Esempio n. 1
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Lambda Function that takes in text and returns a polly voice synthesis
        polly_lambda = _lambda.Function(self,
                                        'pollyHandler',
                                        runtime=_lambda.Runtime.PYTHON_3_8,
                                        code=_lambda.Code.asset('lambda_fns'),
                                        handler='polly.handler')

        # https://docs.aws.amazon.com/polly/latest/dg/api-permissions-reference.html
        # https://docs.aws.amazon.com/translate/latest/dg/translate-api-permissions-ref.html
        polly_policy = iam.PolicyStatement(
            effect=iam.Effect.ALLOW,
            resources=['*'],
            actions=['translate:TranslateText', 'polly:SynthesizeSpeech'])
        polly_lambda.add_to_role_policy(polly_policy)

        # defines an API Gateway Http API resource backed by our "efs_lambda" function.
        api = api_gw.HttpApi(self,
                             'Polly',
                             default_integration=api_gw.LambdaProxyIntegration(
                                 handler=polly_lambda))

        core.CfnOutput(self, 'HTTP API Url', value=api.url)
Esempio n. 2
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # DynamoDB Table
        table = dynamo_db.Table(self,
                                "Hits",
                                partition_key=dynamo_db.Attribute(
                                    name="path",
                                    type=dynamo_db.AttributeType.STRING))

        # defines an AWS  Lambda resource
        dynamo_lambda = _lambda.Function(
            self,
            "DynamoLambdaHandler",
            runtime=_lambda.Runtime.NODEJS_12_X,  # execution environment
            handler="lambda.handler",  # file is "lambda", function is "handler"
            code=_lambda.Code.from_asset(
                "lambda_fns"),  # Code loaded from the lambda_fns dir
            environment={'HITS_TABLE_NAME': table.table_name})

        # grant the lambda role read/write permissions to our table'
        table.grant_read_write_data(dynamo_lambda)

        # defines an API Gateway Http API resource backed by our "dynamoLambda" function.
        api = api_gw.HttpApi(self,
                             'Endpoint',
                             default_integration=api_gw.LambdaProxyIntegration(
                                 handler=dynamo_lambda))

        core.CfnOutput(self, 'HTTP API Url', value=api.url)
    def __init__(self, scope: cdk.Construct, construct_id: str, stage: str,
                 explain_bot_lambda: _lambda.Function,
                 add_meaning_lambda: _lambda.Function, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # Define API Gateway and HTTP API
        explain_bot_api = _apigw2.HttpApi(self, "ExplainSlackBotApi" + stage)

        self.url_output = cdk.CfnOutput(self, "Url", value=explain_bot_api.url)

        # Set up proxy integrations
        explain_bot_entity_lambda_integration = _a2int.LambdaProxyIntegration(
            handler=explain_bot_lambda, )

        # explain_bot_entity
        explain_bot_api.add_routes(
            path="/",
            methods=[_apigw2.HttpMethod.POST],
            integration=explain_bot_entity_lambda_integration,
        )

        add_meaning_lambda_integration = _a2int.LambdaProxyIntegration(
            handler=add_meaning_lambda, )

        # add_meaning_entity
        explain_bot_api.add_routes(
            path="/add_meaning",
            methods=[_apigw2.HttpMethod.ANY],
            integration=add_meaning_lambda_integration,
        )
Esempio n. 4
0
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        memory: int = 1024,
        timeout: int = 30,
        runtime: aws_lambda.Runtime = aws_lambda.Runtime.PYTHON_3_8,
        concurrent: Optional[int] = None,
        permissions: Optional[List[iam.PolicyStatement]] = None,
        env: dict = {},
        code_dir: str = "./",
        **kwargs: Any,
    ) -> None:
        """Define stack."""
        super().__init__(scope, id, *kwargs)

        permissions = permissions or []

        lambda_env = {**DEFAULT_ENV, **env}
        lambda_env.update(
            dict(
                MOSAIC_BACKEND=settings.mosaic_backend,
                MOSAIC_HOST=settings.mosaic_host,
            ))

        lambda_function = aws_lambda.Function(
            self,
            f"{id}-lambda",
            runtime=runtime,
            code=aws_lambda.Code.from_asset(
                path=os.path.abspath(code_dir),
                bundling=core.BundlingOptions(
                    image=core.BundlingDockerImage.from_asset(
                        os.path.abspath(code_dir),
                        file="Dockerfile",
                    ),
                    command=[
                        "bash", "-c", "cp -R /var/task/. /asset-output/."
                    ],
                ),
            ),
            handler="titiler_pds.handler.handler",
            memory_size=memory,
            reserved_concurrent_executions=concurrent,
            timeout=core.Duration.seconds(timeout),
            environment=lambda_env,
        )

        for perm in permissions:
            lambda_function.add_to_role_policy(perm)

        api = apigw.HttpApi(
            self,
            f"{id}-endpoint",
            default_integration=apigw_integrations.LambdaProxyIntegration(
                handler=lambda_function),
        )
        core.CfnOutput(self, "Endpoint", value=api.url)
    def __init__(self, scope, id, name=None, state_machine_arn=None) -> None:
        super().__init__(scope, id)
        # ==================================================
        # ================= IAM ROLE =======================
        # ==================================================
        api_role = iam.Role(
            scope=self,
            id='api_role',
            assumed_by=iam.ServicePrincipal(service='apigateway.amazonaws.com'),
        )
        api_role.add_to_policy(
            iam.PolicyStatement(
                effect=iam.Effect.ALLOW,
                actions=['states:StartSyncExecution'],
                resources=['*']
            )
        )

        # ==================================================
        # ================== API GATEWAY ===================
        # ==================================================
        api = apigw.HttpApi(
            scope=self,
            id='api',
            api_name=name,
            cors_preflight={
                "allow_headers": ["Authorization"],
                "allow_methods": [apigw.HttpMethod.POST],
                "allow_origins": ["*"],
                "max_age": core.Duration.days(10)
            }
        )

        integration = apigw.CfnIntegration(
            scope=self,
            id='integration',
            api_id=api.http_api_id,
            credentials_arn=api_role.role_arn,
            integration_type='AWS_PROXY',
            integration_subtype='StepFunctions-StartSyncExecution',
            request_parameters={
                'Input': '$request.body',
                'StateMachineArn': f'{state_machine_arn}'
            },
            payload_format_version='1.0'
        )

        apigw.CfnRoute(
            scope=self,
            id='route',
            api_id=api.http_api_id,
            route_key='POST /',
            target=f'integrations/{integration.ref}'
        )
Esempio n. 6
0
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        memory: int = 1024,
        timeout: int = 30,
        runtime: aws_lambda.Runtime = aws_lambda.Runtime.PYTHON_3_8,
        concurrent: Optional[int] = None,
        permissions: Optional[List[iam.PolicyStatement]] = None,
        layer_arn: Optional[str] = None,
        env: dict = {},
        code_dir: str = "./",
        **kwargs: Any,
    ) -> None:
        """Define stack."""
        super().__init__(scope, id, *kwargs)

        permissions = permissions or []

        lambda_env = DEFAULT_ENV.copy()
        lambda_env.update(env)

        lambda_function = aws_lambda.Function(
            self,
            f"{id}-lambda",
            runtime=runtime,
            code=self.create_package(code_dir),
            handler="handler.handler",
            memory_size=memory,
            reserved_concurrent_executions=concurrent,
            timeout=core.Duration.seconds(timeout),
            environment=lambda_env,
        )

        for perm in permissions:
            lambda_function.add_to_role_policy(perm)

        if layer_arn:
            lambda_function.add_layers(
                aws_lambda.LayerVersion.from_layer_version_arn(
                    self,
                    layer_arn.split(":")[-2], layer_arn))

        api = apigw.HttpApi(
            self,
            f"{id}-endpoint",
            default_integration=apigw_integrations.LambdaProxyIntegration(
                handler=lambda_function),
        )
        core.CfnOutput(self, "Endpoint", value=api.url)
Esempio n. 7
0
    def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # defines an AWS  Lambda resource
        model_folder = os.path.dirname(os.path.realpath(__file__)) + "/../model"
        predictive_lambda = _lambda.DockerImageFunction(self, 'PredictiveLambda',
                                                        code=_lambda.DockerImageCode.from_image_asset(model_folder),
                                                        memory_size=4096,
                                                        timeout=core.Duration.seconds(15))
        # defines an API Gateway Http API resource backed by our "PredictiveLambda" function.
        api = api_gw.HttpApi(self, 'PredictiveLambdaEndpoint',
                             default_integration=integrations.LambdaProxyIntegration(handler=predictive_lambda));

        core.CfnOutput(self, 'HTTP API Url', value=api.url);
Esempio n. 8
0
    def __init__(self, scope: core.Construct, stack_id: str, *, api_name: str,
                 domain_name: str, functions_stacks: List[FunctionsStack],
                 subdomain: str, **kwargs):
        super().__init__(scope, stack_id, **kwargs)

        hosted_zone = route53.HostedZone.from_lookup(self,
                                                     'HostedZone',
                                                     domain_name=domain_name)

        subdomain = f'{subdomain}.{hosted_zone.zone_name}'

        certificate = acm.DnsValidatedCertificate(self,
                                                  'Certificate',
                                                  domain_name=subdomain,
                                                  hosted_zone=hosted_zone)

        self.api = apigw.HttpApi(self, 'HttpApi', api_name=api_name)

        domain_name = apigw.CfnDomainName(
            self,
            'DomainName',
            domain_name=subdomain,
            domain_name_configurations=[
                apigw.CfnDomainName.DomainNameConfigurationProperty(
                    certificate_arn=certificate.certificate_arn)
            ])

        # add an alias to the hosted zone
        route53.ARecord(self,
                        'ARecord',
                        record_name=subdomain,
                        target=route53.RecordTarget.from_alias(
                            ApiGatewayV2Domain(domain_name)),
                        zone=hosted_zone)

        mapping = apigw.CfnApiMapping(self,
                                      'ApiMapping',
                                      api_id=self.api.http_api_id,
                                      domain_name=domain_name.ref,
                                      stage='$default')

        mapping.add_depends_on(domain_name)

        for functions_stack in functions_stacks:
            self.api.add_routes(integration=apigw.LambdaProxyIntegration(
                handler=functions_stack.receiver_function),
                                methods=[functions_stack.api_method],
                                path=functions_stack.api_path)
Esempio n. 9
0
    def __init__(self, scope: core.Construct, stack_id: str, *, api_name: str,
                 bucket: s3.Bucket, domain_name: str, functions,
                 subdomain: str, **kwargs):
        super().__init__(scope, stack_id, **kwargs)

        hosted_zone = route53.HostedZone.from_lookup(self,
                                                     'HostedZone',
                                                     domain_name=domain_name)

        subdomain = f'{subdomain}.{hosted_zone.zone_name}'

        certificate = acm.DnsValidatedCertificate(self,
                                                  'Certificate',
                                                  domain_name=subdomain,
                                                  hosted_zone=hosted_zone)

        self.api = apigw.HttpApi(self, 'HttpApi', api_name=api_name)

        domain_name = apigw.CfnDomainName(
            self,
            'DomainName',
            domain_name=subdomain,
            domain_name_configurations=[
                apigw.CfnDomainName.DomainNameConfigurationProperty(
                    certificate_arn=certificate.certificate_arn)
            ])

        # add an alias to the hosted zone
        route53.ARecord(self,
                        'ARecord',
                        record_name=subdomain,
                        target=route53.RecordTarget.from_alias(
                            ApiGatewayV2Domain(domain_name)),
                        zone=hosted_zone)

        mapping = apigw.CfnApiMapping(self,
                                      'ApiMapping',
                                      api_id=self.api.http_api_id,
                                      domain_name=domain_name.ref,
                                      stage='$default')

        mapping.add_depends_on(domain_name)

        for function in functions:
            self.add_endpoint(bucket, function)
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # DynamoDB Table
        table = dynamo_db.Table(self,
                                "CircuitBreakerTable",
                                partition_key=dynamo_db.Attribute(
                                    name="id",
                                    type=dynamo_db.AttributeType.STRING),
                                removal_policy=core.RemovalPolicy.DESTROY)

        # install node dependencies for lambdas
        lambda_folder = os.path.dirname(
            os.path.realpath(__file__)) + "/../lambda_fns"
        subprocess.check_call("npm i".split(),
                              cwd=lambda_folder,
                              stdout=subprocess.DEVNULL)
        subprocess.check_call("npm run build".split(),
                              cwd=lambda_folder,
                              stdout=subprocess.DEVNULL)

        # defines an AWS Lambda resource with unreliable code
        unreliable_lambda = _lambda.Function(
            self,
            "UnreliableLambdaHandler",
            runtime=_lambda.Runtime.NODEJS_12_X,
            handler="unreliable.handler",
            code=_lambda.Code.from_asset("lambda_fns"),
            # Code loaded from the lambda_fns dir
            environment={'CIRCUITBREAKER_TABLE': table.table_name})

        # grant the lambda role read/write permissions to our table'
        table.grant_read_write_data(unreliable_lambda)

        # defines an API Gateway Http API resource backed by our "dynamoLambda" function.
        api = api_gw.HttpApi(
            self,
            'CircuitBreakerGateway',
            default_integration=integrations.LambdaProxyIntegration(
                handler=unreliable_lambda))

        core.CfnOutput(self, 'HTTP API Url', value=api.url)
def add_cors_http_api(self):
    cors_preflight = _apigw2.CorsPreflightOptions(
        allow_credentials=False,
        allow_headers=['*'],
        allow_methods=[
            _apigw2.HttpMethod.GET, _apigw2.HttpMethod.HEAD,
            _apigw2.HttpMethod.OPTIONS, _apigw2.HttpMethod.POST,
            _apigw2.HttpMethod.PUT, _apigw2.HttpMethod.DELETE
        ],
        allow_origins=['*'],
    )

    http_api = _apigw2.HttpApi(
        self,
        "ApiGwId",
        api_name="HttpGateway",
        cors_preflight=cors_preflight,
    )

    return http_api
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        my_api_function = _lambda.DockerImageFunction(
            self,
            'MyApiFunction',
            code=_lambda.DockerImageCode.from_image_asset('../src'),
            timeout=core.Duration.seconds(30),
            memory_size=2048)

        my_default_integration = apigv2int.LambdaProxyIntegration(
            handler=my_api_function)

        my_http_api = apigv2.HttpApi(
            self, 'MyApi', default_integration=my_default_integration)

        core.CfnOutput(self, 'MyApiUrl', value=my_http_api.api_endpoint)

        core.CfnOutput(self,
                       'MyApiFnLogGroup',
                       value=my_api_function.log_group.log_group_name)
Esempio n. 13
0
    def __init__(self, scope: core.Construct, id: str, *, docker_root: str,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        base_lambda = _lambda.DockerImageFunction(
            self,
            "FastAPIImageLambda",
            code=_lambda.DockerImageCode.from_image_asset(docker_root),
        )

        base_api = _apigw.HttpApi(
            self,
            "FastAPIProxyGateway",
            api_name="FastAPIProxyGateway",
            default_integration=_apigw_integration.LambdaProxyIntegration(
                handler=base_lambda),
        )

        core.CfnOutput(self,
                       "EndpointUrl",
                       value=base_api.api_endpoint,
                       export_name="fastApiUrl")
Esempio n. 14
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # EFS needs to be setup in a VPC
        vpc = ec2.Vpc(self, 'Vpc', max_azs=2)

        # Create a file system in EFS to store information
        fs = efs.FileSystem(self,
                            'FileSystem',
                            vpc=vpc,
                            removal_policy=core.RemovalPolicy.DESTROY)

        access_point = fs.add_access_point(
            'AccessPoint',
            create_acl=efs.Acl(owner_gid='1001',
                               owner_uid='1001',
                               permissions='750'),
            path="/export/lambda",
            posix_user=efs.PosixUser(gid="1001", uid="1001"))

        efs_lambda = _lambda.Function(
            self,
            'rdsProxyHandler',
            runtime=_lambda.Runtime.PYTHON_3_8,
            code=_lambda.Code.asset('lambda_fns'),
            handler='message_wall.lambda_handler',
            vpc=vpc,
            filesystem=_lambda.FileSystem.from_efs_access_point(
                access_point, '/mnt/msg'))

        # defines an API Gateway Http API resource backed by our "efs_lambda" function.
        api = api_gw.HttpApi(
            self,
            'EFS Lambda',
            default_integration=integrations.LambdaProxyIntegration(
                handler=efs_lambda))

        core.CfnOutput(self, 'HTTP API Url', value=api.url)
Esempio n. 15
0
    def __init__(self, scope: core.Construct, construct_id: str, name: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)
        """VPC - used in project"""
        vpc = ec2.Vpc(self, f'{name}-VPC', max_azs=2)
        """Filesystem - shared between Lambda and Streamlit - Deletes when stack gets shut down"""
        fs = efs.FileSystem(self,
                            f'{name}-FileSystem',
                            vpc=vpc,
                            removal_policy=core.RemovalPolicy.DESTROY)

        access_point = fs.add_access_point(
            'AccessPoint',
            create_acl=efs.Acl(owner_gid='1001',
                               owner_uid='1001',
                               permissions='750'),
            path="/export/lambda",
            posix_user=efs.PosixUser(gid="1001", uid="1001"))
        """Model folder that contains Lambda code"""
        model_folder = os.path.dirname(
            os.path.realpath(__file__)) + "/../model"
        lambda_handler = _lambda.DockerImageFunction(
            self,
            f'{name}-Lambda',
            code=_lambda.DockerImageCode.from_image_asset(
                model_folder),  #Uses local code to build the container
            memory_size=1024,  #Adjust to your need - 128MB to 10GB
            timeout=core.Duration.minutes(
                5),  #Adjust to your need - up to 15 mins
            vpc=vpc,
            filesystem=_lambda.FileSystem.from_efs_access_point(
                access_point, MOUNT_POINT))
        """Custom Log groups for Lambda"""
        lambda_lgs = logs.LogGroup(
            self,
            f'{name}-Lambda-LogGroup',
            log_group_name=f"/aws/lambda/{lambda_handler.function_name}",
            retention=logs.RetentionDays.ONE_WEEK,
            removal_policy=core.RemovalPolicy.DESTROY)
        """API Gateway - integrates all methods and ressources - used for Lambda invocation"""
        api = api_gw.HttpApi(
            self,
            f'{name}-ApiGw',
            default_integration=integrations.LambdaProxyIntegration(
                handler=lambda_handler))
        """""" """""" """""" """""" """""" """""" """""" """""" """"""
        #STREAMLIT RELATED START
        """""" """""" """""" """""" """""" """""" """""" """""" """"""
        '''
        cluster = ecs.Cluster(self, f"{name}-Streamlit-Cluster", vpc=vpc)
        
        ecs_task = ecs.FargateTaskDefinition(
            self,
            f'{name}-Streamlit-Task-Def',            
        )

        streamlit_container = ecs_task.add_container(
            f'{name}-Streamlit-Container',
            image=ecs.ContainerImage.from_asset('streamlit-docker'),
            essential=True,
            environment={
                'API_URL': api.url,
            },
            logging=ecs.LogDrivers.aws_logs(
                stream_prefix=f'{name}-Streamlit-Log'
            )            
        )
        
        streamlit_container.add_port_mappings(
            ecs.PortMapping(
                container_port=8501,
                host_port=8501,
                protocol=ecs.Protocol.TCP
            )
        )
        
        """Efs Volume - shared between Lambda / Streamlit"""
        ecs_task.add_volume(name=f'{name}-Efs-Volume',  
                efs_volume_configuration=ecs.EfsVolumeConfiguration(
                file_system_id=fs.file_system_id,                
        ))
        
        """Efs Mountpoint"""
        streamlit_container.add_mount_points(
            ecs.MountPoint(
                container_path="/mnt/data",
                read_only=False,
                source_volume=f'{name}-Efs-Volume'
        ))
        
       
        ecs_task.add_to_task_role_policy(
            statement=iam.PolicyStatement(
                actions=["efs:*"],
                resources=['*'],
                effect=iam.Effect.ALLOW
            )
        )
       
        """Fargate Service that hosts the Streamlit Application"""
        ecs_service = ecs_patterns.ApplicationLoadBalancedFargateService(self, f'{name}-Fargate-Service',
            cluster=cluster,            
            cpu=256,                    
            desired_count=1,            
            task_definition = ecs_task,
            memory_limit_mib=512,     
            public_load_balancer=True, 
            platform_version=ecs.FargatePlatformVersion.VERSION1_4, #https://forums.aws.amazon.com/thread.jspa?messageID=960420
            
        )  
        
        fs.connections.allow_default_port_from(
            ecs_service.service.connections)
        '''
        """""" """""" """""" """""" """""" """""" """""" """""" """"""
        #STREAMLIT RELATED END
        """""" """""" """""" """""" """""" """""" """""" """""" """"""

        core.CfnOutput(self, 'URL', value=api.url)
Esempio n. 16
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # The code that defines your stack goes here

        wsgi_function = lmb_py.PythonFunction(self,
                                              "wsgi-function",
                                              entry="./lambdas/wsgi")

        wsgi_integration = apigw_v2.LambdaProxyIntegration(
            handler=wsgi_function,
            payload_format_version=apigw_v2.PayloadFormatVersion.VERSION_1_0,
        )

        asgi_function = lmb_py.PythonFunction(
            self,
            "asgi-function",
            entry="./lambdas/asgi",
        )

        asgi_integration = apigw_v2.LambdaProxyIntegration(
            handler=asgi_function)

        self.http_api = apigw_v2.HttpApi(self,
                                         "http-api",
                                         default_integration=asgi_integration)

        self.http_api.add_routes(
            path="/wsgi",
            methods=[apigw_v2.HttpMethod.GET],
            integration=wsgi_integration,
        )

        self.http_api.add_routes(
            path="/wsgi/{proxy+}",
            methods=[apigw_v2.HttpMethod.GET],
            integration=wsgi_integration,
        )

        self.http_api_url = core.CfnOutput(self,
                                           "RestApiUrl",
                                           value=self.http_api.url)

        self.graphql_api = appsync.GraphqlApi(
            self,
            "graphql-api",
            name="notes-example-api",
            schema=appsync.Schema.from_asset("./graphql/schema.graphql"),
        )

        core.CfnOutput(self, "GraphQLUrl", value=self.graphql_api.graphql_url)

        core.CfnOutput(self, "GraphQlApiKey", value=self.graphql_api.api_key)

        graphql_handler = lmb_py.PythonFunction(
            self,
            "graphql-handler",
            entry="./lambdas/graphql",
            runtime=lmb.Runtime.PYTHON_3_8,
        )

        data_source = self.graphql_api.add_lambda_data_source(
            "lambdaDatasource", graphql_handler)

        data_source.create_resolver(type_name="Query",
                                    field_name="getNoteById")

        data_source.create_resolver(type_name="Query", field_name="listNotes")

        data_source.create_resolver(type_name="Mutation",
                                    field_name="createNote")

        data_source.create_resolver(type_name="Mutation",
                                    field_name="deleteNote")

        dynamo_table = dynamodb.Table(
            self,
            "notes-table",
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            partition_key=dynamodb.Attribute(
                name="id", type=dynamodb.AttributeType.STRING),
        )

        dynamo_table.grant_read_write_data(graphql_handler)

        graphql_handler.add_environment("NOTES_TABLE", dynamo_table.table_name)
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # ==================================================
        # =============== CFN PARAMETERS ===================
        # ==================================================
        project_name = core.CfnParameter(scope=self,
                                         id='SageMakerProjectName',
                                         type='String')
        model_execution_role_arn = core.CfnParameter(
            scope=self, id='ModelExecutionRoleArn', type='String')
        model_binary_location = core.CfnParameter(scope=self,
                                                  id='ModelBinaryLocation',
                                                  type='String')
        stage_name = core.CfnParameter(scope=self,
                                       id='StageName',
                                       type='String')

        name = f'{project_name.value_as_string}-{stage_name.value_as_string}'
        # ==================================================
        # ================== IAM ROLE ======================
        # ==================================================
        role = iam.Role.from_role_arn(
            scope=self,
            id='role',
            role_arn=model_execution_role_arn.value_as_string)

        # ==================================================
        # ================== ECR IMAGE =====================
        # ==================================================
        ecr_repository = ecr.Repository.from_repository_name(
            scope=self,
            id='repo',
            repository_name='<ADD YOUR CONTAINER REPO HERE>')

        ecr_image = aws_lambda.DockerImageCode.from_ecr(
            repository=ecr_repository, tag='<ADD YOUR IMAGE TAG HERE>')
        # ==================================================
        # ================ LAMBDA FUNCTION =================
        # ==================================================
        lambda_function = aws_lambda.DockerImageFunction(
            scope=self,
            id='lambda',
            function_name=name,
            code=ecr_image,
            memory_size=1024,
            role=role,
            environment={
                'MODEL_S3_URI': model_binary_location.value_as_string,
            },
            timeout=core.Duration.seconds(60))

        # ==================================================
        # ================== API GATEWAY ===================
        # ==================================================
        api = apigw.HttpApi(scope=self,
                            id='api_gateway',
                            api_name=name,
                            cors_preflight={
                                "allow_headers": ["Authorization"],
                                "allow_methods": [apigw.HttpMethod.POST],
                                "allow_origins": ["*"],
                                "max_age": core.Duration.days(10)
                            })

        integration = apigw.CfnIntegration(
            scope=self,
            id='integration',
            api_id=api.http_api_id,
            credentials_arn=role.role_arn,
            integration_type='AWS_PROXY',
            integration_uri=lambda_function.function_arn,
            integration_method='POST',
            payload_format_version='2.0')

        apigw.CfnRoute(scope=self,
                       id='route',
                       api_id=api.http_api_id,
                       route_key='POST /',
                       target=f'integrations/{integration.ref}')
    def __init__(self, scope: cdk.Construct, construct_id: str,
                 db_stack: DatabaseStack, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)
        # Enrichment Queue
        enrichment_queue = sqs.Queue(
            self,
            "CrawlerEnrichmentQueue",
            queue_name='CrawlerEnrichmentQueue',
            retention_period=cdk.Duration.days(1),
            visibility_timeout=cdk.Duration.minutes(15))

        # Environment
        env_default = {'APP_LOGGING_LEVEL': 'ERROR'}
        env_table = {'APP_OFFERS_TABLE': db_stack.offers_table.table_name}
        env_queue_url = {'APP_OFFERS_QUEUE_URL': enrichment_queue.queue_url}

        # Base Lambda ECR image asset
        lambda_asset = ecr_assets.DockerImageAsset(self,
                                                   'CrawlerLambdaImage',
                                                   directory=os.path.join(
                                                       os.getcwd(), 'src',
                                                       'crawler'),
                                                   repository_name='crawler')

        # Crawler Lambda
        lambda_crawler = self._lambda_function_from_asset(
            lambda_asset, 'LambdaCrawler', 'lambda_handler.crawler', {
                **env_default,
                **env_table,
                **env_queue_url
            })
        rule = events.Rule(self,
                           'CrawlerCallingRule',
                           rule_name='CrawlerCallingRule',
                           schedule=events.Schedule.rate(
                               cdk.Duration.hours(1)))
        rule.add_target(targets.LambdaFunction(lambda_crawler))
        db_stack.offers_table.grant_write_data(lambda_crawler)
        enrichment_queue.grant_send_messages(lambda_crawler)

        # Enrichment Lambda
        lambda_enrichment = self._lambda_function_from_asset(
            lambda_asset, 'LambdaEnrichment', 'lambda_handler.enrichment', {
                **env_default,
                **env_table
            })
        lambda_enrichment.add_event_source(
            lambda_event_sources.SqsEventSource(enrichment_queue))
        db_stack.offers_table.grant_write_data(lambda_enrichment)

        lambda_search = self._lambda_function_from_asset(
            lambda_asset,
            'LambdaSearch',
            'lambda_handler.search', {
                **env_default,
                **env_table
            },
            reserved_concurrent_executions=10,
            timeout_minutes=1,
            memory_size=128,
            max_event_age_minutes=1)
        db_stack.offers_table.grant_read_data(lambda_search)

        personal_token = open(
            os.path.join(str(Path.home()), '.github/personal_token.txt'),
            'r').read()

        # Frontend entrypoin
        amplify_app = amplify.App(
            self,
            'CrawlerFrontend',
            app_name='CrawlerFrontend',
            auto_branch_creation=amplify.AutoBranchCreation(auto_build=True),
            source_code_provider=amplify.GitHubSourceCodeProvider(
                owner='jaswdr',
                repository='aws-cdk-crawler-frontend-example',
                oauth_token=cdk.SecretValue(personal_token)))

        # Backend entrypoint
        search_entrypoint = gateway.HttpApi(
            self,
            'CrawlerSearchApiEntrypoint',
            api_name='CrawlerSearchApiEntrypoint',
            cors_preflight=gateway.CorsPreflightOptions(
                allow_headers=['*'],
                allow_methods=[gateway.HttpMethod.GET],
                allow_origins=['*'],
                max_age=cdk.Duration.hours(2)),
            description='Crawler Search API Entrypoint')
        search_entrypoint.add_routes(
            path='/search',
            methods=[gateway.HttpMethod.GET],
            integration=gateway_integrations.LambdaProxyIntegration(
                handler=lambda_search,
                payload_format_version=gateway.PayloadFormatVersion.VERSION_2_0
            ))
        static_data_bucket = s3.Bucket(
            self,
            'CrawlerStaticDataBucket',
            versioned=True,
            removal_policy=cdk.RemovalPolicy.DESTROY,
            auto_delete_objects=True,
            bucket_name='crawler-static-data')
Esempio n. 19
0
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        memory: int = 1024,
        timeout: int = 30,
        concurrent: Optional[int] = None,
        permissions: Optional[List[iam.PolicyStatement]] = None,
        layer_arn: Optional[str] = None,
        env: dict = {},
        code_dir: str = "./",
        **kwargs: Any,
    ) -> None:
        """Define stack."""
        super().__init__(scope, id, *kwargs)

        permissions = permissions or []

        lambda_env = DEFAULT_ENV.copy()
        lambda_env.update(env)

        lambda_function = aws_lambda.Function(
            self,
            f"{id}-lambda",
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            code=self.create_package(code_dir),
            handler="handler.handler",
            memory_size=memory,
            reserved_concurrent_executions=concurrent,
            timeout=core.Duration.seconds(timeout),
            environment=lambda_env,
        )

        # # If you use dynamodb mosaic backend you should add IAM roles to read/put Item and maybe create Table
        # permissions.append(
        #     iam.PolicyStatement(
        #         actions=[
        #             "dynamodb:GetItem",
        #             "dynamodb:PutItem",
        #             "dynamodb:CreateTable",
        #             "dynamodb:Scan",
        #             "dynamodb:BatchWriteItem",
        #         ],
        #         resources=[f"arn:aws:dynamodb:{self.region}:{self.account}:table/*"],
        #     )
        # )

        for perm in permissions:
            lambda_function.add_to_role_policy(perm)

        if layer_arn:
            lambda_function.add_layers(
                aws_lambda.LayerVersion.from_layer_version_arn(
                    self,
                    layer_arn.split(":")[-2], layer_arn))

        api = apigw.HttpApi(
            self,
            f"{id}-endpoint",
            default_integration=apigw.LambdaProxyIntegration(
                handler=lambda_function),
        )
        core.CfnOutput(self, "Endpoint", value=api.url)
Esempio n. 20
0
    def __init__(self,
                 scope: core.Construct,
                 construct_id: str,
                 cdk_env_='',
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        this_dir = path.dirname(__file__)

        # Dynamo DB Tables
        dynamo_names_table = dynamodb.Table(
            self,
            'Names',
            partition_key=dynamodb.Attribute(
                name='name', type=dynamodb.AttributeType.STRING),
            sort_key=dynamodb.Attribute(name='gender',
                                        type=dynamodb.AttributeType.STRING),
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST)
        dynamo_names_table.add_global_secondary_index(
            partition_key=dynamodb.Attribute(
                name='gender', type=dynamodb.AttributeType.STRING),
            sort_key=dynamodb.Attribute(name='uuid',
                                        type=dynamodb.AttributeType.STRING),
            index_name='bn_uuid_sort')

        # Lambda Layers
        lambda_layer_requests = lmb.LayerVersion(
            self,
            'Layer-Requests',
            code=lmb.Code.from_asset(
                path.join(this_dir, 'lambda/layers/requests.zip')),
            compatible_runtimes=[lmb.Runtime.PYTHON_3_8],
        )
        lambda_layer_simplejson = lmb.LayerVersion(
            self,
            'Layer-SimpleJSON',
            code=lmb.Code.from_asset(
                path.join(this_dir, 'lambda/layers/simplejson.zip')),
            compatible_runtimes=[lmb.Runtime.PYTHON_3_8],
        )
        lambda_layer_jinja2 = lmb.LayerVersion(
            self,
            'Layer-Jinja2',
            code=lmb.Code.from_asset(
                path.join(this_dir, 'lambda/layers/jinja2.zip')),
            compatible_runtimes=[lmb.Runtime.PYTHON_3_8],
        )

        ## Lambda - API Handler
        lambda_api_handler = lmb.Function(
            self,
            'API-Handler',
            timeout=core.Duration.seconds(360),
            memory_size=512,
            runtime=lmb.Runtime.PYTHON_3_8,
            handler='api_handler.handler',
            layers=[lambda_layer_simplejson, lambda_layer_jinja2],
            code=lmb.Code.from_asset(path.join(this_dir,
                                               'lambda/api_handler')),
            environment={'DYNAMO_DB_NAMES': dynamo_names_table.table_name})
        ### Grants
        dynamo_names_table.grant_read_write_data(lambda_api_handler)

        # APIGW
        ## Pull domain values from parameter store
        parameter_store_record_name = ssm.StringParameter.value_for_string_parameter(
            self, f'/babynames/{cdk_env_}/record_name')
        parameter_store_domain_name = ssm.StringParameter.value_for_string_parameter(
            self, f'/babynames/{cdk_env_}/domain_name')
        parameter_store_zone_id = ssm.StringParameter.value_for_string_parameter(
            self, f'/babynames/{cdk_env_}/zone_id')

        ## Import R53 Zone
        r53_zone = route53.HostedZone.from_hosted_zone_attributes(
            self,
            "R53Zone",
            zone_name=parameter_store_domain_name,
            hosted_zone_id=parameter_store_zone_id)

        ## ACM Certificate
        acm_certificate = acm.Certificate(
            self,
            "BabyNamesCertificate",
            domain_name=parameter_store_record_name,
            validation=acm.CertificateValidation.from_dns(r53_zone))

        ## APIGW Custom Domain
        apigw_baby_names_domain_name = apigw2.DomainName(
            self,
            "BabyNamesDomain",
            domain_name=parameter_store_record_name,
            certificate=acm.Certificate.from_certificate_arn(
                self, "BabyNamesCert", acm_certificate.certificate_arn))

        ## Set R53 Records
        r53_alias_target_baby_names_apigw = r53targets.ApiGatewayv2Domain(
            apigw_baby_names_domain_name)
        route53.ARecord(self,
                        "BabyNamesARecord",
                        record_name='babynames',
                        zone=r53_zone,
                        target=route53.RecordTarget.from_alias(
                            r53_alias_target_baby_names_apigw))

        ## Instantiate APIGW
        apigw_baby_names = apigw2.HttpApi(
            self,
            'BabyNames-APIGW-Http',
            default_domain_mapping=(apigw2.DefaultDomainMappingOptions(
                domain_name=apigw_baby_names_domain_name)))

        ## APIGW Integrations
        ## Lambda Integrations
        lambda_int_lambda_api_handler = apigw2int.LambdaProxyIntegration(
            handler=lambda_api_handler)

        apigw_baby_names.add_routes(path='/{name}/{gender}',
                                    methods=[apigw2.HttpMethod.GET],
                                    integration=lambda_int_lambda_api_handler)

        apigw_baby_names.add_routes(path='/{proxy+}',
                                    methods=[apigw2.HttpMethod.GET],
                                    integration=lambda_int_lambda_api_handler)
Esempio n. 21
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        account = self.account

        print("")
        print(f"   Service: {service_name}")
        print(f"   Region:  {region}")
        print(f"   Stage:   {stage}")
        print(f"   Account: {account}")
        print(f"   Stack:   {stack_name}")
        print("")

        ssm = boto3.client('ssm')

        # Environment variable mapping
        environment: dict = {'dev': {
                                     'logLevel': 'DEBUG',
                                     'dbHost': 'simple-serverless-aurora-serverless-development.cluster-cw3bjgnjhzxa.us-east-2.rds.amazonaws.com',
                                     'dbName': 'simple_serverless_dev',
                                     'vpcId': 'vpc-319daa58'
                                     },
                             'prod': {
                                      'logLevel': 'INFO',
                                      'dbHost': 'simple-serverless-aurora-serverless-production.cluster-cw3bjgnjhzxa.us-east-2.rds.amazonaws.com',
                                      'dbName': 'simple_serverless_prod',
                                      'vpcId': 'vpc-XXXXXX'
                                      }
                             }

        env_variables = {
            'STAGE': stage,
            "LOG_LEVEL": environment[stage]['logLevel']
        }

        # Create the main lambda function
        service_lambda = aws_lambda.Function(self,
                                             'LambdaFunction',
                                             runtime=aws_lambda.Runtime.PYTHON_3_8,
                                             description=service_name,
                                             code=aws_lambda.AssetCode("./dist"),
                                             function_name=service_name + "-" + stage,
                                             timeout=core.Duration.seconds(35),
                                             tracing=aws_lambda.Tracing.ACTIVE,
                                             memory_size=128,
                                             handler='lambda_function.handler',
                                             environment=env_variables)

        #
        # REST (API Gateway HTTP) stuff starts here
        #

        # How to: Import an existing HTTP API Gateway instance
        # http_api = apigatewayv2.HttpApi.from_api_id(self, id='APIGateway', http_api_id='0fdl9wlxw4')

        # How to: Create a new HTTP API Gateway instance
        http_api = apigatewayv2.HttpApi(
            self, 'APIGateway',
            api_name=f'{service_name}-api-{stage}'
        )

        integration = apigatewayv2_integrations.LambdaProxyIntegration(
            handler=service_lambda,
            payload_format_version=apigatewayv2.PayloadFormatVersion.VERSION_2_0
        )

        # How to: auto generate REST endpoints from decorators ex: @router.rest("GET", "/students").
        for route_key, endpoint in lambda_function.router.get_rest_endpoints().items():
            print(f"Creating REST endpoint for {route_key}")
            http_api.add_routes(
                path=endpoint['path'],
                methods=[apigatewayv2.HttpMethod(endpoint['method'])],
                integration=integration
            )

        #
        # Graphql (AppSync) stuff starts here
        #
        policy = iam.PolicyStatement(actions=['lambda:InvokeFunction'],
                                     resources=[service_lambda.function_arn])
        principal = iam.ServicePrincipal('appsync.amazonaws.com')
        service_role = iam.Role(self, 'service-role', assumed_by=principal)
        service_role.add_to_policy(policy)

        # How to: import an existing AppSync instance
        # graphql_api = appsync.GraphqlApi.from_graphql_api_attributes(self, 'GraphQLApi', graphql_api_id='phw4kdabqnbjzi4czy3dtbmynu')

        graphql_schema = appsync.Schema(file_path='./src/schema.graphql')
        graphql_auth_mode = appsync.AuthorizationMode(authorization_type=appsync.AuthorizationType.API_KEY)
        graphql_auth_config = appsync.AuthorizationConfig(default_authorization=graphql_auth_mode)

        graphql_api = appsync.GraphqlApi(
            self, 'GraphQLApi',
            name=f'{service_name}-api-' + stage,
            authorization_config=graphql_auth_config,
            schema=graphql_schema
        )

        datasource_name = to_camel(service_name) + "Lambda"
        lambda_data_source = appsync.LambdaDataSource(
            self, 'LambdaDataSource',
            api=graphql_api,
            name=datasource_name,
            lambda_function=service_lambda,
            service_role=service_role
        )

        # How to: auto generate GraphQL resolvers from decorators ex: @router.graphql("Query", "listStudents").
        for field_name, graphql_def in lambda_function.router.get_graphql_endpoints().items():
            print(f"Creating graphql {graphql_def['parent']} for {field_name}")
            appsync.Resolver(
                self, field_name + "Resolver",
                api=graphql_api,
                type_name=graphql_def['parent'],
                field_name=field_name,
                data_source=lambda_data_source
            )


        core.CfnOutput(self, "RestAPIUrlOutput",
                       value=http_api.url,
                       export_name=f"{stack_name}-RestApiUrl-{stage}")

        core.CfnOutput(self, "GraphQLApiIdOutput",
                       value=graphql_api.api_id,
                       export_name=f"{stack_name}-GraphqlApiId-{stage}")

        core.CfnOutput(self, "GraphQLUrlOutput",
                       value=graphql_api.graphql_url,
                       export_name=f"{stack_name}-GraphqlUrl-{stage}")

        core.CfnOutput(self, "GraphQLApiKeyOutput",
                       value=graphql_api.api_key,
                       export_name=f"{stack_name}-GraphQLApiKey-{stage}")
Esempio n. 22
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # RDS needs to be setup in a VPC
        vpc = ec2.Vpc(self, 'Vpc', max_azs=2)

        # We need this security group to add an ingress rule and allow our lambda to query the proxy
        lambda_to_proxy_group = ec2.SecurityGroup(self, 'Lambda to RDS Proxy Connection', vpc=vpc)

        # We need this security group to allow our proxy to query our MySQL Instance
        db_connection_group = ec2.SecurityGroup(self, 'Proxy to DB Connection', vpc=vpc)
        db_connection_group.add_ingress_rule(db_connection_group,ec2.Port.tcp(3306), 'allow db connection')
        db_connection_group.add_ingress_rule(lambda_to_proxy_group, ec2.Port.tcp(3306), 'allow lambda connection')

        db_credentials_secret = secrets.Secret(self, 'DBCredentialsSecret',
                                               secret_name=id+'-rds-credentials',
                                               generate_secret_string=secrets.SecretStringGenerator(
                                                   secret_string_template="{\"username\":\"syscdk\"}",
                                                   exclude_punctuation=True,
                                                   include_space=False,
                                                   generate_string_key="password"
                                               ))

        ssm.StringParameter(self, 'DBCredentialsArn',
                            parameter_name='rds-credentials-arn',
                            string_value=db_credentials_secret.secret_arn)

        # MySQL DB Instance (delete protection turned off because pattern is for learning.)
        # re-enable delete protection for a real implementation
        rds_instance = rds.DatabaseInstance(self,
                                            'DBInstance',
                                            engine=rds.DatabaseInstanceEngine.MYSQL,
                                            master_username=
                                            db_credentials_secret.secret_value_from_json('username').to_string(),
                                            master_user_password=
                                            db_credentials_secret.secret_value_from_json('password'),
                                            instance_type=
                                            ec2.InstanceType.of(ec2.InstanceClass.BURSTABLE2, ec2.InstanceSize.SMALL),
                                            vpc=vpc,
                                            removal_policy=core.RemovalPolicy.DESTROY,
                                            deletion_protection=False,
                                            security_groups=[db_connection_group])

        # Create an RDS proxy
        proxy = rds_instance.add_proxy(id+'-proxy',
                                       secret=db_credentials_secret,
                                       debug_logging=True,
                                       vpc=vpc,
                                       security_groups=[db_connection_group])

        # Workaround for bug where TargetGroupName is not set but required
        target_group = proxy.node.find_child('ProxyTargetGroup')
        target_group.add_property_override('TargetGroupName', 'default')

        rds_lambda = _lambda.Function(self, 'rdsProxyHandler',
                                      runtime=_lambda.Runtime.NODEJS_12_X,
                                      code=_lambda.Code.asset('lambda_fns/rds'),
                                      handler='rdsLambda.handler',
                                      vpc=vpc,
                                      security_groups=[lambda_to_proxy_group],
                                      environment={
                                          "PROXY_ENDPOINT": proxy.endpoint,
                                          "RDS_SECRET_NAME": id+'-rds-credentials'
                                      })

        db_credentials_secret.grant_read(rds_lambda)

        # defines an API Gateway Http API resource backed by our "dynamoLambda" function.
        api = api_gw.HttpApi(self, 'Endpoint',
                             default_integration=api_gw.LambdaProxyIntegration(handler=rds_lambda));

        core.CfnOutput(self, 'HTTP API Url', value=api.url);
Esempio n. 23
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Step Function Starts Here

        # The first thing we need to do is see if they are asking for pineapple on a pizza
        pineapple_check_lambda = _lambda.Function(
            self,
            "pineappleCheckLambdaHandler",
            runtime=_lambda.Runtime.NODEJS_12_X,
            handler="orderPizza.handler",
            code=_lambda.Code.from_asset("lambda_fns"),
        )

        # Step functions are built up of steps, we need to define our first step
        order_pizza = step_fn_tasks.LambdaInvoke(
            self,
            'Order Pizza Job',
            lambda_function=pineapple_check_lambda,
            input_path='$.flavour',
            result_path='$.pineappleAnalysis',
            payload_response_only=True)

        # Pizza Order failure step defined
        pineapple_detected = step_fn.Fail(self,
                                          'Sorry, We Dont add Pineapple',
                                          cause='They asked for Pineapple',
                                          error='Failed To Make Pizza')

        # If they didnt ask for pineapple let's cook the pizza
        cook_pizza = step_fn.Succeed(self,
                                     'Lets make your pizza',
                                     output_path='$.pineappleAnalysis')

        # If they ask for a pizza with pineapple, fail. Otherwise cook the pizza
        definition = step_fn.Chain \
            .start(order_pizza) \
            .next(step_fn.Choice(self, 'With Pineapple?')
                  .when(step_fn.Condition.boolean_equals('$.pineappleAnalysis.containsPineapple', True),
                        pineapple_detected)
                  .otherwise(cook_pizza))

        state_machine = step_fn.StateMachine(
            self,
            'StateMachine',
            definition=definition,
            timeout=core.Duration.minutes(5),
            tracing_enabled=True,
            state_machine_type=step_fn.StateMachineType.EXPRESS)

        # HTTP API Definition

        # Give our gateway permissions to interact with SNS
        http_api_role = iam.Role(
            self,
            'HttpApiRole',
            assumed_by=iam.ServicePrincipal('apigateway.amazonaws.com'),
            inline_policies={
                "AllowSFNExec":
                iam.PolicyDocument(statements=[
                    iam.PolicyStatement(
                        actions=["states:StartSyncExecution"],
                        effect=iam.Effect.ALLOW,
                        resources=[state_machine.state_machine_arn])
                ])
            })

        api = api_gw.HttpApi(self,
                             'the_state_machine_api',
                             create_default_stage=True)

        # create an AWS_PROXY integration between the HTTP API and our Step Function
        integ = api_gw.CfnIntegration(
            self,
            'Integ',
            api_id=api.http_api_id,
            integration_type='AWS_PROXY',
            connection_type='INTERNET',
            integration_subtype='StepFunctions-StartSyncExecution',
            credentials_arn=http_api_role.role_arn,
            request_parameters={
                "Input": "$request.body",
                "StateMachineArn": state_machine.state_machine_arn
            },
            payload_format_version="1.0",
            timeout_in_millis=10000)

        api_gw.CfnRoute(self,
                        'DefaultRoute',
                        api_id=api.http_api_id,
                        route_key=api_gw.HttpRouteKey.DEFAULT.key,
                        target="integrations/" + integ.ref)

        core.CfnOutput(self, 'HTTP API URL', value=api.url)
Esempio n. 24
0
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        dataset_metadata_filename: str,
        dataset_metadata_generator_function_name: str,
        memory: int = 1024,
        timeout: int = 30,
        concurrent: int = 100,
        code_dir: str = "./",
        **kwargs: Any,
    ) -> None:
        """Define stack."""
        super().__init__(scope, id, **kwargs)

        # add cache
        if config.VPC_ID:
            vpc = ec2.Vpc.from_lookup(
                self,
                f"{id}-vpc",
                vpc_id=config.VPC_ID,
            )
        else:
            vpc = ec2.Vpc(self, f"{id}-vpc")

        sb_group = escache.CfnSubnetGroup(
            self,
            f"{id}-subnet-group",
            description=f"{id} subnet group",
            subnet_ids=[sb.subnet_id for sb in vpc.private_subnets],
        )

        lambda_function_security_group = ec2.SecurityGroup(self,
                                                           f"{id}-lambda-sg",
                                                           vpc=vpc)
        lambda_function_security_group.add_egress_rule(
            ec2.Peer.any_ipv4(),
            connection=ec2.Port(protocol=ec2.Protocol("ALL"),
                                string_representation=""),
            description="Allow lambda security group all outbound access",
        )

        cache_security_group = ec2.SecurityGroup(self,
                                                 f"{id}-cache-sg",
                                                 vpc=vpc)

        cache_security_group.add_ingress_rule(
            lambda_function_security_group,
            connection=ec2.Port(protocol=ec2.Protocol("ALL"),
                                string_representation=""),
            description=
            "Allow Lambda security group access to Cache security group",
        )

        cache = escache.CfnCacheCluster(
            self,
            f"{id}-cache",
            cache_node_type=config.CACHE_NODE_TYPE,
            engine=config.CACHE_ENGINE,
            num_cache_nodes=config.CACHE_NODE_NUM,
            vpc_security_group_ids=[cache_security_group.security_group_id],
            cache_subnet_group_name=sb_group.ref,
        )

        logs_access = iam.PolicyStatement(
            actions=[
                "logs:CreateLogGroup",
                "logs:CreateLogStream",
                "logs:PutLogEvents",
            ],
            resources=["*"],
        )
        ec2_network_access = iam.PolicyStatement(
            actions=[
                "ec2:CreateNetworkInterface",
                "ec2:DescribeNetworkInterfaces",
                "ec2:DeleteNetworkInterface",
            ],
            resources=["*"],
        )

        lambda_env = DEFAULT_ENV.copy()
        lambda_env.update(
            dict(
                MODULE_NAME="covid_api.main",
                VARIABLE_NAME="app",
                WORKERS_PER_CORE="1",
                LOG_LEVEL="error",
                MEMCACHE_HOST=cache.attr_configuration_endpoint_address,
                MEMCACHE_PORT=cache.attr_configuration_endpoint_port,
                DATASET_METADATA_FILENAME=dataset_metadata_filename,
                DATASET_METADATA_GENERATOR_FUNCTION_NAME=
                dataset_metadata_generator_function_name,
                PLANET_API_KEY=os.environ["PLANET_API_KEY"],
            ))

        lambda_function_props = dict(
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            code=self.create_package(code_dir),
            handler="handler.handler",
            memory_size=memory,
            timeout=core.Duration.seconds(timeout),
            environment=lambda_env,
            security_groups=[lambda_function_security_group],
            vpc=vpc,
        )

        if concurrent:
            lambda_function_props[
                "reserved_concurrent_executions"] = concurrent

        lambda_function = aws_lambda.Function(self, f"{id}-lambda",
                                              **lambda_function_props)

        lambda_function.add_to_role_policy(s3_full_access_to_data_bucket)
        lambda_function.add_to_role_policy(logs_access)
        lambda_function.add_to_role_policy(ec2_network_access)

        # defines an API Gateway Http API resource backed by our "dynamoLambda" function.
        apigw.HttpApi(
            self,
            f"{id}-endpoint",
            default_integration=apigw_integrations.LambdaProxyIntegration(
                handler=lambda_function),
        )
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # -----------------------------------------------------------------------------------------------------------
        # The Simple Webservice Logic - This is what we will be monitoring
        #
        # API GW HTTP API, Lambda Fn and DynamoDB
        # https://github.com/cdk-patterns/serverless/tree/master/the-simple-webservice
        # -----------------------------------------------------------------------------------------------------------

        # DynamoDB Table
        table = dynamo_db.Table(
            self,
            "Hits",
            partition_key=dynamo_db.Attribute(
                name="path", type=dynamo_db.AttributeType.STRING),
            billing_mode=dynamo_db.BillingMode.PAY_PER_REQUEST)

        # defines an AWS  Lambda resource
        dynamo_lambda = _lambda.Function(
            self,
            "DynamoLambdaHandler",
            runtime=_lambda.Runtime.NODEJS_12_X,  # execution environment
            handler="lambda.handler",  # file is "lambda", function is "handler"
            code=_lambda.Code.from_asset(
                "lambda_fns"),  # Code loaded from the lambda dir
            environment={'HITS_TABLE_NAME': table.table_name})

        # grant the lambda role read/write permissions to our table'
        table.grant_read_write_data(dynamo_lambda)

        # defines an API Gateway Http API resource backed by our "dynamoLambda" function.
        api = api_gw.HttpApi(self,
                             'HttpAPI',
                             default_integration=api_gw.LambdaProxyIntegration(
                                 handler=dynamo_lambda))

        core.CfnOutput(self, 'HTTP API Url', value=api.url)

        # -----------------------------------------------------------------------------------------------------------
        # Monitoring Logic Starts Here
        #
        # This is everything we need to understand the state of our system:
        # - custom metrics
        # - cloudwatch alarms
        # - custom cloudwatch dashboard
        # -----------------------------------------------------------------------------------------------------------

        # SNS Topic so we can hook things into our alerts e.g. email
        error_topic = sns.Topic(self, 'theBigFanTopic')

        ###
        # Custom Metrics
        ###

        api_gw_4xx_error_percentage = cloud_watch.MathExpression(
            expression="m1/m2*100",
            label="% API Gateway 4xx Errors",
            using_metrics={
                "m1":
                self.metric_for_api_gw(api.http_api_id, '4XXError',
                                       '4XX Errors', 'sum'),
                "m2":
                self.metric_for_api_gw(api.http_api_id, 'Count', '# Requests',
                                       'sum'),
            },
            period=core.Duration.minutes(5))

        # Gather the % of lambda invocations that error in past 5 mins
        lambda_error_perc = cloud_watch.MathExpression(
            expression="e / i * 100",
            label="% of invocations that errored, last 5 mins",
            using_metrics={
                "i":
                dynamo_lambda.metric(metric_name="Invocations",
                                     statistic="sum"),
                "e":
                dynamo_lambda.metric(metric_name="Errors", statistic="sum"),
            },
            period=core.Duration.minutes(5))

        # note: throttled requests are not counted in total num of invocations
        lambda_throttled_perc = cloud_watch.MathExpression(
            expression="t / (i + t) * 100",
            label="% of throttled requests, last 30 mins",
            using_metrics={
                "i":
                dynamo_lambda.metric(metric_name="Invocations",
                                     statistic="sum"),
                "t":
                dynamo_lambda.metric(metric_name="Throttles", statistic="sum"),
            },
            period=core.Duration.minutes(5))

        # I think usererrors are at an account level rather than a table level so merging
        # these two metrics until I can get a definitive answer. I think usererrors
        # will always show as 0 when scoped to a table so this is still effectively
        # a system errors count
        dynamo_db_total_errors = cloud_watch.MathExpression(
            expression="m1 + m2",
            label="DynamoDB Errors",
            using_metrics={
                "m1": table.metric_user_errors(),
                "m2": table.metric_system_errors(),
            },
            period=core.Duration.minutes(5))

        # Rather than have 2 alerts, let's create one aggregate metric
        dynamo_db_throttles = cloud_watch.MathExpression(
            expression="m1 + m2",
            label="DynamoDB Throttles",
            using_metrics={
                "m1":
                table.metric(metric_name="ReadThrottleEvents",
                             statistic="sum"),
                "m2":
                table.metric(metric_name="WriteThrottleEvents",
                             statistic="sum"),
            },
            period=core.Duration.minutes(5))
        ###
        # Alarms
        ###

        # Api Gateway

        # 4xx are user errors so a large volume indicates a problem
        cloud_watch.Alarm(self,
                          id="API Gateway 4XX Errors > 1%",
                          metric=api_gw_4xx_error_percentage,
                          threshold=1,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # 5xx are internal server errors so we want 0 of these
        cloud_watch.Alarm(self,
                          id="API Gateway 5XX Errors > 0",
                          metric=self.metric_for_api_gw(api_id=api.http_api_id,
                                                        metric_name="5XXError",
                                                        label="5XX Errors",
                                                        stat="p99"),
                          threshold=0,
                          period=core.Duration.minutes(5),
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        cloud_watch.Alarm(self,
                          id="API p99 latency alarm >= 1s",
                          metric=self.metric_for_api_gw(api_id=api.http_api_id,
                                                        metric_name="Latency",
                                                        label="API GW Latency",
                                                        stat="p99"),
                          threshold=1000,
                          period=core.Duration.minutes(5),
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # Lambda

        # 2% of Dynamo Lambda invocations erroring
        cloud_watch.Alarm(self,
                          id="Dynamo Lambda 2% Error",
                          metric=lambda_error_perc,
                          threshold=2,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # 1% of Lambda invocations taking longer than 1 second
        cloud_watch.Alarm(self,
                          id="Dynamo Lambda p99 Long Duration (>1s)",
                          metric=dynamo_lambda.metric_duration(),
                          period=core.Duration.minutes(5),
                          threshold=1000,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          statistic="p99",
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # 2% of our lambda invocations are throttled
        cloud_watch.Alarm(self,
                          id="Dynamo Lambda 2% Throttled",
                          metric=lambda_throttled_perc,
                          threshold=2,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # DynamoDB

        # DynamoDB Interactions are throttled - indicated poorly provisioned
        cloud_watch.Alarm(self,
                          id="DynamoDB Table Reads/Writes Throttled",
                          metric=dynamo_db_throttles,
                          threshold=1,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # There should be 0 DynamoDB errors
        cloud_watch.Alarm(self,
                          id="DynamoDB Errors > 0",
                          metric=dynamo_db_total_errors,
                          threshold=0,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        dashboard = cloud_watch.Dashboard(self, id="CloudWatchDashBoard")
        dashboard.add_widgets(
            cloud_watch.GraphWidget(title="Requests",
                                    width=8,
                                    left=[
                                        self.metric_for_api_gw(
                                            api_id=api.http_api_id,
                                            metric_name="Count",
                                            label="# Requests",
                                            stat="sum")
                                    ]),
            cloud_watch.GraphWidget(
                title="API GW Latency",
                width=8,
                stacked=True,
                left=[
                    self.metric_for_api_gw(api_id=api.http_api_id,
                                           metric_name="Latency",
                                           label="API Latency p50",
                                           stat="p50"),
                    self.metric_for_api_gw(api_id=api.http_api_id,
                                           metric_name="Latency",
                                           label="API Latency p90",
                                           stat="p90"),
                    self.metric_for_api_gw(api_id=api.http_api_id,
                                           metric_name="Latency",
                                           label="API Latency p99",
                                           stat="p99")
                ]),
            cloud_watch.GraphWidget(
                title="API GW Errors",
                width=8,
                stacked=True,
                left=[
                    self.metric_for_api_gw(api_id=api.http_api_id,
                                           metric_name="4XXError",
                                           label="4XX Errors",
                                           stat="sum"),
                    self.metric_for_api_gw(api_id=api.http_api_id,
                                           metric_name="5XXError",
                                           label="5XX Errors",
                                           stat="sum")
                ]),
            cloud_watch.GraphWidget(title="Dynamo Lambda Error %",
                                    width=8,
                                    left=[lambda_error_perc]),
            cloud_watch.GraphWidget(
                title="Dynamo Lambda Duration",
                width=8,
                stacked=True,
                left=[
                    dynamo_lambda.metric_duration(statistic="p50"),
                    dynamo_lambda.metric_duration(statistic="p90"),
                    dynamo_lambda.metric_duration(statistic="p99")
                ]),
            cloud_watch.GraphWidget(title="Dynamo Lambda Throttle %",
                                    width=8,
                                    left=[lambda_throttled_perc]),
            cloud_watch.GraphWidget(
                title="DynamoDB Latency",
                width=8,
                stacked=True,
                left=[
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "GetItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "UpdateItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "PutItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "DeleteItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "Query"
                        }),
                ]),
            cloud_watch.GraphWidget(
                title="DynamoDB Consumed Read/Write Units",
                width=8,
                stacked=False,
                left=[
                    table.metric(metric_name="ConsumedReadCapacityUnits"),
                    table.metric(metric_name="ConsumedWriteCapacityUnits")
                ]),
            cloud_watch.GraphWidget(
                title="DynamoDB Throttles",
                width=8,
                stacked=True,
                left=[
                    table.metric(metric_name="ReadThrottleEvents",
                                 statistic="sum"),
                    table.metric(metric_name="WriteThrottleEvents",
                                 statistic="sum")
                ]),
        )
Esempio n. 26
0
    def __init__(self, scope: core.Construct, id_: str, props,
                 **kwargs) -> None:
        super().__init__(scope, id_, **kwargs)

        namespace = props['namespace']
        htsget_refserver_ecr_repo: ecr.Repository = props['ecr_repo']
        htsget_refserver_image_tag = props['htsget_refserver_image_tag']
        cors_allowed_origins = props['cors_allowed_origins']

        # --- Query deployment env specific config from SSM Parameter Store

        cert_apse2_arn = ssm.StringParameter.from_string_parameter_name(
            self,
            "SSLCertAPSE2ARN",
            string_parameter_name="/htsget/acm/apse2_arn",
        )
        cert_apse2 = acm.Certificate.from_certificate_arn(
            self,
            "SSLCertAPSE2",
            certificate_arn=cert_apse2_arn.string_value,
        )

        hosted_zone_id = ssm.StringParameter.from_string_parameter_name(
            self, "HostedZoneID", string_parameter_name="hosted_zone_id")
        hosted_zone_name = ssm.StringParameter.from_string_parameter_name(
            self, "HostedZoneName", string_parameter_name="hosted_zone_name")

        domain_name = ssm.StringParameter.from_string_parameter_name(
            self,
            "DomainName",
            string_parameter_name="/htsget/domain",
        )

        # --- Cognito parameters are from data portal terraform stack

        cog_user_pool_id = ssm.StringParameter.from_string_parameter_name(
            self,
            "CogUserPoolID",
            string_parameter_name="/data_portal/client/cog_user_pool_id",
        )

        cog_app_client_id_stage = ssm.StringParameter.from_string_parameter_name(
            self,
            "CogAppClientIDStage",
            string_parameter_name="/data_portal/client/cog_app_client_id_stage",
        )

        cog_app_client_id_local = ssm.StringParameter.from_string_parameter_name(
            self,
            "CogAppClientIDLocal",
            string_parameter_name="/data_portal/client/cog_app_client_id_local",
        )

        # --- Query main VPC and setup Security Groups

        vpc = ec2.Vpc.from_lookup(
            self,
            "VPC",
            vpc_name="main-vpc",
            tags={
                'Stack': "networking",
            },
        )
        private_subnets = ec2.SubnetSelection(
            subnet_type=ec2.SubnetType.PRIVATE,
            availability_zones=["ap-southeast-2a"],
        )

        sg_elb = ec2.SecurityGroup(
            self,
            "ELBSecurityGroup",
            vpc=vpc,
            description=f"Security Group for ELB in {namespace} stack",
            security_group_name=f"{namespace} ELB Security Group",
            allow_all_outbound=False,
        )
        sg_elb.add_ingress_rule(peer=ec2.Peer.any_ipv4(),
                                connection=ec2.Port.tcp(80),
                                description="Allow http inbound within VPC")

        sg_ecs_service = ec2.SecurityGroup(
            self,
            "ECSServiceSecurityGroup",
            vpc=vpc,
            description=f"Security Group for ECS Service in {namespace} stack",
            security_group_name=f"{namespace} ECS Security Group",
        )
        sg_ecs_service.add_ingress_rule(
            peer=sg_elb,
            connection=ec2.Port.tcp(3000),
            description="Allow traffic from Load balancer to ECS service")

        # --- Setup ECS Fargate cluster

        config_vol = ecs.Volume(
            name="config-vol",
            host=ecs.Host(),
        )

        task_execution_role = iam.Role(
            self,
            "ecsTaskExecutionRole",
            assumed_by=iam.ServicePrincipal("ecs-tasks.amazonaws.com"))
        task_execution_role.add_to_policy(
            iam.PolicyStatement(
                actions=[
                    "s3:GetBucketLocation",
                    "s3:GetObject",
                    "s3:ListBucket",
                    "s3:ListBucketMultipartUploads",
                    "s3:ListMultipartUploadParts",
                    "s3:GetObjectTagging",
                    "s3:GetObjectVersionTagging",
                    "logs:CreateLogStream",
                    "logs:PutLogEvents",
                    "ssm:GetParameterHistory",
                    "ssm:GetParametersByPath",
                    "ssm:GetParameters",
                    "ssm:GetParameter",
                ],
                resources=["*"],
            ))
        task_execution_role.add_managed_policy(
            iam.ManagedPolicy.from_aws_managed_policy_name(
                'service-role/AmazonECSTaskExecutionRolePolicy'))

        task = ecs.FargateTaskDefinition(
            self,
            f"{namespace}-task",
            cpu=512,
            memory_limit_mib=1024,
            volumes=[config_vol],
            task_role=task_execution_role,
            execution_role=task_execution_role,
        )

        cmd_ssm = "ssm get-parameter --name '/htsget/refserver/config' --output text --query Parameter.Value"
        sidecar_container: ecs.ContainerDefinition = task.add_container(
            f"{namespace}-sidecar",
            image=ecs.ContainerImage.from_registry(
                "quay.io/victorskl/aws-cli:2.1.3"),
            essential=False,
            entry_point=[
                "/bin/bash",
                "-c",
                f"aws {cmd_ssm} > config.json",
            ],
            logging=ecs.LogDriver.aws_logs(stream_prefix=f"{namespace}", ),
        )
        sidecar_container.add_mount_points(
            ecs.MountPoint(
                container_path="/aws",
                read_only=False,
                source_volume=config_vol.name,
            ))

        main_container: ecs.ContainerDefinition = task.add_container(
            namespace,
            image=ecs.ContainerImage.from_ecr_repository(
                repository=htsget_refserver_ecr_repo,
                tag=htsget_refserver_image_tag,
            ),
            essential=True,
            command=[
                "./htsget-refserver", "-config",
                "/usr/src/app/config/config.json"
            ],
            logging=ecs.LogDriver.aws_logs(stream_prefix=f"{namespace}", ),
        )
        main_container.add_port_mappings(
            ecs.PortMapping(
                container_port=3000,
                protocol=ecs.Protocol.TCP,
            ))
        main_container.add_mount_points(
            ecs.MountPoint(
                container_path="/usr/src/app/config",
                read_only=True,
                source_volume=config_vol.name,
            ))
        main_container.add_container_dependencies(
            ecs.ContainerDependency(
                container=sidecar_container,
                condition=ecs.ContainerDependencyCondition.COMPLETE,
            ))

        cluster = ecs.Cluster(self, f"{namespace}-cluster", vpc=vpc)

        service = ecs.FargateService(
            self,
            f"{namespace}-service",
            platform_version=ecs.FargatePlatformVersion.VERSION1_4,
            task_definition=task,
            cluster=cluster,
            vpc_subnets=private_subnets,
            desired_count=1,
            security_groups=[
                sg_ecs_service,
            ],
        )

        # --- Setup Application Load Balancer in front of ECS cluster

        lb = elbv2.ApplicationLoadBalancer(
            self,
            f"{namespace}-lb",
            vpc=vpc,
            internet_facing=False,
            security_group=sg_elb,
            deletion_protection=True,
        )
        http_listener = lb.add_listener(
            "HttpLBListener",
            port=80,
        )
        health_check = elbv2.HealthCheck(interval=core.Duration.seconds(30),
                                         path="/reads/service-info",
                                         timeout=core.Duration.seconds(5))
        http_listener.add_targets(
            "LBtoECS",
            port=3000,
            protocol=elbv2.ApplicationProtocol.HTTP,
            targets=[service],
            health_check=health_check,
        )
        core.CfnOutput(self,
                       "LoadBalancerDNS",
                       value=lb.load_balancer_dns_name)

        # --- Setup APIGatewayv2 HttpApi using VpcLink private integration to ALB/ECS in private subnets

        vpc_link = apigwv2.VpcLink(self,
                                   f"{namespace}-VpcLink",
                                   vpc=vpc,
                                   security_groups=[
                                       sg_ecs_service,
                                       sg_elb,
                                   ])
        self.apigwv2_alb_integration = apigwv2i.HttpAlbIntegration(
            listener=http_listener,
            vpc_link=vpc_link,
        )
        custom_domain = apigwv2.DomainName(
            self,
            "CustomDomain",
            certificate=cert_apse2,
            domain_name=domain_name.string_value,
        )
        self.http_api = apigwv2.HttpApi(
            self,
            f"{namespace}-apigw",
            default_domain_mapping=apigwv2.DomainMappingOptions(
                domain_name=custom_domain),
            cors_preflight=apigwv2.CorsPreflightOptions(
                allow_origins=cors_allowed_origins,
                allow_headers=["*"],
                allow_methods=[
                    apigwv2.CorsHttpMethod.ANY,
                ],
                allow_credentials=True,
            ))
        core.CfnOutput(self, "ApiEndpoint", value=self.http_api.api_endpoint)

        # --- Setup DNS for the custom domain

        hosted_zone = route53.HostedZone.from_hosted_zone_attributes(
            self,
            "HostedZone",
            hosted_zone_id=hosted_zone_id.string_value,
            zone_name=hosted_zone_name.string_value,
        )
        route53.ARecord(
            self,
            "ApiCustomDomainAlias",
            zone=hosted_zone,
            record_name="htsget",
            target=route53.RecordTarget.from_alias(
                route53t.ApiGatewayv2DomainProperties(
                    regional_domain_name=custom_domain.regional_domain_name,
                    regional_hosted_zone_id=custom_domain.
                    regional_hosted_zone_id)),
        )
        core.CfnOutput(
            self,
            "HtsgetEndpoint",
            value=custom_domain.name,
        )

        cognito_authzr = apigwv2.CfnAuthorizer(
            self,
            "CognitoAuthorizer",
            api_id=self.http_api.http_api_id,
            authorizer_type="JWT",
            identity_source=[
                "$request.header.Authorization",
            ],
            name="CognitoAuthorizer",
            jwt_configuration=apigwv2.CfnAuthorizer.JWTConfigurationProperty(
                audience=[
                    cog_app_client_id_stage.string_value,
                    cog_app_client_id_local.string_value,
                ],
                issuer=
                f"https://cognito-idp.{self.region}.amazonaws.com/{cog_user_pool_id.string_value}"
            ))

        # Add catch all routes
        rt_catchall = apigwv2.HttpRoute(
            self,
            "CatchallRoute",
            http_api=self.http_api,
            route_key=apigwv2.HttpRouteKey.with_(
                path="/{proxy+}", method=apigwv2.HttpMethod.GET),
            integration=self.apigwv2_alb_integration)
        rt_catchall_cfn: apigwv2.CfnRoute = rt_catchall.node.default_child
        rt_catchall_cfn.authorizer_id = cognito_authzr.ref
        rt_catchall_cfn.authorization_type = "JWT"

        # Comment this to opt-out setting up experimental Passport + htsget
        self.setup_ga4gh_passport()
Esempio n. 27
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        diaries_dynamodb_table = aws_dynamodb.Table(self, 'Diaries',
            partition_key=aws_dynamodb.Attribute(
                name='id',
                type=aws_dynamodb.AttributeType.STRING)
        )

        api = aws_apigatewayv2.HttpApi(
            self, 'HttpApi', api_name='AutomaticTranslationDiary',
            cors_preflight=aws_apigatewayv2.CorsPreflightOptions(
                allow_headers=['Content-Type'],
                allow_methods=[
                    aws_apigatewayv2.HttpMethod.GET,
                    aws_apigatewayv2.HttpMethod.POST,
                    aws_apigatewayv2.HttpMethod.OPTIONS
                ],
                allow_origins=['*'],
            ),
        )

        def create_function(handler: str):
            function = aws_lambda.Function(
                self, handler.replace('.', '-'),
                function_name=handler.replace('.', '-'),
                runtime=aws_lambda.Runtime.PYTHON_3_8,
                code=aws_lambda.Code.asset('lambda/src'),
                handler=handler)

            diaries_dynamodb_table.grant_read_write_data(function)
            function.add_environment(
                'DYNAMODB_NAME_DIARIES',
                diaries_dynamodb_table.table_name)
            function.add_to_role_policy(
                aws_iam.PolicyStatement(
                    resources=['*'],
                    actions=['translate:TranslateText', 'polly:SynthesizeSpeech']))

            return function

        api.add_routes(
            path='/diaries/{lang}',
            methods=[aws_apigatewayv2.HttpMethod.POST],
            integration=aws_apigatewayv2.LambdaProxyIntegration(
                handler=create_function('diary_handler.save')
            ))

        api.add_routes(
            path='/diaries',
            methods=[aws_apigatewayv2.HttpMethod.GET],
            integration=aws_apigatewayv2.LambdaProxyIntegration(
                handler=create_function('diary_handler.diaries')
            ))

        api.add_routes(
            path='/diaries/{diaryId}/speech/{lang}',
            methods=[aws_apigatewayv2.HttpMethod.GET],
            integration=aws_apigatewayv2.LambdaProxyIntegration(
                handler=create_function('diary_handler.speech')
            ))
Esempio n. 28
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        account = self.account

        print("")
        print(f"   Service: {service_name}")
        print(f"   Region:  {region}")
        print(f"   Stage:   {stage}")
        print(f"   Account: {account}")
        print(f"   Stack:   {stack_name}")
        print("")

        ssm = boto3.client('ssm')

        # Environment variable mapping
        environment: dict = {
            'dev': {
                'logLevel': 'DEBUG'
            },
            'prod': {
                'logLevel': 'INFO'
            }
        }

        # How to: Retrieve an existing VPC instance.
        vpc_id: str = ssm.get_parameter(Name="VpcId")['Parameter']['Value']
        vpc = ec2.Vpc.from_lookup(self, 'VPC', vpc_id=vpc_id)

        private_subnet_1_id: str = ssm.get_parameter(
            Name="private-subnet-1")['Parameter']['Value']
        private_subnet_2_id: str = ssm.get_parameter(
            Name="private-subnet-2")['Parameter']['Value']
        private_subnet_3_id: str = ssm.get_parameter(
            Name="private-subnet-3")['Parameter']['Value']

        # How to: Import a value exported from another stack
        # These values are imported from the simple-database stack https://github.com/SimpleServerless/simple-database/blob/main/template.yaml#L95
        # Change these lines to the appropriate values for your project
        db_host = core.Fn.import_value(
            f"simple-serverless-database-{stage}-Host")
        db_name = core.Fn.import_value(
            f"simple-serverless-database-{stage}-Name")
        app_security_group_id = core.Fn.import_value(
            f"simple-serverless-database-{stage}-AppSGId")

        env_variables = {
            'STAGE': stage,
            "PGHOST": db_host,
            "PGPORT": "5432",
            "PGDATABASE": db_name,
            "LOG_LEVEL": environment[stage]['logLevel']
        }

        # How to: Import a security group
        app_security_group = ec2.SecurityGroup.from_security_group_id(
            self, "AppSecurityGroup", app_security_group_id)

        # Create the main lambda function
        service_lambda = aws_lambda.Function(
            self,
            'LambdaFunction',
            runtime=aws_lambda.Runtime.PYTHON_3_8,
            description=service_name,
            code=aws_lambda.AssetCode("./dist"),
            function_name=service_name + "-" + stage,
            timeout=core.Duration.seconds(35),
            tracing=aws_lambda.Tracing.ACTIVE,
            memory_size=128,
            handler='lambda_function.handler',
            vpc=vpc,
            security_groups=[app_security_group],
            environment=env_variables)

        # Add SecretsManager permissions to lambda
        service_lambda.add_to_role_policy(
            iam.PolicyStatement(
                effect=iam.Effect.ALLOW,
                actions=[
                    "secretsmanager:DescribeSecret",
                    "secretsmanager:GetSecretValue", "secretsmanager:List*"
                ],
                resources=[
                    f"arn:aws:secretsmanager:{region}:{account}:secret:simple-serverless/*"
                ]))

        # Make a wide open security group for the secrets vpc endpoint
        vpc_endpoint_sg = ec2.SecurityGroup(
            self,
            'VpcEndpointSG',
            vpc=vpc,
            allow_all_outbound=True,
            description="Secret Manager VPC Endpoint SG")

        vpc_endpoint_sg.add_ingress_rule(
            peer=ec2.Peer.any_ipv4(),
            connection=ec2.Port.tcp_range(0, 65535),
            description="all inbound",
        )

        # How to create at VPC Endpoint to access secrets manager.
        # You can delete this if you're not too cheap to pay for a NAT instance.
        # This still costs $0.24 per day per AZ, so $0.72 for the three AZs we're using here
        # This block is the only reoccurring cost in this stack and is the only reason I delete this stack
        # when I'm not actively working on it.
        ec2.CfnVPCEndpoint(
            self,
            'SecretsManagerVPCEndpoint',
            service_name='com.amazonaws.us-east-2.secretsmanager',
            vpc_endpoint_type='Interface',
            vpc_id=vpc_id,
            subnet_ids=[
                private_subnet_1_id, private_subnet_2_id, private_subnet_3_id
            ],
            security_group_ids=[vpc_endpoint_sg.security_group_id],
            private_dns_enabled=True)

        #
        # REST (API Gateway HTTP) stuff starts here
        #

        # How to: Import an existing HTTP API Gateway instance
        # http_api = apigatewayv2.HttpApi.from_api_id(self, id='APIGateway', http_api_id='0fdl9wlxw4')

        # How to: Create a new HTTP API Gateway instance
        http_api = apigatewayv2.HttpApi(self,
                                        'APIGateway',
                                        api_name=f'{service_name}-api-{stage}')

        integration = apigatewayv2_integrations.LambdaProxyIntegration(
            handler=service_lambda,
            payload_format_version=apigatewayv2.PayloadFormatVersion.
            VERSION_2_0)

        # How to: auto generate REST endpoints from decorators ex: @router.rest("GET", "/students").
        for route_key, endpoint in lambda_function.router.get_rest_endpoints(
        ).items():
            print(f"Creating REST endpoint for {route_key}")
            http_api.add_routes(
                path=endpoint['path'],
                methods=[apigatewayv2.HttpMethod(endpoint['method'])],
                integration=integration)

        core.CfnOutput(self,
                       "RestAPIOutput",
                       value=http_api.url,
                       export_name=f"{stack_name}-RestApiUrl-{stage}")
Esempio n. 29
0
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        secret_key: str,
        custom_domain: Optional[str] = None,
        hosted_zone_id: Optional[str] = None,
        hosted_zone_name: Optional[str] = None,
        **kwargs,
    ) -> None:
        super().__init__(scope, id, **kwargs)

        self.files_bucket = s3.Bucket(
            self,
            "files-bucket",
            bucket_name="once-shared-files",
            block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
            encryption=s3.BucketEncryption.S3_MANAGED,
            removal_policy=core.RemovalPolicy.DESTROY,
        )

        self.files_table = dynamodb.Table(
            self,
            "once-files-table",
            table_name="once-files",
            partition_key=dynamodb.Attribute(
                name="id", type=dynamodb.AttributeType.STRING),
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            removal_policy=core.RemovalPolicy.DESTROY,
        )

        self.api = apigw.HttpApi(self, "once-api", api_name="once-api")

        api_url = self.api.url
        if custom_domain is not None:
            api_url = f"https://{custom_domain}/"

        core.CfnOutput(self, "base-url", value=api_url)

        self.get_upload_ticket_function = lambda_.Function(
            self,
            "get-upload-ticket-function",
            function_name="once-get-upload-ticket",
            description="Returns a pre-signed request to share a file",
            runtime=lambda_.Runtime.PYTHON_3_7,
            code=make_python_zip_bundle(
                os.path.join(BASE_PATH, "get-upload-ticket")),
            handler="handler.on_event",
            log_retention=LOG_RETENTION,
            environment={
                "APP_URL": api_url,
                "FILES_TABLE_NAME": self.files_table.table_name,
                "FILES_BUCKET": self.files_bucket.bucket_name,
                "SECRET_KEY": secret_key,
            },
        )

        self.files_bucket.grant_put(self.get_upload_ticket_function)
        self.files_table.grant_read_write_data(self.get_upload_ticket_function)

        self.download_and_delete_function = lambda_.Function(
            self,
            "download-and-delete-function",
            function_name="once-download-and-delete",
            description=
            "Serves a file from S3 and deletes it as soon as it has been successfully transferred",
            runtime=lambda_.Runtime.PYTHON_3_7,
            code=lambda_.Code.from_asset(
                os.path.join(BASE_PATH, "download-and-delete")),
            handler="handler.on_event",
            log_retention=LOG_RETENTION,
            environment={
                "FILES_BUCKET": self.files_bucket.bucket_name,
                "FILES_TABLE_NAME": self.files_table.table_name,
            },
        )

        self.files_bucket.grant_read(self.download_and_delete_function)
        self.files_bucket.grant_delete(self.download_and_delete_function)
        self.files_table.grant_read_write_data(
            self.download_and_delete_function)

        get_upload_ticket_integration = integrations.LambdaProxyIntegration(
            handler=self.get_upload_ticket_function)
        self.api.add_routes(path="/",
                            methods=[apigw.HttpMethod.GET],
                            integration=get_upload_ticket_integration)

        download_and_delete_integration = integrations.LambdaProxyIntegration(
            handler=self.download_and_delete_function)
        self.api.add_routes(path="/{entry_id}/{filename}",
                            methods=[apigw.HttpMethod.GET],
                            integration=download_and_delete_integration)

        self.cleanup_function = lambda_.Function(
            self,
            "delete-served-files-function",
            function_name="once-delete-served-files",
            description=
            "Deletes files from S3 once they have been marked as deleted in DynamoDB",
            runtime=lambda_.Runtime.PYTHON_3_7,
            code=lambda_.Code.from_asset(
                os.path.join(BASE_PATH, "delete-served-files")),
            handler="handler.on_event",
            log_retention=LOG_RETENTION,
            environment={
                "FILES_BUCKET": self.files_bucket.bucket_name,
                "FILES_TABLE_NAME": self.files_table.table_name,
            },
        )

        self.files_bucket.grant_delete(self.cleanup_function)
        self.files_table.grant_read_write_data(self.cleanup_function)

        events.Rule(
            self,
            "once-delete-served-files-rule",
            schedule=events.Schedule.rate(core.Duration.hours(24)),
            targets=[targets.LambdaFunction(self.cleanup_function)],
        )

        if custom_domain is not None:
            self.custom_domain_stack = CustomDomainStack(
                self,
                "custom-domain",
                api=self.api,
                domain_name=custom_domain,
                hosted_zone_id=hosted_zone_id,
                hosted_zone_name=hosted_zone_name,
            )
Esempio n. 30
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # Input variables

        # Domain name to redirect
        domain_name = core.CfnParameter(
            self,
            "domainName",
            type="String",
            description="Domain name to redirect",
        )

        # Here we use a specific certificate from parameter values
        cert_arn = core.CfnParameter(
            self,
            "certArn",
            type="String",
            description=
            "Certificate ARN of for the redirection (has to be in us-east-1",
        )
        # End: Input variables

        # Infra setup

        redirect_fn = _lambda.Function(
            self,
            "NCCIDRedirectLambda",
            handler="lambda-handler.handler",
            runtime=_lambda.Runtime.PYTHON_3_8,
            code=_lambda.Code.asset("lambda"),
        )

        redirect_integration = _apigw2int.LambdaProxyIntegration(
            handler=redirect_fn)

        cert = _acm.Certificate.from_certificate_arn(self, "cert",
                                                     cert_arn.value_as_string)

        http_api = _apigw2.HttpApi(
            self,
            "nccid-redirect",
            api_name="nccid-redirect",
            description="A redirection gateway.",
        )

        http_api.add_routes(path="/",
                            methods=[_apigw2.HttpMethod.GET],
                            integration=redirect_integration)

        # Change https address into just domain name (while keeping the Cloudformation variables in)
        origin_target = http_api.url.replace("https://", "",
                                             1).replace("/", "")
        origin = _origins.HttpOrigin(domain_name=origin_target)
        behaviour = _cloudfront.BehaviorOptions(origin=origin)

        distribution = _cloudfront.Distribution(
            self,
            "nccid-redirect-dist",
            default_behavior=behaviour,
            certificate=cert,
            domain_names=[domain_name.value_as_string],
        )
        # Explicit dependency is required between the API gateway and Cloudfront distribution
        distribution.node.add_dependency(http_api)

        # Outputs
        distribution_domain = core.CfnOutput(  # noqa:  F841
            self,
            "nccidRedirectDomain",
            value=distribution.distribution_domain_name,
            description=
            "The Cloudfront domain to add to the CNAME records for the redirected domain",
        )