예제 #1
0
    def _build_api(self, *, stack):

        self.api = appsync.GraphqlApi(stack, 'exampleapi',
                                name="examplegraphqlapi",
                                log_config=appsync.LogConfig(field_log_level=appsync.FieldLogLevel.ALL),
                                schema=appsync.Schema.from_asset(file_path="../appsync-conf/schema.graphql")
                                )

        self.api_key = appsync.CfnApiKey(stack, 'examplegraphqlapi',
                                    api_id=self.api.api_id
                                    )
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)
        
        schema_location = os.path.dirname(os.path.realpath(__file__)) + "/../schema/schema.graphql"

        # Create a new AppSync GraphQL API
        api = appsync.GraphqlApi(self, 'Api',
                                 name="demoapi",
                                 log_config=appsync.LogConfig(field_log_level=appsync.FieldLogLevel.ALL),
                                 schema=appsync.Schema.from_asset(schema_location)
                                 )

        api_key = appsync.CfnApiKey(self, 'the-simple-graphql-service-api-key',
                                    api_id=api.api_id
                                    )

        # Create new DynamoDB Table for Customer
        customer_table = dynamo_db.Table(self, "CustomerTable",
                                         partition_key=dynamo_db.Attribute(name="id",
                                                                           type=dynamo_db.AttributeType.STRING)
                                         )

        # Add Customer DynamoDB as a Datasource for the Graphql API.
        customer_ds = api.add_dynamo_db_data_source('Customer', customer_table)

        # Query Resolver to get all Customers
        customer_ds.create_resolver(
            type_name='Query',
            field_name='getCustomers',
            request_mapping_template=appsync.MappingTemplate.dynamo_db_scan_table(),
            response_mapping_template=appsync.MappingTemplate.dynamo_db_result_list(),
        )

        # Query Resolver to get an individual Customer by their id
        customer_ds.create_resolver(
            type_name='Query',
            field_name='getCustomer',
            request_mapping_template=appsync.MappingTemplate.dynamo_db_get_item('id', 'id'),
            response_mapping_template=appsync.MappingTemplate.dynamo_db_result_item(),
        )

        # Mutation Resolver for adding a new Customer
        customer_ds.create_resolver(
            type_name='Mutation',
            field_name='addCustomer',
            request_mapping_template=appsync.MappingTemplate.dynamo_db_put_item(
                key=appsync.PrimaryKey.partition('id').auto(),
                values=appsync.Values.projecting('customer')
            ),
            response_mapping_template=appsync.MappingTemplate.dynamo_db_result_item()
        )

        # Mutation Resolver for updating an existing Customer
        customer_ds.create_resolver(
            type_name='Mutation',
            field_name='saveCustomer',
            request_mapping_template=appsync.MappingTemplate.dynamo_db_put_item(
                key=appsync.PrimaryKey.partition('id').is_('id'),
                values=appsync.Values.projecting('customer')
            ),
            response_mapping_template=appsync.MappingTemplate.dynamo_db_result_item()
        )

        #  Mutation resolver for creating a new customer along with their first order 
        customer_ds.create_resolver(
            type_name='Mutation',
            field_name='saveCustomerWithFirstOrder',
            request_mapping_template=appsync.MappingTemplate.dynamo_db_put_item(
                key=appsync.PrimaryKey.partition('order').auto().sort('customer').is_('customer.id'),
                values=appsync.Values.projecting('order').attribute('referral').is_('referral')
            ),
            response_mapping_template=appsync.MappingTemplate.dynamo_db_result_item()
        )

        # Mutation Resolver for deleting an existing Customer
        customer_ds.create_resolver(
            type_name='Mutation',
            field_name='removeCustomer',
            request_mapping_template=appsync.MappingTemplate.dynamo_db_delete_item('id', 'id'),
            response_mapping_template=appsync.MappingTemplate.dynamo_db_result_item(),
        )

        # defines an AWS  Lambda resource
        loyalty_lambda = _lambda.Function(self, "LoyaltyLambdaHandler",
                                          runtime=_lambda.Runtime.NODEJS_12_X,
                                          handler="loyalty.handler",
                                          code=_lambda.Code.from_asset("lambda_fns"),
                                          )

        # Add Loyalty Lambda as a Datasource for the Graphql API.
        loyalty_ds = api.add_lambda_data_source('Loyalty', loyalty_lambda)

        # Query Resolver to get all Customers
        loyalty_ds.create_resolver(
            type_name='Query',
            field_name='getLoyaltyLevel',
            request_mapping_template=appsync.MappingTemplate.lambda_request(),
            response_mapping_template=appsync.MappingTemplate.lambda_result(),
        )

        # GraphQL API Endpoint
        core.CfnOutput(self, 'Endpoint',
                       value=api.graphql_url
                       )

        # API Key
        core.CfnOutput(self, 'API_Key',
                       value=api_key.attr_api_key
                       )
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # load configs from "./comfigurations/config.json"
        configs = {}
        with open("./configurations/config.json") as json_file:
            configs = json.load(json_file)

        # Default lambdas for testing
        mem_list = configs['MemorySizeList']
        cold_start_lambdas = {}
        for mem in mem_list:
            python38_lambda = lambda_.Function(
                self,
                id="coldstart_python38_" + str(mem) + "_",
                runtime=lambda_.Runtime.PYTHON_3_8,
                handler="lambda_function.lambda_handler",
                memory_size=mem,
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/python38"))
            cold_start_lambdas['PYTHON38_' + str(mem)] = python38_lambda

        for mem in mem_list:
            nodejs12x_lambda = lambda_.Function(
                self,
                id="coldstart_nodejs12x" + str(mem) + "_",
                runtime=lambda_.Runtime.NODEJS_12_X,
                handler="index.handler",
                memory_size=mem,
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/nodejs12x"))
            cold_start_lambdas['NODEJS12X_' + str(mem)] = nodejs12x_lambda

        for mem in mem_list:
            go1x_lambda = lambda_.Function(
                self,
                id="coldstart_go1x" + str(mem) + "_",
                runtime=lambda_.Runtime.GO_1_X,
                handler="hello",
                memory_size=mem,
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/go1x"))
            cold_start_lambdas['GO1X_' + str(mem)] = go1x_lambda

        for mem in mem_list:
            netcore31_lambda = lambda_.Function(
                self,
                id="coldstart_netcore31" + str(mem) + "_",
                runtime=lambda_.Runtime.DOTNET_CORE_3_1,
                handler="LambdaTest::LambdaTest.LambdaHandler::handleRequest",
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/netcore31"),
                memory_size=mem,
            )
            cold_start_lambdas['NETCORE31_' + str(mem)] = netcore31_lambda

        for mem in mem_list:
            java11corretto_lambda = lambda_.Function(
                self,
                id="coldstart_java11corretto" + str(mem) + "_",
                runtime=lambda_.Runtime.JAVA_11,
                handler="example.Hello::handleRequest",
                memory_size=mem,
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/java11corretto"))
            cold_start_lambdas['JAVA11_' + str(mem)] = java11corretto_lambda

        for mem in mem_list:
            ruby27_lambda = lambda_.Function(
                self,
                id="coldstart_ruby27" + str(mem) + "_",
                runtime=lambda_.Runtime.RUBY_2_7,
                handler="lambda_function.lambda_handler",
                memory_size=mem,
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/ruby27"))
            cold_start_lambdas['RUBY27_' + str(mem)] = ruby27_lambda

        # Caller
        cold_start_caller = lambda_.Function(
            self,
            id="cold_start_caller",
            runtime=lambda_.Runtime.PYTHON_3_8,
            handler="ColdStartCaller.lambda_handler",
            code=lambda_.Code.asset("./cold_start_lambdas/cold_start_caller"),
            timeout=core.Duration.seconds(180))
        cold_start_caller.role.add_managed_policy(
            iam_.ManagedPolicy.from_aws_managed_policy_name(
                "AWSXrayReadOnlyAccess"))
        cold_start_caller.role.add_to_policy(
            iam_.PolicyStatement(effect=iam_.Effect.ALLOW,
                                 actions=['lambda:GetFunctionConfiguration'],
                                 resources=["*"]))
        for lambda_name in cold_start_lambdas:
            cold_start_caller.add_environment(
                lambda_name, cold_start_lambdas[lambda_name].function_arn)
            cold_start_lambdas[lambda_name].grant_invoke(cold_start_caller)

        # DynamoDB
        cold_start_table = dynamodb_.Table(
            self,
            id="cold_start_benchmark_table",
            partition_key=dynamodb_.Attribute(
                name="PK", type=dynamodb_.AttributeType.STRING),
            sort_key=dynamodb_.Attribute(name="SK",
                                         type=dynamodb_.AttributeType.NUMBER),
            time_to_live_attribute="TTL")
        cold_start_table.grant_write_data(cold_start_caller)
        cold_start_caller.add_environment('TABLE_NAME',
                                          cold_start_table.table_name)

        # S3
        life_cycle_rule = s3_.LifecycleRule(transitions=[
            s3_.Transition(storage_class=s3_.StorageClass.INFREQUENT_ACCESS,
                           transition_after=core.Duration.days(30))
        ])
        cold_start_backup_s3 = s3_.Bucket(self,
                                          "cold_start_benchmark_backup",
                                          lifecycle_rules=[life_cycle_rule])
        cold_start_backup_s3.grant_write(cold_start_caller)
        cold_start_caller.add_environment('BACKUP_BUCKET_NAME',
                                          cold_start_backup_s3.bucket_name)

        # CW event
        cron_job = events_.Rule(
            self,
            "cold_start_caller_cron_job",
            description="Run cold start caller twice every 1 hour",
            schedule=events_.Schedule.cron(minute="0,1"),
            targets=[targets_.LambdaFunction(cold_start_caller)])

        # alarm when caller failed, send email for notification
        errorAlarm = cloudwatch_.Alarm(
            self,
            "cold_start_caller_error_alarm",
            metric=cloudwatch_.Metric(
                metric_name="Errors",
                namespace="AWS/Lambda",
                period=core.Duration.minutes(5),
                statistic="Maximum",
                dimensions={"FunctionName": cold_start_caller.function_name}),
            evaluation_periods=1,
            datapoints_to_alarm=1,
            threshold=1,
            actions_enabled=True,
            alarm_description="Alarm when cold start caller failed",
            alarm_name="cold_start_caller_errer_alarm",
            comparison_operator=cloudwatch_.ComparisonOperator.
            GREATER_THAN_OR_EQUAL_TO_THRESHOLD,
            treat_missing_data=cloudwatch_.TreatMissingData.MISSING)
        cold_start_caller_error_alarm_topic = sns_.Topic(
            self,
            "cold_start_caller_error_alarm_topic",
            display_name="ColdStartCallerErrorAlarmTopic",
            topic_name="ColdStartCallerErrorAlarmTopic")
        cold_start_caller_error_alarm_topic.add_subscription(
            sns_subs_.EmailSubscription(
                configs['AlarmNotificationEmailAddress']))
        errorAlarm.add_alarm_action(
            cloudwatch_actions_.SnsAction(cold_start_caller_error_alarm_topic))

        # Summarizer
        cold_start_summarizer = lambda_.Function(
            self,
            id="cold_start_summarizer",
            runtime=lambda_.Runtime.PYTHON_3_8,
            handler="ColdStartSummarizer.lambda_handler",
            code=lambda_.Code.asset(
                "./cold_start_lambdas/cold_start_summarizer"),
            timeout=core.Duration.seconds(10))
        cold_start_table.grant_read_write_data(cold_start_summarizer)
        cold_start_summarizer.add_environment('TABLE_NAME',
                                              cold_start_table.table_name)

        # setup CW event for summarizer
        cron_job_summarizer = events_.Rule(
            self,
            "cold_start_summarizer_cron_job",
            description="Run cold start summarizer once every day",
            schedule=events_.Schedule.cron(minute='30', hour='0'),
            targets=[targets_.LambdaFunction(cold_start_summarizer)])

        # error alarm for summarizer
        errorAlarm_summarizer = cloudwatch_.Alarm(
            self,
            "cold_start_summarizer_error_alarm",
            metric=cloudwatch_.Metric(metric_name='Errors',
                                      namespace='AWS/Lambda',
                                      period=core.Duration.minutes(5),
                                      statistic='Maximum',
                                      dimensions={
                                          'FunctionName':
                                          cold_start_summarizer.function_name
                                      }),
            evaluation_periods=1,
            datapoints_to_alarm=1,
            threshold=1,
            actions_enabled=True,
            alarm_description="Alarm when cold start summarizer failed",
            alarm_name="cold_start_summarizer_errer_alarm",
            comparison_operator=cloudwatch_.ComparisonOperator.
            GREATER_THAN_OR_EQUAL_TO_THRESHOLD,
            treat_missing_data=cloudwatch_.TreatMissingData.MISSING)
        cold_start_summarizer_error_alarm_topic = sns_.Topic(
            self,
            "cold_start_summarizer_error_alarm_topic",
            display_name="ColdStartSummarizerErrorAlarmTopic",
            topic_name="ColdStartSummarizerErrorAlarmTopic")
        cold_start_summarizer_error_alarm_topic.add_subscription(
            sns_subs_.EmailSubscription(
                configs['AlarmNotificationEmailAddress']))
        errorAlarm_summarizer.add_alarm_action(
            cloudwatch_actions_.SnsAction(
                cold_start_summarizer_error_alarm_topic))

        # GraphQL API
        graphql_api = appsync_.GraphqlApi(
            self,
            "cold_start_benchmark_graphql_api",
            name="cold_start_benchmark_graphql_api",
            authorization_config=appsync_.AuthorizationConfig(
                default_authorization=appsync_.AuthorizationMode(
                    authorization_type=appsync_.AuthorizationType.API_KEY,
                    api_key_config=appsync_.ApiKeyConfig(
                        description="cold_start_benchmark_graphql_api_key",
                        expires=core.Expiration.after(core.Duration.days(365)),
                        name="cold_start_benchmark_graphql_api_key"))),
            schema=appsync_.Schema.from_asset(
                './cold_start_benchmark/graphql_schema/schema.graphql'),
            xray_enabled=True)
        dynamodb_data_source = graphql_api.add_dynamo_db_data_source(
            id="cold_start_dynamodb_data_source", table=cold_start_table)
        dynamodb_data_source.create_resolver(
            field_name="listColdStartSummariesAfterTimestamp",
            type_name="Query",
            request_mapping_template=appsync_.MappingTemplate.from_file(
                './cold_start_benchmark/graphql_schema/request_mapping_template'
            ),
            response_mapping_template=appsync_.MappingTemplate.from_file(
                './cold_start_benchmark/graphql_schema/response_mapping_template'
            ))

        front_end_amplify_app = amplify_.App(
            self,
            "cold-start-front-end",
            app_name="cold_start_front_end",
            source_code_provider=amplify_.GitHubSourceCodeProvider(
                owner="ZzzGin",
                repository="cold-start-frontend-website",
                oauth_token=core.SecretValue.secrets_manager(
                    "zzzgin/github/token", json_field="zzzgin-github-token")))
        master_Branch = front_end_amplify_app.add_branch("master")
        domain = front_end_amplify_app.add_domain('zzzgin.com')
        domain.map_sub_domain(master_Branch, 'coldstart')
예제 #4
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        account = self.account

        print("")
        print(f"   Service: {service_name}")
        print(f"   Region:  {region}")
        print(f"   Stage:   {stage}")
        print(f"   Account: {account}")
        print(f"   Stack:   {stack_name}")
        print("")

        ssm = boto3.client('ssm')

        # Environment variable mapping
        environment: dict = {'dev': {
                                     'logLevel': 'DEBUG',
                                     'dbHost': 'simple-serverless-aurora-serverless-development.cluster-cw3bjgnjhzxa.us-east-2.rds.amazonaws.com',
                                     'dbName': 'simple_serverless_dev',
                                     'vpcId': 'vpc-319daa58'
                                     },
                             'prod': {
                                      'logLevel': 'INFO',
                                      'dbHost': 'simple-serverless-aurora-serverless-production.cluster-cw3bjgnjhzxa.us-east-2.rds.amazonaws.com',
                                      'dbName': 'simple_serverless_prod',
                                      'vpcId': 'vpc-XXXXXX'
                                      }
                             }

        env_variables = {
            'STAGE': stage,
            "LOG_LEVEL": environment[stage]['logLevel']
        }

        # Create the main lambda function
        service_lambda = aws_lambda.Function(self,
                                             'LambdaFunction',
                                             runtime=aws_lambda.Runtime.PYTHON_3_8,
                                             description=service_name,
                                             code=aws_lambda.AssetCode("./dist"),
                                             function_name=service_name + "-" + stage,
                                             timeout=core.Duration.seconds(35),
                                             tracing=aws_lambda.Tracing.ACTIVE,
                                             memory_size=128,
                                             handler='lambda_function.handler',
                                             environment=env_variables)

        #
        # REST (API Gateway HTTP) stuff starts here
        #

        # How to: Import an existing HTTP API Gateway instance
        # http_api = apigatewayv2.HttpApi.from_api_id(self, id='APIGateway', http_api_id='0fdl9wlxw4')

        # How to: Create a new HTTP API Gateway instance
        http_api = apigatewayv2.HttpApi(
            self, 'APIGateway',
            api_name=f'{service_name}-api-{stage}'
        )

        integration = apigatewayv2_integrations.LambdaProxyIntegration(
            handler=service_lambda,
            payload_format_version=apigatewayv2.PayloadFormatVersion.VERSION_2_0
        )

        # How to: auto generate REST endpoints from decorators ex: @router.rest("GET", "/students").
        for route_key, endpoint in lambda_function.router.get_rest_endpoints().items():
            print(f"Creating REST endpoint for {route_key}")
            http_api.add_routes(
                path=endpoint['path'],
                methods=[apigatewayv2.HttpMethod(endpoint['method'])],
                integration=integration
            )

        #
        # Graphql (AppSync) stuff starts here
        #
        policy = iam.PolicyStatement(actions=['lambda:InvokeFunction'],
                                     resources=[service_lambda.function_arn])
        principal = iam.ServicePrincipal('appsync.amazonaws.com')
        service_role = iam.Role(self, 'service-role', assumed_by=principal)
        service_role.add_to_policy(policy)

        # How to: import an existing AppSync instance
        # graphql_api = appsync.GraphqlApi.from_graphql_api_attributes(self, 'GraphQLApi', graphql_api_id='phw4kdabqnbjzi4czy3dtbmynu')

        graphql_schema = appsync.Schema(file_path='./src/schema.graphql')
        graphql_auth_mode = appsync.AuthorizationMode(authorization_type=appsync.AuthorizationType.API_KEY)
        graphql_auth_config = appsync.AuthorizationConfig(default_authorization=graphql_auth_mode)

        graphql_api = appsync.GraphqlApi(
            self, 'GraphQLApi',
            name=f'{service_name}-api-' + stage,
            authorization_config=graphql_auth_config,
            schema=graphql_schema
        )

        datasource_name = to_camel(service_name) + "Lambda"
        lambda_data_source = appsync.LambdaDataSource(
            self, 'LambdaDataSource',
            api=graphql_api,
            name=datasource_name,
            lambda_function=service_lambda,
            service_role=service_role
        )

        # How to: auto generate GraphQL resolvers from decorators ex: @router.graphql("Query", "listStudents").
        for field_name, graphql_def in lambda_function.router.get_graphql_endpoints().items():
            print(f"Creating graphql {graphql_def['parent']} for {field_name}")
            appsync.Resolver(
                self, field_name + "Resolver",
                api=graphql_api,
                type_name=graphql_def['parent'],
                field_name=field_name,
                data_source=lambda_data_source
            )


        core.CfnOutput(self, "RestAPIUrlOutput",
                       value=http_api.url,
                       export_name=f"{stack_name}-RestApiUrl-{stage}")

        core.CfnOutput(self, "GraphQLApiIdOutput",
                       value=graphql_api.api_id,
                       export_name=f"{stack_name}-GraphqlApiId-{stage}")

        core.CfnOutput(self, "GraphQLUrlOutput",
                       value=graphql_api.graphql_url,
                       export_name=f"{stack_name}-GraphqlUrl-{stage}")

        core.CfnOutput(self, "GraphQLApiKeyOutput",
                       value=graphql_api.api_key,
                       export_name=f"{stack_name}-GraphQLApiKey-{stage}")
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        ### S3 ###

        source_csv_bucket = _s3.Bucket(self,
                                       "BYODValidationSourceBucket",
                                       versioned=True)

        target_csv_bucket = _s3.Bucket(
            self,
            "BYODValidationTargetBucket",
            removal_policy=core.RemovalPolicy.RETAIN)

        webtool_bucket = _s3.Bucket(
            self,
            "WebToolBucket",
            website_index_document="index.html",
            website_error_document="index.html",
            public_read_access=True,
        )

        core.CfnOutput(self, "DVTRegion", value=self.region)
        core.CfnOutput(self,
                       "SourceS3Bucket",
                       value=source_csv_bucket.bucket_name)
        core.CfnOutput(self,
                       "TargetS3Bucket",
                       value=target_csv_bucket.bucket_name)
        core.CfnOutput(self,
                       "WebToolS3Bucket",
                       value=webtool_bucket.bucket_name)
        core.CfnOutput(self,
                       "WebToolUrl",
                       value=webtool_bucket.bucket_website_url)

        ### Stager Function ###
        stager_function = _lambda.Function(self,
                                           "StagerFunction",
                                           runtime=_lambda.Runtime.NODEJS_12_X,
                                           code=_lambda.Code.from_asset(
                                               os.path.join(
                                                   dirname, "lambda",
                                                   "stager")),
                                           handler='index.handler')

        stager_function.add_environment("REGION", self.region)
        stager_function.add_environment("SOURCE_BUCKET",
                                        source_csv_bucket.bucket_name)
        stager_function.add_environment("STAGE_BUCKET",
                                        target_csv_bucket.bucket_name)
        source_csv_bucket.grant_read(stager_function)
        target_csv_bucket.grant_put(stager_function)
        core.CfnOutput(self,
                       "StagerLambdaFunction",
                       value=stager_function.function_name)

        ### Profiling Queue
        profiling_job_queue = _sqs.Queue(self, "ProfilingJobQueue")
        core.CfnOutput(self,
                       "SQSProfileQueue",
                       value=profiling_job_queue.queue_url)

        ### Cognito ###

        userpool = _cognito.UserPool(self,
                                     "WebToolUserPool",
                                     user_pool_name="byod-webtool-userpool",
                                     self_sign_up_enabled=True,
                                     auto_verify={
                                         "email": True,
                                         "phone": False
                                     },
                                     user_verification={
                                         "email_subject":
                                         "Your verification code",
                                         "email_body":
                                         "Your verification code is {####}",
                                         "email_style":
                                         _cognito.VerificationEmailStyle.CODE
                                     },
                                     standard_attributes={
                                         "email": {
                                             "required": True,
                                             "mutable": False
                                         }
                                     },
                                     password_policy={})
        client = userpool.add_client(
            "webtool-app-client",
            auth_flows={
                "custom": True,
                "user_password": True,
                "user_srp": True,
                #"refresh_token": True
            })
        identity_pool = _cognito.CfnIdentityPool(
            self,
            "WebToolCognitoIdentityPool",
            allow_unauthenticated_identities=True)
        identity_pool.add_property_override(
            "CognitoIdentityProviders",
            [{
                "ClientId": client.user_pool_client_id,
                "ProviderName": userpool.user_pool_provider_name
            }])
        auth_role = _iam.Role(
            self,
            "CognitoAuthRole",
            assumed_by=WebIdentityPrincipal(
                "cognito-identity.amazonaws.com", {
                    "StringEquals": {
                        "cognito-identity.amazonaws.com:aud": identity_pool.ref
                    },
                    "ForAnyValue:StringLike": {
                        "cognito-identity.amazonaws.com:amr": "authenticated"
                    }
                }))
        auth_role.add_to_policy(
            PolicyStatement(effect=Effect.ALLOW,
                            actions=["s3:GetObject", "s3:PutObject"],
                            resources=["%s/*" % target_csv_bucket.bucket_arn]))

        auth_role.add_to_policy(
            PolicyStatement(effect=Effect.ALLOW,
                            actions=["lambda:invokeFunction"],
                            resources=[stager_function.function_arn]))

        auth_role.add_to_policy(
            PolicyStatement(effect=Effect.ALLOW,
                            actions=["sqs:*"],
                            resources=[profiling_job_queue.queue_arn]))

        unauth_role = _iam.Role(
            self,
            "CognitoUnauthRole",
            assumed_by=_iam.WebIdentityPrincipal(
                "cognito-identity.amazonaws.com",
                conditions={
                    "StringEquals": {
                        "cognito-identity.amazonaws.com:aud": identity_pool.ref
                    },
                    "ForAnyValue:StringLike": {
                        "cognito-identity.amazonaws.com:amr": "unauthenticated"
                    }
                }))
        identity_pool_policy = _cognito.CfnIdentityPoolRoleAttachment(
            self,
            "WebToolCognitoIdentityPoolPolicy",
            identity_pool_id=identity_pool.ref,
            roles={
                'unauthenticated': unauth_role.role_arn,
                'authenticated': auth_role.role_arn
            })
        core.CfnOutput(self, "UserPoolId", value=userpool.user_pool_id)
        core.CfnOutput(self, "IdentityPoolId", value=identity_pool.ref)
        core.CfnOutput(self, "ClientId", value=client.user_pool_client_id)
        core.CfnOutput(self,
                       "ProviderName",
                       value=userpool.user_pool_provider_name)

        ### DynamoDB ###

        validation_job_table = _dynamodb.Table(
            self,
            "ValidationJobTable",
            partition_key=_dynamodb.Attribute(
                name="id", type=_dynamodb.AttributeType.STRING))

        ## AppSync ###

        api = _appsync.GraphqlApi(
            self,
            "Api",
            name="validation-job-api",
            schema=_appsync.Schema.from_asset(
                os.path.join(dirname, "api", "schema.graphql")),
            authorization_config=AuthorizationConfig(
                default_authorization=AuthorizationMode(
                    authorization_type=AuthorizationType.USER_POOL,
                    user_pool_config=UserPoolConfig(user_pool=userpool))),
            log_config=LogConfig(exclude_verbose_content=False,
                                 field_log_level=FieldLogLevel.ALL))
        api_ds = api.add_dynamo_db_data_source("ValidationJobDataSource",
                                               validation_job_table)

        from aws_cdk.aws_appsync import MappingTemplate

        api_ds.create_resolver(
            type_name="Query",
            field_name="listJobss",
            request_mapping_template=MappingTemplate.from_file(
                os.path.join(dirname, "api", "resolvers",
                             "Query.listJobss.req.vtl")),
            response_mapping_template=MappingTemplate.from_file(
                os.path.join(dirname, "api", "resolvers",
                             "Query.listJobss.res.vtl")))

        api_ds.create_resolver(
            type_name="Query",
            field_name="getJobs",
            request_mapping_template=MappingTemplate.from_file(
                os.path.join(dirname, "api", "resolvers",
                             "Query.getJobs.req.vtl")),
            response_mapping_template=MappingTemplate.from_file(
                os.path.join(dirname, "api", "resolvers",
                             "Query.getJobs.res.vtl")))

        core.CfnOutput(self, "GraphQLEndpoint", value=api.graphql_url)

        ### SQS ###

        validation_job_queue = _sqs.Queue(self, "ValidationJobQueue")

        ### Lambda ###
        validation_trigger_function = _lambda.Function(
            self,
            "ValidationTriggerFunction",
            runtime=_lambda.Runtime.PYTHON_3_8,
            code=_lambda.Code.from_asset(
                os.path.join(dirname, "lambda", "validation_trigger")),
            handler='lambda_function.lambda_handler')

        validation_trigger_function.add_environment(
            "TABLE_NAME", validation_job_table.table_name)
        validation_trigger_function.add_environment(
            "QUEUE_URL", validation_job_queue.queue_url)

        validation_trigger_function.add_event_source(
            _S3EventSource(source_csv_bucket,
                           events=[_s3.EventType.OBJECT_CREATED]))

        source_csv_bucket.grant_read(validation_trigger_function)
        validation_job_table.grant_read_write_data(validation_trigger_function)
        validation_job_queue.grant_send_messages(validation_trigger_function)

        ### ECS Fargate ###

        validation_fargate_asset = _ecr_assets.DockerImageAsset(
            self,
            "ValidationBuildImage",
            directory=os.path.join(dirname, "fargate", "validation"))
        profiling_fargate_asset = _ecr_assets.DockerImageAsset(
            self,
            "ProfilingBuildImage",
            directory=os.path.join(dirname, "fargate", "profiling"))

        vpc = _ec2.Vpc(self, "VPC", max_azs=3)
        cluster = _ecs.Cluster(self, "ECSCluster", vpc=vpc)

        validation_fargate_service = _ecs_patterns.QueueProcessingFargateService(
            self,
            "ValidationFargateService",
            cluster=cluster,
            cpu=4096,
            memory_limit_mib=30720,
            enable_logging=True,
            image=_ecs.ContainerImage.from_docker_image_asset(
                validation_fargate_asset),
            environment={
                "TABLE_NAME": validation_job_table.table_name,
                "QUEUE_URL": validation_job_queue.queue_url,
                "SOURCE_BUCKET_NAME": source_csv_bucket.bucket_name,
                "TARGET_BUCKET_NAME": target_csv_bucket.bucket_name,
                "REGION": self.region
            },
            queue=validation_job_queue,
            max_scaling_capacity=2,
            max_healthy_percent=200,
            min_healthy_percent=66)
        validation_fargate_service.task_definition.task_role.add_managed_policy(
            _iam.ManagedPolicy.from_aws_managed_policy_name(
                "AmazonDynamoDBFullAccess"))
        validation_fargate_service.task_definition.task_role.add_managed_policy(
            _iam.ManagedPolicy.from_aws_managed_policy_name(
                "AmazonS3FullAccess"))

        profiling_fargate_service = _ecs_patterns.QueueProcessingFargateService(
            self,
            "ProfilingFargateService",
            cluster=cluster,
            cpu=4096,
            memory_limit_mib=30720,
            enable_logging=True,
            image=_ecs.ContainerImage.from_docker_image_asset(
                profiling_fargate_asset),
            environment={
                "TABLE_NAME": validation_job_table.table_name,
                "QUEUE_URL": profiling_job_queue.queue_url,
                "SOURCE_BUCKET_NAME": source_csv_bucket.bucket_name,
                "TARGET_BUCKET_NAME": target_csv_bucket.bucket_name,
                "REGION": self.region
            },
            queue=profiling_job_queue,
            max_scaling_capacity=2,
            max_healthy_percent=200,
            min_healthy_percent=66)
        profiling_fargate_service.task_definition.task_role.add_managed_policy(
            _iam.ManagedPolicy.from_aws_managed_policy_name(
                "AmazonDynamoDBFullAccess"))
        profiling_fargate_service.task_definition.task_role.add_managed_policy(
            _iam.ManagedPolicy.from_aws_managed_policy_name(
                "AmazonS3FullAccess"))
예제 #6
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        pool = cognito.UserPool(scope=self,
                                id="user-pool",
                                mfa=cognito.Mfa.OPTIONAL,
                                mfa_second_factor=cognito.MfaSecondFactor(
                                    otp=True, sms=True),
                                password_policy=cognito.PasswordPolicy(
                                    min_length=12,
                                    require_lowercase=True,
                                    require_uppercase=False,
                                    require_digits=False,
                                    require_symbols=False,
                                ))

        client = pool.add_client(
            id="customer-app-client",
            auth_flows=cognito.AuthFlow(
                user_password=True,
                refresh_token=True,
                user_srp=True,
            ),
        )

        graphql_api = appsync.GraphqlApi(
            scope=self,
            id="graphql-api",
            name="todo-api",
            schema=appsync.Schema.from_asset(
                file_path=os.path.join("graphQL", "schema.graphql")),
            authorization_config=appsync.AuthorizationConfig(
                default_authorization=appsync.AuthorizationMode(
                    authorization_type=appsync.AuthorizationType.USER_POOL,
                    user_pool_config=appsync.UserPoolConfig(user_pool=pool,
                                                            ))),
            xray_enabled=True)

        todo_table = dynamodb.Table(
            scope=self,
            id="todo-table",
            removal_policy=core.RemovalPolicy.DESTROY,
            partition_key=dynamodb.Attribute(
                name="id", type=dynamodb.AttributeType.STRING),
        )
        commnet_table = dynamodb.Table(
            scope=self,
            id="comment-table",
            removal_policy=core.RemovalPolicy.DESTROY,
            partition_key=dynamodb.Attribute(
                name="commentid", type=dynamodb.AttributeType.STRING),
            sort_key=dynamodb.Attribute(name="todoid",
                                        type=dynamodb.AttributeType.STRING),
        )
        commnet_table.add_global_secondary_index(
            partition_key=dynamodb.Attribute(
                name="todoid", type=dynamodb.AttributeType.STRING),
            index_name="todoid-index")

        todo_dS = graphql_api.add_dynamo_db_data_source(
            id="todoDS",
            table=todo_table,
        )

        todo_dS.create_resolver(
            type_name="Query",
            field_name="getTodos",
            request_mapping_template=appsync.MappingTemplate.from_file(
                os.path.join("graphQL", "getItemsRequest.vtl")),
            response_mapping_template=appsync.MappingTemplate.from_file(
                os.path.join("graphQL", "getItemsResponse.vtl")),
        )

        todo_dS.create_resolver(
            type_name="Mutation",
            field_name="addTodo",
            request_mapping_template=appsync.MappingTemplate.from_file(
                os.path.join("graphQL", "addTodoRequest.vtl")),
            response_mapping_template=appsync.MappingTemplate.from_file(
                os.path.join("graphQL", "addTodoResponse.vtl")),
        )

        comment_dS = graphql_api.add_dynamo_db_data_source(
            id="commentDS",
            table=commnet_table,
        )

        comment_dS.create_resolver(
            type_name="Todo",
            field_name="contents",
            request_mapping_template=appsync.MappingTemplate.from_file(
                os.path.join("graphQL", "getCommentsRequest.vtl")),
            response_mapping_template=appsync.MappingTemplate.from_file(
                os.path.join("graphQL", "getCommentsResponse.vtl")),
        )

        comment_dS.create_resolver(
            type_name="Mutation",
            field_name="addComment",
            request_mapping_template=appsync.MappingTemplate.from_file(
                os.path.join("graphQL", "addCommentRequest.vtl")),
            response_mapping_template=appsync.MappingTemplate.from_file(
                os.path.join("graphQL", "addCommentResponse.vtl")),
        )
예제 #7
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # The code that defines your stack goes here

        wsgi_function = lmb_py.PythonFunction(self,
                                              "wsgi-function",
                                              entry="./lambdas/wsgi")

        wsgi_integration = apigw_v2.LambdaProxyIntegration(
            handler=wsgi_function,
            payload_format_version=apigw_v2.PayloadFormatVersion.VERSION_1_0,
        )

        asgi_function = lmb_py.PythonFunction(
            self,
            "asgi-function",
            entry="./lambdas/asgi",
        )

        asgi_integration = apigw_v2.LambdaProxyIntegration(
            handler=asgi_function)

        self.http_api = apigw_v2.HttpApi(self,
                                         "http-api",
                                         default_integration=asgi_integration)

        self.http_api.add_routes(
            path="/wsgi",
            methods=[apigw_v2.HttpMethod.GET],
            integration=wsgi_integration,
        )

        self.http_api.add_routes(
            path="/wsgi/{proxy+}",
            methods=[apigw_v2.HttpMethod.GET],
            integration=wsgi_integration,
        )

        self.http_api_url = core.CfnOutput(self,
                                           "RestApiUrl",
                                           value=self.http_api.url)

        self.graphql_api = appsync.GraphqlApi(
            self,
            "graphql-api",
            name="notes-example-api",
            schema=appsync.Schema.from_asset("./graphql/schema.graphql"),
        )

        core.CfnOutput(self, "GraphQLUrl", value=self.graphql_api.graphql_url)

        core.CfnOutput(self, "GraphQlApiKey", value=self.graphql_api.api_key)

        graphql_handler = lmb_py.PythonFunction(
            self,
            "graphql-handler",
            entry="./lambdas/graphql",
            runtime=lmb.Runtime.PYTHON_3_8,
        )

        data_source = self.graphql_api.add_lambda_data_source(
            "lambdaDatasource", graphql_handler)

        data_source.create_resolver(type_name="Query",
                                    field_name="getNoteById")

        data_source.create_resolver(type_name="Query", field_name="listNotes")

        data_source.create_resolver(type_name="Mutation",
                                    field_name="createNote")

        data_source.create_resolver(type_name="Mutation",
                                    field_name="deleteNote")

        dynamo_table = dynamodb.Table(
            self,
            "notes-table",
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            partition_key=dynamodb.Attribute(
                name="id", type=dynamodb.AttributeType.STRING),
        )

        dynamo_table.grant_read_write_data(graphql_handler)

        graphql_handler.add_environment("NOTES_TABLE", dynamo_table.table_name)
예제 #8
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # ugly, but we need to read user_pool_id from amplify's aws-exports.js file
        with open('../aws-exports.js') as dataFile:
            data = dataFile.read()
            obj = data[data.find('{'):data.rfind('}') + 1]
            aws_exports = json.loads(obj)
        user_pool_id = aws_exports["aws_user_pools_id"]

        # use that user_pool_id to define a cdk struct representing it
        user_pool = aws_cognito.UserPool.from_user_pool_id(
            self, 'amplify_user_pool', user_pool_id)

        api = aws_appsync.GraphqlApi(
            self,
            'ring-it-up-api',
            name='ring-it-up-api',
            schema=aws_appsync.Schema.from_asset('graphql/schema.graphql'),
            authorization_config=aws_appsync.AuthorizationConfig(
                default_authorization=aws_appsync.AuthorizationMode(
                    authorization_type=aws_appsync.AuthorizationType.API_KEY,
                    api_key_config=aws_appsync.ApiKeyConfig(
                        expires=core.Expiration.after(core.Duration.days(
                            365)))),
                additional_authorization_modes=[
                    aws_appsync.AuthorizationMode(
                        authorization_type=aws_appsync.AuthorizationType.
                        USER_POOL,
                        user_pool_config=aws_appsync.UserPoolConfig(
                            user_pool=user_pool))
                ]))

        # Prints out the AppSync GraphQL API URL to the ternminal
        core.CfnOutput(self,
                       "aws_appsync_graphqlEndpoint",
                       value=api.graphql_url)

        # Prints out the AppSync GraphQL API key to the terminal
        core.CfnOutput(self, "aws_appsync_apiKey", value=api.api_key)

        # Prints out the base authentication type for API
        core.CfnOutput(self,
                       "aws_appsync_authenticationType",
                       value=str(aws_appsync.AuthorizationType.API_KEY))

        api_lambda = aws_lambda.Function(
            self,
            'AppSyncBlogHandler',
            runtime=aws_lambda.Runtime.NODEJS_12_X,
            handler='main.handler',
            code=aws_lambda.Code.from_asset('lambda-fns'),
            memory_size=1024)

        # Set the new Lambda function as a data source for the AppSync API
        lambda_datasource = api.add_lambda_data_source('lambdaDatasource',
                                                       api_lambda)

        lambda_datasource.create_resolver(type_name="Query",
                                          field_name="getActivityById")

        lambda_datasource.create_resolver(type_name="Query",
                                          field_name="listActivities")

        lambda_datasource.create_resolver(type_name="Mutation",
                                          field_name="createActivity")

        lambda_datasource.create_resolver(type_name="Mutation",
                                          field_name="deleteActivity")

        lambda_datasource.create_resolver(type_name="Mutation",
                                          field_name="updateActivity")

        activity_table = aws_dynamodb.Table(
            self,
            'CDKPostTable',
            removal_policy=core.RemovalPolicy.DESTROY,
            billing_mode=aws_dynamodb.BillingMode.PAY_PER_REQUEST,
            partition_key={
                "name": 'id',
                "type": aws_dynamodb.AttributeType.STRING,
            })

        # enable the Lambda function to access the DynamoDB table (using IAM)
        activity_table.grant_full_access(api_lambda)

        # Create an environment variable that we will use in the function code
        api_lambda.add_environment('ACTIVITY_TABLE', activity_table.table_name)
    def __init__(self, scope: core.Construct, id: str, vpc_id: str, subnet_ids, rds_secret_arn: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        vpc = ec2.Vpc.from_vpc_attributes(self, vpc_id, vpc_id=vpc_id, 
        availability_zones= [ 'eu-west-1c'],
        public_subnet_ids= subnet_ids)

        # creating vote table for dynamodb resolver
        vote_table = ddb.Table(self, 'votes', 
            table_name='votes', 
            partition_key={
                "name": "productid",
                "type": ddb.AttributeType.STRING
            }, 
            # Sortkey structure is like : UP#20200902T12:34:00 - DOWN#20201030T10:45:12
            sort_key={
                "name": "votesortkey",
                "type": ddb.AttributeType.STRING
            },
            read_capacity=5, 
            write_capacity=5
        )

        # creating API with GraphQL schema
        api = appsync.GraphqlApi(self, 'example_appsync_api',
                                name="example_appsync_api",
                                log_config=appsync.LogConfig(field_log_level=appsync.FieldLogLevel.ALL),
                                schema=appsync.Schema.from_asset(file_path="../appsync-conf/schema.graphql")
                                )

        # Authentication done with API key - for development purposes only
        appsync.CfnApiKey(self, 'examplegraphqlapi',
                                    api_id=api.api_id
                                    )

        # create security group for lambda
        # this will need to be added to your RDS inbound
        lambda_security_group = ec2.SecurityGroup(self, "Example-AppSyncResolverLambdaSG", 
            security_group_name="Example-AppSyncResolverLambdaSG",
            vpc=vpc,
            allow_all_outbound=True
        )

        # getting the code from local directory
        lambda_rds_code = aws_lambda.Code.asset("../lambda-rds")

        lambda_rds_resolver = aws_lambda.Function(self,
            "LambdaAppSyncSQLResolver",
            function_name=f"LambdaAppSyncSQLResolver",
            code=lambda_rds_code,
            handler="index.handler",
            runtime=aws_lambda.Runtime.NODEJS_12_X,
            memory_size=512,
            timeout=core.Duration.seconds(60),
            log_retention=logs.RetentionDays.ONE_MONTH,
            vpc=vpc,
            vpc_subnets={
                "subnet_type": ec2.SubnetType.PUBLIC
            },
            allow_public_subnet=True,
            security_group=lambda_security_group,
        )

        # env parameters for rds lambda to perform SQL calls
        lambda_rds_resolver.add_environment("SECRET_ARN", rds_secret_arn)

        # allow lambda to read secret
        lambda_rds_resolver.add_to_role_policy(iam.PolicyStatement(
            effect=iam.Effect.ALLOW,
            actions=[ 'secretsmanager:GetSecretValue' ],
            resources=[ rds_secret_arn ]
        ))

        # adding the product datasource as lamda resolver
        products_ds = api.add_lambda_data_source('Products', lambda_rds_resolver)

        # creates resolver for query getProduct
        products_ds.create_resolver(
            type_name='Query',
            field_name='getProduct',
            request_mapping_template=appsync.MappingTemplate.from_file("../appsync-conf/vtl/getProduct.vtl"),
            response_mapping_template=appsync.MappingTemplate.from_file("../appsync-conf/vtl/getProduct_output_template.vtl"),
        )

        # adding lamda resolver for vote fields in product model
        lambda_dynamodb_code = aws_lambda.Code.asset("../lambda-dynamodb")

        lambda_dynamodb_votes_resolver = aws_lambda.Function(self,
            "LambdaAppSyncVotesResolver",
            function_name=f"LambdaAppSyncVotesResolver",
            code=lambda_dynamodb_code,
            handler="index.handler",
            runtime=aws_lambda.Runtime.NODEJS_12_X,
            memory_size=512,
            timeout=core.Duration.seconds(60),
        )

        # allow lambda to query dynamodb
        lambda_dynamodb_votes_resolver.add_to_role_policy(iam.PolicyStatement(
            effect=iam.Effect.ALLOW,
            actions=[ 
                "dynamodb:GetItem",
                "dynamodb:Query", 
            ],
            resources=[ 
                vote_table.table_arn,
                vote_table.table_arn + "/*"
            ]
        ));           

        # create lambda datasource for dynamodb queries
        votes_ds = api.add_lambda_data_source('Votes', lambda_dynamodb_votes_resolver)

        votes_ds.create_resolver(
            type_name='Product',
            field_name='ups',
            request_mapping_template=appsync.MappingTemplate.from_file("../appsync-conf/vtl/fields/votes_up.vtl"),
            response_mapping_template=appsync.MappingTemplate.from_file("../appsync-conf/vtl/fields/votes_up_output_template.vtl"),
        )

        votes_ds.create_resolver(
            type_name='Product',
            field_name='downs',
            request_mapping_template=appsync.MappingTemplate.from_file("../appsync-conf/vtl/fields/votes_down.vtl"),
            response_mapping_template=appsync.MappingTemplate.from_file("../appsync-conf/vtl/fields/votes_down_output_template.vtl"),
        )