Beispiel #1
0
    def __init__(self, scope: cdk.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        amplify_app = amplify.App(
            self,
            "newspaper-app",
            source_code_provider=amplify.GitHubSourceCodeProvider(
                owner="trey-rosius",
                repository="newspaper-app",
                oauth_token=cdk.SecretValue.secrets_manager(
                    "trainer-github-token")),
        )
        amplify_app.add_branch("master")
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # load configs from "./comfigurations/config.json"
        configs = {}
        with open("./configurations/config.json") as json_file:
            configs = json.load(json_file)

        # Default lambdas for testing
        mem_list = configs['MemorySizeList']
        cold_start_lambdas = {}
        for mem in mem_list:
            python38_lambda = lambda_.Function(
                self,
                id="coldstart_python38_" + str(mem) + "_",
                runtime=lambda_.Runtime.PYTHON_3_8,
                handler="lambda_function.lambda_handler",
                memory_size=mem,
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/python38"))
            cold_start_lambdas['PYTHON38_' + str(mem)] = python38_lambda

        for mem in mem_list:
            nodejs12x_lambda = lambda_.Function(
                self,
                id="coldstart_nodejs12x" + str(mem) + "_",
                runtime=lambda_.Runtime.NODEJS_12_X,
                handler="index.handler",
                memory_size=mem,
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/nodejs12x"))
            cold_start_lambdas['NODEJS12X_' + str(mem)] = nodejs12x_lambda

        for mem in mem_list:
            go1x_lambda = lambda_.Function(
                self,
                id="coldstart_go1x" + str(mem) + "_",
                runtime=lambda_.Runtime.GO_1_X,
                handler="hello",
                memory_size=mem,
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/go1x"))
            cold_start_lambdas['GO1X_' + str(mem)] = go1x_lambda

        for mem in mem_list:
            netcore31_lambda = lambda_.Function(
                self,
                id="coldstart_netcore31" + str(mem) + "_",
                runtime=lambda_.Runtime.DOTNET_CORE_3_1,
                handler="LambdaTest::LambdaTest.LambdaHandler::handleRequest",
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/netcore31"),
                memory_size=mem,
            )
            cold_start_lambdas['NETCORE31_' + str(mem)] = netcore31_lambda

        for mem in mem_list:
            java11corretto_lambda = lambda_.Function(
                self,
                id="coldstart_java11corretto" + str(mem) + "_",
                runtime=lambda_.Runtime.JAVA_11,
                handler="example.Hello::handleRequest",
                memory_size=mem,
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/java11corretto"))
            cold_start_lambdas['JAVA11_' + str(mem)] = java11corretto_lambda

        for mem in mem_list:
            ruby27_lambda = lambda_.Function(
                self,
                id="coldstart_ruby27" + str(mem) + "_",
                runtime=lambda_.Runtime.RUBY_2_7,
                handler="lambda_function.lambda_handler",
                memory_size=mem,
                tracing=lambda_.Tracing.ACTIVE,
                code=lambda_.Code.asset("./cold_start_lambdas/ruby27"))
            cold_start_lambdas['RUBY27_' + str(mem)] = ruby27_lambda

        # Caller
        cold_start_caller = lambda_.Function(
            self,
            id="cold_start_caller",
            runtime=lambda_.Runtime.PYTHON_3_8,
            handler="ColdStartCaller.lambda_handler",
            code=lambda_.Code.asset("./cold_start_lambdas/cold_start_caller"),
            timeout=core.Duration.seconds(180))
        cold_start_caller.role.add_managed_policy(
            iam_.ManagedPolicy.from_aws_managed_policy_name(
                "AWSXrayReadOnlyAccess"))
        cold_start_caller.role.add_to_policy(
            iam_.PolicyStatement(effect=iam_.Effect.ALLOW,
                                 actions=['lambda:GetFunctionConfiguration'],
                                 resources=["*"]))
        for lambda_name in cold_start_lambdas:
            cold_start_caller.add_environment(
                lambda_name, cold_start_lambdas[lambda_name].function_arn)
            cold_start_lambdas[lambda_name].grant_invoke(cold_start_caller)

        # DynamoDB
        cold_start_table = dynamodb_.Table(
            self,
            id="cold_start_benchmark_table",
            partition_key=dynamodb_.Attribute(
                name="PK", type=dynamodb_.AttributeType.STRING),
            sort_key=dynamodb_.Attribute(name="SK",
                                         type=dynamodb_.AttributeType.NUMBER),
            time_to_live_attribute="TTL")
        cold_start_table.grant_write_data(cold_start_caller)
        cold_start_caller.add_environment('TABLE_NAME',
                                          cold_start_table.table_name)

        # S3
        life_cycle_rule = s3_.LifecycleRule(transitions=[
            s3_.Transition(storage_class=s3_.StorageClass.INFREQUENT_ACCESS,
                           transition_after=core.Duration.days(30))
        ])
        cold_start_backup_s3 = s3_.Bucket(self,
                                          "cold_start_benchmark_backup",
                                          lifecycle_rules=[life_cycle_rule])
        cold_start_backup_s3.grant_write(cold_start_caller)
        cold_start_caller.add_environment('BACKUP_BUCKET_NAME',
                                          cold_start_backup_s3.bucket_name)

        # CW event
        cron_job = events_.Rule(
            self,
            "cold_start_caller_cron_job",
            description="Run cold start caller twice every 1 hour",
            schedule=events_.Schedule.cron(minute="0,1"),
            targets=[targets_.LambdaFunction(cold_start_caller)])

        # alarm when caller failed, send email for notification
        errorAlarm = cloudwatch_.Alarm(
            self,
            "cold_start_caller_error_alarm",
            metric=cloudwatch_.Metric(
                metric_name="Errors",
                namespace="AWS/Lambda",
                period=core.Duration.minutes(5),
                statistic="Maximum",
                dimensions={"FunctionName": cold_start_caller.function_name}),
            evaluation_periods=1,
            datapoints_to_alarm=1,
            threshold=1,
            actions_enabled=True,
            alarm_description="Alarm when cold start caller failed",
            alarm_name="cold_start_caller_errer_alarm",
            comparison_operator=cloudwatch_.ComparisonOperator.
            GREATER_THAN_OR_EQUAL_TO_THRESHOLD,
            treat_missing_data=cloudwatch_.TreatMissingData.MISSING)
        cold_start_caller_error_alarm_topic = sns_.Topic(
            self,
            "cold_start_caller_error_alarm_topic",
            display_name="ColdStartCallerErrorAlarmTopic",
            topic_name="ColdStartCallerErrorAlarmTopic")
        cold_start_caller_error_alarm_topic.add_subscription(
            sns_subs_.EmailSubscription(
                configs['AlarmNotificationEmailAddress']))
        errorAlarm.add_alarm_action(
            cloudwatch_actions_.SnsAction(cold_start_caller_error_alarm_topic))

        # Summarizer
        cold_start_summarizer = lambda_.Function(
            self,
            id="cold_start_summarizer",
            runtime=lambda_.Runtime.PYTHON_3_8,
            handler="ColdStartSummarizer.lambda_handler",
            code=lambda_.Code.asset(
                "./cold_start_lambdas/cold_start_summarizer"),
            timeout=core.Duration.seconds(10))
        cold_start_table.grant_read_write_data(cold_start_summarizer)
        cold_start_summarizer.add_environment('TABLE_NAME',
                                              cold_start_table.table_name)

        # setup CW event for summarizer
        cron_job_summarizer = events_.Rule(
            self,
            "cold_start_summarizer_cron_job",
            description="Run cold start summarizer once every day",
            schedule=events_.Schedule.cron(minute='30', hour='0'),
            targets=[targets_.LambdaFunction(cold_start_summarizer)])

        # error alarm for summarizer
        errorAlarm_summarizer = cloudwatch_.Alarm(
            self,
            "cold_start_summarizer_error_alarm",
            metric=cloudwatch_.Metric(metric_name='Errors',
                                      namespace='AWS/Lambda',
                                      period=core.Duration.minutes(5),
                                      statistic='Maximum',
                                      dimensions={
                                          'FunctionName':
                                          cold_start_summarizer.function_name
                                      }),
            evaluation_periods=1,
            datapoints_to_alarm=1,
            threshold=1,
            actions_enabled=True,
            alarm_description="Alarm when cold start summarizer failed",
            alarm_name="cold_start_summarizer_errer_alarm",
            comparison_operator=cloudwatch_.ComparisonOperator.
            GREATER_THAN_OR_EQUAL_TO_THRESHOLD,
            treat_missing_data=cloudwatch_.TreatMissingData.MISSING)
        cold_start_summarizer_error_alarm_topic = sns_.Topic(
            self,
            "cold_start_summarizer_error_alarm_topic",
            display_name="ColdStartSummarizerErrorAlarmTopic",
            topic_name="ColdStartSummarizerErrorAlarmTopic")
        cold_start_summarizer_error_alarm_topic.add_subscription(
            sns_subs_.EmailSubscription(
                configs['AlarmNotificationEmailAddress']))
        errorAlarm_summarizer.add_alarm_action(
            cloudwatch_actions_.SnsAction(
                cold_start_summarizer_error_alarm_topic))

        # GraphQL API
        graphql_api = appsync_.GraphqlApi(
            self,
            "cold_start_benchmark_graphql_api",
            name="cold_start_benchmark_graphql_api",
            authorization_config=appsync_.AuthorizationConfig(
                default_authorization=appsync_.AuthorizationMode(
                    authorization_type=appsync_.AuthorizationType.API_KEY,
                    api_key_config=appsync_.ApiKeyConfig(
                        description="cold_start_benchmark_graphql_api_key",
                        expires=core.Expiration.after(core.Duration.days(365)),
                        name="cold_start_benchmark_graphql_api_key"))),
            schema=appsync_.Schema.from_asset(
                './cold_start_benchmark/graphql_schema/schema.graphql'),
            xray_enabled=True)
        dynamodb_data_source = graphql_api.add_dynamo_db_data_source(
            id="cold_start_dynamodb_data_source", table=cold_start_table)
        dynamodb_data_source.create_resolver(
            field_name="listColdStartSummariesAfterTimestamp",
            type_name="Query",
            request_mapping_template=appsync_.MappingTemplate.from_file(
                './cold_start_benchmark/graphql_schema/request_mapping_template'
            ),
            response_mapping_template=appsync_.MappingTemplate.from_file(
                './cold_start_benchmark/graphql_schema/response_mapping_template'
            ))

        front_end_amplify_app = amplify_.App(
            self,
            "cold-start-front-end",
            app_name="cold_start_front_end",
            source_code_provider=amplify_.GitHubSourceCodeProvider(
                owner="ZzzGin",
                repository="cold-start-frontend-website",
                oauth_token=core.SecretValue.secrets_manager(
                    "zzzgin/github/token", json_field="zzzgin-github-token")))
        master_Branch = front_end_amplify_app.add_branch("master")
        domain = front_end_amplify_app.add_domain('zzzgin.com')
        domain.map_sub_domain(master_Branch, 'coldstart')
    def __init__(self, scope: cdk.Construct, construct_id: str,
                 db_stack: DatabaseStack, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)
        # Enrichment Queue
        enrichment_queue = sqs.Queue(
            self,
            "CrawlerEnrichmentQueue",
            queue_name='CrawlerEnrichmentQueue',
            retention_period=cdk.Duration.days(1),
            visibility_timeout=cdk.Duration.minutes(15))

        # Environment
        env_default = {'APP_LOGGING_LEVEL': 'ERROR'}
        env_table = {'APP_OFFERS_TABLE': db_stack.offers_table.table_name}
        env_queue_url = {'APP_OFFERS_QUEUE_URL': enrichment_queue.queue_url}

        # Base Lambda ECR image asset
        lambda_asset = ecr_assets.DockerImageAsset(self,
                                                   'CrawlerLambdaImage',
                                                   directory=os.path.join(
                                                       os.getcwd(), 'src',
                                                       'crawler'),
                                                   repository_name='crawler')

        # Crawler Lambda
        lambda_crawler = self._lambda_function_from_asset(
            lambda_asset, 'LambdaCrawler', 'lambda_handler.crawler', {
                **env_default,
                **env_table,
                **env_queue_url
            })
        rule = events.Rule(self,
                           'CrawlerCallingRule',
                           rule_name='CrawlerCallingRule',
                           schedule=events.Schedule.rate(
                               cdk.Duration.hours(1)))
        rule.add_target(targets.LambdaFunction(lambda_crawler))
        db_stack.offers_table.grant_write_data(lambda_crawler)
        enrichment_queue.grant_send_messages(lambda_crawler)

        # Enrichment Lambda
        lambda_enrichment = self._lambda_function_from_asset(
            lambda_asset, 'LambdaEnrichment', 'lambda_handler.enrichment', {
                **env_default,
                **env_table
            })
        lambda_enrichment.add_event_source(
            lambda_event_sources.SqsEventSource(enrichment_queue))
        db_stack.offers_table.grant_write_data(lambda_enrichment)

        lambda_search = self._lambda_function_from_asset(
            lambda_asset,
            'LambdaSearch',
            'lambda_handler.search', {
                **env_default,
                **env_table
            },
            reserved_concurrent_executions=10,
            timeout_minutes=1,
            memory_size=128,
            max_event_age_minutes=1)
        db_stack.offers_table.grant_read_data(lambda_search)

        personal_token = open(
            os.path.join(str(Path.home()), '.github/personal_token.txt'),
            'r').read()

        # Frontend entrypoin
        amplify_app = amplify.App(
            self,
            'CrawlerFrontend',
            app_name='CrawlerFrontend',
            auto_branch_creation=amplify.AutoBranchCreation(auto_build=True),
            source_code_provider=amplify.GitHubSourceCodeProvider(
                owner='jaswdr',
                repository='aws-cdk-crawler-frontend-example',
                oauth_token=cdk.SecretValue(personal_token)))

        # Backend entrypoint
        search_entrypoint = gateway.HttpApi(
            self,
            'CrawlerSearchApiEntrypoint',
            api_name='CrawlerSearchApiEntrypoint',
            cors_preflight=gateway.CorsPreflightOptions(
                allow_headers=['*'],
                allow_methods=[gateway.HttpMethod.GET],
                allow_origins=['*'],
                max_age=cdk.Duration.hours(2)),
            description='Crawler Search API Entrypoint')
        search_entrypoint.add_routes(
            path='/search',
            methods=[gateway.HttpMethod.GET],
            integration=gateway_integrations.LambdaProxyIntegration(
                handler=lambda_search,
                payload_format_version=gateway.PayloadFormatVersion.VERSION_2_0
            ))
        static_data_bucket = s3.Bucket(
            self,
            'CrawlerStaticDataBucket',
            versioned=True,
            removal_policy=cdk.RemovalPolicy.DESTROY,
            auto_delete_objects=True,
            bucket_name='crawler-static-data')
    def __init__(self, scope: cdk.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        S3policy = iam.PolicyStatement(actions=['s3:*'], resources=['*'])

        SQSpolicy = iam.PolicyStatement(actions=['sqs:*'], resources=['*'])

        Rekpolicy = iam.PolicyStatement(actions=['rekognition:*'],
                                        resources=['*'])

        rds_lambda_role = iam.Role(
            scope=self,
            id='cdk-lambda-role',
            assumed_by=iam.ServicePrincipal('lambda.amazonaws.com'),
            role_name='cdk-lambda-role',
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    'service-role/AWSLambdaVPCAccessExecutionRole'),
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    'service-role/AWSLambdaBasicExecutionRole')
            ])
        policystatement = iam.PolicyStatement(resources=["*"],
                                              actions=["sns:Publish"],
                                              effect=iam.Effect.ALLOW)

        token = cdk.SecretValue.plain_text("")  #github personal token

        amplify_app = amp.App(
            self,
            "MyApp",
            source_code_provider=amp.GitHubSourceCodeProvider(
                owner="swen-514-614-spring2021",
                repository="term-project--team-9",
                oauth_token=token))
        amplify_app.add_branch("main")

        # matt beef bucket
        picbucket = s3.Bucket(self,
                              "bucket1",
                              bucket_name='bucketswen614',
                              versioned=False,
                              removal_policy=cdk.RemovalPolicy.DESTROY,
                              auto_delete_objects=True)

        # matt lambda
        fifosendfunction = _lambda.Function(
            self,
            "lambda_function1",
            code=_lambda.Code.asset(os.path.join(dirname,
                                                 "send_to_fifo_queue")),
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler="lambda-handler.main",
            function_name="sendtofifoqueue")
        # remember to add role= to funciton
        fifosendfunction.add_to_role_policy(S3policy)
        fifosendfunction.add_to_role_policy(SQSpolicy)

        # notification for lambda to activate when file gets put into bucket
        notification = s3n.LambdaDestination(fifosendfunction)
        picbucket.add_event_notification(s3.EventType.OBJECT_CREATED_PUT,
                                         notification)

        # matt queue
        queueP = sqs.Queue(self,
                           "Queue",
                           queue_name="picturequeue.fifo",
                           fifo=True,
                           content_based_deduplication=True)

        # matt lambda make it so this is activated by message sent to queue above then send it forward to bean bucket
        function_rekognition = _lambda.Function(
            self,
            "lambda_function2",
            code=_lambda.Code.asset(
                os.path.join(dirname, "send_to_rekognition")),
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler="lambda-handler.main",
            function_name="detect_labels")
        function_rekognition.add_to_role_policy(S3policy)
        function_rekognition.add_to_role_policy(SQSpolicy)
        function_rekognition.add_to_role_policy(Rekpolicy)

        event_source = function_rekognition.add_event_source(
            SqsEventSource(queueP))
        # event_source = functionbean.add_event_source(SqsEventSource(queueP))
        # event_source_id = event_source.event_source_id

        #VPC for RDS
        vpc = ec2.Vpc(self, "VPC", max_azs=2)

        #Sets password using secretmanageer
        password = cdk.SecretValue.plain_text("swen614Team9")

        #Creates RDS using POSTGRESQL
        myrds = rds.DatabaseInstance(
            self,
            "RDS",
            database_name="CCDatabase",
            engine=rds.DatabaseInstanceEngine.postgres(
                version=rds.PostgresEngineVersion.VER_12_5),
            instance_type=ec2.InstanceType.of(ec2.InstanceClass.BURSTABLE2,
                                              ec2.InstanceSize.MICRO),
            vpc=vpc,
            storage_type=rds.StorageType.GP2,
            allocated_storage=20,
            credentials=rds.Credentials.from_password('team9', password),
            vpc_subnets={"subnet_type": ec2.SubnetType.PUBLIC})

        myrds.connections.allow_default_port_from_any_ipv4('5432')

        # FIFO Queue going into database
        queueDB = sqs.Queue(self,
                            "DBQueue",
                            queue_name="dbqueue.fifo",
                            fifo=True,
                            content_based_deduplication=True)

        # lambda for DB queue
        fifoDBsendfunction = _lambda.Function(
            self,
            "lambda_function3",
            code=_lambda.Code.asset(os.path.join(dirname, "send_to_db")),
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler="lambda-handler.main",
            role=rds_lambda_role,
            function_name="sendtodb",
            environment={'DB_HOST': myrds.db_instance_endpoint_address})
        fifoDBsendfunction.add_to_role_policy(SQSpolicy)

        # attaches DBFIFO to the lambda
        event_source1 = fifoDBsendfunction.add_event_source(
            SqsEventSource(queueDB))

        # lambda for DB get
        DBgetfunction = _lambda.Function(
            self,
            "lambda_function4",
            code=_lambda.Code.asset(os.path.join(dirname, "get_capacity_rds")),
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler="lambda-handler.main",
            role=rds_lambda_role,
            function_name="getfromdb",
            environment={'DB_HOST': myrds.db_instance_endpoint_address})

        DBgetfunction.add_to_role_policy(policystatement)

        api_gateway = apigw.LambdaRestApi(
            self,
            'Endpoint',
            handler=DBgetfunction,
        )

        get_widgets_integration = apigw.LambdaIntegration(
            DBgetfunction,
            request_templates={"application/json": '{ "statusCode": "200" }'})

        api_gateway.root.add_method("GET", get_widgets_integration)  # GET /

        cdk.CfnOutput(self,
                      'frontend',
                      value="https://main." + amplify_app.default_domain)
        cdk.CfnOutput(self,
                      'rdsendpoint',
                      value=myrds.db_instance_endpoint_address)
Beispiel #5
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # Create repo for Amplify static site
        amplify_repo = codecommit.Repository(
            self,
            'amplify-wild-rydes-repo',
            repository_name='amplify-wild-rydes',
            description='Repo for the Wild Rydes static site for Amplify')

        # Create repo for holding the code for this project
        app_repo = codecommit.Repository(
            self,
            'app-serverless-workshop-repo',
            repository_name='app-wild-rydes-serverless-workshop',
            description=
            'Repo for project from webapp.serverlessworkshops.io/staticwebhosting/overview/'
        )

        # IAM Role & Policy for Amplify
        amplify_role = iam.Role(
            self,
            'amplify-wild-rydes-role',
            role_name='amplify-wild-rydes-role',
            assumed_by=iam.ServicePrincipal('amplify.amazonaws.com'))

        # Amplify
        amplify_static_site = amplify.App(
            self,
            'amplify-wild-rydes-site',
            source_code_provider=amplify.CodeCommitSourceCodeProvider(
                repository=amplify_repo),
            description='Wild Rydes Amplify Static Site',
            role=amplify_role,
            app_name='wild-rydes-site')

        master = amplify_static_site.add_branch("master")

        # Policy is fairly open
        # Ran into issues when I deployed the cognito user pools through the amplify cli
        # It creates a new CloudFormation stack and deploys several resources
        amplify_policy = iam.Policy(
            self,
            'amplify-wild-rydes-policy',
            roles=[amplify_role],
            statements=[
                iam.PolicyStatement(effect=iam.Effect.ALLOW,
                                    actions=['codecommit:GitPull'],
                                    resources=[amplify_repo.repository_arn]),
                iam.PolicyStatement(effect=iam.Effect.ALLOW,
                                    actions=[
                                        'amplify:GetApp',
                                        'amplify:CreateBackendEnvironment',
                                        'cloudformation:*', 'cognito:*',
                                        'lambda:*', 's3:*', 'iam:*'
                                    ],
                                    resources=['*'])
            ])

        # DynamoDB
        # removal_policy=core.RemovalPolicy.DESTROY is to ensure it is deleted since this is only a lab
        # table_name is required to be Rides, its configured in the nodejs code that the lambda function runs
        rides_table = ddb.Table(self,
                                'Table',
                                table_name='Rides',
                                partition_key=ddb.Attribute(
                                    name='RideId',
                                    type=ddb.AttributeType.STRING),
                                removal_policy=core.RemovalPolicy.DESTROY)

        # Lambda Functions
        request_unicorn_role = iam.Role(
            self,
            'RequestUnicornRole',
            role_name='wild-rydes-lambda-role',
            assumed_by=iam.ServicePrincipal('lambda.amazonaws.com'),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    'service-role/AWSLambdaBasicExecutionRole')
            ])

        # Grant write access to the lambda role
        rides_table.grant_write_data(request_unicorn_role)

        request_unicorn = _lambda.Function(
            self,
            'request-unicorn',
            handler='requestUnicorn.handler',
            runtime=_lambda.Runtime.NODEJS_12_X,
            code=_lambda.AssetCode('request_unicorn'),
            role=request_unicorn_role,
            function_name='request-unicorn-wild-rydes')

        # Rest API
        ride_api_gw = apigw.RestApi(
            self,
            'wild-rydes-apigw',
            rest_api_name='WildRydes',
            endpoint_types=[apigw.EndpointType.REGIONAL])

        # APIGW Lambda Integration
        # proxy enabled for the workshop
        ride_api_gw_lambda_integration = apigw.LambdaIntegration(
            request_unicorn,
            proxy=True,
            integration_responses=[{
                'statusCode': '200',
                'responseParameters': {
                    'method.response.header.Access-Control-Allow-Origin':
                    "'*'",
                }
            }])

        post_ride_resource = ride_api_gw.root.add_resource('ride')
        post_ride_resource_method = post_ride_resource.add_method(
            'POST',
            ride_api_gw_lambda_integration,
            method_responses=[{
                'statusCode': '200',
                'responseParameters': {
                    'method.response.header.Access-Control-Allow-Origin': True,
                }
            }])

        # This needs to be created after the Amplify site unless you create the cognito userpool in the cdk
        # I went through the Amplify CLI to create the backend
        ride_api_gw_authorizer = apigw.CfnAuthorizer(
            self,
            'wild-rydes-apigw-authorizer',
            rest_api_id=ride_api_gw.rest_api_id,
            name='wild-rydes-apigw-authorizer',
            type='COGNITO_USER_POOLS',
            identity_source='method.request.header.name.Authorization',
            identity_validation_expression="Bearer (.*)",
            provider_arns=[
                'arn:aws:cognito-idp:us-east-1:<ACCOUNT_ID>:userpool/<USER_POOL_ID>'
            ])

        # https://github.com/aws/aws-cdk/issues/5618
        post_ride_resource_fix = post_ride_resource_method.node.find_child(
            'Resource')
        post_ride_resource_fix.add_property_override('AuthorizationType',
                                                     'COGNITO_USER_POOLS')
        post_ride_resource_fix.add_property_override(
            'AuthorizerId', {"Ref": ride_api_gw_authorizer.logical_id})

        # Enable CORS for the workshop
        post_ride_resource.add_method(
            'OPTIONS',
            apigw.MockIntegration(integration_responses=[{
                'statusCode': '200',
                'responseParameters': {
                    'method.response.header.Access-Control-Allow-Headers':
                    "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'",
                    'method.response.header.Access-Control-Allow-Origin':
                    "'*'",
                    'method.response.header.Access-Control-Allow-Methods':
                    "'POST,OPTIONS'"
                }
            }],
                                  passthrough_behavior=apigw.
                                  PassthroughBehavior.WHEN_NO_MATCH,
                                  request_templates={
                                      "application/json":
                                      "{\"statusCode\":200}"
                                  }),
            method_responses=[{
                'statusCode': '200',
                'responseParameters': {
                    'method.response.header.Access-Control-Allow-Headers':
                    True,
                    'method.response.header.Access-Control-Allow-Methods':
                    True,
                    'method.response.header.Access-Control-Allow-Origin': True,
                }
            }])

        # Outputs
        amplify_repo_url = core.CfnOutput(
            self,
            'amplify-repo-url',
            value=amplify_repo.repository_clone_url_http)

        app_repo_url = core.CfnOutput(self,
                                      'app-repo-url',
                                      value=app_repo.repository_clone_url_http)

        amplify_default_domain = core.CfnOutput(
            self,
            'amplify-default-domain',
            value=amplify_static_site.default_domain)

        request_unicorn_apigw = core.CfnOutput(self,
                                               'request-unicorn-apigw',
                                               value=request_unicorn_apigw.url)