Example #1
0
 def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
     super().__init__(scope, id, **kwargs)
     table = ddb.Table(self,
                       'WeatherData',
                       partition_key={
                           'name': 'date_part',
                           'type': ddb.AttributeType.NUMBER
                       },
                       sort_key={
                           'name': 'time_part',
                           'type': ddb.AttributeType.NUMBER
                       },
                       read_capacity=1,
                       write_capacity=1)
     main_lambda = _lambda.Function(
         self,
         'MainHandler',
         runtime=_lambda.Runtime.PYTHON_3_8,
         code=_lambda.Code.asset('lambda'),
         handler='main.handler',
         environment={'WEATHERDATA_TABLE_NAME': table.table_name},
         log_retention=logs.RetentionDays.TWO_WEEKS)
     table.grant_read_write_data(main_lambda)
     api = apigw.LambdaRestApi(self, 'MainEndpoint', handler=main_lambda)
     api.add_usage_plan('UsagePlan',
                        throttle=apigw.ThrottleSettings(rate_limit=10,
                                                        burst_limit=10))
     cloud_front = cf.CloudFrontWebDistribution(
         self,
         'Https2HttpDistribution',
         viewer_protocol_policy=cf.ViewerProtocolPolicy.ALLOW_ALL,
         geo_restriction=cf.GeoRestriction.whitelist('US'),
         origin_configs=[
             cf.SourceConfiguration(
                 custom_origin_source=cf.CustomOriginConfig(
                     domain_name=api.url.lstrip("https://").split("/")[0],
                     origin_protocol_policy=cf.OriginProtocolPolicy.
                     HTTPS_ONLY,
                 ),
                 origin_path='/prod',
                 behaviors=[
                     cf.Behavior(
                         is_default_behavior=True,
                         allowed_methods=cf.CloudFrontAllowedMethods.ALL,
                         cached_methods=cf.CloudFrontAllowedCachedMethods.
                         GET_HEAD,
                         compress=True,
                         forwarded_values=cf.CfnDistribution.
                         ForwardedValuesProperty(query_string=True, )),
                 ],
             )
         ])
     core.CfnOutput(
         self,
         'HttpEndpointDomain',
         value=f'http://{cloud_front.domain_name}',
         description=
         'CloudFront domain name that accepts requests both in HTTP and HTTPS protocols.',
         export_name='HTTP-Endpoint')
Example #2
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)
        # get this files folder path
        path = pathlib.Path(__file__).parent.absolute()

        bucket = aws_s3.Bucket(scope=self,
                               id="Bucket",
                               public_read_access=True)

        badge_uploader_handler = aws_lambda.Function(
            scope=self,
            id="Handler",
            runtime=aws_lambda.Runtime.PYTHON_3_8,
            retry_attempts=0,
            memory_size=1024,
            timeout=core.Duration.seconds(60),
            code=PythonS3CodeAsset(scope=self,
                                   id='HandlerCode',
                                   work_dir=str(path /
                                                'badge_uploader_lambda'),
                                   sources=['handler.py'],
                                   runtime=aws_lambda.Runtime.PYTHON_3_8),
            handler='handler.main',
            environment={
                'BADGE_UPLOADER_BUCKET': bucket.bucket_name,
                'REGION': self.region
            })

        bucket.grant_read_write(identity=badge_uploader_handler,
                                objects_key_pattern="*.svg")

        api = aws_apigateway.LambdaRestApi(scope=self,
                                           id="Api",
                                           rest_api_name="BadgeUploaderApi",
                                           handler=badge_uploader_handler)

        api_key = api.add_api_key(id="ApiKey")

        api.add_usage_plan(id='UsagePlan',
                           api_key=api_key,
                           throttle=aws_apigateway.ThrottleSettings(
                               burst_limit=2, rate_limit=10))
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        env_name = self.node.try_get_context('env')

        #Create the SQS queue
        queue = sqs.Queue(self,
                          id=f"{env_name}-SQSQueue",
                          queue_name=f"{env_name}-queue")

        #Create the API GW service role with permissions to call SQS
        rest_api_role = iam.Role(
            self,
            id=f"{env_name}-RestAPISQSRole",
            assumed_by=iam.ServicePrincipal("apigateway.amazonaws.com"),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "AmazonSQSFullAccess")
            ])

        #Create an API GW Rest API
        base_api = apigw.RestApi(
            self,
            id=f'{env_name}-ApiGW',
            rest_api_name=f'{env_name}SQSTestAPI',
            api_key_source_type=apigw.ApiKeySourceType.HEADER)

        usage_api_key_value = ''.join(
            random.choice(string.ascii_uppercase + string.ascii_lowercase +
                          string.digits) for _ in range(40))

        usage_api_key = base_api.add_api_key(id=f'{env_name}-apikey',
                                             value=usage_api_key_value)
        usage_plan = base_api.add_usage_plan(id=f'{env_name}-usageplan',
                                             name=f'{env_name}-usageplan',
                                             api_key=usage_api_key,
                                             throttle=apigw.ThrottleSettings(
                                                 rate_limit=10, burst_limit=2))
        usage_plan.add_api_stage(stage=base_api.deployment_stage)

        #Create a resource named "example" on the base API
        api_resource = base_api.root.add_resource('sqstest')

        #Create API Integration Response object: https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_apigateway/IntegrationResponse.html
        integration_response = apigw.IntegrationResponse(
            status_code="200",
            response_templates={"application/json": ""},
        )

        #Create API Integration Options object: https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_apigateway/IntegrationOptions.html
        api_integration_options = apigw.IntegrationOptions(
            credentials_role=rest_api_role,
            integration_responses=[integration_response],
            request_templates={
                "application/json":
                "Action=SendMessage&MessageBody=$input.body"
            },
            passthrough_behavior=apigw.PassthroughBehavior.NEVER,
            request_parameters={
                "integration.request.header.Content-Type":
                "'application/x-www-form-urlencoded'"
            },
        )

        #Create AWS Integration Object for SQS: https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_apigateway/AwsIntegration.html
        api_resource_sqs_integration = apigw.AwsIntegration(
            service="sqs",
            integration_http_method="POST",
            # must be ACCOUNT_ID. Just the way URL to SQS is created
            path="{}/{}".format(core.Aws.ACCOUNT_ID, queue.queue_name),
            options=api_integration_options)

        #Create a Method Response Object: https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_apigateway/MethodResponse.html
        method_response = apigw.MethodResponse(status_code="200")

        #Add the API GW Integration to the "example" API GW Resource
        api_resource.add_method("POST",
                                api_resource_sqs_integration,
                                method_responses=[method_response],
                                api_key_required=True)

        #Creating Lambda function that will be triggered by the SQS Queue
        sqs_lambda = _lambda.Function(
            self,
            'SQSTriggerLambda',
            handler='sqs_lambda.handler',
            runtime=_lambda.Runtime.PYTHON_3_8,
            code=_lambda.Code.asset('pr_sqs_lambda'),
        )

        #Create an SQS event source for Lambda
        sqs_event_source = lambda_event_source.SqsEventSource(queue)

        #Add SQS event source to the Lambda function
        sqs_lambda.add_event_source(sqs_event_source)

        # https://67ixnggm81.execute-api.us-east-1.amazonaws.com/prod/sqstest
        region = core.Aws.REGION
        core.CfnOutput(self,
                       'api-gw-url',
                       value='https://' + base_api.rest_api_id +
                       '.execute-api.' + region +
                       '.amazonaws.com/prod/sqstest',
                       export_name='api-sqs-gw-url')
        print(f'API Key: {usage_api_key_value}')
        """
    def __init__(self,
                 scope: core.Construct,
                 id: str,
                 is_qa_stack=False,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        def qa_maybe(id_str: str) -> str:
            return id_str if not is_qa_stack else id_str + '-qa'

        # Bucket used to deliver events
        delivery_bucket = aws_s3.Bucket(
            self,
            id=qa_maybe('my-event-storage-bucket'),
            bucket_name=qa_maybe('my-event-storage-bucket'),
            block_public_access=aws_s3.BlockPublicAccess.BLOCK_ALL)

        # ---- Below is firehose related code ----
        # Since firehose is not yet cdk ready we need to do everything the old way with defining roles
        role = aws_iam.Role(
            self,
            id=qa_maybe('my-firehose-delivery-role'),
            assumed_by=aws_iam.ServicePrincipal('firehose.amazonaws.com'))
        delivery_bucket.grant_write(role)

        # Everything that is not CDK ready still exists like Cfn (Cloudformation?) objects
        firehose = aws_kinesisfirehose.CfnDeliveryStream(
            self,
            id=qa_maybe('my-pipeline-firehose'),
            delivery_stream_name=qa_maybe('my-pipeline-firehose'),
            delivery_stream_type='DirectPut',
            s3_destination_configuration={
                'bucketArn': delivery_bucket.bucket_arn,
                'bufferingHints': {
                    'intervalInSeconds':
                    900,  # The recommended setting is 900 (maximum for firehose)
                    'sizeInMBs': 5
                },
                'compressionFormat': 'UNCOMPRESSED',
                'prefix':
                'events/',  # This is the folder the events will end up in
                'errorOutputPrefix':
                'delivery_error/',  # Folder in case of delivery error
                'roleArn': role.role_arn
            })

        # Policy statement required for lambda to be able to put records on the firehose stream
        firehose_policy = aws_iam.PolicyStatement(
            actions=['firehose:DescribeDeliveryStream', 'firehose:PutRecord'],
            effect=aws_iam.Effect.ALLOW,
            resources=[firehose.attr_arn])

        # ---- API GW + Lambda code ----
        api_lambda = aws_lambda.Function(
            self,
            id=qa_maybe('my-api-gw-lambda'),
            runtime=aws_lambda.Runtime.PYTHON_3_8,
            code=aws_lambda.Code.asset('src/lambda_code/api_gw_lambda'),
            handler='main.handler',
            memory_size=128,
            timeout=core.Duration.seconds(5),
            environment={
                'region': self.region,
                'stream_name': firehose.delivery_stream_name
            })
        # Important to add the firehose postRecord policy to lambda otherwise there will be access errors
        api_lambda.add_to_role_policy(firehose_policy)

        # Create the lambda that will receive the data messages
        api_gw = aws_apigateway.LambdaRestApi(
            self,
            id=qa_maybe('my-api-gw'),
            handler=api_lambda,
            proxy=False,
            deploy_options=aws_apigateway.StageOptions(
                stage_name='qa' if is_qa_stack else 'prod'))

        # Add API query method
        api_gw.root.add_resource('send_data').add_method('GET',
                                                         api_key_required=True)

        # Generate an API key and add it to a usage plan
        api_key = api_gw.add_api_key(qa_maybe('MyPipelinePublicKey'))
        usage_plan = api_gw.add_usage_plan(
            id=qa_maybe('my-pipeline-usage-plan'),
            name='standard',
            api_key=api_key,
            throttle=aws_apigateway.ThrottleSettings(rate_limit=10,
                                                     burst_limit=2))

        # Add the usage plan to the API GW
        usage_plan.add_api_stage(stage=api_gw.deployment_stage)
Example #5
0
    def __init__(self, scope: cdk.Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # The code that defines your stack goes here
        
        ap_api = aws_apigateway.RestApi(self, id='alpha-price-mule-api', deploy=False)
        
        ap_deployment = aws_apigateway.Deployment(self, id='ap-deployment', api=ap_api)
        aws_apigateway.Stage(self, id='ap-stage', deployment=ap_deployment, stage_name='Prod')
        
        ap_api.root.add_method('ANY')
        
        ap_deployment2 = aws_apigateway.Deployment(self, id='ap-deployment2', api=ap_api)
        stagename = aws_apigateway.Stage(self, id='ap-stage2', deployment=ap_deployment2, stage_name='Stage')
        
        ap_api.deployment_stage = stagename
        
        key_mule = ap_api.add_api_key(id='mule', api_key_name='mule')
        plan = ap_api.add_usage_plan(id='Usage-plan-mule', name='mule', api_key=key_mule, throttle=aws_apigateway.ThrottleSettings(rate_limit=100, burst_limit=200))
        plan.add_api_stage(api=ap_api, stage=ap_api.deployment_stage)
Example #6
0
    def __init__(self, scope: core.Construct, id: str, stack_log_level: str,
                 back_end_api_name: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Create Serverless Event Processor using Lambda):
        # Read Lambda Code):
        try:
            with open(
                    "secure_api_with_keys/stacks/back_end/lambda_src/serverless_greeter.py",
                    mode="r") as f:
                greeter_fn_code = f.read()
        except OSError as e:
            print("Unable to read Lambda Function Code")
            raise e

        greeter_fn = _lambda.Function(
            self,
            "secureGreeterFn",
            function_name=f"greeter_fn_{id}",
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler="index.lambda_handler",
            code=_lambda.InlineCode(greeter_fn_code),
            timeout=core.Duration.seconds(15),
            reserved_concurrent_executions=20,
            environment={
                "LOG_LEVEL": f"{stack_log_level}",
                "Environment": "Production",
                "ANDON_CORD_PULLED": "False",
                "RANDOM_SLEEP_ENABLED": "False"
            },
            description="Creates a simple greeter function")
        greeter_fn_version = greeter_fn.latest_version
        greeter_fn_version_alias = _lambda.Alias(
            self,
            "greeterFnAlias",
            alias_name="MystiqueAutomation",
            version=greeter_fn_version)

        # Create Custom Loggroup
        # /aws/lambda/function-name
        greeter_fn_lg = _logs.LogGroup(
            self,
            "greeterFnLoggroup",
            log_group_name=f"/aws/lambda/{greeter_fn.function_name}",
            retention=_logs.RetentionDays.ONE_WEEK,
            removal_policy=core.RemovalPolicy.DESTROY)

        # Add API GW front end for the Lambda
        back_end_01_api_stage_options = _apigw.StageOptions(
            stage_name="miztiik",
            throttling_rate_limit=10,
            throttling_burst_limit=100,
            # Log full requests/responses data
            data_trace_enabled=True,
            # Enable Detailed CloudWatch Metrics
            metrics_enabled=True,
            logging_level=_apigw.MethodLoggingLevel.INFO,
        )

        # Create API Gateway
        secure_api_with_keys_01 = _apigw.RestApi(
            self,
            "backEnd01Api",
            rest_api_name=f"{back_end_api_name}",
            deploy_options=back_end_01_api_stage_options,
            endpoint_types=[_apigw.EndpointType.REGIONAL],
            description=
            f"{GlobalArgs.OWNER}: API Best Practice Demonstration - Security for APIs with Keys"
        )

        back_end_01_api_res = secure_api_with_keys_01.root.add_resource(
            "secure")
        greeter = back_end_01_api_res.add_resource("greeter")

        greeter_method_get = greeter.add_method(
            http_method="GET",
            request_parameters={
                "method.request.header.InvocationType": True,
                "method.request.path.pkon": True
            },
            integration=_apigw.LambdaIntegration(handler=greeter_fn,
                                                 proxy=True),
            api_key_required=True)

        # Start with the API Keys
        dev_kon_api_key = _apigw.ApiKey(
            self,
            "devApiKey",
            description="The Api Key for 'Kon' Developer",
            enabled=True,
            api_key_name="Developer-Kon-Key",
            # value ="" # Leave it to AWS to create a random key for us
        )

        partner_api_key = _apigw.ApiKey(
            self,
            "partnerApiKey",
            description="The Api Key for 'Partner' Mystique Corp",
            enabled=True,
            api_key_name="Partner-Mystique-Corp-Key",
        )

        # We have API Keys to attach to Usage Plan
        # Usage plan for Developer with METHOD Level throttling
        # There seems to be a BUG. console allows rate_limit to be float. But CDK throws an error.
        secure_api_with_keys_01_usage_plan_01 = secure_api_with_keys_01.add_usage_plan(
            "secureApiDevUsagePlan",
            name="DeveloperUsagePlan",
            api_key=dev_kon_api_key,
            api_stages=[
                _apigw.UsagePlanPerApiStage(
                    api=secure_api_with_keys_01,
                    stage=secure_api_with_keys_01.deployment_stage)
            ],
            throttle=_apigw.ThrottleSettings(burst_limit=1, rate_limit=1),
            quota=_apigw.QuotaSettings(limit=1440, period=_apigw.Period.DAY),
            description=
            "Mystique Automation: Api Security with usage plan and throttling. Usage plan for developer s kon"
        )

        # Usage plan for Partner with METHOD Level throttling
        secure_api_with_keys_01_usage_plan_02 = secure_api_with_keys_01.add_usage_plan(
            "secureApiPartnerUsagePlan",
            name="PartnerUsagePlan",
            api_key=partner_api_key,
            api_stages=[
                _apigw.UsagePlanPerApiStage(
                    api=secure_api_with_keys_01,
                    stage=secure_api_with_keys_01.deployment_stage,
                    throttle=[
                        _apigw.ThrottlingPerMethod(
                            method=greeter_method_get,
                            throttle=_apigw.ThrottleSettings(burst_limit=50,
                                                             rate_limit=5))
                    ])
            ],
            throttle=_apigw.ThrottleSettings(burst_limit=100, rate_limit=10),
            quota=_apigw.QuotaSettings(limit=864000, period=_apigw.Period.DAY),
            description=
            "Mystique Automation: Api Security with usage plan and throttling. Usage plan for Partner Mystique Corp"
        )

        # Outputs
        output_1 = core.CfnOutput(
            self,
            "SecureApiWithKeysUrl",
            value=f"{greeter.url}",
            description=
            "Use an utility like curl from the same VPC as the API to invoke it."
        )
Example #7
0
    def __init__(self, scope: core.Construct, id_: str,
                 imported_assets_config: ImportedAssetsConfiguration,
                 mongodb_config: MongoDBConfiguration,
                 access_keys_config: AccessKeysConfiguration, **kwargs):

        super().__init__(scope, id_, **kwargs)

        # LAMBDAS DEFINITIONS

        lambda_dispatch_stream = lambda_.Function(
            self,
            'DispatchStream',
            code=lambda_.AssetCode(
                'stack/lambda/dispatch_stream/1.0.0/python/dispatch_stream'),
            timeout=core.Duration.seconds(10),
            description='',
            function_name='DispatchStream',
            reserved_concurrent_executions=10,
            handler=DEFAULT_LAMBDA_HANDLER,
            runtime=DEFAULT_LAMBDA_RUNTIME,
            log_retention=DEFAULT_LAMBDA_LOG_RETENTION,
            memory_size=128,
            retry_attempts=0,
            dead_letter_queue_enabled=False)

        lambda_geocode_property = lambda_.Function(
            self,
            'GeocodeProperty',
            code=lambda_.AssetCode(
                'stack/lambda/geocode_property/1.1.3/python/geocode_property'),
            timeout=core.Duration.seconds(15),
            description='',
            function_name='GeocodeProperty',
            reserved_concurrent_executions=10,
            handler=DEFAULT_LAMBDA_HANDLER,
            runtime=DEFAULT_LAMBDA_RUNTIME,
            log_retention=DEFAULT_LAMBDA_LOG_RETENTION,
            memory_size=128,
            retry_attempts=0,
            dead_letter_queue_enabled=True)

        lambda_fetch_properties = lambda_.Function(
            self,
            'FetchProperties',
            code=lambda_.AssetCode(
                'stack/lambda/fetch_properties/1.4.0/python/fetch_properties'),
            timeout=core.Duration.seconds(10),
            description='',
            function_name='FetchProperties',
            reserved_concurrent_executions=10,
            handler=DEFAULT_LAMBDA_HANDLER,
            runtime=DEFAULT_LAMBDA_RUNTIME,
            log_retention=DEFAULT_LAMBDA_LOG_RETENTION,
            memory_size=128,
            retry_attempts=0,
            dead_letter_queue_enabled=True)

        # LAYERS DEFINITIONS

        layer_dispatch_stream = lambda_.LayerVersion(
            self,
            'DispatchStreamLibs',
            code=lambda_.Code.from_asset(
                'stack/lambda/dispatch_stream/1.0.0/'),
            description='',
            layer_version_name='DispatchStreamLibs',
            compatible_runtimes=[DEFAULT_LAMBDA_RUNTIME])

        layer_geocode_property = lambda_.LayerVersion(
            self,
            'GeocodePropertyLibs',
            code=lambda_.Code.from_asset(
                'stack/lambda/geocode_property/1.1.3/'),
            description='',
            layer_version_name='GeocodePropertyLibs',
            compatible_runtimes=[DEFAULT_LAMBDA_RUNTIME])

        layer_fetch_properties = lambda_.LayerVersion(
            self,
            'FetchPropertiesLibs',
            code=lambda_.Code.from_asset(
                'stack/lambda/fetch_properties/1.4.0/'),
            description='',
            layer_version_name='FetchPropertiesLibs',
            compatible_runtimes=[DEFAULT_LAMBDA_RUNTIME])

        # CLOUDWATCH RULES DEFINITIONS
        # -

        # SQS QUEUES DEFINITIONS
        # -

        # SNS TOPICS DEFINITIONS

        topic_new_properties = sns.Topic(self,
                                         'NewProperties',
                                         display_name='',
                                         topic_name='NewProperties')

        # API GATEWAYS
        api_gateway_graphql = api_gateway.LambdaRestApi(
            self,
            'GraphQLApi',
            handler=lambda_fetch_properties,
            rest_api_name='GraphQLApi',
            description='GraphQL API',
            cloud_watch_role=True)
        api_gateway_graphql_resource = api_gateway_graphql.root.add_resource(
            'graphql')
        api_gateway_graphql_resource.add_method('GET', api_key_required=False)
        api_gateway_graphql.add_usage_plan(
            'GraphQLUsagePlan',
            name='GraphQLUsagePlan',
            throttle=api_gateway.ThrottleSettings(rate_limit=1, burst_limit=1))

        # DYNAMODB PERMISSIONS
        lambda_dispatch_stream.add_event_source(
            event_sources.DynamoEventSource(
                table=imported_assets_config.table_property,
                starting_position=lambda_.StartingPosition.LATEST,
                batch_size=10,
                max_batching_window=core.Duration.seconds(30),
                parallelization_factor=10,
                retry_attempts=0))

        # CLOUDWATCH SCHEDULING RULES
        # -

        # SQS PERMISSIONS
        # -

        # SNS PERMISSIONS

        topic_new_properties.grant_publish(lambda_dispatch_stream)
        topic_new_properties.add_subscription(
            sns_subscriptions.LambdaSubscription(lambda_geocode_property))

        # LAYERS ASSIGNMENTS

        lambda_dispatch_stream.add_layers(layer_dispatch_stream)
        lambda_geocode_property.add_layers(layer_geocode_property)
        lambda_fetch_properties.add_layers(layer_fetch_properties)

        # ENVIRONMENT VARIABLES

        lambda_geocode_property.add_environment(key='MONGODB_URI',
                                                value=mongodb_config.uri)
        lambda_geocode_property.add_environment(key='MONGODB_DATABASE',
                                                value=mongodb_config.database)
        lambda_geocode_property.add_environment(
            key='MONGODB_COLLECTION', value=mongodb_config.collection)
        lambda_geocode_property.add_environment(
            key='API_ACCESS_TOKEN_GEOCODING',
            value=access_keys_config.geocoding)
        lambda_fetch_properties.add_environment(key='MONGODB_URI',
                                                value=mongodb_config.uri)
        lambda_fetch_properties.add_environment(key='MONGODB_DATABASE',
                                                value=mongodb_config.database)
        lambda_fetch_properties.add_environment(
            key='MONGODB_COLLECTION', value=mongodb_config.collection)
        lambda_fetch_properties.add_environment(
            key='MONGODB_MAX_PAGE_SIZE', value=mongodb_config.max_page_size)