def add_cors_options(self, apigw_resource): apigw_resource.add_method( "OPTIONS", apigateway.MockIntegration( integration_responses=[{ "statusCode": "200", "responseParameters": { "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", "method.response.header.Access-Control-Allow-Origin": "'*'", "method.response.header.Access-Control-Allow-Methods": "'GET,POST,PUT,DELETE,OPTIONS'", }, }], passthrough_behavior=apigateway.PassthroughBehavior. WHEN_NO_MATCH, request_templates={"application/json": '{"statusCode":200}'}, ), method_responses=[{ "statusCode": "200", "responseParameters": { "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Origin": True, }, }], )
def add_cors_options(api_resource): """Add response to OPTIONS to enable CORS on an API resource.""" mock = apigateway.MockIntegration( integration_responses=[{ "statusCode": "200", "responseParameters": { "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", "method.response.header.Access-Control-Allow-Origin": "'*'", # "method.response.header.Access-Control-Allow-Credentials": "'true'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,GET,PUT,POST,DELETE'", }, }], passthrough_behavior=apigateway.PassthroughBehavior.WHEN_NO_MATCH, # content_handling=apigateway.ContentHandling.CONVERT_TO_TEXT, request_templates={"application/json": '{"statusCode": 200}'}, ) method_response = apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, # "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) api_resource.add_method("OPTIONS", integration=mock, method_responses=[method_response])
def add_cors_options(self, apigw_resource): apigw_resource.add_method( 'OPTIONS', aws_apigateway.MockIntegration(integration_responses=[{ 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Headers': "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", 'method.response.header.Access-Control-Allow-Origin': "'*'", 'method.response.header.Access-Control-Allow-Methods': "'GET,OPTIONS'" } }], passthrough_behavior=aws_apigateway. PassthroughBehavior.WHEN_NO_MATCH, request_templates={ "application/json": "{\"statusCode\":200}" }), method_responses=[{ 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Headers': True, 'method.response.header.Access-Control-Allow-Methods': True, 'method.response.header.Access-Control-Allow-Origin': True, } }], )
def add_cors_options(api_gateway_resource: aws_apigateway.Resource): api_gateway_resource.add_method( 'OPTIONS', aws_apigateway.MockIntegration(integration_responses=[ CDKMasterStack.get_options_integration_response() ], passthrough_behavior=aws_apigateway. PassthroughBehavior.WHEN_NO_MATCH, request_templates={ "application/json": "{\"statusCode\":200}" }), method_responses=[CDKMasterStack.get_options_method_response()])
def create_noddy_endpoint(resource: IResource, method: str, response: Dict): resource.add_method( method, aws_apigateway.MockIntegration( request_templates={ "application/json": json.dumps({"statusCode": 200}) }, integration_responses=[{ "statusCode": "200", "responseTemplates": { "application/json": json.dumps(response) }, }], ), method_responses=[{ "statusCode": "200", "responseModels": { "application/json": aws_apigateway.Model.EMPTY_MODEL }, }], )
def add_cors_options(api_resource: apigateway.IResource): api_resource.add_method( "OPTIONS", apigateway.MockIntegration( integration_responses=[ { "statusCode": "200", "responseParameters": { "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", "method.response.header.Access-Control-Allow-Origin": "'*'", "method.response.header.Access-Control-Allow-Credentials": "'false'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,GET,PUT,POST,DELETE'", }, }, ], passthrough_behavior=apigateway.PassthroughBehavior.NEVER, request_templates={ "application/json": '{"statusCode": 200}', }, ), method_responses=[ { "statusCode": "200", "responseParameters": { "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True, }, }, ], )
def enable_cors(self): # TODO: this shares a lot of code with `handle_verb` # perhaps, make a helper function, that abstracts the shared code self.cdk_resource.add_method( 'OPTIONS', aws_apigateway.MockIntegration( integration_responses=[ { 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Headers': "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", 'method.response.header.Access-Control-Allow-Origin': "'*'", 'method.response.header.Access-Control-Allow-Methods': "'GET,OPTIONS'" } }, ], passthrough_behavior=aws_apigateway.PassthroughBehavior. WHEN_NO_MATCH, request_templates={"application/json": "{\"statusCode\":200}"}, ), method_responses=[ { 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Headers': True, 'method.response.header.Access-Control-Allow-Methods': True, 'method.response.header.Access-Control-Allow-Origin': True, } }, ], )
def add_cors_options(apigw_resource): mock_integration = apigw_.MockIntegration(integration_responses=[ apigw_.IntegrationResponse( status_code="200", response_parameters={ 'method.response.header.Access-Control-Allow-Headers': "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", 'method.response.header.Access-Control-Allow-Origin': "'*'", 'method.response.header.Access-Control-Allow-Methods': "'GET,OPTIONS'" }), ], passthrough_behavior=apigw_. PassthroughBehavior. WHEN_NO_MATCH, request_templates={ "application/json": "{\"statusCode\":200}" }) apigw_resource.add_method( http_method='OPTIONS', integration=mock_integration, method_responses=[ apigw_.MethodResponse( status_code="200", response_parameters={ 'method.response.header.Access-Control-Allow-Headers': True, 'method.response.header.Access-Control-Allow-Methods': True, 'method.response.header.Access-Control-Allow-Origin': True, }) ])
def __init__(self, scope: cdk.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) codeLocation = 'lambdas' layerLocation = self.installRequirements(codeLocation) self.ip = self.getIp() self.vpc = self.createVpc() self.lambdaRole = self.createLambdaRole() self.lambdaCode = lambda_.Code.from_asset(codeLocation) self.lambdaLayer = lambda_.LayerVersion(self, 'lambdaLayer', code=lambda_.Code.from_asset(layerLocation), compatible_runtimes=[ lambda_.Runtime.PYTHON_3_8 ] ) self.statesRole = iam.Role(self, 'statesExecutionRole', assumed_by=iam.ServicePrincipal('states.amazonaws.com'), inline_policies={ 'StatesExecutionPolicy': iam.PolicyDocument( statements=[ iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=['lambda:InvokeFunction'], resources=['*'] ) ] ) } ) api = agw.RestApi(self, 'lntipbot', endpoint_types=[agw.EndpointType.REGIONAL], deploy_options=agw.StageOptions( metrics_enabled=True ) ) api.root.add_resource('info', default_integration=agw.MockIntegration( integration_responses=[ agw.IntegrationResponse( status_code='301', response_parameters={ 'method.response.header.Location': '\'https://www.reddit.com/r/LNTipBot2/wiki/index\'', 'method.response.header.Cache-Control': '\'max-age=300\'' } ) ], request_templates={ 'application/json': '{"statusCode": 301}' } )).add_method('GET', method_responses=[{ 'statusCode': '301', 'responseParameters': { 'method.response.header.Location': True, 'method.response.header.Cache-Control': True } }] ) api.root.add_resource('uri', default_integration=agw.LambdaIntegration( self.createLambda('invoiceUriFunction', 'getURI.getURI') )).add_method('GET') api.root.add_resource('qr', default_integration=agw.LambdaIntegration( self.createLambda('qrFunction', 'qrEncoder.qrEncoder') )).add_method('GET') events.Rule(self, 'oauthRefreshEvent', schedule=events.Schedule.rate(cdk.Duration.minutes(28)), targets=[eventsTargets.LambdaFunction( self.createLambda('oauthFunction', 'redditOAuthRequester.redditOAuthRequester') )] ) self.settledInvoiceHandler = self.createLambda('settledInvoiceHandler', 'settledInvoiceHandler.settledInvoiceHandler') self.createLambda('apiTest', 'lambda_function.lambda_handler') withdrawWorkflow = self.createWithdrawWorkflow() tipWorkflow = self.createTipWorkflow() events.Rule(self, 'redditCommentScannerEvent', schedule=events.Schedule.rate(cdk.Duration.minutes(1)), targets=[eventsTargets.LambdaFunction( lambda_.Function(self, 'redditCommentScanner', code=self.lambdaCode, runtime=lambda_.Runtime.PYTHON_3_8, handler='scanComments.scannerLoop', role=self.lambdaRole, layers=[self.lambdaLayer], timeout=cdk.Duration.seconds(55), reserved_concurrent_executions=1 ), event=events.RuleTargetInput.from_object({ 'tipWorkflowArn': tipWorkflow.state_machine_arn, 'withdrawWorkflowArn': withdrawWorkflow.state_machine_arn }) )] ) self.backupBucket = s3.Bucket(self, 'bitcoindBackups', block_public_access=s3.BlockPublicAccess.BLOCK_ALL, bucket_name='bitcoind-pruned-backups-lntipbot', ) self.serverRole = self.createServerRole() self.securityGroup = self.createSecurityGroup() self.createServer() self.createOps()
def __init__(self, scope: core.Construct, id: str, props: KinesisFirehoseStackProps, **kwargs) -> None: super().__init__(scope, id, **kwargs) lambda_repository = aws_codecommit.Repository( self, "ClicksProcessingLambdaRepository", repository_name="MythicalMysfits-ClicksProcessingLambdaRepository", ) core.CfnOutput( self, "kinesisRepositoryCloneUrlHttp", value=lambda_repository.repository_clone_url_http, description="Clicks Processing Lambda Repository Clone URL HTTP", ) core.CfnOutput( self, "kinesisRepositoryCloneUrlSsh", value=lambda_repository.repository_clone_url_ssh, description="Clicks Processing Lambda Repository Clone URL SSH", ) clicks_destination_bucket = aws_s3.Bucket(self, "Bucket", versioned=True) lambda_function_policy = aws_iam.PolicyStatement() lambda_function_policy.add_actions("dynamodb:GetItem") lambda_function_policy.add_resources(props.table.table_arn) mysfits_clicks_processor = aws_lambda.Function( self, "Function", handler="streamProcessor.processRecord", runtime=aws_lambda.Runtime.PYTHON_3_7, description= "An Amazon Kinesis Firehose stream processor that enriches click records to not just include a mysfitId, but also other attributes that can be analyzed later.", memory_size=128, code=aws_lambda.Code.asset("../../lambda-streaming-processor"), timeout=core.Duration.seconds(30), initial_policy=[lambda_function_policy], environment={ # TODO: this seems better than having the user copy/paste it in, but is it the best way? "MYSFITS_API_URL": "https://{}.execute-api.{}.amazonaws.com/prod/".format( props.api_gateway.ref, core.Aws.REGION) }, ) firehose_delivery_role = aws_iam.Role( self, "FirehoseDeliveryRole", role_name="FirehoseDeliveryRole", assumed_by=aws_iam.ServicePrincipal("firehose.amazonaws.com"), external_id=core.Aws.ACCOUNT_ID, ) firehose_delivery_policy_s3_statement = aws_iam.PolicyStatement() firehose_delivery_policy_s3_statement.add_actions( "s3:AbortMultipartUpload", "s3:GetBucketLocation", "s3:GetObject", "s3:ListBucket", "s3:ListBucketMultipartUploads", "s3:PutObject", ) firehose_delivery_policy_s3_statement.add_resources( clicks_destination_bucket.bucket_arn) firehose_delivery_policy_s3_statement.add_resources( clicks_destination_bucket.arn_for_objects("*")) firehose_delivery_policy_lambda_statement = aws_iam.PolicyStatement() firehose_delivery_policy_lambda_statement.add_actions( "lambda:InvokeFunction") firehose_delivery_policy_lambda_statement.add_resources( mysfits_clicks_processor.function_arn) firehose_delivery_role.add_to_policy( firehose_delivery_policy_s3_statement) firehose_delivery_role.add_to_policy( firehose_delivery_policy_lambda_statement) mysfits_firehose_to_s3 = aws_kinesisfirehose.CfnDeliveryStream( self, "DeliveryStream", extended_s3_destination_configuration=aws_kinesisfirehose. CfnDeliveryStream.ExtendedS3DestinationConfigurationProperty( bucket_arn=clicks_destination_bucket.bucket_arn, buffering_hints=aws_kinesisfirehose.CfnDeliveryStream. BufferingHintsProperty(interval_in_seconds=60, size_in_m_bs=50), compression_format="UNCOMPRESSED", prefix="firehose/", role_arn=firehose_delivery_role.role_arn, processing_configuration=aws_kinesisfirehose.CfnDeliveryStream. ProcessingConfigurationProperty( enabled=True, processors=[ aws_kinesisfirehose.CfnDeliveryStream. ProcessorProperty( parameters=[ aws_kinesisfirehose.CfnDeliveryStream. ProcessorParameterProperty( parameter_name="LambdaArn", parameter_value=mysfits_clicks_processor. function_arn, ) ], type="Lambda", ) ], ), ), ) aws_lambda.CfnPermission( self, "Permission", action="lambda:InvokeFunction", function_name=mysfits_clicks_processor.function_arn, principal="firehose.amazonaws.com", source_account=core.Aws.ACCOUNT_ID, source_arn=mysfits_firehose_to_s3.attr_arn, ) click_processing_api_role = aws_iam.Role( self, "ClickProcessingApiRole", assumed_by=aws_iam.ServicePrincipal("apigateway.amazonaws.com"), ) api_policy = aws_iam.PolicyStatement() api_policy.add_actions("firehose:PutRecord") api_policy.add_resources(mysfits_firehose_to_s3.attr_arn) aws_iam.Policy( self, "ClickProcessingApiPolicy", policy_name="api_gateway_firehose_proxy_role", statements=[api_policy], roles=[click_processing_api_role], ) api = aws_apigateway.RestApi( self, "APIEndpoint", rest_api_name="ClickProcessing API Service", endpoint_types=[aws_apigateway.EndpointType.REGIONAL], ) clicks = api.root.add_resource("clicks") clicks.add_method( "PUT", aws_apigateway.AwsIntegration( service="firehose", integration_http_method="POST", action="PutRecord", options=aws_apigateway.IntegrationOptions( connection_type=aws_apigateway.ConnectionType.INTERNET, credentials_role=click_processing_api_role, integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_templates={ "application/json": '{"status": "OK"}' }, response_parameters={ "method.response.header.Access-Control-Allow-Headers": "'Content-Type'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,PUT'", "method.response.header.Access-Control-Allow-Origin": "'*'", }, ) ], request_parameters={ "integration.request.header.Content-Type": "'application/x-amz-json-1.1'" }, request_templates={ "application/json": """{ "DeliveryStreamName": "%s", "Record": { "Data": "$util.base64Encode($input.json('$'))" }}""" % mysfits_firehose_to_s3.ref }, ), ), method_responses=[ aws_apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) ], ) clicks.add_method( "OPTIONS", aws_apigateway.MockIntegration( integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", "method.response.header.Access-Control-Allow-Origin": "'*'", "method.response.header.Access-Control-Allow-Credentials": "'false'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,GET,PUT,POST,DELETE'", }, ) ], passthrough_behavior=aws_apigateway.PassthroughBehavior.NEVER, request_templates={"application/json": '{"statusCode": 200}'}, ), method_responses=[ aws_apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) ], )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) lambda_repository = aws_codecommit.Repository( self, "QuestionsLambdaRepository", repository_name="MythicalMysfits-QuestionsLambdaRepository", ) core.CfnOutput( self, "questionsRepositoryCloneUrlHTTP", value=lambda_repository.repository_clone_url_http, description="Questions Lambda Repository Clone URL HTTP", ) core.CfnOutput( self, "questionsRepositoryCloneUrlSSH", value=lambda_repository.repository_clone_url_ssh, description="Questions Lambda Repository Clone URL SSH", ) table = aws_dynamodb.Table( self, "Table", table_name="MysfitsQuestionsTable", partition_key=aws_dynamodb.Attribute( name="QuestionId", type=aws_dynamodb.AttributeType.STRING), stream=aws_dynamodb.StreamViewType.NEW_IMAGE, ) lambda_function_policy_statement_ddb = aws_iam.PolicyStatement() lambda_function_policy_statement_ddb.add_actions("dynamodb:PutItem") lambda_function_policy_statement_ddb.add_resources(table.table_arn) lambda_function_policy_statement_xray = aws_iam.PolicyStatement() lambda_function_policy_statement_xray.add_actions( "xray:PutTraceSegments", "xray:PutTelemetryRecords", "xray:GetSamplingRules", "xray:GetSamplingTargets", "xray:GetSamplingStatisticSummaries", ) lambda_function_policy_statement_xray.add_all_resources() mysfits_post_question = aws_lambda.Function( self, "PostQuestionFunction", handler="mysfitsPostQuestion.postQuestion", runtime=aws_lambda.Runtime.PYTHON_3_6, description= "A microservice Lambda function that receives a new question submitted to the MythicalMysfits website from a user and inserts it into a DynamoDB database table.", memory_size=128, code=aws_lambda.Code.asset( os.path.join("..", "..", "lambda-questions", "PostQuestionsService")), timeout=core.Duration.seconds(30), initial_policy=[ lambda_function_policy_statement_ddb, lambda_function_policy_statement_xray, ], tracing=aws_lambda.Tracing.ACTIVE, ) topic = aws_sns.Topic( self, "Topic", display_name="MythicalMysfitsQuestionsTopic", topic_name="MythicalMysfitsQuestionsTopic", ) topic.add_subscription(subs.EmailSubscription(os.environ["SNS_EMAIL"])) post_question_lamdaa_function_policy_statement_sns = aws_iam.PolicyStatement( ) post_question_lamdaa_function_policy_statement_sns.add_actions( "sns:Publish") post_question_lamdaa_function_policy_statement_sns.add_resources( topic.topic_arn) mysfits_process_question_stream = aws_lambda.Function( self, "ProcessQuestionStreamFunction", handler="mysfitsProcessStream.processStream", runtime=aws_lambda.Runtime.PYTHON_3_6, description= "An AWS Lambda function that will process all new questions posted to mythical mysfits and notify the site administrator of the question that was asked.", memory_size=128, code=aws_lambda.Code.asset( os.path.join("..", "..", "lambda-questions", "ProcessQuestionsStream")), timeout=core.Duration.seconds(30), initial_policy=[ post_question_lamdaa_function_policy_statement_sns, lambda_function_policy_statement_xray, ], tracing=aws_lambda.Tracing.ACTIVE, environment={"SNS_TOPIC_ARN": topic.topic_arn}, events=[ event.DynamoEventSource( table, starting_position=aws_lambda.StartingPosition.TRIM_HORIZON, batch_size=1, ) ], ) questions_api_role = aws_iam.Role( self, "QuestionsApiRole", assumed_by=aws_iam.ServicePrincipal("apigateway.amazonaws.com"), ) api_policy = aws_iam.PolicyStatement() api_policy.add_actions("lambda:InvokeFunction") api_policy.add_resources(mysfits_post_question.function_arn) aws_iam.Policy( self, "QuestionsApiPolicy", policy_name="questions_api_policy", statements=[api_policy], roles=[questions_api_role], ) questions_integration = aws_apigateway.LambdaIntegration( mysfits_post_question, credentials_role=questions_api_role, integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_templates={ "application/json": '{"status": "OK"}' }, ) ], ) api = aws_apigateway.LambdaRestApi( self, "APIEndpoint", handler=mysfits_post_question, options=aws_apigateway.RestApiProps( rest_api_name="Questions API Server"), proxy=False, ) questions_method = api.root.add_resource("questions") questions_method.add_method( "POST", questions_integration, method_responses=[ aws_apigateway.MethodResponse(status_code="200") ], authorization_type=aws_apigateway.AuthorizationType.NONE, ) questions_method.add_method( "OPTIONS", aws_apigateway.MockIntegration( integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", "method.response.header.Access-Control-Allow-Origin": "'*'", "method.response.header.Access-Control-Allow-Credentials": "'false'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,GET,PUT,POST,DELETE'", }, ) ], passthrough_behavior=aws_apigateway.PassthroughBehavior.NEVER, request_templates={"application/json": '{"statusCode": 200}'}, ), method_responses=[ aws_apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) ], )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Note: typo of role name is copied from original workshop mysfits_notebook_role = aws_iam.Role( self, "MysfitsNotbookRole", assumed_by=aws_iam.ServicePrincipal("sagemaker.amazonaws.com"), ) mysfits_notebook_policy = aws_iam.PolicyStatement() mysfits_notebook_policy.add_actions( "sagemaker:*", "ecr:GetAuthorizationToken", "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage", "ecr:BatchCheckLayerAvailability", "cloudwatch:PutMetricData", "logs:CreateLogGroup", "logs:CreateLogStream", "logs:DescribeLogStreams", "logs:PutLogEvents", "logs:GetLogEvents", "s3:CreateBucket", "s3:ListBucket", "s3:GetBucketLocation", "s3:GetObject", "s3:PutObject", "s3:DeleteObject", ) mysfits_notebook_policy.add_all_resources() mysfits_notebook_pass_role_policy = aws_iam.PolicyStatement() mysfits_notebook_pass_role_policy.add_actions("iam:PassRole") mysfits_notebook_pass_role_policy.add_all_resources() mysfits_notebook_pass_role_policy.add_condition( "StringEquals", {"iam:PassedToService": "sagemaker.amazonaws.com"}) aws_iam.Policy( self, "MysfitsNotebookPolicy", statements=[ mysfits_notebook_pass_role_policy, mysfits_notebook_policy ], roles=[mysfits_notebook_role], ) notebook_instance = aws_sagemaker.CfnNotebookInstance( self, "MythicalMysfits-SageMaker-Notebook", instance_type="ml.t2.medium", role_arn=mysfits_notebook_role.role_arn, ) lambda_repository = aws_codecommit.Repository( self, "RecommendationsLambdaRepository", repository_name="MythicalMysfits-RecommendationsLambdaRepository", ) core.CfnOutput( self, "recommandationsRepositoryCloneUrlHttp", value=lambda_repository.repository_clone_url_http, description="Recommendations Lambda Repository Clone Url HTTP", ) core.CfnOutput( self, "recommandationsRepositoryCloneUrlSsh", value=lambda_repository.repository_clone_url_ssh, description="Recommendations Lambda Repository Clone Url SSH", ) recommendations_lambda_function_policy_statement = aws_iam.PolicyStatement( ) recommendations_lambda_function_policy_statement.add_actions( "sagemaker:InvokeEndpoint") recommendations_lambda_function_policy_statement.add_all_resources() mysfits_recommendations = aws_lambda.Function( self, "Function", handler="recommendations.recommend", runtime=aws_lambda.Runtime.PYTHON_3_6, description="A microservice backend to a SageMaker endpoint", memory_size=128, code=aws_lambda.Code.asset( os.path.join("..", "..", "lambda-recommendations/service")), timeout=core.Duration.seconds(30), initial_policy=[recommendations_lambda_function_policy_statement], ) questions_api_role = aws_iam.Role( self, "QuestionsApiRole", assumed_by=aws_iam.ServicePrincipal("apigateway.amazonaws.com"), ) api_policy = aws_iam.PolicyStatement() api_policy.add_actions("lambda:InvokeFunction") api_policy.add_resources(mysfits_recommendations.function_arn) aws_iam.Policy( self, "QuestionsApiPolicy", policy_name="questions_api_policy", statements=[api_policy], roles=[questions_api_role], ) questions_integration = aws_apigateway.LambdaIntegration( mysfits_recommendations, credentials_role=questions_api_role, integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_templates={ "application/json": '{"status": "OK"}' }, ) ], ) api = aws_apigateway.LambdaRestApi( self, "APIEndpoint", handler=mysfits_recommendations, rest_api_name="Recommendation API Service", proxy=False, ) recommendations_method = api.root.add_resource("recommendations") recommendations_method.add_method( "POST", questions_integration, method_responses=[ aws_apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) ], authorization_type=aws_apigateway.AuthorizationType.NONE, ) recommendations_method.add_method( "OPTIONS", aws_apigateway.MockIntegration( integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", "method.response.header.Access-Control-Allow-Origin": "'*'", "method.response.header.Access-Control-Allow-Credentials": "'false'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,GET,PUT,POST,DELETE'", }, ) ], passthrough_behavior=aws_apigateway.PassthroughBehavior.NEVER, request_templates={"application/json": '{"statusCode": 200}'}, ), method_responses=[ aws_apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) ], )
def __init__(self, scope: core.Construct, _id: str, **kwargs) -> None: super().__init__(scope, _id, **kwargs) # Setup SSM parameter of credentials, bucket_para, ignore_list ssm_credential_para = ssm.StringParameter.from_secure_string_parameter_attributes( self, "ssm_parameter_credentials", parameter_name=ssm_parameter_credentials, version=1) ssm_bucket_para = ssm.StringParameter(self, "s3bucket_serverless", string_value=json.dumps( bucket_para, indent=4)) ssm_parameter_ignore_list = ssm.StringParameter( self, "s3_migrate_ignore_list", string_value=ignore_list) # Setup DynamoDB ddb_file_list = ddb.Table(self, "s3migrate_serverless", partition_key=ddb.Attribute( name="Key", type=ddb.AttributeType.STRING), billing_mode=ddb.BillingMode.PAY_PER_REQUEST) ddb_file_list.add_global_secondary_index( partition_key=ddb.Attribute(name="desBucket", type=ddb.AttributeType.STRING), index_name="desBucket-index", projection_type=ddb.ProjectionType.INCLUDE, non_key_attributes=["desKey", "versionId"]) # Setup SQS sqs_queue_DLQ = sqs.Queue(self, "s3migrate_serverless_Q_DLQ", visibility_timeout=core.Duration.minutes(15), retention_period=core.Duration.days(14)) sqs_queue = sqs.Queue(self, "s3migrate_serverless_Q", visibility_timeout=core.Duration.minutes(15), retention_period=core.Duration.days(14), dead_letter_queue=sqs.DeadLetterQueue( max_receive_count=60, queue=sqs_queue_DLQ)) # Setup API for Lambda to get IP address (for debug networking routing purpose) checkip = api.RestApi( self, "lambda-checkip-api", cloud_watch_role=True, deploy=True, description="For Lambda get IP address", default_integration=api.MockIntegration( integration_responses=[ api.IntegrationResponse(status_code="200", response_templates={ "application/json": "$context.identity.sourceIp" }) ], request_templates={"application/json": '{"statusCode": 200}'}), endpoint_types=[api.EndpointType.REGIONAL]) checkip.root.add_method("GET", method_responses=[ api.MethodResponse( status_code="200", response_models={ "application/json": api.Model.EMPTY_MODEL }) ]) # Setup Lambda functions handler = lam.Function(self, "s3-migrate-worker", code=lam.Code.asset("./lambda"), handler="lambda_function_worker.lambda_handler", runtime=lam.Runtime.PYTHON_3_8, memory_size=1024, timeout=core.Duration.minutes(15), tracing=lam.Tracing.ACTIVE, environment={ 'table_queue_name': ddb_file_list.table_name, 'Des_bucket_default': Des_bucket_default, 'Des_prefix_default': Des_prefix_default, 'StorageClass': StorageClass, 'checkip_url': checkip.url, 'ssm_parameter_credentials': ssm_parameter_credentials, 'JobType': JobType, 'MaxRetry': MaxRetry, 'MaxThread': MaxThread, 'MaxParallelFile': MaxParallelFile, 'JobTimeout': JobTimeout, 'UpdateVersionId': UpdateVersionId, 'GetObjectWithVersionId': GetObjectWithVersionId }) handler_jobsender = lam.Function( self, "s3-migrate-jobsender", code=lam.Code.asset("./lambda"), handler="lambda_function_jobsender.lambda_handler", runtime=lam.Runtime.PYTHON_3_8, memory_size=1024, timeout=core.Duration.minutes(15), tracing=lam.Tracing.ACTIVE, environment={ 'table_queue_name': ddb_file_list.table_name, 'StorageClass': StorageClass, 'checkip_url': checkip.url, 'sqs_queue': sqs_queue.queue_name, 'ssm_parameter_credentials': ssm_parameter_credentials, 'ssm_parameter_ignore_list': ssm_parameter_ignore_list.parameter_name, 'ssm_parameter_bucket': ssm_bucket_para.parameter_name, 'JobType': JobType, 'MaxRetry': MaxRetry, 'JobsenderCompareVersionId': JobsenderCompareVersionId }) # Allow lambda read/write DDB, SQS ddb_file_list.grant_read_write_data(handler) ddb_file_list.grant_read_write_data(handler_jobsender) sqs_queue.grant_send_messages(handler_jobsender) # SQS trigger Lambda worker handler.add_event_source(SqsEventSource(sqs_queue, batch_size=1)) # Option1: Create S3 Bucket, all new objects in this bucket will be transmitted by Lambda Worker s3bucket = s3.Bucket(self, "s3_new_migrate") s3bucket.grant_read(handler) s3bucket.add_event_notification(s3.EventType.OBJECT_CREATED, s3n.SqsDestination(sqs_queue)) # Option2: Allow Exist S3 Buckets to be read by Lambda functions. # Lambda Jobsender will scan and compare the these buckets and trigger Lambda Workers to transmit bucket_name = '' for b in bucket_para: if bucket_name != b['src_bucket']: # 如果列了多个相同的Bucket,就跳过 bucket_name = b['src_bucket'] s3exist_bucket = s3.Bucket.from_bucket_name( self, bucket_name, # 用这个做id bucket_name=bucket_name) if JobType == 'PUT': s3exist_bucket.grant_read(handler_jobsender) s3exist_bucket.grant_read(handler) else: # 'GET' mode s3exist_bucket.grant_read_write(handler_jobsender) s3exist_bucket.grant_read_write(handler) # Allow Lambda read ssm parameters ssm_bucket_para.grant_read(handler_jobsender) ssm_credential_para.grant_read(handler) ssm_credential_para.grant_read(handler_jobsender) ssm_parameter_ignore_list.grant_read(handler_jobsender) # Schedule cron event to trigger Lambda Jobsender per hour: event.Rule(self, 'cron_trigger_jobsender', schedule=event.Schedule.rate(core.Duration.hours(1)), targets=[target.LambdaFunction(handler_jobsender)]) # TODO: Trigger event imediately, add custom resource lambda to invoke handler_jobsender # Create Lambda logs filter to create network traffic metric handler.log_group.add_metric_filter( "Completed-bytes", metric_name="Completed-bytes", metric_namespace="s3_migrate", metric_value="$bytes", filter_pattern=logs.FilterPattern.literal( '[info, date, sn, p="--->Complete", bytes, key]')) handler.log_group.add_metric_filter( "Uploading-bytes", metric_name="Uploading-bytes", metric_namespace="s3_migrate", metric_value="$bytes", filter_pattern=logs.FilterPattern.literal( '[info, date, sn, p="--->Uploading", bytes, key]')) handler.log_group.add_metric_filter( "Downloading-bytes", metric_name="Downloading-bytes", metric_namespace="s3_migrate", metric_value="$bytes", filter_pattern=logs.FilterPattern.literal( '[info, date, sn, p="--->Downloading", bytes, key]')) handler.log_group.add_metric_filter( "MaxMemoryUsed", metric_name="MaxMemoryUsed", metric_namespace="s3_migrate", metric_value="$memory", filter_pattern=logs.FilterPattern.literal( '[head="REPORT", a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, ' 'a13, a14, a15, a16, memory, MB="MB", rest]')) lambda_metric_Complete = cw.Metric(namespace="s3_migrate", metric_name="Completed-bytes", statistic="Sum", period=core.Duration.minutes(1)) lambda_metric_Upload = cw.Metric(namespace="s3_migrate", metric_name="Uploading-bytes", statistic="Sum", period=core.Duration.minutes(1)) lambda_metric_Download = cw.Metric(namespace="s3_migrate", metric_name="Downloading-bytes", statistic="Sum", period=core.Duration.minutes(1)) lambda_metric_MaxMemoryUsed = cw.Metric( namespace="s3_migrate", metric_name="MaxMemoryUsed", statistic="Maximum", period=core.Duration.minutes(1)) handler.log_group.add_metric_filter( "ERROR", metric_name="ERROR-Logs", metric_namespace="s3_migrate", metric_value="1", filter_pattern=logs.FilterPattern.literal('"ERROR"')) handler.log_group.add_metric_filter( "WARNING", metric_name="WARNING-Logs", metric_namespace="s3_migrate", metric_value="1", filter_pattern=logs.FilterPattern.literal('"WARNING"')) # Task timed out handler.log_group.add_metric_filter( "TIMEOUT", metric_name="TIMEOUT-Logs", metric_namespace="s3_migrate", metric_value="1", filter_pattern=logs.FilterPattern.literal('"Task timed out"')) log_metric_ERROR = cw.Metric(namespace="s3_migrate", metric_name="ERROR-Logs", statistic="Sum", period=core.Duration.minutes(1)) log_metric_WARNING = cw.Metric(namespace="s3_migrate", metric_name="WARNING-Logs", statistic="Sum", period=core.Duration.minutes(1)) log_metric_TIMEOUT = cw.Metric(namespace="s3_migrate", metric_name="TIMEOUT-Logs", statistic="Sum", period=core.Duration.minutes(1)) # Dashboard to monitor SQS and Lambda board = cw.Dashboard(self, "s3_migrate_serverless") board.add_widgets( cw.GraphWidget(title="Lambda-NETWORK", left=[ lambda_metric_Download, lambda_metric_Upload, lambda_metric_Complete ]), cw.GraphWidget(title="Lambda-concurrent", left=[ handler.metric( metric_name="ConcurrentExecutions", period=core.Duration.minutes(1)) ]), cw.GraphWidget( title="Lambda-invocations/errors/throttles", left=[ handler.metric_invocations( period=core.Duration.minutes(1)), handler.metric_errors(period=core.Duration.minutes(1)), handler.metric_throttles(period=core.Duration.minutes(1)) ]), cw.GraphWidget( title="Lambda-duration", left=[ handler.metric_duration(period=core.Duration.minutes(1)) ]), ) board.add_widgets( cw.GraphWidget(title="Lambda_MaxMemoryUsed(MB)", left=[lambda_metric_MaxMemoryUsed]), cw.GraphWidget(title="ERROR/WARNING Logs", left=[log_metric_ERROR], right=[log_metric_WARNING, log_metric_TIMEOUT]), cw.GraphWidget( title="SQS-Jobs", left=[ sqs_queue.metric_approximate_number_of_messages_visible( period=core.Duration.minutes(1)), sqs_queue. metric_approximate_number_of_messages_not_visible( period=core.Duration.minutes(1)) ]), cw.SingleValueWidget( title="Running/Waiting and Dead Jobs", metrics=[ sqs_queue. metric_approximate_number_of_messages_not_visible( period=core.Duration.minutes(1)), sqs_queue.metric_approximate_number_of_messages_visible( period=core.Duration.minutes(1)), sqs_queue_DLQ. metric_approximate_number_of_messages_not_visible( period=core.Duration.minutes(1)), sqs_queue_DLQ. metric_approximate_number_of_messages_visible( period=core.Duration.minutes(1)) ], height=6)) # Alarm for queue - DLQ alarm_DLQ = cw.Alarm( self, "SQS_DLQ", metric=sqs_queue_DLQ.metric_approximate_number_of_messages_visible( ), threshold=0, comparison_operator=cw.ComparisonOperator.GREATER_THAN_THRESHOLD, evaluation_periods=1, datapoints_to_alarm=1) alarm_topic = sns.Topic(self, "SQS queue-DLQ has dead letter") alarm_topic.add_subscription( subscription=sub.EmailSubscription(alarm_email)) alarm_DLQ.add_alarm_action(action.SnsAction(alarm_topic)) core.CfnOutput(self, "Dashboard", value="CloudWatch Dashboard name s3_migrate_serverless")
def __init__(self, scope: cdk.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # # Create role and policiy for the SageMaker notebook # mysfits_notebook_role = _iam.Role( # self, 'MysfitsNotbookRole', # assumed_by=_iam.ServicePrincipal('sagemaker.amazonaws.com') # ) # # mysfits_notebook_policy_stm = _iam.PolicyStatement() # mysfits_notebook_policy_stm.add_actions('sagemaker:*', # 'ecr:GetAuthorizationToken', # 'ecr:GetDownloadUrlForLayer', # 'ecr:BatchGetImage', # 'ecr:BatchCheckLayerAvailability', # 'cloudwatch:PutMetricData', # 'logs:CreateLogGroup', # 'logs:CreateLogStream', # 'logs:DescribeLogStreams', # 'logs:PutLogEvents', # 'logs:GetLogEvents', # 's3:CreateBucket', # 's3:ListBucket', # 's3:GetBucketLocation', # 's3:GetObject', # 's3:PutObject', # 's3:DeleteObject') # mysfits_notebook_policy_stm.add_all_resources() # # mysfits_notebook_policy_passrole_stm = _iam.PolicyStatement() # mysfits_notebook_policy_passrole_stm.add_actions('iam:PassRole') # mysfits_notebook_policy_passrole_stm.add_all_resources() # mysfits_notebook_policy_passrole_stm.add_condition( # 'StringEquals', # { # 'iam:PassedToService': 'sagemaker.amazonaws.com', # } # ) # # _iam.Policy( # self, 'MysfitsNotebookPolicy', # statements=[ # mysfits_notebook_policy_stm, # mysfits_notebook_policy_passrole_stm # ], # roles=[mysfits_notebook_role] # ) # # # Create notebook # notebook_instance = sagemaker.CfnNotebookInstance( # self, 'MythicalMysfits-SageMaker-Notebook', # instance_type='ml.t2.medium', # role_arn=mysfits_notebook_role.role_arn # ) # Create the recommendations lambda function with its policy for use with the inference endpoint recommendations_lambda_function_policy_stm = _iam.PolicyStatement() recommendations_lambda_function_policy_stm.add_actions( 'sagemaker:InvokeEndpoint') recommendations_lambda_function_policy_stm.add_all_resources() mysfits_recommendations = _lambda.Function( self, 'RecommendationsFunction', handler="recommendations.recommend", runtime=_lambda.Runtime.PYTHON_3_6, description= 'A microservice backend to invoke a SageMaker endpoint.', memory_size=128, code=_lambda.Code.asset('./lambda_recommendations/service'), timeout=cdk.Duration.seconds(30), initial_policy=[recommendations_lambda_function_policy_stm], # tracing=_lambda.Tracing.ACTIVE ) # Create APIGateway with policy recommendations_api_role = _iam.Role( self, 'RecommendationsApiRole', assumed_by=_iam.ServicePrincipal('apigateway.amazonaws.com')) api_policy = _iam.PolicyStatement() api_policy.add_actions("lambda:InvokeFunction") api_policy.add_resources(mysfits_recommendations.function_arn) api_policy.effect = _iam.Effect.ALLOW # Associate policy to role _iam.Policy(self, "RecommendationsApiPolicy", policy_name="recommendations_api_policy", statements=[api_policy], roles=[recommendations_api_role]) api = apigw.LambdaRestApi( self, 'APIEndpoint', handler=mysfits_recommendations, options=apigw.LambdaRestApiProps( rest_api_name='RecommendationsAPI', deploy_options=apigw.StageOptions(tracing_enabled=True), handler=mysfits_recommendations), proxy=False) # Create methods recommendations_integration = apigw.LambdaIntegration( mysfits_recommendations, credentials_role=recommendations_api_role, integration_responses=[ apigw.IntegrationResponse( status_code='200', response_templates={"application/json": '{"status":"OK"}'}, # response_parameters={ # "method.response.header.Access-Control-Allow-Headers": "'Content-Type'", # "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,POST'", # "method.response.header.Access-Control-Allow-Origin": "'*'" # } ) ], ) recommendations_method = api.root.add_resource('recommendations') recommendations_method.add_method( 'POST', recommendations_integration, method_responses=[ apigw.MethodResponse( status_code='200', response_parameters={ 'method.response.header.Access-Control-Allow-Headers': True, 'method.response.header.Access-Control-Allow-Methods': True, 'method.response.header.Access-Control-Allow-Origin': True, }) ], authorization_type=apigw.AuthorizationType.NONE) recommendations_method.add_method( 'OPTIONS', integration=apigw.MockIntegration(integration_responses=[ apigw.IntegrationResponse( status_code='200', response_parameters={ 'method.response.header.Access-Control-Allow-Headers': "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", 'method.response.header.Access-Control-Allow-Origin': "'*'", 'method.response.header.Access-Control-Allow-Credentials': "'false'", 'method.response.header.Access-Control-Allow-Methods': "'OPTIONS,GET,PUT,POST,DELETE'", }) ], passthrough_behavior=apigw. PassthroughBehavior.NEVER, request_templates={ "application/json": '{"statusCode": 200}' }), method_responses=[ apigw.MethodResponse( status_code='200', response_parameters={ 'method.response.header.Access-Control-Allow-Headers': True, 'method.response.header.Access-Control-Allow-Methods': True, 'method.response.header.Access-Control-Allow-Credentials': True, 'method.response.header.Access-Control-Allow-Origin': True }) ])
def create_api_endpoint(self): # API Endpoint # Endpoint lambda function api_function_source = _lmbd.Code.asset("../api_function") api_function = _lmbd.Function( self, "api-function", function_name=f"{self.prefix}-api-function", handler="main.handler", runtime=_lmbd.Runtime.PYTHON_3_8, code=api_function_source, timeout=core.Duration.seconds(5), memory_size=128, environment={ "OUTPUT_STREAM": self.stream.stream_name, }, ) self.stream.grant_read_write(api_function) # Endpoint API Gateway gateway = _apg.LambdaRestApi( self, "api_gateway", rest_api_name=f"{self.prefix}_api_gateway", handler=api_function, proxy=True, ) # Enabling CORS resource = gateway.root.add_resource("browsers") mock_integration = _apg.MockIntegration( integration_responses=[{ "statusCode": "200", "responseParameters": { "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-API-KEY, X-API-SECRET,X-Amz-Security-Token,X-Amz-User-Agent'", "method.response.header.Access-Control-Allow-Origin": "'*'", "method.response.header.Access-Control-Allow-Credentials": "'false'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,GET,PUT,POST,DELETE'", }, }], passthrough_behavior=_apg.PassthroughBehavior.WHEN_NO_MATCH, request_templates={"application/json": '{"statusCode": 200}'}, ) resource.add_method( "OPTIONS", mock_integration, method_responses=[{ "statusCode": "200", "responseParameters": { "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True, }, }], ) # Exporting API endpoint core.CfnOutput( self, f"{self.prefix}_url", value=gateway.url, export_name=f"{self.prefix}_url", )
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # Create repo for Amplify static site amplify_repo = codecommit.Repository( self, 'amplify-wild-rydes-repo', repository_name='amplify-wild-rydes', description='Repo for the Wild Rydes static site for Amplify') # Create repo for holding the code for this project app_repo = codecommit.Repository( self, 'app-serverless-workshop-repo', repository_name='app-wild-rydes-serverless-workshop', description= 'Repo for project from webapp.serverlessworkshops.io/staticwebhosting/overview/' ) # IAM Role & Policy for Amplify amplify_role = iam.Role( self, 'amplify-wild-rydes-role', role_name='amplify-wild-rydes-role', assumed_by=iam.ServicePrincipal('amplify.amazonaws.com')) # Amplify amplify_static_site = amplify.App( self, 'amplify-wild-rydes-site', source_code_provider=amplify.CodeCommitSourceCodeProvider( repository=amplify_repo), description='Wild Rydes Amplify Static Site', role=amplify_role, app_name='wild-rydes-site') master = amplify_static_site.add_branch("master") # Policy is fairly open # Ran into issues when I deployed the cognito user pools through the amplify cli # It creates a new CloudFormation stack and deploys several resources amplify_policy = iam.Policy( self, 'amplify-wild-rydes-policy', roles=[amplify_role], statements=[ iam.PolicyStatement(effect=iam.Effect.ALLOW, actions=['codecommit:GitPull'], resources=[amplify_repo.repository_arn]), iam.PolicyStatement(effect=iam.Effect.ALLOW, actions=[ 'amplify:GetApp', 'amplify:CreateBackendEnvironment', 'cloudformation:*', 'cognito:*', 'lambda:*', 's3:*', 'iam:*' ], resources=['*']) ]) # DynamoDB # removal_policy=core.RemovalPolicy.DESTROY is to ensure it is deleted since this is only a lab # table_name is required to be Rides, its configured in the nodejs code that the lambda function runs rides_table = ddb.Table(self, 'Table', table_name='Rides', partition_key=ddb.Attribute( name='RideId', type=ddb.AttributeType.STRING), removal_policy=core.RemovalPolicy.DESTROY) # Lambda Functions request_unicorn_role = iam.Role( self, 'RequestUnicornRole', role_name='wild-rydes-lambda-role', assumed_by=iam.ServicePrincipal('lambda.amazonaws.com'), managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( 'service-role/AWSLambdaBasicExecutionRole') ]) # Grant write access to the lambda role rides_table.grant_write_data(request_unicorn_role) request_unicorn = _lambda.Function( self, 'request-unicorn', handler='requestUnicorn.handler', runtime=_lambda.Runtime.NODEJS_12_X, code=_lambda.AssetCode('request_unicorn'), role=request_unicorn_role, function_name='request-unicorn-wild-rydes') # Rest API ride_api_gw = apigw.RestApi( self, 'wild-rydes-apigw', rest_api_name='WildRydes', endpoint_types=[apigw.EndpointType.REGIONAL]) # APIGW Lambda Integration # proxy enabled for the workshop ride_api_gw_lambda_integration = apigw.LambdaIntegration( request_unicorn, proxy=True, integration_responses=[{ 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Origin': "'*'", } }]) post_ride_resource = ride_api_gw.root.add_resource('ride') post_ride_resource_method = post_ride_resource.add_method( 'POST', ride_api_gw_lambda_integration, method_responses=[{ 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Origin': True, } }]) # This needs to be created after the Amplify site unless you create the cognito userpool in the cdk # I went through the Amplify CLI to create the backend ride_api_gw_authorizer = apigw.CfnAuthorizer( self, 'wild-rydes-apigw-authorizer', rest_api_id=ride_api_gw.rest_api_id, name='wild-rydes-apigw-authorizer', type='COGNITO_USER_POOLS', identity_source='method.request.header.name.Authorization', identity_validation_expression="Bearer (.*)", provider_arns=[ 'arn:aws:cognito-idp:us-east-1:<ACCOUNT_ID>:userpool/<USER_POOL_ID>' ]) # https://github.com/aws/aws-cdk/issues/5618 post_ride_resource_fix = post_ride_resource_method.node.find_child( 'Resource') post_ride_resource_fix.add_property_override('AuthorizationType', 'COGNITO_USER_POOLS') post_ride_resource_fix.add_property_override( 'AuthorizerId', {"Ref": ride_api_gw_authorizer.logical_id}) # Enable CORS for the workshop post_ride_resource.add_method( 'OPTIONS', apigw.MockIntegration(integration_responses=[{ 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Headers': "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", 'method.response.header.Access-Control-Allow-Origin': "'*'", 'method.response.header.Access-Control-Allow-Methods': "'POST,OPTIONS'" } }], passthrough_behavior=apigw. PassthroughBehavior.WHEN_NO_MATCH, request_templates={ "application/json": "{\"statusCode\":200}" }), method_responses=[{ 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Headers': True, 'method.response.header.Access-Control-Allow-Methods': True, 'method.response.header.Access-Control-Allow-Origin': True, } }]) # Outputs amplify_repo_url = core.CfnOutput( self, 'amplify-repo-url', value=amplify_repo.repository_clone_url_http) app_repo_url = core.CfnOutput(self, 'app-repo-url', value=app_repo.repository_clone_url_http) amplify_default_domain = core.CfnOutput( self, 'amplify-default-domain', value=amplify_static_site.default_domain) request_unicorn_apigw = core.CfnOutput(self, 'request-unicorn-apigw', value=request_unicorn_apigw.url)
def __init__(self, scope: core.Construct, _id: str, **kwargs) -> None: super().__init__(scope, _id, **kwargs) ddb_file_list = ddb.Table(self, "ddb", partition_key=ddb.Attribute( name="Key", type=ddb.AttributeType.STRING), billing_mode=ddb.BillingMode.PAY_PER_REQUEST) sqs_queue_DLQ = sqs.Queue(self, "sqs_DLQ", visibility_timeout=core.Duration.minutes(15), retention_period=core.Duration.days(14)) sqs_queue = sqs.Queue(self, "sqs_queue", visibility_timeout=core.Duration.minutes(15), retention_period=core.Duration.days(14), dead_letter_queue=sqs.DeadLetterQueue( max_receive_count=100, queue=sqs_queue_DLQ)) checkip = api.RestApi( self, "lambda-checkip-api", cloud_watch_role=True, deploy=True, description="For Lambda get IP address", default_integration=api.MockIntegration( integration_responses=[ api.IntegrationResponse(status_code="200", response_templates={ "application/json": "$context.identity.sourceIp" }) ], request_templates={"application/json": '{"statusCode": 200}'}), endpoint_types=[api.EndpointType.REGIONAL]) checkip.root.add_method("GET", method_responses=[ api.MethodResponse( status_code="200", response_models={ "application/json": api.Model.EMPTY_MODEL }) ]) handler = lam.Function(self, "lambdaFunction", code=lam.Code.asset("./lambda"), handler="lambda_function.lambda_handler", runtime=lam.Runtime.PYTHON_3_8, memory_size=1024, timeout=core.Duration.minutes(15), tracing=lam.Tracing.ACTIVE, environment={ 'table_queue_name': ddb_file_list.table_name, 'Des_bucket_default': Des_bucket_default, 'Des_prefix_default': Des_prefix_default, 'StorageClass': StorageClass, 'aws_access_key_id': aws_access_key_id, 'aws_secret_access_key': aws_secret_access_key, 'aws_access_key_region': aws_access_key_region, 'checkip_url': checkip.url }) ddb_file_list.grant_read_write_data(handler) handler.add_event_source(SqsEventSource(sqs_queue)) s3bucket = s3.Bucket(self, "s3bucket") s3bucket.grant_read(handler) s3bucket.add_event_notification(s3.EventType.OBJECT_CREATED, s3n.SqsDestination(sqs_queue)) # You can import an existing bucket and grant access to lambda # exist_s3bucket = s3.Bucket.from_bucket_name(self, "import_bucket", # bucket_name="you_bucket_name") # exist_s3bucket.grant_read(handler) # But You have to add sqs as imported bucket event notification manually, it doesn't support by CloudFormation # An work around is to add on_cloud_trail_event for the bucket, but will trigger could_trail first # 因为是导入的Bucket,需要手工建Bucket Event Trigger SQS,以及设置SQS允许该bucekt触发的Permission core.CfnOutput(self, "DynamoDB_Table", value=ddb_file_list.table_name) core.CfnOutput(self, "SQS_Job_Queue", value=sqs_queue.queue_name) core.CfnOutput(self, "SQS_Job_Queue_DLQ", value=sqs_queue_DLQ.queue_name) core.CfnOutput(self, "Worker_Lambda_Function", value=handler.function_name) core.CfnOutput(self, "New_S3_Bucket", value=s3bucket.bucket_name) # Create Lambda logs filter to create network traffic metric handler.log_group.add_metric_filter( "Complete-bytes", metric_name="Complete-bytes", metric_namespace="s3_migrate", metric_value="$bytes", filter_pattern=logs.FilterPattern.literal( '[info, date, sn, p="--->Complete", bytes, key]')) handler.log_group.add_metric_filter( "Uploading-bytes", metric_name="Uploading-bytes", metric_namespace="s3_migrate", metric_value="$bytes", filter_pattern=logs.FilterPattern.literal( '[info, date, sn, p="--->Uploading", bytes, key]')) handler.log_group.add_metric_filter( "Downloading-bytes", metric_name="Downloading-bytes", metric_namespace="s3_migrate", metric_value="$bytes", filter_pattern=logs.FilterPattern.literal( '[info, date, sn, p="--->Downloading", bytes, key]')) lambda_metric_Complete = cw.Metric(namespace="s3_migrate", metric_name="Complete-bytes", statistic="Sum", period=core.Duration.minutes(1)) lambda_metric_Upload = cw.Metric(namespace="s3_migrate", metric_name="Uploading-bytes", statistic="Sum", period=core.Duration.minutes(1)) lambda_metric_Download = cw.Metric(namespace="s3_migrate", metric_name="Downloading-bytes", statistic="Sum", period=core.Duration.minutes(1)) handler.log_group.add_metric_filter( "ERROR", metric_name="ERROR-Logs", metric_namespace="s3_migrate", metric_value="1", filter_pattern=logs.FilterPattern.literal('"ERROR"')) handler.log_group.add_metric_filter( "WARNING", metric_name="WARNING-Logs", metric_namespace="s3_migrate", metric_value="1", filter_pattern=logs.FilterPattern.literal('"WARNING"')) log_metric_ERROR = cw.Metric(namespace="s3_migrate", metric_name="ERROR-Logs", statistic="Sum", period=core.Duration.minutes(1)) log_metric_WARNING = cw.Metric(namespace="s3_migrate", metric_name="WARNING-Logs", statistic="Sum", period=core.Duration.minutes(1)) # Dashboard to monitor SQS and Lambda board = cw.Dashboard(self, "s3_migrate", dashboard_name="s3_migrate_serverless") board.add_widgets( cw.GraphWidget(title="Lambda-NETWORK", left=[ lambda_metric_Download, lambda_metric_Upload, lambda_metric_Complete ]), # TODO: here monitor all lambda concurrency not just the working one. Limitation from CDK # Lambda now supports monitor single lambda concurrency, will change this after CDK support cw.GraphWidget(title="Lambda-all-concurrent", left=[ handler.metric_all_concurrent_executions( period=core.Duration.minutes(1)) ]), cw.GraphWidget( title="Lambda-invocations/errors/throttles", left=[ handler.metric_invocations( period=core.Duration.minutes(1)), handler.metric_errors(period=core.Duration.minutes(1)), handler.metric_throttles(period=core.Duration.minutes(1)) ]), cw.GraphWidget( title="Lambda-duration", left=[ handler.metric_duration(period=core.Duration.minutes(1)) ]), ) board.add_widgets( cw.GraphWidget( title="SQS-Jobs", left=[ sqs_queue.metric_approximate_number_of_messages_visible( period=core.Duration.minutes(1)), sqs_queue. metric_approximate_number_of_messages_not_visible( period=core.Duration.minutes(1)) ]), cw.GraphWidget( title="SQS-DeadLetterQueue", left=[ sqs_queue_DLQ. metric_approximate_number_of_messages_visible( period=core.Duration.minutes(1)), sqs_queue_DLQ. metric_approximate_number_of_messages_not_visible( period=core.Duration.minutes(1)) ]), cw.GraphWidget(title="ERROR/WARNING Logs", left=[log_metric_ERROR], right=[log_metric_WARNING]), cw.SingleValueWidget( title="Running/Waiting and Dead Jobs", metrics=[ sqs_queue. metric_approximate_number_of_messages_not_visible( period=core.Duration.minutes(1)), sqs_queue.metric_approximate_number_of_messages_visible( period=core.Duration.minutes(1)), sqs_queue_DLQ. metric_approximate_number_of_messages_not_visible( period=core.Duration.minutes(1)), sqs_queue_DLQ. metric_approximate_number_of_messages_visible( period=core.Duration.minutes(1)) ], height=6)) # Alarm for queue - DLQ alarm_DLQ = cw.Alarm( self, "SQS_DLQ", alarm_name="s3-migration-serverless-SQS Dead Letter Queue", metric=sqs_queue_DLQ.metric_approximate_number_of_messages_visible( ), threshold=0, comparison_operator=cw.ComparisonOperator.GREATER_THAN_THRESHOLD, evaluation_periods=1, datapoints_to_alarm=1) alarm_topic = sns.Topic(self, "SQS queue-DLQ has dead letter") alarm_topic.add_subscription( subscription=sub.EmailSubscription(alarm_email)) alarm_DLQ.add_alarm_action(action.SnsAction(alarm_topic)) # Alarm for queue empty, i.e. no visible message and no in-visible message # metric_all_message = cw.MathExpression( # expression="a + b", # label="empty_queue_expression", # using_metrics={ # "a": sqs_queue.metric_approximate_number_of_messages_visible(), # "b": sqs_queue.metric_approximate_number_of_messages_not_visible() # } # ) # alarm_0 = cw.Alarm(self, "SQSempty", # alarm_name="SQS queue empty-Serverless", # metric=metric_all_message, # threshold=0, # comparison_operator=cw.ComparisonOperator.LESS_THAN_OR_EQUAL_TO_THRESHOLD, # evaluation_periods=3, # datapoints_to_alarm=3, # treat_missing_data=cw.TreatMissingData.IGNORE # ) # alarm_topic = sns.Topic(self, "SQS queue empty-Serverless") # alarm_topic.add_subscription(subscription=sub.EmailSubscription(alarm_email)) # alarm_0.add_alarm_action(action.SnsAction(alarm_topic)) # core.CfnOutput(self, "Alarm", value="CloudWatch SQS queue empty Alarm for Serverless: " + alarm_email) core.CfnOutput(self, "Dashboard", value="CloudWatch Dashboard name s3_migrate_serverless") core.CfnOutput(self, "API-checkip", value=checkip.url)
def __init__(self, scope: cdk.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) table = dynamo_db.Table(self, "MysfitsQuestionsTable", table_name="MysfitsQuestionsTable", partition_key=dynamo_db.Attribute( name="QuestionId", type=dynamo_db.AttributeType.STRING), stream=dynamo_db.StreamViewType.NEW_IMAGE) post_question_lambda_function_policy_stm_ddb = _iam.PolicyStatement() post_question_lambda_function_policy_stm_ddb.add_actions( "dynamodb:PutItem") post_question_lambda_function_policy_stm_ddb.add_resources( table.table_arn) lambda_function_policy_stm_xray = _iam.PolicyStatement() lambda_function_policy_stm_xray.add_actions( "xray:PutTraceSegments", "xray:PutTelemetryRecords", "xray:GetSamplingRules", "xray:GetSamplingTargets", "xray:GetSamplingStatisticSummaries") lambda_function_policy_stm_xray.add_all_resources() # Lambda processor function mysfits_post_question = _lambda.Function( self, 'PostQuestionFunction', handler="mysfitsPostQuestion.postQuestion", runtime=_lambda.Runtime.PYTHON_3_6, description= 'A microservice Lambda function that receives a new question submitted to the MythicalMysfits' ' website from a user and inserts it into a DynamoDB database table.', memory_size=128, code=_lambda.Code.asset('./lambda_questions/PostQuestionsService'), timeout=cdk.Duration.seconds(30), initial_policy=[ post_question_lambda_function_policy_stm_ddb, lambda_function_policy_stm_xray ], tracing=_lambda.Tracing.ACTIVE) topic = sns.Topic(self, 'Topic', display_name='MythicalMysfitsQuestionsTopic', topic_name='MythicalMysfitsQuestionsTopic') topic.add_subscription(subs.EmailSubscription(receiver_email)) post_question_lambda_function_policy_stm_sns = _iam.PolicyStatement() post_question_lambda_function_policy_stm_sns.add_actions("sns:Publish") post_question_lambda_function_policy_stm_sns.add_resources( topic.topic_arn) mysfits_process_questions_stream = _lambda.Function( self, 'ProcessQuestionStreamFunction', handler="mysfitsProcessStream.processStream", runtime=_lambda.Runtime.PYTHON_3_6, description= 'An AWS Lambda function that will process all new questions posted to mythical mysfits' ' and notify the site administrator of the question that was asked.', memory_size=128, code=_lambda.Code.asset( './lambda_questions/ProcessQuestionsStream'), timeout=cdk.Duration.seconds(30), initial_policy=[ post_question_lambda_function_policy_stm_sns, lambda_function_policy_stm_xray ], environment={'SNS_TOPIC_ARN': topic.topic_arn}, tracing=_lambda.Tracing.ACTIVE, events=[ event.DynamoEventSource( table, starting_position=_lambda.StartingPosition.TRIM_HORIZON, batch_size=1) ]) questions_api_role = _iam.Role( self, 'QuestionsApiRole', assumed_by=_iam.ServicePrincipal('apigateway.amazonaws.com')) api_policy = _iam.PolicyStatement() api_policy.add_actions("lambda:InvokeFunction") api_policy.add_resources(mysfits_post_question.function_arn) api_policy.effect = _iam.Effect.ALLOW # Associate policy to role _iam.Policy(self, "QuestionsApiPolicy", policy_name="questions_api_policy", statements=[api_policy], roles=[questions_api_role]) # Create API gateway questions_integration = apigw.LambdaIntegration( mysfits_post_question, credentials_role=questions_api_role, integration_responses=[ apigw.IntegrationResponse( status_code='200', response_templates={"application/json": '{"status":"OK"}'}) ], ) api = apigw.LambdaRestApi( self, 'APIEndpoint', handler=mysfits_post_question, options=apigw.LambdaRestApiProps( rest_api_name='QuestionsAPI', deploy_options=apigw.StageOptions(tracing_enabled=True), handler=mysfits_post_question), proxy=False) questions_method = api.root.add_resource('questions') questions_method.add_method( 'POST', questions_integration, method_responses=[apigw.MethodResponse(status_code='200')], authorization_type=apigw.AuthorizationType.NONE) questions_method.add_method( 'OPTIONS', integration=apigw.MockIntegration(integration_responses=[ apigw.IntegrationResponse( status_code='200', response_parameters={ 'method.response.header.Access-Control-Allow-Headers': "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", 'method.response.header.Access-Control-Allow-Origin': "'*'", 'method.response.header.Access-Control-Allow-Credentials': "'false'", 'method.response.header.Access-Control-Allow-Methods': "'OPTIONS,GET,PUT,POST,DELETE'" }) ], passthrough_behavior=apigw. PassthroughBehavior.NEVER, request_templates={ "application/json": '{"statusCode": 200}' }), method_responses=[ apigw.MethodResponse( status_code='200', response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True }) ])
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.current_dir = os.path.dirname(__file__) self.website_bucket = s3.Bucket( self, "qs-embed-bucket", bucket_name=f'quicksight-embed-{core.Aws.ACCOUNT_ID}', block_public_access=s3.BlockPublicAccess.BLOCK_ALL) self.quicksight_embed_lambda_role = iam.Role( self, 'quicksight-embed-lambda-role', description='Role for the Quicksight dashboard embed Lambdas', role_name='quicksight-embed-lambda-role', max_session_duration=core.Duration.seconds(3600), assumed_by=iam.ServicePrincipal('lambda.amazonaws.com'), inline_policies={ 'AllowAccess': iam.PolicyDocument(statements=[ iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ 'logs:CreateLogGroup', 'logs:CreateLogStream', 'logs:PutLogEvents' ], resources=[ f'arn:aws:logs:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:*' ]), iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=["secrets:GetSecretValue"], resources=[ f"arn:aws:secretsmanager:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:secret:*" ]), iam.PolicyStatement(effect=iam.Effect.ALLOW, actions=[ "quicksight:GetDashboardEmbedUrl", "quicksight:GetAuthCode" ], resources=["*"]) ]) }) self.quicksight_migration_lambda = _lambda.Function( self, 'quicksight-migration-lambda', handler='quicksight_embed.lambda_handler', runtime=_lambda.Runtime.PYTHON_3_8, code=_lambda.Code.from_asset( os.path.join(self.current_dir, '../lambda/quicksight_embed/')), function_name='quicksight_embed_lambda', role=self.quicksight_embed_lambda_role, timeout=core.Duration.minutes(3), memory_size=512, environment={ 'DASHBOARD_ID': 'CHANGEME_DASHBOARD_ID', 'QUICKSIGHT_USER_ARN': f'arn:aws:quicksight:us-east-1:{core.Aws.ACCOUNT_ID}:user/default/quicksight-migration-user' }) self.apigw_lambda = ApiGatewayToLambda( self, "ApiGatewayToLambdaQSEmbed", existing_lambda_obj=self.quicksight_migration_lambda, api_gateway_props=apigw.LambdaRestApiProps( rest_api_name="quicksight-embed", handler=self.quicksight_migration_lambda, deploy=True, proxy=False, default_method_options=apigw.MethodOptions( authorization_type=apigw.AuthorizationType.NONE), policy=iam.PolicyDocument(statements=[ iam.PolicyStatement(effect=iam.Effect.ALLOW, actions=['execute-api:Invoke'], resources=["execute-api:/prod/*"], principals=[iam.ArnPrincipal("*")]) ]))) self.embedurl = self.apigw_lambda.api_gateway.root.add_resource( "embedurl") self.embedurl.add_method( "GET", method_responses=[{ 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Headers': True, 'method.response.header.Access-Control-Allow-Methods': True, 'method.response.header.Access-Control-Allow-Origin': True } }], integration=apigw.LambdaIntegration( self.quicksight_migration_lambda, proxy=False, integration_responses=[{ 'statusCode': '200', 'responseTemplates': { "application/json": "" }, 'responseParameters': { 'method.response.header.Access-Control-Allow-Headers': "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", 'method.response.header.Access-Control-Allow-Origin': "'*'", 'method.response.header.Access-Control-Allow-Methods': "'GET'" } }])) self.embedurl.add_method( 'OPTIONS', apigw.MockIntegration(integration_responses=[{ 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Headers': "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", 'method.response.header.Access-Control-Allow-Origin': "'*'", 'method.response.header.Access-Control-Allow-Methods': "'GET,OPTIONS'" } }], passthrough_behavior=apigw. PassthroughBehavior.WHEN_NO_MATCH, request_templates={ "application/json": "{\"statusCode\":200}" }), method_responses=[{ 'statusCode': '200', 'responseParameters': { 'method.response.header.Access-Control-Allow-Headers': True, 'method.response.header.Access-Control-Allow-Methods': True, 'method.response.header.Access-Control-Allow-Origin': True } }]) # Cloudfront Distribution for authentication self.embed_auth_lambda_role = iam.Role( self, 'embed-auth-lambda-role', description= 'Role for the Quicksight dashboard embed authentication Lambda', role_name='embed-auth-lambda-role', max_session_duration=core.Duration.seconds(3600), assumed_by=iam.ServicePrincipal('lambda.amazonaws.com'), inline_policies={ 'AllowAccess': iam.PolicyDocument(statements=[ iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ 'logs:CreateLogGroup', 'logs:CreateLogStream', 'logs:PutLogEvents' ], resources=[ f'arn:aws:logs:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:*' ]) ]) }) self.embed_auth_lambda = _lambda.Function( self, 'embed-auth-lambda', handler='index.handler', description= "A Lambda@Edge function for QuickSight embed authentication via CloudFront Distribution", runtime=_lambda.Runtime.NODEJS_14_X, code=_lambda.Code.from_asset( os.path.join(self.current_dir, '../lambda/embed_auth/')), function_name='embed_auth_lambda', role=self.embed_auth_lambda_role, timeout=core.Duration.seconds(5), memory_size=128) self.embed_auth_dist = cloudfront.Distribution( self, "embed-auth-dist", enabled=True, default_root_object="index.html", default_behavior=cloudfront.BehaviorOptions( origin=origins.S3Origin(self.website_bucket), allowed_methods=cloudfront.AllowedMethods.ALLOW_GET_HEAD, edge_lambdas=[{ "functionVersion": self.embed_auth_lambda.current_version, "eventType": cloudfront.LambdaEdgeEventType.VIEWER_REQUEST, "includeBody": True }])) core.CfnOutput(self, "EmbedAPIGatewayURL", value=self.apigw_lambda.api_gateway.url + "embedurl?", description="Embed API GW URL") core.CfnOutput(self, "EmbedCloudFrontURL", value="https://" + self.embed_auth_dist.distribution_domain_name, description="CloudFront Distribution URL")
def __init__(self, scope: cdk.Construct, construct_id: str, table: dynamo_db.Table, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # Bucket for the processed stream events # -------------------------------------- clicks_destination_bucket = _s3.Bucket( self, 'Bucket', versioned=False # True ) # Lambda function for processing the stream # ----------------------------------------- # Policy statement for accessing the DynamoDB table lambda_function_policy_stm = _iam.PolicyStatement() lambda_function_policy_stm.add_actions('dynamodb:GetItem') lambda_function_policy_stm.add_resources(table.table_arn) # Lambda processor function mysfits_click_processor = _lambda.Function( self, 'Function', handler="streamProcessor.processRecord", runtime=_lambda.Runtime.PYTHON_3_6, description= 'An Amazon Kinesis Firehose stream processor that enriches click records to not just ' 'include a mysfitId, but also other attributes that can be analyzed later.', memory_size=128, code=_lambda.Code.asset('./lambda_streaming_processor'), timeout=cdk.Duration.seconds(60), initial_policy=[lambda_function_policy_stm], environment={'MYSFITS_API_URL': mysfits_api_url}) # Firehose delivery stream # ------------------------ # Initialize role firehose_delivery_role = _iam.Role( self, "FirehoseDeliveryRole", role_name='FirehoseDeliveryRole', assumed_by=_iam.ServicePrincipal('firehose.amazonaws.com'), ) # Statement with access to S3 bucket firehose_delivery_policy_s3_stm = _iam.PolicyStatement() firehose_delivery_policy_s3_stm.add_actions( "s3:AbortMultipartUpload", "s3:GetBucketLocation", "s3:GetObject", "s3:ListBucket", "s3:ListBucketMultipartUploads", "s3:PutObject") firehose_delivery_policy_s3_stm.add_resources( clicks_destination_bucket.bucket_arn) firehose_delivery_policy_s3_stm.add_resources( clicks_destination_bucket.arn_for_objects('*')) firehose_delivery_policy_s3_stm.effect = _iam.Effect.ALLOW # Statement with access to Lambda function firehose_delivery_policy_lambda_stm = _iam.PolicyStatement() firehose_delivery_policy_lambda_stm.add_actions( "lambda:InvokeFunction") firehose_delivery_policy_lambda_stm.add_actions( "lambda:GetFunctionConfiguration") firehose_delivery_policy_lambda_stm.add_resources( mysfits_click_processor.function_arn) firehose_delivery_policy_lambda_stm.effect = _iam.Effect.ALLOW # Add policies to role firehose_delivery_role.add_to_policy(firehose_delivery_policy_s3_stm) firehose_delivery_role.add_to_policy( firehose_delivery_policy_lambda_stm) # Create delivery stream mysfits_firehose_to_s3 = kinfire.CfnDeliveryStream( self, "DeliveryStream", delivery_stream_name="DeliveryStream", delivery_stream_type="DirectPut", extended_s3_destination_configuration=kinfire.CfnDeliveryStream. ExtendedS3DestinationConfigurationProperty( bucket_arn=clicks_destination_bucket.bucket_arn, buffering_hints=kinfire.CfnDeliveryStream. BufferingHintsProperty(interval_in_seconds=60, size_in_m_bs=1), compression_format="UNCOMPRESSED", error_output_prefix="errors/", prefix="firehose/", processing_configuration=kinfire.CfnDeliveryStream. ProcessingConfigurationProperty( enabled=True, processors=[ kinfire.CfnDeliveryStream.ProcessorProperty( type="Lambda", parameters=[ kinfire.CfnDeliveryStream. ProcessorParameterProperty( parameter_name="LambdaArn", parameter_value=mysfits_click_processor. function_arn) ]) ]), role_arn=firehose_delivery_role.role_arn, )) # API Gateway as proxy to the Firehose stream # ------------------------------------------- # Initialize role click_processing_api_role = _iam.Role( self, "ClickProcessingApiRole", role_name="ClickProcessingApiRole", assumed_by=_iam.ServicePrincipal("apigateway.amazonaws.com")) api_policy = _iam.PolicyStatement() api_policy.add_actions("firehose:PutRecord") api_policy.add_resources(mysfits_firehose_to_s3.attr_arn) api_policy.effect = _iam.Effect.ALLOW # Associate policy to role _iam.Policy(self, "ClickProcessingApiPolicy", policy_name="api_gateway_firehose_proxy_role", statements=[api_policy], roles=[click_processing_api_role]) # Create API gateway api = apigw.RestApi(self, "APIEndpoint", rest_api_name="ClickProcessingApi", endpoint_types=[apigw.EndpointType.REGIONAL]) # Add the resource endpoint and the method used to send clicks to Firehose clicks = api.root.add_resource('clicks') clicks.add_method( 'PUT', integration=apigw.AwsIntegration( service='firehose', integration_http_method='POST', action='PutRecord', options=apigw.IntegrationOptions( connection_type=apigw.ConnectionType.INTERNET, credentials_role=click_processing_api_role, integration_responses=[ apigw.IntegrationResponse( status_code='200', response_templates={ "application/json": '{"status":"OK"}' }, response_parameters={ "method.response.header.Access-Control-Allow-Headers": "'Content-Type'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,PUT'", "method.response.header.Access-Control-Allow-Origin": "'*'" }) ], request_parameters={ "integration.request.header.Content-Type": "'application/x-amz-json-1.1'" }, request_templates={ "application/json": "{ \"DeliveryStreamName\": \"" + mysfits_firehose_to_s3.ref + "\", \"Record\": { \"Data\": \"$util.base64Encode($input.json('$'))\" } }" }, )), method_responses=[ apigw.MethodResponse( status_code='200', response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Origin": True }) ]) clicks.add_method( 'OPTIONS', integration=apigw.MockIntegration(integration_responses=[ apigw.IntegrationResponse( status_code='200', response_parameters={ "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", "method.response.header.Access-Control-Allow-Origin": "'*'", "method.response.header.Access-Control-Allow-Credentials": "'false'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,GET,PUT,POST,DELETE'" }) ], passthrough_behavior=apigw. PassthroughBehavior.NEVER, request_templates={ "application/json": '{"statusCode": 200}' }), method_responses=[ apigw.MethodResponse( status_code='200', response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True }) ])
def __init__(self, scope: core.Construct, id: str, redirect_handler, create_handler, **kwargs) -> None: super().__init__(scope, id, **kwargs) base_api = api_gw.RestApi( scope=self, id='ShortenURLAPI', rest_api_name='dev-url-shortener-api', deploy=True, deploy_options=api_gw.StageOptions(stage_name='devShortenURL') ) # shortenUrl shorten_url = base_api.root.add_resource('shortenUrl') shorten_url_responses = [ api_gw.MethodResponse( status_code='200', response_models={"application/json": api_gw.Model.EMPTY_MODEL}, response_parameters={'method.response.header.Access-Control-Allow-Origin': True} ), api_gw.MethodResponse( status_code='500', response_models={"application/json": api_gw.Model.ERROR_MODEL}, ) ] shorten_url_lambda_integration = api_gw.LambdaIntegration( handler=create_handler, request_templates={"application/json": '{ "statusCode": "200" }'}, integration_responses=[ api_gw.IntegrationResponse( status_code='200', response_templates={'application/json': ''} ) ] ) shorten_url.add_method( http_method='POST', api_key_required=False, integration=shorten_url_lambda_integration, method_responses=shorten_url_responses ) shorten_url_integration_mock = api_gw.MockIntegration( request_templates={"application/json": json.dumps({"statusCode": 200})}, integration_responses=[ api_gw.IntegrationResponse( status_code='200', response_templates={'application/json': ''}, response_parameters={ "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,POST'", "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", "method.response.header.Access-Control-Allow-Origin": "'*'", }, ) ], passthrough_behavior=api_gw.PassthroughBehavior.WHEN_NO_MATCH ) shorten_url_mock_reponses = api_gw.MethodResponse( status_code='200', response_models={ "application/json": api_gw.Model.EMPTY_MODEL }, response_parameters={ "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Origin": True } ) shorten_url.add_method( http_method='OPTIONS', integration=shorten_url_integration_mock, method_responses=[shorten_url_mock_reponses] ) # {id} id_url = base_api.root.add_resource("{id}") id_url_responses = [ api_gw.MethodResponse( status_code='200', response_models={"application/json": api_gw.Model.EMPTY_MODEL}, response_parameters={'method.response.header.Access-Control-Allow-Origin': True} ) ] id_url_lambda_integration = api_gw.LambdaIntegration( handler=redirect_handler, request_templates={"application/json": '{ "statusCode": "200" }'}, integration_responses=[ api_gw.IntegrationResponse( status_code='200', response_templates={'application/json': ''} ) ] ) id_url.add_method( http_method='GET', integration=id_url_lambda_integration, method_responses=id_url_responses, request_parameters={'method.request.path.proxy': True} ) id_url_integration_mock = api_gw.MockIntegration( request_templates={"application/json": json.dumps({"statusCode": 200})}, integration_responses=[ api_gw.IntegrationResponse( status_code='200', response_templates={'application/json': ''}, response_parameters={ "method.response.header.Access-Control-Allow-Methods": "'DELETE,GET,HEAD,OPTIONS,PATCH,POST,PUT'", "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", "method.response.header.Access-Control-Allow-Origin": "'*'", }, ) ], passthrough_behavior=api_gw.PassthroughBehavior.WHEN_NO_MATCH ) id_url_mock_responses = api_gw.MethodResponse( status_code='200', response_models={ "application/json": api_gw.Model.EMPTY_MODEL }, response_parameters={ "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Origin": True } ) id_url.add_method( http_method='OPTIONS', integration=id_url_integration_mock, method_responses=[id_url_mock_responses] ) # Create custom domain mapping API sel_cert_arn = "arn:aws:acm:ap-northeast-2:111111111111:certificate/b1111bf5-ae1b-1f61-a111-f1d839428f5f" apigw_domain = base_api.add_domain_name( id='ShortenURLIDCustomDomain', certificate=_acm.Certificate.from_certificate_arn( scope=self, id="ShortenURLIDCert", certificate_arn=sel_cert_arn ), security_policy=api_gw.SecurityPolicy.TLS_1_2, domain_name='s.cloudopz.co' ) dev_hosted_zone = 'A11AA1A1A1AAAA' hz_dev = _route53.HostedZone.from_hosted_zone_attributes( self, id="ShortenURLHostedZoneDev", hosted_zone_id=dev_hosted_zone, zone_name='cloudopz.co') _route53.ARecord( self, 'ShortenURLRoute53', record_name='s', zone=hz_dev, target=_route53.RecordTarget.from_alias(_route53_target.ApiGatewayDomain(apigw_domain)) )