コード例 #1
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # Package our common dependencies as layers
        db_layer = lambda_python.PythonLayerVersion(
            self,
            "DB lib",
            entry="layers/db-utils",
            compatible_runtimes=[aws_lambda.Runtime.PYTHON_3_8],
        )

        # Deploy Lambda 1 which relies on db_utils (which is deployed as a Layer)
        _ = lambda_python.PythonFunction(
            self,
            "Lambda 1",
            entry="lambdas/lambda_1",
            handler="handler",
            layers=[db_layer],
            memory_size=128,
            timeout=core.Duration.seconds(10),
            runtime=aws_lambda.Runtime.PYTHON_3_8,
        )

        # Deploy Lambda 2, which has no local dependencies, but does install Numpy on
        # deployment
        _ = lambda_python.PythonFunction(
            self,
            "Lambda 2",
            entry="lambdas/lambda_2",
            handler="handler",
            memory_size=128,
            timeout=core.Duration.seconds(10),
            runtime=aws_lambda.Runtime.PYTHON_3_8,
        )
コード例 #2
0
    def _create_token_validation_lambda(self) -> aws_lambda.Function:

        return lambda_python.PythonFunction(
            scope=self,
            id="token_validation_lambda",
            function_name=f"orbit-{self.context.name}-token-validation",
            entry=_lambda_path("token_validation"),
            index="index.py",
            handler="handler",
            runtime=aws_lambda.Runtime.PYTHON_3_8,
            timeout=Duration.seconds(5),
            environment={
                "COGNITO_USER_POOL_ID":
                self.user_pool.user_pool_id,
                "REGION":
                self.context.region,
                "COGNITO_USER_POOL_CLIENT_ID":
                self.user_pool_client.user_pool_client_id,
            },
            initial_policy=[
                iam.PolicyStatement(
                    effect=iam.Effect.ALLOW,
                    actions=[
                        "ec2:Describe*", "logs:Create*", "logs:PutLogEvents",
                        "logs:Describe*"
                    ],
                    resources=["*"],
                )
            ],
        )
コード例 #3
0
    def _create_eks_service_lambda(self) -> aws_lambda.Function:

        return lambda_python.PythonFunction(
            scope=self,
            id="eks_service_lambda",
            function_name=f"orbit-{self.context.name}-eks-service-handler",
            entry=_lambda_path("eks_service_handler"),
            index="index.py",
            handler="handler",
            runtime=aws_lambda.Runtime.PYTHON_3_8,
            timeout=Duration.seconds(5),
            environment={
                "REGION": self.context.region,
            },
            initial_policy=[
                iam.PolicyStatement(
                    effect=iam.Effect.ALLOW,
                    actions=["eks:List*", "eks:Describe*"],
                    resources=[
                        f"arn:aws:eks:{self.context.region}:{self.context.account_id}:cluster/orbit-*",
                        f"arn:aws:eks:{self.context.region}:{self.context.account_id}:nodegroup/orbit-*/*/*",
                    ],
                )
            ],
        )
コード例 #4
0
    def __init__(self, scope: cdk.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        announcements_table = aws_dynamodb.Table(
            scope=self,
            id='announcements_table',
            partition_key=aws_dynamodb.Attribute(
                name='id',
                type=aws_dynamodb.AttributeType.STRING,
            ),
        )
        api = aws_apigateway.RestApi(
            scope=self,
            id='announcements_rest_api_gateway',
        )
        announcements_endpoint = api.root.add_resource('announcements')

        get_announcements_handler = aws_lambda_python.PythonFunction(
            scope=self,
            id='GetAnnouncementHandler',
            entry='lambda',
            index='get.py',
            handler='handler',
            runtime=aws_lambda.Runtime.PYTHON_3_8,
        )
        create_announcement_handler = aws_lambda_python.PythonFunction(
            scope=self,
            id='CreateAnnouncementHandler',
            entry='lambda',
            index='create.py',
            handler='handler',
            runtime=aws_lambda.Runtime.PYTHON_3_6,
        )
        announcements_table.grant_read_data(get_announcements_handler)
        announcements_table.grant_read_write_data(create_announcement_handler)
        get_announcements_handler.add_environment(
            'TABLE_NAME', announcements_table.table_name)
        create_announcement_handler.add_environment(
            'TABLE_NAME', announcements_table.table_name)
        get_announcements_integration = aws_apigateway.LambdaIntegration(
            get_announcements_handler)
        create_announcements_integration = aws_apigateway.LambdaIntegration(
            create_announcement_handler)
        announcements_endpoint.add_method('GET', get_announcements_integration)
        announcements_endpoint.add_method('POST',
                                          create_announcements_integration)
コード例 #5
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        my_python_lambda = _lambda_python.PythonFunction(
            self,
            'MyPythonLambda',
            runtime=_lambda.Runtime.PYTHON_3_8,
            entry='./lambda/')
コード例 #6
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 bucket_name: str, cloudfront_distribution_id: str,
                 iam_construct: IAMConstruct,
                 secretsmanager_construct: SecretsManagerConstruct,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        self.credential_rotator = aws_lambda_python.PythonFunction(
            self,
            "credential_rotation_lambda",
            entry="cdk/credential_rotation/lambda_functions/src",
            index="handler.py",
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            role=iam_construct.lambda_role,
            timeout=core.Duration.minutes(5),
            retry_attempts=1,
            description=
            "Credential rotation script for the AWS iOS SDK CircleCI pipeline",
            current_version_options=aws_lambda.VersionOptions(
                removal_policy=core.RemovalPolicy.DESTROY),
        )
        self.credential_rotator.add_environment(
            lambda_constants.CIRCLE_CI_IOS_SDK_API_TOKEN_ENV,
            secretsmanager_construct.circleci_aws_ios_sdk_api_key.
            secret_full_arn,
        )

        self.credential_rotator.add_environment(
            lambda_constants.CIRCLE_CI_IOS_SDK_SPM_API_TOKEN_ENV,
            secretsmanager_construct.circleci_aws_ios_sdk_spm_api_key.
            secret_full_arn,
        )

        self.credential_rotator.add_environment(
            lambda_constants.GITHUB_CREDENTIALS_SECRET_ENV,
            secretsmanager_construct.github_release_api_key.secret_full_arn,
        )

        self.credential_rotator.add_environment(
            lambda_constants.IAM_ROLE_ENV,
            iam_construct.circleci_release_role.role_arn,
        )

        self.credential_rotator.add_environment(
            lambda_constants.IAM_USERNAME_ENV,
            iam_construct.circleci_user.user_name)

        self.credential_rotator.add_environment(
            lambda_constants.GITHUB_PROJECT_PATH_ENV,
            "aws-amplify/aws-sdk-ios")

        self.credential_rotator.add_environment(
            lambda_constants.RELEASE_BUCKET_NAME_ENV, bucket_name)

        self.credential_rotator.add_environment(
            lambda_constants.RELEASE_CLOUDFRONT_DISTRIBUTION_ID_ENV,
            cloudfront_distribution_id)
コード例 #7
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        api_log_group = cwlogs.LogGroup(self, "HelloWorldAPILogs")

        # Create the api gateway for this lambda set
        self.target_api = apigw.RestApi(
            self,
            'HelloWorldAPI',
            rest_api_name='HelloWorld',
            endpoint_types=[apigw.EndpointType.REGIONAL],
            deploy_options={
                "access_log_destination":
                apigw.LogGroupLogDestination(api_log_group),
                "access_log_format":
                apigw.AccessLogFormat.clf(),
                "method_options": {
                    "/*/*":
                    {  # This special path applies to all resource paths and all HTTP methods
                        "throttling_rate_limit": 100,
                        "throttling_burst_limit": 200
                    }
                }
            })

        hello_world = py_lambda.PythonFunction(
            self,
            "HelloWorld",
            entry='thewafapigateway/lambda_fns',
            index='helloworld.py',
            handler='lambda_handler',
            description='Helloworld',
            timeout=core.Duration.seconds(60))

        entity = self.target_api.root.add_resource('helloworld')
        this_lambda_integration = apigw.LambdaIntegration(
            hello_world,
            proxy=False,
            integration_responses=[{
                'statusCode': '200',
                'responseParameters': {
                    'method.response.header.Access-Control-Allow-Origin':
                    "'*'",
                }
            }])
        method = entity.add_method(
            'GET',
            this_lambda_integration,
            method_responses=[{
                'statusCode': '200',
                'responseParameters': {
                    'method.response.header.Access-Control-Allow-Origin': True,
                }
            }])

        self.resource_arn = f"arn:aws:apigateway:ap-southeast-2::/restapis/{self.target_api.rest_api_id}/stages/{self.target_api.deployment_stage.stage_name}"
コード例 #8
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # Alternative way, equally as crappy
        # myfunc = lambda_.Function(
        #     self, 'MyTestLambda',
        #     code=lambda_.Code.from_asset(path='lambda',
        #                                  bundling={
        #                                      "image": lambda_.Runtime.PYTHON_3_8.bundling_docker_image,
        #                                      "command": ["bash", "-c", "pip install -r requirements.txt -t /asset-output && cp -au . /asset-output"
        #                                                  ]
        #                                  }),
        #     handler='handler',
        #     runtime=lambda_.Runtime.PYTHON_3_8
        # )

        lambda_func = lp.PythonFunction(self, 'MyTestLambda', entry='lambda')
コード例 #9
0
    def __init__(self, scope: cdk.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # The secret
        mysecret = secretsmanager.Secret(self,
                                         "DocBuilderSecret",
                                         secret_name="docbuilder/bigsecret")
        # The Lambda
        doc_builder = lambda_python.PythonFunction(
            self,
            "DocBuilderLambda",
            runtime=lambda_.Runtime.PYTHON_3_8,
            entry='./resources',
            index='my_lambda.py',
            environment=dict(SECRET_ID=mysecret.secret_arn))
        # The role
        mysecret.grant_read(doc_builder.role)
コード例 #10
0
    def __init__(self, scope: core.Construct, id: str, target_table: ddb.Table, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        feed_scanner_lambda = pylambda.PythonFunction(self, "FeedScannerLambda",
                                                      function_name="CloudsatLHR-Feed-Scanner",
                                                      entry="lambdas/feed_scanner/",
                                                      index="app/index.py",
                                                      runtime=lambda_.Runtime.PYTHON_3_7
                                                      )
        self.feed_scanner_lambda = feed_scanner_lambda

        target_table.grant_write_data(feed_scanner_lambda)
        target_table.grant_read_data(feed_scanner_lambda)

        five_minute_timer = events.Rule(
            self,
            "FiveMinuteTimer",
            enabled=True,
            schedule=events.Schedule.rate(core.Duration.minutes(5)),
            targets=[targets.LambdaFunction(feed_scanner_lambda)])

        self.five_minute_timer = five_minute_timer
コード例 #11
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 stream: aws_kinesis.IStream, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        sample_device_producer = aws_lambda_python.PythonFunction(
            self,
            'SampleDeviceProducer',
            entry='stacks/sample_kinesis_stream_producer/producer_lambda',
            index='app.py',
            runtime=aws_lambda.Runtime.PYTHON_3_8,
            timeout=core.Duration.seconds(30))

        stream.grant_write(sample_device_producer)

        lambda_input = {"Stream": stream.stream_name}
        Rule(self,
             'ProducerTriggerEventRule',
             enabled=True,
             schedule=Schedule.rate(Duration.minutes(1)),
             targets=[
                 aws_events_targets.LambdaFunction(
                     handler=sample_device_producer,
                     event=RuleTargetInput.from_object(lambda_input))
             ])
コード例 #12
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        brand = 'a'
        stage = 'dev'
        tablename = 'webchat'

        connectionstable = dynamodb.Table(
            self,
            'connectionsTable',
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            removal_policy=core.RemovalPolicy.DESTROY,
            table_name=tablename,
            partition_key=dynamodb.Attribute(
                name="connectionId", type=dynamodb.AttributeType.STRING),
        )

        websocketgw = apiv2.CfnApi(
            self,
            'websocket',
            name='SimpleChatWebSocket',
            protocol_type='WEBSOCKET',
            route_selection_expression='$request.body.action')

        # connect function
        connect_function = py_lambda.PythonFunction(
            self,
            "connect_function",
            entry='websocket/api_lambda/connect',  #folder
            index='connect.py',  #file
            handler='lambda_handler',  #function
            description='connect',
            environment={
                'brand': brand,
                'stage': stage,
                'CONNECTION_TABLE_NAME': tablename
            },
            timeout=core.Duration.seconds(60))
        connectionstable.grant_read_write_data(connect_function)

        connect_function_policy = iam.Policy(
            self,
            'connect_policy',
            statements=[
                iam.PolicyStatement(actions=['dynamodb:*'],
                                    resources=[connectionstable.table_arn])
            ],
            roles=[connect_function.role])

        connect_function_permission = aws_lambda.CfnPermission(
            self,
            'connectFunctionPermission',
            action='lambda:InvokeFunction',
            function_name=connect_function.function_name,
            principal='apigateway.amazonaws.com')
        connect_function_permission.add_depends_on(websocketgw)

        # # disconnect function
        disconnect_function = py_lambda.PythonFunction(
            self,
            "disconnect_function",
            entry='websocket/api_lambda/disconnect',  #folder
            index='disconnect.py',  #file
            handler='lambda_handler',  #function
            description='disconnect',
            environment={
                'brand': brand,
                'stage': stage,
                'CONNECTION_TABLE_NAME': tablename
            },
            timeout=core.Duration.seconds(60))

        disconnect_function_policy = iam.Policy(
            self,
            'disconnect_policy',
            statements=[
                iam.PolicyStatement(actions=['dynamodb:*'],
                                    resources=[connectionstable.table_arn])
            ],
            roles=[disconnect_function.role])

        disconnect_function_permission = aws_lambda.CfnPermission(
            self,
            'disconnectFunctionPermission',
            action='lambda:InvokeFunction',
            function_name=disconnect_function.function_name,
            principal='apigateway.amazonaws.com')
        connectionstable.grant_read_write_data(disconnect_function)
        disconnect_function_permission.add_depends_on(websocketgw)

        #send message function.
        sendmessage_function = py_lambda.PythonFunction(
            self,
            "sendmessage_function",
            entry='websocket/api_lambda/sendmessage',  #folder
            index='sendmessage.py',  #file
            handler='lambda_handler',  #function
            description='sendmessage',
            environment={
                'brand': brand,
                'stage': stage,
                'CONNECTION_TABLE_NAME': tablename
            },
            timeout=core.Duration.seconds(60))
        connectionstable.grant_read_write_data(connect_function)

        sendmessage_function_policy = iam.Policy(
            self,
            'sendmessage_policy',
            statements=[
                iam.PolicyStatement(actions=['dynamodb:*'],
                                    resources=[connectionstable.table_arn]),
                iam.PolicyStatement(
                    actions=['execute-api:ManageConnections'],
                    resources=[
                        f'arn:aws:execute-api:aws:{self.region}:{self.account}:{websocketgw.ref}/*',
                        f'arn:aws:execute-api:{self.region}:{self.account}:{websocketgw.ref}/prod/POST/@connections/*'
                    ],
                ),
            ],
            roles=[sendmessage_function.role])
        sendmessage_function_permission = aws_lambda.CfnPermission(
            self,
            'sendmessageFunctionPermission',
            action='lambda:InvokeFunction',
            function_name=sendmessage_function.function_name,
            principal='apigateway.amazonaws.com')
        sendmessage_function_permission.add_depends_on(websocketgw)

        #set username function
        setusername_function = py_lambda.PythonFunction(
            self,
            "setusername_function",
            entry='websocket/api_lambda/setusername',  #folder
            index='setusername.py',  #file
            handler='lambda_handler',  #function
            description='setusername',
            environment={
                'brand': brand,
                'stage': stage,
                'CONNECTION_TABLE_NAME': tablename
            },
            timeout=core.Duration.seconds(60))
        connectionstable.grant_read_write_data(connect_function)

        setusername_function_policy = iam.Policy(
            self,
            'setusername_policy',
            statements=[
                iam.PolicyStatement(actions=['dynamodb:*'],
                                    resources=[connectionstable.table_arn]),
                iam.PolicyStatement(
                    actions=['execute-api:ManageConnections'],
                    resources=[
                        f'arn:aws:execute-api:aws:{self.region}:{self.account}:{websocketgw.ref}/*',
                        f'arn:aws:execute-api:{self.region}:{self.account}:{websocketgw.ref}/prod/POST/@connections/*'
                    ],
                ),
            ],
            roles=[setusername_function.role])
        setusername_function_permission = aws_lambda.CfnPermission(
            self,
            'setusernameFunctionPermission',
            action='lambda:InvokeFunction',
            function_name=setusername_function.function_name,
            principal='apigateway.amazonaws.com')
        setusername_function_permission.add_depends_on(websocketgw)

        # Connect route
        connect_integration = apiv2.CfnIntegration(
            self,
            'ConnectIntegration',
            api_id=websocketgw.ref,
            description='Connect Integration',
            integration_type='AWS_PROXY',
            integration_uri=
            f'arn:aws:apigateway:{self.region}:lambda:path/2015-03-31/functions/{connect_function.function_arn}/invocations'
        )

        connect_route = apiv2.CfnRoute(self,
                                       'connectRoute',
                                       api_id=websocketgw.ref,
                                       route_key='$connect',
                                       authorization_type='NONE',
                                       operation_name='ConnectRoute',
                                       target='integrations/' +
                                       connect_integration.ref)

        # #Disconnect route
        disconnect_integration = apiv2.CfnIntegration(
            self,
            'disConnectIntegration',
            api_id=websocketgw.ref,
            description='disConnect Integration',
            integration_type='AWS_PROXY',
            integration_uri=
            f'arn:aws:apigateway:{self.region}:lambda:path/2015-03-31/functions/{disconnect_function.function_arn}/invocations'
        )
        disconnect_route = apiv2.CfnRoute(self,
                                          'disconnectRoute',
                                          api_id=websocketgw.ref,
                                          route_key='$disconnect',
                                          authorization_type='NONE',
                                          operation_name='DisconnectRoute',
                                          target='integrations/' +
                                          disconnect_integration.ref)

        #Send Route
        sendmessage_integration = apiv2.CfnIntegration(
            self,
            'sendMessageIntegration',
            api_id=websocketgw.ref,
            description='sendmessage Integration',
            integration_type='AWS_PROXY',
            integration_uri=
            f'arn:aws:apigateway:{self.region}:lambda:path/2015-03-31/functions/{sendmessage_function.function_arn}/invocations'
        )
        sendmessage_route = apiv2.CfnRoute(self,
                                           'sendRoute',
                                           api_id=websocketgw.ref,
                                           route_key='sendmessage',
                                           authorization_type='NONE',
                                           operation_name='SendRoute',
                                           target='integrations/' +
                                           sendmessage_integration.ref)

        #Set username Route
        setusername_integration = apiv2.CfnIntegration(
            self,
            'setUsernameIntegration',
            api_id=websocketgw.ref,
            description='setusername Integration',
            integration_type='AWS_PROXY',
            integration_uri=
            f'arn:aws:apigateway:{self.region}:lambda:path/2015-03-31/functions/{setusername_function.function_arn}/invocations'
        )
        setusername_route = apiv2.CfnRoute(self,
                                           'setUsernameRoute',
                                           api_id=websocketgw.ref,
                                           route_key='setusername',
                                           authorization_type='NONE',
                                           operation_name='SetUsernameRoute',
                                           target='integrations/' +
                                           setusername_integration.ref)

        deployment = apiv2.CfnDeployment(
            self,
            'Deployment',
            api_id=websocketgw.ref,
        )
        deployment.add_depends_on(sendmessage_route)
        deployment.add_depends_on(setusername_route)
        deployment.add_depends_on(connect_route)
        deployment.add_depends_on(disconnect_route)

        stage = apiv2.CfnStage(
            self,
            'stage',
            stage_name='prod',
            description='prod stage',
            # deployment_id= deployment.ref,
            api_id=websocketgw.ref,
        )

        core.CfnOutput(
            self,
            'WebSocketURI',
            value=
            f'wss://{websocketgw.ref}.execute-api.{self.region}.amazonaws.com/prod',
            description='URI of websocket')

        print('WebSocket')
コード例 #13
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # ------ Necessary Roles ------
        roles = IamRole(
            self, 'IamRoles'
        )
        

        # ------ S3 Buckets ------
        # Create Athena bucket
        athena_bucket = _s3.Bucket(self, "AthenaBucket",
            removal_policy=core.RemovalPolicy.DESTROY
        )
        # Create Forecast bucket
        forecast_bucket = _s3.Bucket(self, "FoecastBucket",
            removal_policy=core.RemovalPolicy.DESTROY
        )


        # ------ Athena ------ 
        # Config Athena query result output location
        workgroup_prop = _athena.CfnWorkGroup.WorkGroupConfigurationProperty(
            result_configuration=_athena.CfnWorkGroup.ResultConfigurationProperty(
                output_location="s3://"+athena_bucket.bucket_name
            )
        )
        # Create Athena workgroup
        athena_workgroup = _athena.CfnWorkGroup(
            self, 'ForecastGroup',
            name='ForecastGroup', 
            recursive_delete_option=True, 
            state='ENABLED', 
            work_group_configuration=workgroup_prop
        )
            
    
        # ------ SNS Topic ------
        topic = sns.Topic(
            self, 'NotificationTopic',
            display_name='StepsTopic'
        )
        # SNS email subscription. Get the email address from context value(cdk.json)
        topic.add_subscription(subs.EmailSubscription(self.node.try_get_context('my_email')))
         

        # ------ Layers ------
        shared_layer = _lambda.LayerVersion(
            self, 'LambdaLayer',
            layer_version_name='testfolderlayer',
            code=_lambda.AssetCode('shared/')
        )


        # ------ Lambdas for stepfuctions------
        create_dataset_lambda = _lambda.Function(
            self, 'CreateDataset',
            function_name='CreateDataset',
            code=_lambda.Code.asset('lambdas/createdataset/'),
            handler='dataset.lambda_handler',
            runtime=_lambda.Runtime.PYTHON_3_7,
            role=roles.lambda_role,
            timeout=core.Duration.seconds(30),
            layers=[shared_layer]
        )

        create_dataset_group_lambda = _lambda.Function(
            self, 'CreateDatasetGroup',
            function_name='CreateDatasetGroup',
            code = _lambda.Code.asset('lambdas/createdatasetgroup/'),
            handler = 'datasetgroup.lambda_handler',
            runtime = _lambda.Runtime.PYTHON_3_7,
            role=roles.lambda_role,
            layers=[shared_layer]
        )

        import_data_lambda = _lambda.Function(
            self, 'CreateDatasetImportJob',
            function_name='CreateDatasetImportJob',
            code = _lambda.Code.asset('lambdas/createdatasetimportjob/'),
            handler = 'datasetimport.lambda_handler',
            runtime = _lambda.Runtime.PYTHON_3_7,
            role=roles.lambda_role,
            environment= {
                'FORECAST_ROLE': roles.forecast_role.role_arn
            },
            layers=[shared_layer]
        )

        create_predictor_lambda = _lambda.Function(
            self, 'CreatePredictor',
            function_name='CreatePredictor',
            code = _lambda.Code.asset('lambdas/createpredictor/'),
            handler = 'predictor.lambda_handler',
            runtime = _lambda.Runtime.PYTHON_3_7,
            role=roles.lambda_role,
            layers=[shared_layer]
        )

        create_forecast_lambda = _lambda.Function(
            self, 'CreateForecast',
            function_name='CreateForecast',
            code = _lambda.Code.asset('lambdas/createforecast/'),
            handler = 'forecast.lambda_handler',
            runtime = _lambda.Runtime.PYTHON_3_7,
            role=roles.lambda_role,
            environment= {
                'EXPORT_ROLE': roles.forecast_role.role_arn
            },
            layers=[shared_layer],
            timeout=core.Duration.seconds(30)
        )

        # Deploy lambda with python dependencies from requirements.txt
        update_resources_lambda = _lambda_python.PythonFunction(
            self, 'UpdateResources',
            function_name='UpdateResources',
            entry='lambdas/updateresources/',
            index='update.py',
            handler='lambda_handler',
            runtime = _lambda.Runtime.PYTHON_3_7,
            role=roles.update_role,
            environment= {
                'ATHENA_WORKGROUP': athena_workgroup.name,
                'ATHENA_BUCKET' : athena_bucket.bucket_name
            },
            layers=[shared_layer],
            timeout=core.Duration.seconds(900)
        )
        

        notify_lambda = _lambda.Function(
            self, 'NotifyTopic',
            function_name='NotifyTopic',
            code = _lambda.Code.asset('lambdas/notify/'),
            handler = 'notify.lambda_handler',
            runtime = _lambda.Runtime.PYTHON_3_7,
            role=roles.lambda_role,
            environment= {
                'SNS_TOPIC_ARN': topic.topic_arn
            },
            layers=[shared_layer]
        )

        delete_forecast_lambda = _lambda.Function(
            self, 'DeleteForecast',
            function_name='DeleteForecast',
            code = _lambda.Code.asset('lambdas/deleteforecast/'),
            handler = 'deleteforecast.lambda_handler',
            runtime = _lambda.Runtime.PYTHON_3_7,
            role=roles.lambda_role,
            layers=[shared_layer]
        )

        delete_predctor_lambda = _lambda.Function(
            self, 'DeletePredictor',
            function_name='DeletePredictor',
            code = _lambda.Code.asset('lambdas/deletepredictor/'),
            handler = 'deletepredictor.lambda_handler',
            runtime = _lambda.Runtime.PYTHON_3_7,
            role=roles.lambda_role,
            layers=[shared_layer]
        )

        delete_importjob_lambda = _lambda.Function(
            self, 'DeleteImportJob',
            function_name='DeleteImportJob',
            code = _lambda.Code.asset('lambdas/deletedatasetimport/'),
            handler = 'deletedataset.lambda_handler',
            runtime = _lambda.Runtime.PYTHON_3_7,
            role=roles.lambda_role,
            layers=[shared_layer]
        )


        # ------ StepFunctions ------
        strategy_choice = sfn.Choice(
            self, 'Strategy-Choice'
        )

        success_state = sfn.Succeed(
            self, 'SuccessState'
        )

        failed = sfn_tasks.LambdaInvoke(
            self, 'Failed',
            lambda_function = notify_lambda,
            result_path=None
        ).next(strategy_choice)

        create_dataset_job = sfn_tasks.LambdaInvoke(
            self, 'Create-Dataset', 
            lambda_function = create_dataset_lambda,
            retry_on_service_exceptions=True,
            payload_response_only=True
        )

        self.add_retry_n_catch(create_dataset_job, failed)

        create_dataset_group_job = sfn_tasks.LambdaInvoke(
            self, 'Create-DatasetGroup', 
            lambda_function = create_dataset_group_lambda,
            payload_response_only=True
        )
        self.add_retry_n_catch(create_dataset_group_job, failed)


        import_data_job = sfn_tasks.LambdaInvoke(
            self, 'Import-Data',
            lambda_function = import_data_lambda,
            payload_response_only=True
        )
        self.add_retry_n_catch(import_data_job, failed)

        create_predictor_job = sfn_tasks.LambdaInvoke(
            self, 'Create-Predictor',
            lambda_function = create_predictor_lambda,
            payload_response_only=True
        )
        self.add_retry_n_catch(create_predictor_job, failed)

        create_forecast_job = sfn_tasks.LambdaInvoke(
            self, 'Create-Forecast',
            lambda_function = create_forecast_lambda,
            payload_response_only=True
        )
        self.add_retry_n_catch(create_forecast_job, failed)

        update_resources_job = sfn_tasks.LambdaInvoke(
            self, 'Update-Resources',
            lambda_function = update_resources_lambda,
            payload_response_only=True
        )
        self.add_retry_n_catch(update_resources_job, failed)

        notify_success = sfn_tasks.LambdaInvoke(
            self, 'Notify-Success',
            lambda_function = notify_lambda,
            payload_response_only=True
        )

        delete_forecast_job = sfn_tasks.LambdaInvoke(
            self, 'Delete-Forecast',
            lambda_function = delete_forecast_lambda,
            payload_response_only=True
        )
        self.delete_retry(delete_forecast_job)

        delete_predictor_job = sfn_tasks.LambdaInvoke(
            self, 'Delete-Predictor',
            lambda_function = delete_predctor_lambda,
            payload_response_only=True
        )
        self.delete_retry(delete_predictor_job)

        delete_import_job = sfn_tasks.LambdaInvoke(
            self, 'Delete-ImportJob',
            lambda_function = delete_importjob_lambda,
            payload_response_only=True
        )
        self.delete_retry(delete_import_job)
        
        
        definition = create_dataset_job\
            .next(create_dataset_group_job)\
            .next(import_data_job)\
            .next(create_predictor_job)\
            .next(create_forecast_job)\
            .next(update_resources_job)\
            .next(notify_success)\
            .next(strategy_choice.when(sfn.Condition.boolean_equals('$.params.PerformDelete', False), success_state)\
                                .otherwise(delete_forecast_job).afterwards())\
            .next(delete_predictor_job)\
            .next(delete_import_job)
                    
            
        deployt_state_machine = sfn.StateMachine(
            self, 'StateMachine',
            definition = definition
            # role=roles.states_execution_role
        )

        # S3 event trigger lambda
        s3_lambda = _lambda.Function(
            self, 'S3Lambda',
            function_name='S3Lambda',
            code=_lambda.Code.asset('lambdas/s3lambda/'),
            handler='parse.lambda_handler',
            runtime=_lambda.Runtime.PYTHON_3_7,
            role=roles.trigger_role,
            environment= {
                'STEP_FUNCTIONS_ARN': deployt_state_machine.state_machine_arn,
                'PARAMS_FILE': self.node.try_get_context('parameter_file')
            }
        )
        s3_lambda.add_event_source(
            event_src.S3EventSource(
                bucket=forecast_bucket,
                events=[_s3.EventType.OBJECT_CREATED],
                filters=[_s3.NotificationKeyFilter(
                    prefix='train/',
                    suffix='.csv'
                )]
            )
        )

        # CloudFormation output
        core.CfnOutput(
            self, 'StepFunctionsName',
            description='Step Functions Name',
            value=deployt_state_machine.state_machine_name
        )

        core.CfnOutput(
            self, 'ForecastBucketName',
            description='Forecast bucket name to drop you files',
            value=forecast_bucket.bucket_name
        )

        core.CfnOutput(
            self, 'AthenaBucketName',
            description='Athena bucket name to drop your files',
            value=athena_bucket.bucket_name
        )
コード例 #14
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Contextual variables
        external_tag_key = self.node.try_get_context("external_tag_key")
        external_tag_value = self.node.try_get_context(
            "external_tag_value")
        internal_tag_key = self.node.try_get_context("internal_tag_key")
        internal_tag_value = self.node.try_get_context(
            "internal_tag_value")
        enforce_compliance = self.node.try_get_context("enforce_compliance")

        # Config Rule Lambda function
        lambda_function = lambda_python.PythonFunction(
            self, "ConfigRuleLambdaFunction",
            entry="./lambda",
            handler="lambda_handler",
            index="index.py",
            description="Config Rule to create tags on untagged ALB resources",
            timeout=core.Duration.seconds(10),
            tracing=lambda_.Tracing.ACTIVE,
            environment=dict(
                EXTERNAL_TAG_KEY=external_tag_key,
                EXTERNAL_TAG_VALUE=external_tag_value,
                INTERNAL_TAG_KEY=internal_tag_key,
                INTERNAL_TAG_VALUE=internal_tag_value
            )
        )

        # Allow Lambda function to describe ELBs and read their tags
        lambda_function.add_to_role_policy(
            statement=iam.PolicyStatement(
                actions=[
                    "elasticloadbalancing:DescribeLoadBalancers",
                    "elasticloadbalancing:DescribeTags"
                ],
                effect=iam.Effect.ALLOW,
                resources=["*"],
                sid="AllowELBTagRead"
            )
        )

        # The Config Rule
        config_rule = config.CustomRule(
            self, "ConfigRule",
            lambda_function=lambda_function,
            configuration_changes=True,
            config_rule_name="alb-tag-enforcement",
            description="Checks if ALBs have the appropriate tag associated to them based on their ALB scheme"
        )

        # Scope the rule to only look at ELBv2
        config_rule.scope_to_resource(
            type="AWS::ElasticLoadBalancingV2::LoadBalancer")

        # For readability, define the SSM remediation document externally
        # and read it in here
        with open("./ssm/remediation_document.json", "r") as f:
            ssm_document = json.load(f)
        remediation_document = ssm.CfnDocument(
            self, "SSMRemediationDocument",
            document_type="Automation",
            content=ssm_document
        )

        # Give SSM permission to add the tag when remediation is needed
        remediation_role = iam.Role(
            self, "RemediationRole",
            assumed_by=iam.ServicePrincipal(
                service="ssm.amazonaws.com"
            ),
            description="Allow SSM to update tags on ALBs via a Config Rule remediation",
            inline_policies=dict(
                alb_read_write=iam.PolicyDocument(statements=[
                    iam.PolicyStatement(
                        actions=[
                            "elasticloadbalancing:DescribeLoadBalancers",
                            "elasticloadbalancing:DescribeTags",
                            "elasticloadbalancing:AddTags"
                        ],
                        effect=iam.Effect.ALLOW,
                        resources=["*"],
                        sid="AllowELBTagReadWrite"
                    )
                ])
            )
        )

        # If enforce compliance is true, automatically remediate non-compliant
        # resources. Otherwise, just report compliant and non-compliant
        # resources, but still allow for manual remediation.
        if enforce_compliance.lower() == "true":
            remediation_action = config.CfnRemediationConfiguration(
                self, "ConfigRemediationAction",
                automatic=True,
                config_rule_name=config_rule.config_rule_name,
                parameters=dict(
                    ALBArn=dict(ResourceValue=dict(Value="RESOURCE_ID")),
                    AutomationAssumeRole=dict(StaticValue=dict(
                        Values=[remediation_role.role_arn])),
                    ExternalTagKey=dict(StaticValue=dict(
                        Values=[external_tag_key])),
                    ExternalTagValue=dict(StaticValue=dict(
                        Values=[external_tag_value])),
                    InternalTagKey=dict(StaticValue=dict(
                        Values=[internal_tag_key])),
                    InternalTagValue=dict(StaticValue=dict(
                        Values=[internal_tag_value]))
                ),
                target_id=remediation_document.ref,
                target_type="SSM_DOCUMENT",
                maximum_automatic_attempts=3,
                retry_attempt_seconds=15
            )
        else:
            remediation_action = config.CfnRemediationConfiguration(
                self, "ConfigRemediationAction",
                automatic=False,
                config_rule_name=config_rule.config_rule_name,
                parameters=dict(
                    ALBArn=dict(ResourceValue=dict(Value="RESOURCE_ID")),
                    AutomationAssumeRole=dict(StaticValue=dict(
                        Values=[remediation_role.role_arn])),
                    ExternalTagKey=dict(StaticValue=dict(
                        Values=[external_tag_key])),
                    ExternalTagValue=dict(StaticValue=dict(
                        Values=[external_tag_value])),
                    InternalTagKey=dict(StaticValue=dict(
                        Values=[internal_tag_key])),
                    InternalTagValue=dict(StaticValue=dict(
                        Values=[internal_tag_value]))
                ),
                target_id=remediation_document.ref,
                target_type="SSM_DOCUMENT"
            )
コード例 #15
0
    def __init__(self, scope: cdk.Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        """Default values if not specified via context variables from CLI
        logging_level = 'INFO'
        slack_webhook_secret_name = 'aws-to-slack/dev/webhooks'
        """
        if self.node.try_get_context('logging_level') is None:
            LOGGING_LEVEL = 'INFO'
        else:
            LOGGING_LEVEL = self.node.try_get_context('logging_level')
        if self.node.try_get_context('slack_webhook_secret_name') is None:
            WEBHOOK_SECRET_NAME = 'aws-to-slack/dev/webhooks'
        else:
            WEBHOOK_SECRET_NAME = self.node.try_get_context(
                'slack_webhook_secret_name')

        """Create CloudFormation parameters so we can easily use the
        template this CDK app generates and convert it to a SAM
        application.
        """
        webhook_secret_name_param = cdk.CfnParameter(
            self, 'WebhookSecretName',
            description=('The name of the Secrets Manager secret '
                         'which stores the Slack webhook URL'),
            type='String',
            default=WEBHOOK_SECRET_NAME,
            allowed_pattern='[a-zA-Z0-9/_+=.@-]+',
            min_length=1,
            max_length=512
        ).value_as_string
        whats_new_rss_feed = cdk.CfnParameter(
            self, 'WhatsNewRSSFeed',
            description='The RSS feed of all AWS new releases',
            type='String',
            default=self.node.try_get_context(
                'whats_new_rss_feed')
        ).value_as_string
        whats_new_search_api = cdk.CfnParameter(
            self, 'WhatsNewSearchAPI',
            description='The search API url of new releases',
            type='String',
            default=self.node.try_get_context(
                'whats_new_search_api')
        ).value_as_string
        logging_level = cdk.CfnParameter(
            self, 'LoggingLevel',
            description='The verbosity of the logs in the Lambda function',
            type='String',
            allowed_values=['INFO', 'ERROR', 'DEBUG', 'WARN'],
            default=LOGGING_LEVEL,
        ).value_as_string

        """DynamoDB table which stores a history of messages sent"""
        ddb_table = dynamodb.Table(
            self, 'SlackMessageHistory',
            partition_key=dynamodb.Attribute(
                name='url', type=dynamodb.AttributeType.STRING),
            read_capacity=1,
            write_capacity=1
        )

        """Lambda function that queries the AWS What's New RSS feed
        and sends each release to Slack if it has not already been sent.
        """
        new_release_function = lambda_python.PythonFunction(
            self, 'AWSReleasesFunction',
            entry='lambda',
            handler='main',
            index='new_releases.py',
            runtime=lambda_.Runtime.PYTHON_3_8,
            description='Queries https://aws.amazon.com/new/ and sends new release info to a Slack channel via AWS Chatbot',
            environment=dict(
                WHATS_NEW_RSS_FEED=whats_new_rss_feed,
                WHATS_NEW_SEARCH_API=whats_new_search_api,
                WEBHOOK_SECRET_NAME=webhook_secret_name_param,
                DDB_TABLE=ddb_table.table_name,
                LOG_LEVEL=logging_level,
                POWERTOOLS_SERVICE_NAME='aws-to-slack'
            ),
            memory_size=512,
            tracing=lambda_.Tracing.ACTIVE,
            timeout=cdk.Duration.seconds(30),
            log_retention=logs.RetentionDays.SIX_MONTHS
        )
        """Imports the SecretsManager secret which contains the Slack webhook url(s)
        and adds read access to the Lambda execution role
        """
        slack_webhook_urls = secretsmanager.Secret.from_secret_name_v2(
            self, "SlackWebhookURLSecrets",
            secret_name=webhook_secret_name_param
        )
        slack_webhook_urls.grant_read(new_release_function.role)

        """Invoke this function every X minutes"""
        rule = events.Rule(
            self, 'AWSReleaseToSlackRule',
            description='Schedule to invoke Lambda function that sends new AWS releases to Slack',
            schedule=events.Schedule.rate(cdk.Duration.minutes(5))
        )
        rule.add_target(events_targets.LambdaFunction(new_release_function))

        """Grant the Lambda function Query and PutItem access to the DDB table"""
        ddb_table.grant(
            new_release_function,
            'dynamodb:Query',
            'dynamodb:PutItem'
        )
コード例 #16
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # The code that defines your stack goes here

        repository = codecommit.Repository(
            self,
            "slackops-repository",
            repository_name="slackops-pipeline-repo",
            description="Repo for the SlackOps Pipeline Demo",
        )

        website_bucket = s3.Bucket(self,
                                   "website-bucket",
                                   removal_policy=core.RemovalPolicy.DESTROY,
                                   auto_delete_objects=True,
                                   public_read_access=True,
                                   website_index_document="index.html")

        manual_approval_topic = sns.Topic(
            self,
            "manual-approval-notification",
        )

        artifact_bucket = s3.Bucket(self,
                                    "artifact-bucket",
                                    removal_policy=core.RemovalPolicy.DESTROY)

        source_artifact = codepipeline.Artifact(artifact_name="Source")
        deployment_artifact = codepipeline.Artifact(artifact_name="Deployment")

        pipeline = codepipeline.Pipeline(
            self,
            "slackops-pipeline",
            artifact_bucket=artifact_bucket,
            stages=[
                codepipeline.StageOptions(
                    stage_name="Source",
                    actions=[
                        codepipeline_actions.CodeCommitSourceAction(
                            repository=repository,
                            branch="master",
                            output=source_artifact,
                            action_name="Source")
                    ]),
                codepipeline.StageOptions(
                    stage_name="Build",
                    actions=[
                        codepipeline_actions.CodeBuildAction(
                            input=source_artifact,
                            action_name="Build",
                            project=codebuild.PipelineProject(
                                self,
                                "build-project",
                                build_spec=codebuild.BuildSpec.
                                from_source_filename("buildspec.yml"),
                                environment=codebuild.BuildEnvironment(
                                    build_image=codebuild.LinuxBuildImage.
                                    STANDARD_5_0),
                            ),
                            outputs=[deployment_artifact])
                    ]),
                codepipeline.StageOptions(
                    stage_name=MANUAL_APPROVAL_STAGE_NAME,
                    actions=[
                        codepipeline_actions.ManualApprovalAction(
                            action_name=MANUAL_APPROVAL_ACTION_NAME,
                            additional_information=
                            "Please Approve the Deployment",
                            notification_topic=manual_approval_topic,
                        )
                    ]),
                codepipeline.StageOptions(
                    stage_name="Deploy",
                    actions=[
                        codepipeline_actions.S3DeployAction(
                            bucket=website_bucket,
                            input=deployment_artifact,
                            access_control=s3.BucketAccessControl.PUBLIC_READ,
                            action_name="deploy-to-s3")
                    ])
            ])

        # Build the API Gateway to record the approval or rejection

        rest_api = apigateway.RestApi(self,
                                      "slackops-apigw",
                                      deploy_options=apigateway.StageOptions(
                                          stage_name="prod", ))

        root_resource = rest_api.root.add_resource("v1")

        approval_resource = root_resource.add_resource("approval")

        api_gateway_role = iam.Role(self,
                                    "slackops-apigw-role",
                                    assumed_by=iam.ServicePrincipal(
                                        service="apigateway.amazonaws.com", ))
        api_gateway_role.add_to_policy(
            iam.PolicyStatement(actions=["codepipeline:PutApprovalResult"],
                                resources=[pipeline.pipeline_arn + "/*"]))

        # Double curlies to make str.format work
        mapping_template = """
#set($token = $input.params("token"))
#set($response = $input.params("response"))
{{
   "actionName": "{action_name}",
   "pipelineName": "{pipeline_name}",
   "result": {{ 
      "status": "$response",
      "summary": ""
   }},
   "stageName": "{stage_name}",
   "token": "$token"
}}
        """.format(
            action_name="approve-before-publication",
            pipeline_name=pipeline.pipeline_name,
            stage_name="Approval",
        )

        approval_integration = apigateway.AwsIntegration(
            service="codepipeline",
            action="PutApprovalResult",
            integration_http_method="POST",
            options=apigateway.IntegrationOptions(
                credentials_role=api_gateway_role,
                request_parameters={
                    "integration.request.header.x-amz-target":
                    "'CodePipeline_20150709.PutApprovalResult'",
                    "integration.request.header.Content-Type":
                    "'application/x-amz-json-1.1'",
                },
                passthrough_behavior=apigateway.PassthroughBehavior.NEVER,
                request_templates={"application/json": mapping_template},
                integration_responses=[
                    apigateway.IntegrationResponse(
                        status_code='400',
                        selection_pattern="4\d{2}",
                        response_parameters={
                            'method.response.header.error':
                            'integration.response.body'
                        }),
                    apigateway.IntegrationResponse(
                        status_code='500',
                        selection_pattern="5\d{2}",
                        response_parameters={
                            'method.response.header.error':
                            'integration.response.body'
                        }),
                    apigateway.IntegrationResponse(
                        status_code='200',
                        selection_pattern="2\d{2}",
                        response_parameters={
                            'method.response.header.response':
                            'integration.response.body'
                        }),
                ]))

        approval_method = approval_resource.add_method(
            http_method="GET",
            request_validator=apigateway.RequestValidator(
                self,
                "request-validator",
                rest_api=rest_api,
                request_validator_name="ParamValidator",
                validate_request_parameters=True),
            request_parameters={
                "method.request.querystring.token": True,
                "method.request.querystring.response":
                True,  # Approved / Rejected
            },
            method_responses=[
                apigateway.MethodResponse(
                    status_code='400',
                    response_parameters={'method.response.header.error':
                                         True}),
                apigateway.MethodResponse(
                    status_code='500',
                    response_parameters={'method.response.header.error':
                                         True}),
                apigateway.MethodResponse(
                    status_code='200',
                    response_parameters={
                        'method.response.header.response': True
                    }),
            ],
            integration=approval_integration,
        )

        # Notification mechanism

        ssm_parameter_webhook = ssm.StringParameter(
            self,
            "slackops-webhook-parameter",
            string_value="<replace-me>",
            parameter_name="/slackops/webhook-url")

        notification_lambda = _lambda.PythonFunction(
            self,
            "slackops-notification",
            entry=os.path.join(os.path.dirname(__file__), "..", "src"),
            index="index.py",
            handler="notification_handler",
            environment={
                "WEBHOOK_URL_PARAMETER": ssm_parameter_webhook.parameter_name,
                "API_ENDPOINT": rest_api.url_for_path("/v1/approval"),
            })

        notification_lambda.add_event_source(
            lambda_event_sources.SnsEventSource(topic=manual_approval_topic))

        ssm_parameter_webhook.grant_read(notification_lambda)

        # Outputs

        core.CfnOutput(self,
                       "repositoryHttps",
                       value=repository.repository_clone_url_http)

        core.CfnOutput(self,
                       "repositorySSH",
                       value=repository.repository_clone_url_ssh)

        core.CfnOutput(self,
                       "websiteUrl",
                       value=website_bucket.bucket_website_url)
コード例 #17
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Kinesis to lambda
        self.stream_lambda = kinesis_lambda.KinesisStreamsToLambda(
            self,
            'clickstream',
            lambda_function_props=_lambda.FunctionProps(
                runtime=_lambda.Runtime.PYTHON_3_7,
                handler='index.lambda_handler',
                code=_lambda.Code.inline(
                    get_code('send_data_to_firehose.py'))),
            kinesis_stream_props=kinesis.StreamProps(
                stream_name='clickstream',
                retention_period=core.Duration.days(1),
                shard_count=4),
            kinesis_event_source_props=lambda_sources.KinesisEventSourceProps(
                starting_position=_lambda.StartingPosition.TRIM_HORIZON,
                batch_size=1))

        # Lambda to produce data
        self.produce_fake_data = _lambda.Function(
            self,
            'produce_data',
            runtime=_lambda.Runtime.PYTHON_3_7,
            timeout=core.Duration.seconds(90),
            handler='index.lambda_handler',
            code=_lambda.Code.inline(get_code('produce_data.py')),
            environment={
                'STREAM_NAME': self.stream_lambda.kinesis_stream.stream_name
            })
        self.stream_lambda.kinesis_stream.grant_read_write(
            self.produce_fake_data)

        # EventBridge to activate my function above
        self.event_rule = events.Rule(
            self,
            'scheduledRule',
            schedule=events.Schedule.expression('rate(1 minute)'))
        self.event_rule.add_target(
            targets.LambdaFunction(self.produce_fake_data))

        # S3 Bucket
        self.bucket = s3.Bucket(self,
                                'data-clicks-lake',
                                removal_policy=core.RemovalPolicy.DESTROY,
                                auto_delete_objects=True)

        # Glue
        self.glue_db_analytical = glue.Database(
            self,
            'analytic_clickstream',
            database_name='clickstream_db',
            location_uri=None,
        )

        self.glue_table_analytical = glue.Table(
            self,
            'analytical-table',
            table_name='analytical-table',
            columns=[
                glue_column('custid', 'int'),
                glue_column('trafficfrom', 'string'),
                glue_column('url', 'string'),
                glue_column('device', 'string'),
                glue_column('touchproduct', 'int'),
                glue_column('trans_timestamp', 'string')
            ],
            database=self.glue_db_analytical,
            data_format=glue.DataFormat.PARQUET,
            bucket=self.bucket,
            s3_prefix='kinesis/',
        )

        # Firehose
        iam_role_firehose_analytical = self.create_firehose_role()
        self.bucket.grant_read_write(iam_role_firehose_analytical)

        firehose_props = FirehoseProps(
            bucket=self.bucket,
            role=iam_role_firehose_analytical,
            stream=self.stream_lambda.kinesis_stream,
            glue_db=self.glue_db_analytical,
            glue_table=self.glue_table_analytical)

        self.firehose = FirehoseLib(self, 'firehose_clickstream',
                                    firehose_props)

        # Elasticsearh
        self.es_domain = ElasticsearchLib(self,
                                          'ES-clickstream-domain').es_domain

        # Lambda to send data to Elasticsearch
        self.send_data_to_elasticsearch = lambda_python.PythonFunction(
            self,
            'clickstream_to_es',
            entry='./analytics_ml_flow/lambda/lambda_with_requirements/',
            handler='handler',
            timeout=core.Duration.seconds(180),
            index='Kinesis_ES.py',
            environment={
                'ES_HOST_HTTP': self.es_domain.domain_endpoint,
                'ES_INDEX': 'clickstream',
                'ES_IND_TYPE': 'transactions',
                'ES_REGION': 'us-west-2',
            })
        self.es_domain.grant_index_read_write('clickstream',
                                              self.send_data_to_elasticsearch)
        self.es_domain.grant_read_write(self.send_data_to_elasticsearch)

        stream_source = lambda_sources.KinesisEventSource(
            self.stream_lambda.kinesis_stream,
            starting_position=_lambda.StartingPosition.TRIM_HORIZON,
            batch_size=1)

        self.stream_lambda.kinesis_stream.grant_read(
            self.send_data_to_elasticsearch)
        self.send_data_to_elasticsearch.add_event_source(stream_source)

        # Glue Crawler
        crawler_role = self.create_crawler_permissions()
        glue_props = GlueCrawlerProps(bucket=self.bucket, role=crawler_role)
        self.glue_crawler = GlueCrawlerLib(self, 'glueCrawler', glue_props)
コード例 #18
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # The code that defines your stack goes here

        wsgi_function = lmb_py.PythonFunction(self,
                                              "wsgi-function",
                                              entry="./lambdas/wsgi")

        wsgi_integration = apigw_v2.LambdaProxyIntegration(
            handler=wsgi_function,
            payload_format_version=apigw_v2.PayloadFormatVersion.VERSION_1_0,
        )

        asgi_function = lmb_py.PythonFunction(
            self,
            "asgi-function",
            entry="./lambdas/asgi",
        )

        asgi_integration = apigw_v2.LambdaProxyIntegration(
            handler=asgi_function)

        self.http_api = apigw_v2.HttpApi(self,
                                         "http-api",
                                         default_integration=asgi_integration)

        self.http_api.add_routes(
            path="/wsgi",
            methods=[apigw_v2.HttpMethod.GET],
            integration=wsgi_integration,
        )

        self.http_api.add_routes(
            path="/wsgi/{proxy+}",
            methods=[apigw_v2.HttpMethod.GET],
            integration=wsgi_integration,
        )

        self.http_api_url = core.CfnOutput(self,
                                           "RestApiUrl",
                                           value=self.http_api.url)

        self.graphql_api = appsync.GraphqlApi(
            self,
            "graphql-api",
            name="notes-example-api",
            schema=appsync.Schema.from_asset("./graphql/schema.graphql"),
        )

        core.CfnOutput(self, "GraphQLUrl", value=self.graphql_api.graphql_url)

        core.CfnOutput(self, "GraphQlApiKey", value=self.graphql_api.api_key)

        graphql_handler = lmb_py.PythonFunction(
            self,
            "graphql-handler",
            entry="./lambdas/graphql",
            runtime=lmb.Runtime.PYTHON_3_8,
        )

        data_source = self.graphql_api.add_lambda_data_source(
            "lambdaDatasource", graphql_handler)

        data_source.create_resolver(type_name="Query",
                                    field_name="getNoteById")

        data_source.create_resolver(type_name="Query", field_name="listNotes")

        data_source.create_resolver(type_name="Mutation",
                                    field_name="createNote")

        data_source.create_resolver(type_name="Mutation",
                                    field_name="deleteNote")

        dynamo_table = dynamodb.Table(
            self,
            "notes-table",
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            partition_key=dynamodb.Attribute(
                name="id", type=dynamodb.AttributeType.STRING),
        )

        dynamo_table.grant_read_write_data(graphql_handler)

        graphql_handler.add_environment("NOTES_TABLE", dynamo_table.table_name)
コード例 #19
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # The code that defines your stack goes here

        submit_lambda = aws_lambda_python.PythonFunction(
            self,
            "submit-status",
            entry="./lambdas/example",
            handler="submit_status",
        )

        get_status_lambda = aws_lambda_python.PythonFunction(
            self,
            "get-status",
            entry="./lambdas/example",
            handler="get_status")

        final_status_lambda = aws_lambda_python.PythonFunction(
            self,
            "final-status",
            entry="./lambdas/example",
            handler="final_status")

        submit_job = tasks.LambdaInvoke(
            self,
            "Submit Job",
            lambda_function=submit_lambda,
            payload_response_only=True,
        )

        wait_x = sfn.Wait(self,
                          "Wait X Seconds",
                          time=sfn.WaitTime.seconds_path("$.seconds"))

        get_status = tasks.LambdaInvoke(
            self,
            "Get Job Status",
            lambda_function=get_status_lambda,
            payload_response_only=True,
        )

        job_failed = sfn.Fail(
            self,
            "Job Failed",
            cause="AWS Batch Job Failed",
            error="DescribeJob returned FAILED",
        )

        final_status = tasks.LambdaInvoke(
            self,
            "Get Final Job Status",
            lambda_function=final_status_lambda,
            payload_response_only=True,
        )

        definition = (submit_job.next(wait_x).next(get_status).next(
            sfn.Choice(self, "Job Complete?").when(
                sfn.Condition.string_equals("$.status", "FAILED"),
                job_failed).when(
                    sfn.Condition.string_equals("$.status", "SUCCEEDED"),
                    final_status).otherwise(wait_x)))

        sfn.StateMachine(
            self,
            "StateMachine",
            definition=definition,
            timeout=core.Duration.minutes(5),
        )
コード例 #20
0
    def _create_post_authentication_lambda(self) -> None:
        k8s_layer_name = f"orbit-{self.context.name}-k8s-base-layer"

        sam_app = sam.CfnApplication(
            scope=self,
            id="awscli_kubectl_helm_lambda_layer_sam",
            location=sam.CfnApplication.ApplicationLocationProperty(
                application_id=
                "arn:aws:serverlessrepo:us-east-1:903779448426:applications/lambda-layer-kubectl",
                semantic_version="2.0.0",
            ),
            parameters={"LayerName": k8s_layer_name},
        )

        role_arn = cast(str, self.context.toolkit.admin_role_arn)

        lambda_python.PythonFunction(
            scope=self,
            id="cognito_post_authentication_lambda",
            function_name=f"orbit-{self.context.name}-post-authentication",
            entry=_lambda_path("cognito_post_authentication"),
            index="index.py",
            handler="handler",
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            timeout=Duration.seconds(300),
            role=iam.Role.from_role_arn(scope=self,
                                        id="cognito-post-auth-role",
                                        role_arn=role_arn),
            environment={
                "REGION": self.context.region,
                "ORBIT_ENV": self.context.name,
                "ACCOUNT_ID": self.context.account_id,
            },
            memory_size=128,
        ).add_permission(
            id="cognito_post_auth_resource_policy",
            principal=cast(iam.IPrincipal,
                           iam.ServicePrincipal("cognito-idp.amazonaws.com")),
            action="lambda:InvokeFunction",
            source_arn=
            (f"arn:aws:cognito-idp:{self.context.region}:{self.context.account_id}:"
             f"userpool/{self.user_pool.user_pool_id}"),
        )

        lambda_python.PythonFunction(
            scope=self,
            id="cognito_post_authentication_k8s_lambda",
            entry=_lambda_path("cognito_post_authentication"),
            function_name=f"orbit-{self.context.name}-post-auth-k8s-manage",
            index="k8s_manage.py",
            handler="handler",
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            timeout=Duration.seconds(300),
            role=iam.Role.from_role_arn(scope=self,
                                        id="cognito-post-auth-k8s-role",
                                        role_arn=role_arn),
            environment={
                "REGION":
                self.context.region,
                "PATH":
                "/var/lang/bin:/usr/local/bin:/usr/bin/:/bin:/opt/bin:/opt/awscli:/opt/kubectl:/opt/helm",
                "ORBIT_ENV":
                self.context.name,
                "ACCOUNT_ID":
                self.context.account_id,
                "ROLE_PREFIX":
                f"/{self.context.role_prefix}/"
                if self.context.role_prefix else "/",
                "ORBIT_API_VERSION":
                "v1",
                "ORBIT_API_GROUP":
                "orbit.aws",
            },
            layers=[
                aws_lambda.LayerVersion.from_layer_version_arn(
                    scope=self,
                    id="K8sLambdaLayer",
                    layer_version_arn=(sam_app.get_att(
                        "Outputs.LayerVersionArn").to_string()),
                )
            ],
            memory_size=256,
        )
コード例 #21
0
    def __init__(self, app: core.App, id: str) -> None:
        super().__init__(app, id)

        with open("config.json") as f:
            self.config = json.load(f)
        assert (
            "SECRET_KEY"
            in self.config), "Need random SECRET_KEY specified in config.json"
        assert (
            "CERTIFICATE_ARN"
            in self.config), "Need CERTIFICATE_ARN specified in config.json"

        self.lambda_dir = "assets/lambda"
        os.makedirs(os.path.join(self.lambda_dir, "templates", "generated"),
                    exist_ok=True)

        r = requests.get(
            "https://api.github.com/repos/sumpfork/dominiontabs/releases")
        changelog = r.json()
        changelog = [{
            "url":
            ch["html_url"],
            "date":
            dt.datetime.strptime(ch["published_at"][:10], "%Y-%m-%d").date(),
            "name":
            ch["name"],
            "tag":
            ch["tag_name"],
            "description":
            ch["body"],
        } for ch in changelog]

        env = Environment(loader=FileSystemLoader("templates"),
                          autoescape=select_autoescape(["html"]))
        t = env.get_template("changelog.html.j2")
        generated_template_path = os.path.join(self.lambda_dir, "templates",
                                               "generated")
        shutil.rmtree(generated_template_path)
        os.mkdir(generated_template_path)

        with open(
                os.path.join(generated_template_path, "changelog.html"),
                "w",
        ) as f:
            f.write(t.render(changelog=changelog))

        static_website_bucket = s3.Bucket(
            self,
            "Dominion Divider Generator Site",
        )

        cf_static_dist = cloudfront.Distribution(
            self,
            "StaticCloudfrontDist",
            default_behavior=cloudfront.BehaviorOptions(
                origin=cloudfront_origins.S3Origin(static_website_bucket)),
        )

        s3_deployment.BucketDeployment(
            self,
            "Static Files Deployment",
            sources=[s3_deployment.Source.asset("./static")],
            destination_bucket=static_website_bucket,
            destination_key_prefix="static",
        )

        flask_app = lambda_python.PythonFunction(
            self,
            "DominionDividersFlaskApp",
            entry=self.lambda_dir,
            index="lambda-handlers.py",
            handler="apig_wsgi_handler",
            environment={
                "STATIC_WEB_URL": f"https://{cf_static_dist.domain_name}",
                "FLASK_SECRET_KEY": self.config["SECRET_KEY"],
                "GA_CONFIG": self.config.get("GA_CONFIG", ""),
            },
            timeout=core.Duration.seconds(60),
            memory_size=512,
            runtime=lambda_.Runtime.PYTHON_3_8,
        )
        api = apig.LambdaRestApi(
            self,
            "bgtools-api",
            handler=flask_app,
            binary_media_types=["*/*"],
            minimum_compression_size=10e4,
            deploy_options={
                "method_options": {
                    "/*/*":
                    apig.MethodDeploymentOptions(throttling_rate_limit=10,
                                                 throttling_burst_limit=20)
                }
            },
        )
        cloudfront.Distribution(
            self,
            "BGToolsCloudfrontDist",
            default_behavior=cloudfront.BehaviorOptions(
                origin=cloudfront_origins.HttpOrigin(
                    core.Fn.select(2, core.Fn.split("/", api.url)),
                    origin_path=core.Fn.join(
                        "",
                        ["/",
                         core.Fn.select(3, core.Fn.split("/", api.url))]),
                ),
                origin_request_policy=cloudfront.OriginRequestPolicy(
                    self,
                    "OriginRequestPolicy",
                    cookie_behavior=cloudfront.OriginRequestCookieBehavior.all(
                    ),
                ),
                allowed_methods=cloudfront.AllowedMethods.ALLOW_ALL,
            ),
            domain_names=["domdiv.bgtools.net"],
            certificate=acm.Certificate.from_certificate_arn(
                self,
                "cert",
                self.config["CERTIFICATE_ARN"],
            ),
        )

        dashboard = aws_cloudwatch.Dashboard(
            self,
            f"bgtools-dashboard",
            dashboard_name=f"bgtools-prod",
            start="-P1D",
            period_override=aws_cloudwatch.PeriodOverride.INHERIT,
        )
        dashboard.add_widgets(
            aws_cloudwatch.GraphWidget(
                title="API Gateway Counts",
                width=6,
                height=6,
                left=[
                    aws_cloudwatch.Metric(
                        namespace="AWS/ApiGateway",
                        metric_name="5XXError",
                        dimensions={
                            "ApiName": "bgtools-api",
                            "Stage": api.deployment_stage.stage_name,
                        },
                        period=core.Duration.minutes(amount=30),
                        statistic="Sum",
                        color="#d62728",
                    ),
                    aws_cloudwatch.Metric(
                        namespace="AWS/ApiGateway",
                        metric_name="4XXError",
                        dimensions={
                            "ApiName": "bgtools-api",
                            "Stage": api.deployment_stage.stage_name,
                        },
                        period=core.Duration.minutes(amount=30),
                        statistic="Sum",
                        color="#8c564b",
                    ),
                    aws_cloudwatch.Metric(
                        namespace="AWS/ApiGateway",
                        metric_name="Count",
                        dimensions={
                            "ApiName": "bgtools-api",
                            "Stage": api.deployment_stage.stage_name,
                        },
                        period=core.Duration.minutes(amount=30),
                        statistic="Sum",
                        color="#2ca02c",
                    ),
                ],
            ),
            aws_cloudwatch.GraphWidget(
                title="API Gateway Latencies",
                width=6,
                height=6,
                left=[
                    aws_cloudwatch.Metric(
                        namespace="AWS/ApiGateway",
                        metric_name="Latency",
                        dimensions={
                            "ApiName": "bgtools-api",
                            "Stage": api.deployment_stage.stage_name,
                        },
                        period=core.Duration.minutes(amount=30),
                        statistic="Average",
                    ),
                    aws_cloudwatch.Metric(
                        namespace="AWS/ApiGateway",
                        metric_name="IntegrationLatency",
                        dimensions={
                            "ApiName": "bgtools-api",
                            "Stage": api.deployment_stage.stage_name,
                        },
                        period=core.Duration.minutes(amount=30),
                        statistic="Average",
                    ),
                ],
            ),
        )
コード例 #22
0
    def __init__(
        self,
        scope: cdk.Construct,
        construct_id: str,
        vpc: ec2.Vpc,
        domain: sagemaker.CfnDomain,
        **kwargs,
    ) -> None:
        super().__init__(scope, construct_id, **kwargs)

        studio_domain_id = (domain.attr_domain_id
                            )  # cdk.Fn.import_value("StudioDomainId")

        # Get the security group associated with the EFS volume managed by SageMaker Studio
        get_parameter = cr.AwsCustomResource(
            self,
            "GetEfsSgId",
            on_update={  # will also be called for a CREATE event
                "service": "EC2",
                "action": "describeSecurityGroups",
                "parameters": {
                    "Filters": [
                        {"Name": "vpc-id", "Values": [vpc.vpc_id]},
                        {
                            "Name": "group-name",
                            "Values": [
                                f"security-group-for-inbound-nfs-{studio_domain_id}"
                            ],
                        },
                    ]
                },
                "physical_resource_id": cr.PhysicalResourceId.of("GetEfsSgId"),
            },
            policy=cr.AwsCustomResourcePolicy.from_sdk_calls(
                resources=cr.AwsCustomResourcePolicy.ANY_RESOURCE
            ),
        )
        sg_name = get_parameter.get_response_field("SecurityGroups.0.GroupId")
        sg_efs = ec2.SecurityGroup.from_security_group_id(
            self, "SG", security_group_id=sg_name)

        # We can now retrive a handler for the EFS volume
        StudioDomainEfsId = cdk.Fn.import_value("StudioDomainEfsId")
        studio_efs = efs.FileSystem.from_file_system_attributes(
            self,
            "StudioEFS",
            file_system_id=StudioDomainEfsId,
            security_group=sg_efs)

        # Create EFS access point to enable the lambda fn to mount the EFS volume
        efs_ap = efs.AccessPoint(
            self,
            "EfsAccessPoint",
            file_system=studio_efs,
            posix_user=efs.PosixUser(gid="0", uid="0"),
        )

        # Function that takes care of setting up the user environment
        self.lambda_fn = lambda_python.PythonFunction(
            self,
            "UserSetupLambdaFn",
            entry="populate_git_fn",
            index="populate_from_git.py",
            handler="on_event",
            vpc=vpc,
            layers=[
                lambda_.LayerVersion.from_layer_version_arn(
                    self,
                    "GitLayer",
                    layer_version_arn=
                    f"arn:aws:lambda:{self.region}:553035198032:layer:git-lambda2:8",
                ),
            ],
            filesystem=lambda_.FileSystem.from_efs_access_point(
                efs_ap, "/mnt/efs"),
            timeout=cdk.Duration.seconds(300),
            initial_policy=[
                iam.PolicyStatement(
                    effect=iam.Effect.ALLOW,
                    actions=[
                        "sagemaker:DescribeUserProfile",
                    ],
                    resources=["*"],
                )
            ],
        )

        provider = cr.Provider(
            self,
            "Provider",
            on_event_handler=self.lambda_fn,
        )

        cdk.CfnOutput(
            self,
            "StudioUserProviderToken",
            value=provider.service_token,
            description="StudioUserProviderToken",
            export_name="StudioUserProviderToken",
        )

        self.provider = provider