Beispiel #1
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        queue = sqs.Queue(self,
                          "sqsqueue",
                          visibility_timeout=core.Duration.seconds(300))

        topic = sns.Topic(self, "snstopic")

        topic.add_subscription(subs.SqsSubscription(queue))

        bucket = s3.Bucket(
            self,
            "s3Bucket",
            encryption=s3.BucketEncryption.KMS_MANAGED,
            block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
            removal_policy=core.RemovalPolicy.DESTROY,
        )

        bucket.add_event_notification(s3.EventType.OBJECT_CREATED_PUT,
                                      s3n.SnsDestination(topic))

        s3deploy.BucketDeployment(
            self,
            "DeployFile",
            sources=[s3deploy.Source.asset("./assets")],
            destination_bucket=bucket,
            retain_on_delete=False,
        )
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        *,
        target_topic: aws_sns.Topic,
        access_control: typing.Optional["BucketAccessControl"] = None,
        block_public_access: typing.Optional["BlockPublicAccess"] = None,
        bucket_name: typing.Optional[str] = None,
        cors: typing.Optional[typing.List["CorsRule"]] = None,
        encryption: typing.Optional["BucketEncryption"] = None,
        encryption_key: typing.Optional[aws_kms.IKey] = None,
        lifecycle_rules: typing.Optional[typing.List["LifecycleRule"]] = None,
        metrics: typing.Optional[typing.List["BucketMetrics"]] = None,
        public_read_access: typing.Optional[bool] = None,
        removal_policy: typing.Optional[core.RemovalPolicy] = None,
        versioned: typing.Optional[bool] = None,
        website_error_document: typing.Optional[str] = None,
        website_index_document: typing.Optional[str] = None,
        website_redirect: typing.Optional["RedirectTarget"] = None,
        website_routing_rules: typing.Optional[
            typing.List["RoutingRule"]] = None
    ) -> None:
        super().__init__(scope=scope,
                         id=id,
                         access_control=access_control,
                         block_public_access=block_public_access,
                         bucket_name=bucket_name,
                         cors=cors,
                         encryption=encryption,
                         encryption_key=encryption_key,
                         lifecycle_rules=lifecycle_rules,
                         metrics=metrics,
                         public_read_access=public_read_access,
                         removal_policy=removal_policy,
                         versioned=versioned,
                         website_error_document=website_error_document,
                         website_redirect=website_redirect,
                         website_routing_rules=website_routing_rules)

        if target_topic is not None:
            dest = aws_s3_notifications.SnsDestination(target_topic)
            self.add_event_notification(s3.EventType.OBJECT_CREATED, dest)
            self.add_event_notification(s3.EventType.OBJECT_REMOVED, dest)

        self.node.apply_aspect(
            forbid_bucket_with_numbers(bucket_name, type(self)))

        self.add_cors_rule(allowed_methods=[s3.HttpMethods.GET],
                           allowed_origins=['*'])
Beispiel #3
0
    def __init__(self, scope: core.Construct, id: str, **kwargs):
        super().__init__(scope, id, **kwargs)

        src_bucket = aws_s3.Bucket(self,
                                   id='src-bucket',
                                   removal_policy=core.RemovalPolicy.DESTROY)

        new_files_topic = sns.Topic(self, 'NewFileEventNotification')
        src_bucket.add_event_notification(aws_s3.EventType.OBJECT_CREATED,
                                          s3n.SnsDestination(new_files_topic))

        self.input_bucket_sns = new_files_topic
        self.input_bucket_arn = src_bucket.bucket_arn
        print("Input bucket: " + self.input_bucket_arn)
        print("Input bucket SNS: " + self.input_bucket_sns.topic_arn)
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Define S3 bucket
        my_bucket = s3.Bucket(self, "ssl-s3-sns-event-raw")

        #Add Filters if required
        filter1 = s3.NotificationKeyFilter(prefix="home/")

        #sns Topic
        my_sns = sns.Topic(self,
                           id="my-sns-topic",
                           display_name="my-sns-topic")

        #Create the s3 notification objects which points to Lambda
        notification = notifications.SnsDestination(my_sns)

        #link s3 and sns
        my_bucket.add_event_notification(s3.EventType.OBJECT_CREATED,
                                         notification, filter1)

        #create sqs queue
        my_sqs = sqs.Queue(self, id="my-queue")

        #create sqs / sns subcription
        subscription = aws_sns_subscriptions.SqsSubscription(my_sqs)

        #add subscription to sns.
        my_sns.add_subscription(subscription)

        #create lambda function
        my_lambda = _lambda.Function(self,
                                     "HelloHandler",
                                     runtime=_lambda.Runtime.PYTHON_3_7,
                                     handler="hello.handler",
                                     code=_lambda.Code.asset('lambda'))

        #create sns/lambda subscription
        subscription = aws_sns_subscriptions.LambdaSubscription(my_lambda)

        #add lambda subscription to sns
        my_sns.add_subscription(subscription)
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # The code that defines your stack goes here

        ###########################################################################
        # AWS SECRETS MANAGER - Templated secret
        ###########################################################################
        # templated_secret = aws_secretsmanager.Secret(self, "TemplatedSecret",
        #     generate_secret_string=aws_secretsmanager.SecretStringGenerator(
        #         secret_string_template= "{\"username\":\"cleanbox\"}",
        #         generate_string_key="password"
        #     )
        # )
        ###########################################################################
        # CUSTOM CLOUDFORMATION RESOURCE
        ###########################################################################
        # customlambda = aws_lambda.Function(self,'customconfig',
        # handler='customconfig.on_event',
        # runtime=aws_lambda.Runtime.PYTHON_3_7,
        # code=aws_lambda.Code.asset('customconfig'),
        # )

        # customlambda_statement = aws_iam.PolicyStatement(actions=["events:PutRule"], conditions=None, effect=None, not_actions=None, not_principals=None, not_resources=None, principals=None, resources=["*"], sid=None)
        # customlambda.add_to_role_policy(statement=customlambda_statement)

        # my_provider = cr.Provider(self, "MyProvider",
        #     on_event_handler=customlambda,
        #     # is_complete_handler=is_complete, # optional async "waiter"
        #     log_retention=logs.RetentionDays.SIX_MONTHS
        # )

        # CustomResource(self, 'customconfigresource', service_token=my_provider.service_token)

        ###########################################################################
        # AWS LAMBDA FUNCTIONS
        ###########################################################################
        sqs_to_elastic_cloud = aws_lambda.Function(
            self,
            'sqs_to_elastic_cloud',
            handler='sqs_to_elastic_cloud.lambda_handler',
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            code=aws_lambda.Code.asset('sqs_to_elastic_cloud'),
            memory_size=4096,
            timeout=core.Duration.seconds(300),
            log_retention=logs.RetentionDays.ONE_DAY)

        sqs_to_elasticsearch_service = aws_lambda.Function(
            self,
            'sqs_to_elasticsearch_service',
            handler='sqs_to_elasticsearch_service.lambda_handler',
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            code=aws_lambda.Code.asset('sqs_to_elasticsearch_service'),
            memory_size=4096,
            timeout=core.Duration.seconds(300),
            log_retention=logs.RetentionDays.ONE_DAY)

        # sqs_to_elasticsearch_service.add_environment("kinesis_firehose_name", "-")
        # sqs_to_elastic_cloud.add_environment("index_name", "-")

        ###########################################################################
        # AWS LAMBDA FUNCTIONS
        ###########################################################################
        # sqs_to_elasticsearch_service_permission = aws_lambda.Permission(*, principal, action=None, event_source_token=None, scope=None, source_account=None, source_arn=None)

        ###########################################################################
        # AMAZON S3 BUCKETS
        ###########################################################################
        access_log_bucket = aws_s3.Bucket(self, "access_log_bucket")
        kinesis_log_bucket = aws_s3.Bucket(self, "kinesis_log_bucket")

        ###########################################################################
        # LAMBDA SUPPLEMENTAL POLICIES
        ###########################################################################
        lambda_supplemental_policy_statement = aws_iam.PolicyStatement(
            effect=aws_iam.Effect.ALLOW,
            actions=["s3:Get*", "s3:Head*", "s3:List*", "firehose:*"],
            resources=["*"])

        sqs_to_elastic_cloud.add_to_role_policy(
            lambda_supplemental_policy_statement)
        sqs_to_elasticsearch_service.add_to_role_policy(
            lambda_supplemental_policy_statement)
        ###########################################################################
        # AWS SNS TOPICS
        ###########################################################################
        access_log_topic = aws_sns.Topic(self, "access_log_topic")

        ###########################################################################
        # ADD AMAZON S3 BUCKET NOTIFICATIONS
        ###########################################################################
        access_log_bucket.add_event_notification(
            aws_s3.EventType.OBJECT_CREATED,
            aws_s3_notifications.SnsDestination(access_log_topic))

        ###########################################################################
        # AWS SQS QUEUES
        ###########################################################################
        sqs_to_elasticsearch_service_queue_iqueue = aws_sqs.Queue(
            self, "sqs_to_elasticsearch_service_queue_dlq")
        sqs_to_elasticsearch_service_queue_dlq = aws_sqs.DeadLetterQueue(
            max_receive_count=10,
            queue=sqs_to_elasticsearch_service_queue_iqueue)
        sqs_to_elasticsearch_service_queue = aws_sqs.Queue(
            self,
            "sqs_to_elasticsearch_service_queue",
            visibility_timeout=core.Duration.seconds(301),
            dead_letter_queue=sqs_to_elasticsearch_service_queue_dlq)

        sqs_to_elastic_cloud_queue_iqueue = aws_sqs.Queue(
            self, "sqs_to_elastic_cloud_queue_dlq")
        sqs_to_elastic_cloud_queue_dlq = aws_sqs.DeadLetterQueue(
            max_receive_count=10, queue=sqs_to_elastic_cloud_queue_iqueue)
        sqs_to_elastic_cloud_queue = aws_sqs.Queue(
            self,
            "sqs_to_elastic_cloud_queue",
            visibility_timeout=core.Duration.seconds(301),
            dead_letter_queue=sqs_to_elastic_cloud_queue_dlq)

        ###########################################################################
        # AWS SNS TOPIC SUBSCRIPTIONS
        ###########################################################################
        access_log_topic.add_subscription(
            aws_sns_subscriptions.SqsSubscription(sqs_to_elastic_cloud_queue))
        access_log_topic.add_subscription(
            aws_sns_subscriptions.SqsSubscription(
                sqs_to_elasticsearch_service_queue))

        ###########################################################################
        # AWS LAMBDA SQS EVENT SOURCE
        ###########################################################################
        sqs_to_elastic_cloud.add_event_source(
            SqsEventSource(sqs_to_elastic_cloud_queue, batch_size=10))
        sqs_to_elasticsearch_service.add_event_source(
            SqsEventSource(sqs_to_elasticsearch_service_queue, batch_size=10))

        ###########################################################################
        # AWS ELASTICSEARCH DOMAIN
        ###########################################################################

        ###########################################################################
        # AWS ELASTICSEARCH DOMAIN ACCESS POLICY
        ###########################################################################
        this_aws_account = aws_iam.AccountPrincipal(account_id="012345678912")
        # s3_to_elasticsearch_access_logs_domain_access_policy_statement = aws_iam.PolicyStatement(
        #     principals=[this_aws_account],
        #     effect=aws_iam.Effect.ALLOW,
        #     actions=["es:*"],
        #     resources=["*"]
        #     )
        # s3_to_elasticsearch_access_logs_domain_access_policy_statement_list=[]
        # s3_to_elasticsearch_access_logs_domain_access_policy_statement_list.append(s3_to_elasticsearch_access_logs_domain_access_policy_statement)

        s3_to_elasticsearch_access_logs_domain = aws_elasticsearch.Domain(
            self,
            "s3-to-elasticsearch-access-logs-domain",
            # access_policies=s3_to_elasticsearch_access_logs_domain_access_policy_statement_list,
            version=aws_elasticsearch.ElasticsearchVersion.V7_1,
            capacity={
                "master_nodes": 3,
                "data_nodes": 4
            },
            ebs={"volume_size": 100},
            zone_awareness={"availability_zone_count": 2},
            logging={
                "slow_search_log_enabled": True,
                "app_log_enabled": True,
                "slow_index_log_enabled": True
            })

        ###########################################################################
        # AMAZON COGNITO USER POOL
        ###########################################################################
        s3_to_elasticsearch_user_pool = aws_cognito.UserPool(
            self,
            "s3-to-elasticsearch-access-logs-pool",
            account_recovery=None,
            auto_verify=None,
            custom_attributes=None,
            email_settings=None,
            enable_sms_role=None,
            lambda_triggers=None,
            mfa=None,
            mfa_second_factor=None,
            password_policy=None,
            self_sign_up_enabled=None,
            sign_in_aliases=aws_cognito.SignInAliases(email=True,
                                                      phone=None,
                                                      preferred_username=None,
                                                      username=True),
            sign_in_case_sensitive=None,
            sms_role=None,
            sms_role_external_id=None,
            standard_attributes=None,
            user_invitation=None,
            user_pool_name=None,
            user_verification=None)

        ###########################################################################
        # AMAZON KINESIS FIREHOSE STREAM
        ###########################################################################
        # kinesis_policy_statement = aws_iam.PolicyStatement(
        #     effect=aws_iam.Effect.ALLOW,
        #     # actions=["es:*", "s3:*", "kms:*", "kinesis:*", "lambda:*"],
        #     actions=["*"],
        #     resources=["*"]
        #     )

        # kinesis_policy_document = aws_iam.PolicyDocument()
        # kinesis_policy_document.add_statements(kinesis_policy_statement)

        kinesis_firehose_stream_role = aws_iam.Role(
            self,
            "BaseVPCIAMLogRole",
            assumed_by=aws_iam.ServicePrincipal('firehose.amazonaws.com'),
            role_name=None,
            inline_policies={
                "AllowLogAccess":
                aws_iam.PolicyDocument(
                    assign_sids=False,
                    statements=[
                        aws_iam.PolicyStatement(actions=[
                            '*', 'es:*', 'logs:PutLogEvents',
                            'logs:DescribeLogGroups',
                            'logs:DescribeLogsStreams'
                        ],
                                                effect=aws_iam.Effect('ALLOW'),
                                                resources=['*'])
                    ])
            })

        RetryOptions = aws_kinesisfirehose.CfnDeliveryStream.ElasticsearchRetryOptionsProperty(
            duration_in_seconds=300)
        s3_configuration = aws_kinesisfirehose.CfnDeliveryStream.S3DestinationConfigurationProperty(
            bucket_arn=kinesis_log_bucket.bucket_arn,
            role_arn=kinesis_firehose_stream_role.role_arn)

        ElasticsearchDestinationConfiguration = aws_kinesisfirehose.CfnDeliveryStream.ElasticsearchDestinationConfigurationProperty(
            # "BufferingHints" : ElasticsearchBufferingHints,
            # "CloudWatchLoggingOptions" : CloudWatchLoggingOptions,
            # "ClusterEndpoint" : String,
            domain_arn=s3_to_elasticsearch_access_logs_domain.domain_arn,
            index_name="s3-to-elasticsearch-accesslogs",
            index_rotation_period="OneDay",
            # "ProcessingConfiguration" : ProcessingConfiguration,
            retry_options=RetryOptions,
            role_arn=kinesis_firehose_stream_role.role_arn,
            # "S3BackupMode" : String,
            s3_configuration=s3_configuration
            # "TypeName" : String
            # "VpcConfiguration" : VpcConfiguration
        )

        kinesis_firehose_stream = aws_kinesisfirehose.CfnDeliveryStream(
            self,
            "kinesis_firehose_stream",
            delivery_stream_encryption_configuration_input=None,
            delivery_stream_name=None,
            delivery_stream_type=None,
            elasticsearch_destination_configuration=
            ElasticsearchDestinationConfiguration,
            extended_s3_destination_configuration=None,
            http_endpoint_destination_configuration=None,
            kinesis_stream_source_configuration=None,
            redshift_destination_configuration=None,
            s3_destination_configuration=None,
            splunk_destination_configuration=None,
            tags=None)

        sqs_to_elasticsearch_service.add_environment(
            "FIREHOSE_NAME", kinesis_firehose_stream.ref)
        sqs_to_elasticsearch_service.add_environment(
            "QUEUEURL", sqs_to_elasticsearch_service_queue.queue_url)
        sqs_to_elasticsearch_service.add_environment("DEBUG", "False")

        sqs_to_elastic_cloud.add_environment("ELASTICCLOUD_SECRET_NAME", "-")
        sqs_to_elastic_cloud.add_environment("ELASTIC_CLOUD_ID", "-")
        sqs_to_elastic_cloud.add_environment("ELASTIC_CLOUD_PASSWORD", "-")
        sqs_to_elastic_cloud.add_environment("ELASTIC_CLOUD_USERNAME", "-")
        sqs_to_elastic_cloud.add_environment(
            "QUEUEURL", sqs_to_elastic_cloud_queue.queue_url)
        sqs_to_elastic_cloud.add_environment("DEBUG", "False")
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # The code that defines your stack goes here

        ###########################################################################
        # AWS SECRETS MANAGER - Templated secret
        ###########################################################################
        # templated_secret = aws_secretsmanager.Secret(self, "TemplatedSecret",
        #     generate_secret_string=aws_secretsmanager.SecretStringGenerator(
        #         secret_string_template= "{\"username\":\"cleanbox\"}",
        #         generate_string_key="password"
        #     )
        # )
        ###########################################################################
        # CUSTOM CLOUDFORMATION RESOURCE
        ###########################################################################
        # customlambda = aws_lambda.Function(self,'customconfig',
        # handler='customconfig.on_event',
        # runtime=aws_lambda.Runtime.PYTHON_3_7,
        # code=aws_lambda.Code.asset('customconfig'),
        # )

        # customlambda_statement = aws_iam.PolicyStatement(actions=["events:PutRule"], conditions=None, effect=None, not_actions=None, not_principals=None, not_resources=None, principals=None, resources=["*"], sid=None)
        # customlambda.add_to_role_policy(statement=customlambda_statement)

        # my_provider = cr.Provider(self, "MyProvider",
        #     on_event_handler=customlambda,
        #     # is_complete_handler=is_complete, # optional async "waiter"
        #     log_retention=logs.RetentionDays.SIX_MONTHS
        # )

        # CustomResource(self, 'customconfigresource', service_token=my_provider.service_token)

        ###########################################################################
        # AWS LAMBDA FUNCTIONS
        ###########################################################################
        sqs_to_elastic_cloud = aws_lambda.Function(
            self,
            'sqs_to_elastic_cloud',
            handler='sqs_to_elastic_cloud.lambda_handler',
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            code=aws_lambda.Code.asset('sqs_to_elastic_cloud'),
            memory_size=4096,
            timeout=core.Duration.seconds(301),
            log_retention=logs.RetentionDays.ONE_DAY)

        sqs_to_elasticsearch_service = aws_lambda.Function(
            self,
            'sqs_to_elasticsearch_service',
            handler='sqs_to_elasticsearch_service.lambda_handler',
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            code=aws_lambda.Code.asset('sqs_to_elasticsearch_service'),
            memory_size=4096,
            timeout=core.Duration.seconds(301),
            log_retention=logs.RetentionDays.ONE_DAY)
        ###########################################################################
        # AWS LAMBDA FUNCTIONS
        ###########################################################################

        ###########################################################################
        # AMAZON S3 BUCKETS
        ###########################################################################
        cloudtrail_log_bucket = aws_s3.Bucket(self, "cloudtrail_log_bucket")

        ###########################################################################
        # LAMBDA SUPPLEMENTAL POLICIES
        ###########################################################################
        lambda_supplemental_policy_statement = aws_iam.PolicyStatement(
            effect=aws_iam.Effect.ALLOW,
            actions=["s3:Get*", "s3:Head*", "s3:List*", "firehose:*", "es:*"],
            resources=["*"])

        sqs_to_elastic_cloud.add_to_role_policy(
            lambda_supplemental_policy_statement)
        sqs_to_elasticsearch_service.add_to_role_policy(
            lambda_supplemental_policy_statement)
        ###########################################################################
        # AWS SNS TOPICS
        ###########################################################################
        cloudtrail_log_topic = aws_sns.Topic(self, "cloudtrail_log_topic")

        ###########################################################################
        # ADD AMAZON S3 BUCKET NOTIFICATIONS
        ###########################################################################
        cloudtrail_log_bucket.add_event_notification(
            aws_s3.EventType.OBJECT_CREATED,
            aws_s3_notifications.SnsDestination(cloudtrail_log_topic))

        ###########################################################################
        # AWS SQS QUEUES
        ###########################################################################
        sqs_to_elasticsearch_service_queue_iqueue = aws_sqs.Queue(
            self, "sqs_to_elasticsearch_service_queue_dlq")
        sqs_to_elasticsearch_service_queue_dlq = aws_sqs.DeadLetterQueue(
            max_receive_count=10,
            queue=sqs_to_elasticsearch_service_queue_iqueue)
        sqs_to_elasticsearch_service_queue = aws_sqs.Queue(
            self,
            "sqs_to_elasticsearch_service_queue",
            visibility_timeout=core.Duration.seconds(300),
            dead_letter_queue=sqs_to_elasticsearch_service_queue_dlq)

        sqs_to_elastic_cloud_queue_iqueue = aws_sqs.Queue(
            self, "sqs_to_elastic_cloud_queue_dlq")
        sqs_to_elastic_cloud_queue_dlq = aws_sqs.DeadLetterQueue(
            max_receive_count=10, queue=sqs_to_elastic_cloud_queue_iqueue)
        sqs_to_elastic_cloud_queue = aws_sqs.Queue(
            self,
            "sqs_to_elastic_cloud_queue",
            visibility_timeout=core.Duration.seconds(300),
            dead_letter_queue=sqs_to_elastic_cloud_queue_dlq)

        ###########################################################################
        # AWS SNS TOPIC SUBSCRIPTIONS
        ###########################################################################
        cloudtrail_log_topic.add_subscription(
            aws_sns_subscriptions.SqsSubscription(sqs_to_elastic_cloud_queue))
        cloudtrail_log_topic.add_subscription(
            aws_sns_subscriptions.SqsSubscription(
                sqs_to_elasticsearch_service_queue))

        ###########################################################################
        # AWS LAMBDA SQS EVENT SOURCE
        ###########################################################################
        sqs_to_elastic_cloud.add_event_source(
            SqsEventSource(sqs_to_elastic_cloud_queue, batch_size=10))
        sqs_to_elasticsearch_service.add_event_source(
            SqsEventSource(sqs_to_elasticsearch_service_queue, batch_size=10))

        ###########################################################################
        # AWS ELASTICSEARCH DOMAIN
        ###########################################################################

        ###########################################################################
        # AWS ELASTICSEARCH DOMAIN ACCESS POLICY
        ###########################################################################
        this_aws_account = aws_iam.AccountPrincipal(account_id="012345678912")

        s3_to_elasticsearch_cloudtrail_logs_domain = aws_elasticsearch.Domain(
            self,
            "s3-to-elasticsearch-cloudtrail-logs-domain",
            version=aws_elasticsearch.ElasticsearchVersion.V7_1,
            capacity={
                "master_nodes": 3,
                "data_nodes": 4
            },
            ebs={"volume_size": 100},
            zone_awareness={"availability_zone_count": 2},
            logging={
                "slow_search_log_enabled": True,
                "app_log_enabled": True,
                "slow_index_log_enabled": True
            })

        ###########################################################################
        # AMAZON COGNITO USER POOL
        ###########################################################################
        s3_to_elasticsearch_user_pool = aws_cognito.UserPool(
            self,
            "s3-to-elasticsearch-cloudtrial-logs-pool",
            account_recovery=None,
            auto_verify=None,
            custom_attributes=None,
            email_settings=None,
            enable_sms_role=None,
            lambda_triggers=None,
            mfa=None,
            mfa_second_factor=None,
            password_policy=None,
            self_sign_up_enabled=None,
            sign_in_aliases=aws_cognito.SignInAliases(email=True,
                                                      phone=None,
                                                      preferred_username=None,
                                                      username=True),
            sign_in_case_sensitive=None,
            sms_role=None,
            sms_role_external_id=None,
            standard_attributes=None,
            user_invitation=None,
            user_pool_name=None,
            user_verification=None)

        sqs_to_elasticsearch_service.add_environment(
            "ELASTICSEARCH_HOST",
            s3_to_elasticsearch_cloudtrail_logs_domain.domain_endpoint)
        sqs_to_elasticsearch_service.add_environment(
            "QUEUEURL", sqs_to_elasticsearch_service_queue.queue_url)
        sqs_to_elasticsearch_service.add_environment("DEBUG", "False")

        sqs_to_elastic_cloud.add_environment("ELASTICCLOUD_SECRET_NAME", "-")
        sqs_to_elastic_cloud.add_environment("ELASTIC_CLOUD_ID", "-")
        sqs_to_elastic_cloud.add_environment("ELASTIC_CLOUD_PASSWORD", "-")
        sqs_to_elastic_cloud.add_environment("ELASTIC_CLOUD_USERNAME", "-")
        sqs_to_elastic_cloud.add_environment(
            "QUEUEURL", sqs_to_elastic_cloud_queue.queue_url)
        sqs_to_elastic_cloud.add_environment("DEBUG", "False")

        ###########################################################################
        # AWS COGNITO USER POOL
        ###########################################################################
        allevents_trail = aws_cloudtrail.Trail(
            self,
            "allevents_trail",
            bucket=cloudtrail_log_bucket,
            cloud_watch_log_group=None,
            cloud_watch_logs_retention=None,
            enable_file_validation=None,
            encryption_key=None,
            include_global_service_events=None,
            is_multi_region_trail=True,
            kms_key=None,
            management_events=aws_cloudtrail.ReadWriteType("ALL"),
            s3_key_prefix=None,
            send_to_cloud_watch_logs=False,
            sns_topic=None,
            trail_name=None)
    def __init__(self, scope: core.Construct, id: str,
                 infra: RtspBaseResourcesConstruct, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Create the inventory bucket...
        self.inventories = s3.Bucket(
            self,
            'InventoryBucket',
            bucket_name='homenet-{}.rtsp-inventories.{}.virtual.world'.format(
                infra.landing_zone.zone_name,
                core.Stack.of(self).region).lower(),
            removal_policy=core.RemovalPolicy.DESTROY,
            cors=[
                s3.CorsRule(allowed_methods=[s3.HttpMethods.GET],
                            allowed_origins=['*'])
            ],
            lifecycle_rules=[
                s3.LifecycleRule(
                    id='Transition-to-IA-after-30D',
                    prefix='eufy/',
                    abort_incomplete_multipart_upload_after=core.Duration.days(
                        7),
                    transitions=[
                        s3.Transition(
                            storage_class=s3.StorageClass.INFREQUENT_ACCESS,
                            transition_after=core.Duration.days(30))
                    ])
            ])

        # Create inventory collections for the Eufy Homebases...
        infra.bucket.add_inventory(
            objects_prefix='eufy/',
            inventory_id='{}-InventoryReport'.format('EufyFull'),
            format=s3.InventoryFormat.CSV,
            frequency=s3.InventoryFrequency.DAILY,
            include_object_versions=s3.InventoryObjectVersion.CURRENT,
            destination=s3.InventoryDestination(
                bucket=self.inventories,
                bucket_owner=core.Aws.ACCOUNT_ID,
                prefix=None))

        for base_name in ['Moonbase', 'Starbase']:
            prefix = 'eufy/{}.cameras.real.world/'.format(base_name).lower()
            infra.bucket.add_inventory(
                objects_prefix=prefix,
                inventory_id='{}-InventoryReport'.format(base_name),
                format=s3.InventoryFormat.CSV,
                frequency=s3.InventoryFrequency.DAILY,
                include_object_versions=s3.InventoryObjectVersion.CURRENT,
                destination=s3.InventoryDestination(
                    bucket=self.inventories,
                    bucket_owner=core.Aws.ACCOUNT_ID,
                    prefix=None))

        # Broadcast inventory creation events...
        self.inventoryAvailable = sns.Topic(
            self,
            'InventoryAvailable',
            display_name='HomeNet-{}-Rtsp-InventoryAvailable'.format(
                infra.landing_zone.zone_name),
            topic_name='HomeNet-{}-Rtsp-InventoryAvailable'.format(
                infra.landing_zone.zone_name))

        self.inventories.add_event_notification(
            s3.EventType.OBJECT_CREATED,
            s3n.SnsDestination(topic=self.inventoryAvailable),
            s3.NotificationKeyFilter(suffix='manifest.json'))

        # Attach debug queue to the notification
        self.inventoryAvailable.add_subscription(
            subs.SqsSubscription(
                raw_message_delivery=True,
                queue=sqs.Queue(
                    self,
                    'InventoryDebugQueue',
                    removal_policy=core.RemovalPolicy.DESTROY,
                    retention_period=core.Duration.days(7),
                    queue_name='HomeNet-{}-RtspInventoryAvailable_Debug'.
                    format(infra.landing_zone.zone_name))))

        # Subscribe the GroundTruth Manifest Generator
        groundtruth = RtspGroundTruthManifestGenerationFunction(
            self,
            'GroundTruthManifest',
            infra=infra,
            topic=self.inventoryAvailable)

        self.inventories.grant_read_write(groundtruth.function.role)

        # Create the RtspNormalizeImage S3 Object Lambda
        RtspNormalizeImageAccessPoint(scope=self,
                                      id='NormalizedImage',
                                      infra=infra)
Beispiel #8
0
    def __init__(self,
                 scope: core.Construct,
                 id: str,
                 landing_zone: IVpcLandingZone,
                 subnet_group_name: str = 'Default',
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)
        self.__landing_zone = landing_zone
        self.__subnet_group_name = subnet_group_name

        # Init basic resources
        self.log_group = logs.LogGroup(
            self,
            'LogGroup',
            log_group_name='/homenet/video/',
            retention=logs.RetentionDays.ONE_DAY,
            removal_policy=core.RemovalPolicy.DESTROY)

        # Add security constraints
        self.security_group = landing_zone.security_group
        self.secrets = RtspCameraSecrets(self,
                                         'Secrets',
                                         landing_zone=landing_zone)

        # Create the stateful bucket
        bucket_name = 'homenet-{}.{}.virtual.world'.format(
            'hybrid',  #landing_zone.zone_name.lower(),
            core.Stack.of(self).region)
        self.bucket = s3.Bucket(
            self,
            'Bucket',
            removal_policy=core.RemovalPolicy.RETAIN,
            bucket_name=bucket_name,
            cors=[
                s3.CorsRule(allowed_methods=[s3.HttpMethods.GET],
                            allowed_origins=['*'])
            ],
            lifecycle_rules=[
                s3.LifecycleRule(
                    id='Retain_5Years',
                    abort_incomplete_multipart_upload_after=core.Duration.days(
                        7),
                    expiration=core.Duration.days(365 * 5)),
                s3.LifecycleRule(id='Remove_CachedFiles',
                                 tag_filters={'Cached': '7d'},
                                 expiration=core.Duration.days(7))
            ])

        # Create Notification Topics for eventing
        self.frameAnalyzed = sns.Topic(
            self,
            'FrameAnalysis',
            display_name='HomeNet-{}-Rtsp-FrameAnalysis'.format(
                landing_zone.zone_name),
            topic_name='HomeNet-{}-Rtsp-FrameAnalysis'.format(
                landing_zone.zone_name))

        self.frameUploaded = sns.Topic(
            self,
            'RtspFrameUploaded',
            display_name='HomeNet-{}-Rtsp-FrameUploaded'.format(
                landing_zone.zone_name),
            topic_name='HomeNet-{}-Rtsp-FrameUploaded'.format(
                landing_zone.zone_name))

        self.bucket.add_event_notification(
            s3.EventType.OBJECT_CREATED,
            s3n.SnsDestination(topic=self.frameUploaded))

        # Setup databases
        self.time_stream = TimeStreamConstruct(self,
                                               'TimeStream',
                                               landing_zone=landing_zone)

        self.face_table = ddb.Table(
            self,
            'FaceTable',
            table_name='HomeNet-{}-FaceTable'.format(landing_zone.zone_name),
            partition_key=ddb.Attribute(name='PartitionKey',
                                        type=ddb.AttributeType.STRING),
            sort_key=ddb.Attribute(name='SortKey',
                                   type=ddb.AttributeType.STRING),
            billing_mode=ddb.BillingMode.PAY_PER_REQUEST,
            point_in_time_recovery=True)
    def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id)

        lambda_dir = kwargs["lambda_dir"]

        # Note: A dead-letter queue is optional but it helps capture any failed messages
        dlq = sqs.Queue(self,
                        id="dead_letter_queue_id",
                        retention_period=Duration.days(7))
        dead_letter_queue = sqs.DeadLetterQueue(max_receive_count=1, queue=dlq)

        upload_queue = sqs.Queue(self,
                                 id="sample_queue_id",
                                 visibility_timeout=Duration.seconds(30),
                                 dead_letter_queue=dead_letter_queue)

        sqs_subscription = sns_subs.SqsSubscription(upload_queue,
                                                    raw_message_delivery=True)

        upload_event_topic = sns.Topic(self, id="sample_sns_topic_id")

        # This binds the SNS Topic to the SQS Queue
        upload_event_topic.add_subscription(sqs_subscription)

        # Note: Lifecycle Rules are optional but are included here to keep costs
        #       low by cleaning up old files or moving them to lower cost storage options
        s3_bucket = s3.Bucket(
            self,
            id="sample_bucket_id",
            block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
            versioned=True,
            lifecycle_rules=[
                s3.LifecycleRule(
                    enabled=True,
                    expiration=Duration.days(365),
                    transitions=[
                        s3.Transition(
                            storage_class=s3.StorageClass.INFREQUENT_ACCESS,
                            transition_after=Duration.days(30)),
                        s3.Transition(storage_class=s3.StorageClass.GLACIER,
                                      transition_after=Duration.days(90)),
                    ])
            ])

        # Note: If you don't specify a filter all uploads will trigger an event.
        #       Also, modifying the event type will handle other object operations
        # This binds the S3 bucket to the SNS Topic
        s3_bucket.add_event_notification(
            s3.EventType.OBJECT_CREATED_PUT,
            s3n.SnsDestination(upload_event_topic),
            s3.NotificationKeyFilter(prefix="uploads", suffix=".csv"))

        function = _lambda.Function(
            self,
            "lambda_function",
            runtime=_lambda.Runtime.PYTHON_3_9,
            handler="lambda_function.handler",
            code=_lambda.Code.from_asset(path=lambda_dir))

        # This binds the lambda to the SQS Queue
        invoke_event_source = lambda_events.SqsEventSource(upload_queue)
        function.add_event_source(invoke_event_source)

        # Examples of CloudFormation outputs
        CfnOutput(
            self,
            "UploadFileToS3Example",
            value="aws s3 cp <local-path-to-file> s3://{}/".format(
                s3_bucket.bucket_name),
            description=
            "Upload a file to S3 (using AWS CLI) to trigger the SQS chain",
        )
        CfnOutput(
            self,
            "UploadSqsQueueUrl",
            value=upload_queue.queue_url,
            description="Link to the SQS Queue triggered on S3 uploads",
        )
        CfnOutput(
            self,
            "LambdaFunctionName",
            value=function.function_name,
        )
        CfnOutput(
            self,
            "LambdaFunctionLogGroupName",
            value=function.log_group.log_group_name,
        )
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        bucket_with_sns = s3.Bucket(self,
                                    "bucket-with-sns-integration",
                                    removal_policy=core.RemovalPolicy.DESTROY)

        bucket_with_lambda = s3.Bucket(
            self,
            "bucket-with-lambda-integration",
            removal_policy=core.RemovalPolicy.DESTROY)

        exchange_topic = sns.Topic(self, "lambda-info-topic")

        bucket_with_sns.add_event_notification(
            event=s3.EventType.OBJECT_CREATED,
            dest=s3_notifications.SnsDestination(exchange_topic))

        measurement_table = dynamodb.Table(
            self,
            "measurement-table",
            partition_key=dynamodb.Attribute(
                name="PK", type=dynamodb.AttributeType.STRING),
            sort_key=dynamodb.Attribute(name="SK",
                                        type=dynamodb.AttributeType.STRING),
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            removal_policy=core.RemovalPolicy.DESTROY)

        s3_event_generator = _lambda.Function(
            self,
            "s3-event-generator",
            code=_lambda.Code.from_asset(
                path=os.path.join(os.path.dirname(__file__), "..", "src")),
            environment={
                "BUCKET_WITH_LAMBDA": bucket_with_lambda.bucket_name,
                "BUCKET_WITH_SNS": bucket_with_sns.bucket_name,
            },
            handler="s3_event_generator.lambda_handler",
            runtime=_lambda.Runtime.PYTHON_3_8,
            timeout=core.Duration.seconds(300),
            memory_size=1024,
        )

        bucket_with_lambda.grant_write(s3_event_generator)
        bucket_with_sns.grant_write(s3_event_generator)

        measure_lambda = _lambda.Function(
            self,
            "measure-lambda",
            code=_lambda.Code.from_asset(
                path=os.path.join(os.path.dirname(__file__), "..", "src")),
            environment={"MEASUREMENT_TABLE": measurement_table.table_name},
            handler="measure_lambda.lambda_handler",
            runtime=_lambda.Runtime.PYTHON_3_8,
            timeout=core.Duration.seconds(50),
            memory_size=1024,
        )

        measurement_table.grant_read_write_data(measure_lambda)

        measure_lambda.add_event_source(
            lambda_event_sources.SnsEventSource(exchange_topic))

        measure_lambda.add_event_source(
            lambda_event_sources.S3EventSource(
                bucket=bucket_with_lambda,
                events=[s3.EventType.OBJECT_CREATED]))

        ssm.StringParameter(self,
                            "bucket-with-sns-parameter",
                            string_value=bucket_with_sns.bucket_name,
                            parameter_name=BUCKET_WITH_SNS_PARAMETER)

        ssm.StringParameter(self,
                            "bucket-with-lambda-parameter",
                            string_value=bucket_with_lambda.bucket_name,
                            parameter_name=BUCKET_WITH_LAMBDA_PARAMETER)

        ssm.StringParameter(self,
                            "measurement-table-parameter",
                            string_value=measurement_table.table_name,
                            parameter_name=MEASUREMENT_TABLE_PARAMETER)

        ssm.StringParameter(self,
                            "generator-function-name-parameter",
                            string_value=s3_event_generator.function_name,
                            parameter_name=GENERATOR_FUNCTION_NAME_PARAMETER)