def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        self.bucket = s3.Bucket(
            self,
            'Bucket',
            bucket_name='nbachmei.rekognition.{}'.format(
                core.Stack.of(self).region),
            removal_policy=core.RemovalPolicy.DESTROY,
            lifecycle_rules=[
                s3.LifecycleRule(
                    abort_incomplete_multipart_upload_after=core.Duration.days(
                        1),
                    expiration=core.Duration.days(90))
            ])

        self.table = ddb.Table(
            self,
            'Experiments',
            table_name='Rekognition',
            partition_key=ddb.Attribute(name='PartitionKey',
                                        type=ddb.AttributeType.STRING),
            sort_key=ddb.Attribute(name='SortKey',
                                   type=ddb.AttributeType.STRING),
            billing_mode=ddb.BillingMode.PAY_PER_REQUEST,
            removal_policy=core.RemovalPolicy.DESTROY)
Exemplo n.º 2
0
    def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # The code that defines your stack goes here
        vpc = aws_ec2.Vpc(
            self,
            "DynamodbVPC",
            max_azs=2,
            gateway_endpoints={
                "DynamoDB":
                aws_ec2.GatewayVpcEndpointOptions(
                    service=aws_ec2.GatewayVpcEndpointAwsService.DYNAMODB)
            })

        #XXX: Another way to add DynamoDB VPC Endpoint
        #dynamo_db_endpoint = vpc.add_gateway_endpoint("DynamoDB",
        #  service=aws_ec2.GatewayVpcEndpointAwsService.DYNAMODB
        #)

        ddb_table = aws_dynamodb.Table(
            self,
            "SimpleDynamoDbTable",
            table_name="SimpleTable",
            # removal_policy=cdk.RemovalPolicy.DESTROY,
            partition_key=aws_dynamodb.Attribute(
                name="pkid", type=aws_dynamodb.AttributeType.STRING),
            sort_key=aws_dynamodb.Attribute(
                name="sortkey", type=aws_dynamodb.AttributeType.NUMBER),
            time_to_live_attribute="ttl",
            billing_mode=aws_dynamodb.BillingMode.PROVISIONED,
            read_capacity=15,
            write_capacity=5,
        )
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        self.connections_table = dynamodb.Table(
            self,
            f"WSPlaygroundConnections",
            partition_key=dynamodb.Attribute(
                name="connection_id", type=dynamodb.AttributeType.STRING),
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            removal_policy=core.RemovalPolicy.DESTROY)

        self.records_table = dynamodb.Table(
            self,
            f"WSPlaygroundRecords",
            partition_key=dynamodb.Attribute(
                name="id", type=dynamodb.AttributeType.STRING),
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            removal_policy=core.RemovalPolicy.DESTROY)

        core.CfnOutput(self,
                       id="ConnectionsTableName",
                       value=self.connections_table.table_name)
        core.CfnOutput(self,
                       id="RecordsTableName",
                       value=self.records_table.table_name)

        self.service_role = self._create_service_role()

        chalice_dir = os.path.join(os.path.dirname(__file__), os.pardir, "ws")
        chalice_config = self._create_chalice_stage_config()

        self.chalice = Chalice(self,
                               id,
                               source_dir=chalice_dir,
                               stage_config=chalice_config)
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Create a DynamoDB table to store the GPS locations.
        table = dynamodb.Table(self,
                               "PetGpsLocations",
                               partition_key=dynamodb.Attribute(
                                   name="PetId",
                                   type=dynamodb.AttributeType.STRING),
                               sort_key=dynamodb.Attribute(
                                   name="Timestamp",
                                   type=dynamodb.AttributeType.NUMBER),
                               table_name="PetGpsLocations")

        # Create a Lambda function that will read data from the API and populate the DynamoDB table.
        lambda_function = lambda_.Function(
            self,
            "GpsProcessor",
            code=lambda_.Code.from_asset(os.path.join(os.getcwd(),
                                                      "resources")),
            handler="process-gps-datum.main",
            runtime=lambda_.Runtime.PYTHON_3_7)

        # Grant the function read/write access to the table.
        table.grant_read_write_data(lambda_function)

        # Create an API Gateway to act as a proxy for the Lambda.
        apigateway.LambdaRestApi(self,
                                 "pet-location-api",
                                 handler=lambda_function)
Exemplo n.º 5
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        ticker = dynamodb.Attribute(
            name='Ticker',
            type=dynamodb.AttributeType.STRING,
        )

        date = dynamodb.Attribute(
            name='Date',
            type=dynamodb.AttributeType.STRING,
        )

        table = dynamodb.Table(
            self,
            'StockHistory',
            partition_key=ticker,
            sort_key=date,
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            removal_policy=core.RemovalPolicy.DESTROY,
            point_in_time_recovery=True,
        )

        index_name = 'Date-index'
        table.add_global_secondary_index(
            index_name=index_name,
            partition_key=date,
            sort_key=ticker,
            projection_type=dynamodb.ProjectionType.INCLUDE,
            non_key_attributes=['Name'])

        Importer(self, 'Importer', table=table)
        restapi = RestApi(self, 'Api', table=table, index_name=index_name)
        Website(self, 'Website', api=restapi.api)
    def __init__(self, scope: cdk.Construct, construct_id: str, stage: str,
                 explain_bot_lambda: _lambda.Function,
                 add_meaning_lambda: _lambda.Function, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # Define dynamoDb table
        acronym_denied_table = _dynamo.Table(
            self,
            id="explainDeniedAcronymTable",
            table_name="explaindeniedacronymstable" + stage.lower(),
            partition_key=_dynamo.Attribute(name="Acronym",
                                            type=_dynamo.AttributeType.STRING),
            sort_key=_dynamo.Attribute(name="Deleted_at",
                                       type=_dynamo.AttributeType.STRING),
            removal_policy=cdk.RemovalPolicy.DESTROY,
        )

        self.table = acronym_denied_table

        # Add the table name as an environment variable
        explain_bot_lambda.add_environment("TABLE_DENIED_NAME",
                                           acronym_denied_table.table_name)
        add_meaning_lambda.add_environment("TABLE_DENIED_NAME",
                                           acronym_denied_table.table_name)

        # Give lambdas the ability to read and write to the database table
        acronym_denied_table.grant_full_access(explain_bot_lambda)
        acronym_denied_table.grant_full_access(add_meaning_lambda)
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        sender_lambda = aws_lambda.Function(
            self,
            "sms_sender_lambda",
            runtime=aws_lambda.Runtime.PYTHON_3_8,
            handler="sms_sender.handler",
            code=aws_lambda.Code.asset('in_clouds_demos/sms_sender/lambda'))

        # Api Gateway
        api = aws_apigateway.LambdaRestApi(self,
                                           "SmsSenderApi",
                                           handler=sender_lambda)
        # DynamoDb Table
        # indeks = phone number, secondary index md5 z contentu?
        table = aws_dynamodb.Table(self,
                                   "message_log",
                                   partition_key=aws_dynamodb.Attribute(
                                       name="phoneNumber",
                                       type=aws_dynamodb.AttributeType.NUMBER),
                                   sort_key=aws_dynamodb.Attribute(
                                       name="messageHash",
                                       type=aws_dynamodb.AttributeType.STRING),
                                   removal_policy=core.RemovalPolicy.DESTROY)

        sender_lambda.add_environment('TABLE_NAME', table.table_name)
        table.grant_read_write_data(sender_lambda)

        sender_lambda.role.add_to_policy(
            aws_iam.PolicyStatement(actions=['sns:Publish'], resources=["*"]))

        wf = Watchful(self, "monitoring", alarm_email="*****@*****.**")
        wf.watch_scope(self)
Exemplo n.º 8
0
 def create_dynamodb(self) -> None:
     '''DynamoDB Tables and Streams Event Sources
     '''
     # Single-table to store Reality #possibility to change billing,ttl here, global index in new function
     self.ddb_table_reality = aws_dynamodb.Table(
         self,
         'Reality-dev',
         partition_key=aws_dynamodb.Attribute(
             name="hash_id", type=aws_dynamodb.AttributeType.NUMBER),
         sort_key=aws_dynamodb.Attribute(
             name='actualized', type=aws_dynamodb.AttributeType.STRING),
         stream=aws_dynamodb.StreamViewType.
         NEW_AND_OLD_IMAGES,  #enable dynamo streams for the trigger
         read_capacity=3,
         write_capacity=8,
         removal_policy=core.RemovalPolicy.DESTROY  # hidden in core
     )
     ## Streams DB + triggers
     self.ddb_streams_reality = aws_lambda_event_sources.DynamoEventSource(
         table=self.ddb_table_reality,
         starting_position=aws_lambda.StartingPosition.
         LATEST,  # check what trim horizon means
         batch_size=100,  # check is 5 enough? is 500 a lot 
         max_batching_window=core.Duration.seconds(60),
         #parallelization_factor = ?2 ?5
         retry_attempts=2,
         on_failure=aws_lambda_destinations.SqsDestination(
             self.queue_ddb_streams),
     )
Exemplo n.º 9
0
    def catalog(self, bucket, notification_queue):
        catalog_table = _ddb.Table(
            self,
            "CatalogTable",
            billing_mode=_ddb.BillingMode.PAY_PER_REQUEST,
            removal_policy=core.RemovalPolicy.DESTROY,
            partition_key=_ddb.Attribute(name='Source',
                                         type=_ddb.AttributeType.STRING),
            sort_key=_ddb.Attribute(name='Timestamp',
                                    type=_ddb.AttributeType.STRING))

        event_recorder = _lambda.Function(
            self,
            "EventRecorder",
            handler='lambda_function.lambda_handler',
            # https://github.com/aws/aws-cdk/issues/5491
            # pylint: disable=no-value-for-parameter
            code=_lambda.Code.asset('src/event_recorder'),
            runtime=_lambda.Runtime.PYTHON_3_7,
            log_retention=_logs.RetentionDays.ONE_MONTH,
            environment={
                'BUCKET_NAME': bucket.bucket_name,
                'TABLE_NAME': catalog_table.table_name,
                'TOPIC_SSM_PREFIX':
                "/{}/DistributionTopics/".format(self.stack_id)
            })
        bucket.grant_read(event_recorder)
        catalog_table.grant_write_data(event_recorder)
        event_recorder.add_event_source(
            _lambda_event_sources.SqsEventSource(notification_queue))
        return catalog_table, event_recorder
Exemplo n.º 10
0
    def __init__(self, app: core.App, id: str) -> None:
        super().__init__(app, id)

        first_dynamodb_name = core.CfnParameter(self,
                                                "FirstDynamodbName",
                                                type="String")
        second_dynamodb_name = core.CfnParameter(self,
                                                 "SecondDynamodbName",
                                                 type="String")
        third_dynamodb_name = core.CfnParameter(self,
                                                "ThirdDynamodbName",
                                                type="String")
        fourth_dynamodb_name = core.CfnParameter(self,
                                                 "FourthDynamodbName",
                                                 type="String")

        # create dynamo table
        _first_dynamo_table = aws_dynamodb.Table(
            self,
            "onesix",
            removal_policy=core.RemovalPolicy.DESTROY,
            table_name=first_dynamodb_name.value_as_string,
            partition_key=aws_dynamodb.Attribute(
                name="id", type=aws_dynamodb.AttributeType.STRING))

        _second_dynamo_table = aws_dynamodb.Table(
            self,
            "twofour",
            removal_policy=core.RemovalPolicy.DESTROY,
            table_name=second_dynamodb_name.value_as_string,
            partition_key=aws_dynamodb.Attribute(
                name="id", type=aws_dynamodb.AttributeType.STRING))

        _third_dynamo_table = aws_dynamodb.Table(
            self,
            "threefour",
            removal_policy=core.RemovalPolicy.DESTROY,
            table_name=third_dynamodb_name.value_as_string,
            partition_key=aws_dynamodb.Attribute(
                name="id", type=aws_dynamodb.AttributeType.STRING))
        _fourth_dynamo_table = aws_dynamodb.Table(
            self,
            "fourfour",
            removal_policy=core.RemovalPolicy.DESTROY,
            table_name=fourth_dynamodb_name.value_as_string,
            partition_key=aws_dynamodb.Attribute(
                name="id", type=aws_dynamodb.AttributeType.STRING))
        core.CfnOutput(self,
                       "_FirstdynamodbName",
                       value=_first_dynamo_table.table_name)
        core.CfnOutput(self,
                       "_SeconddynamodbName",
                       value=_second_dynamo_table.table_name)
        core.CfnOutput(self,
                       "_ThirddynamodbName",
                       value=_third_dynamo_table.table_name)
        core.CfnOutput(self,
                       "_FourthdynamodbName",
                       value=_fourth_dynamo_table.table_name)
Exemplo n.º 11
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)
        self.ddb_table_name = None
        self.ddb_table_arn = None

        url_shortener_ddb = ddb.Table(
            self,
            id='ShortenURLDDB',
            table_name='dev-url-shortener',
            partition_key=ddb.Attribute(name='id', type=ddb.AttributeType.STRING),
            time_to_live_attribute='expiry_date'
        )
        self.ddb_table_name = url_shortener_ddb.table_name
        self.ddb_table_arn = url_shortener_ddb.table_arn

        write_capacity_scaling = url_shortener_ddb.auto_scale_write_capacity(
            max_capacity=40000,
            min_capacity=5
        )

        write_capacity_scaling.scale_on_utilization(
            target_utilization_percent=70
        )

        read_capapcity_scaling = url_shortener_ddb.auto_scale_read_capacity(
            max_capacity=40000,
            min_capacity=5
        )

        read_capapcity_scaling.scale_on_utilization(
            target_utilization_percent=70
        )

        """ Create index long-url-index """
        url_shortener_ddb.add_global_secondary_index(
            index_name='long-url-index',
            partition_key=ddb.Attribute(name='long_url', type=ddb.AttributeType.STRING)
        )

        global_index_write_capapcity_scaling = url_shortener_ddb.auto_scale_global_secondary_index_write_capacity(
            index_name='long-url-index',
            max_capacity=40000,
            min_capacity=5
        )

        global_index_write_capapcity_scaling.scale_on_utilization(
            target_utilization_percent=70
        )

        global_index_read_capapcity_scaling = url_shortener_ddb.auto_scale_global_secondary_index_read_capacity(
            index_name='long-url-index',
            max_capacity=40000,
            min_capacity=5
        )

        global_index_read_capapcity_scaling.scale_on_utilization(
            target_utilization_percent=70
        )
Exemplo n.º 12
0
 def create_table(self):
     return dynamodb.Table(
         self,
         "server-table",
         partition_key=dynamodb.Attribute(
             name="address", type=dynamodb.AttributeType.STRING),
         sort_key=dynamodb.Attribute(name="game",
                                     type=dynamodb.AttributeType.STRING),
         billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
     )
Exemplo n.º 13
0
 def _create_ddb_table(self):
     dynamodb_table = dynamodb.Table(
         self,
         'AppTable',
         partition_key=dynamodb.Attribute(
             name='PK', type=dynamodb.AttributeType.STRING),
         sort_key=dynamodb.Attribute(name='SK',
                                     type=dynamodb.AttributeType.STRING),
         removal_policy=cdk.RemovalPolicy.DESTROY)
     cdk.CfnOutput(self, 'AppTableName', value=dynamodb_table.table_name)
     return dynamodb_table
Exemplo n.º 14
0
 def _create_ddb_table(self, table_name):
     return ddb.Table(self,
                      table_name,
                      table_name=table_name,
                      partition_key=ddb.Attribute(
                          name="PK", type=ddb.AttributeType.STRING),
                      sort_key=ddb.Attribute(name="SK",
                                             type=ddb.AttributeType.STRING),
                      billing_mode=ddb.BillingMode.PAY_PER_REQUEST,
                      removal_policy=cdk.RemovalPolicy.RETAIN,
                      time_to_live_attribute="TTL",
                      stream=ddb.StreamViewType.NEW_AND_OLD_IMAGES)
Exemplo n.º 15
0
  def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
    super().__init__(scope, id, **kwargs)

    self.auditTable = ddb.Table(self, 'AuditTable',
      table_name='AuditTable',
      partition_key=ddb.Attribute(name='PartitionKey', type=ddb.AttributeType.STRING),
      sort_key=ddb.Attribute(name='SortKey', type=ddb.AttributeType.STRING),
      billing_mode= ddb.BillingMode.PAY_PER_REQUEST,
      point_in_time_recovery=True,
      encryption= ddb.TableEncryption.AWS_MANAGED,
      #time_to_live_attribute=ddb.Attribute(name='Expiration', type=ddb.AttributeType.NUMBER)
    )
Exemplo n.º 16
0
    def __init__(self,
                 scope: core.Construct,
                 id: builtins.str,
                 resources: FsiSharedResources,
                 subnet_group_name: str = 'Default') -> None:
        super().__init__(scope, id)
        self.__resources = resources

        # Configure the Instrument State Table
        self.instrument_table = self.add_ddb_table(
            'InstrumentTable', 'Fsi{}-Collection-Instrument'.format(
                resources.landing_zone.zone_name))

        self.query_by_symbol_index_name = 'query-by-symbol'
        self.instrument_table.add_global_secondary_index(
            partition_key=ddb.Attribute(name='symbol',
                                        type=ddb.AttributeType.STRING),
            sort_key=ddb.Attribute(name='SortKey',
                                   type=ddb.AttributeType.STRING),
            index_name=self.query_by_symbol_index_name,
            projection_type=ddb.ProjectionType.ALL)

        self.query_by_exchange_name = 'query-by-exchange'
        self.instrument_table.add_global_secondary_index(
            partition_key=ddb.Attribute(name='exchange',
                                        type=ddb.AttributeType.STRING),
            sort_key=ddb.Attribute(name='SortKey',
                                   type=ddb.AttributeType.STRING),
            index_name=self.query_by_exchange_name,
            projection_type=ddb.ProjectionType.ALL)

        # Configure the Transaction Audit Table
        self.transaction_table = self.add_ddb_table(
            'TransactionTable', 'Fsi{}-Collection-Transactions'.format(
                resources.landing_zone.zone_name))

        self.quotes_table = self.add_ddb_table(
            'QuoteHistoryTable',
            'Fsi{}-Collection-Quotes'.format(resources.landing_zone.zone_name))

        self.options_table = self.add_ddb_table(
            'OptionCache', 'Fsi{}-Collection-Options'.format(
                resources.landing_zone.zone_name))

        self.timeseries_database = ts.CfnDatabase(
            self,
            'Database',
            database_name='HomeNet-Fsi{}'.format(
                resources.landing_zone.zone_name))

        self.add_timeseries_table('Quotes')
        self.add_timeseries_table('Fundamentals')
Exemplo n.º 17
0
    def __init__(self,
                 scope: core.Construct,
                 id: str,
                 stage: Optional[str] = 'prod',
                 **kwargs) -> None:
        super().__init__(scope, id + '-' + stage, **kwargs)

        acct_table_name = id + '-accounts-table-' + stage
        acct_table = ddb.Table(self,
                               id=acct_table_name,
                               table_name=acct_table_name,
                               partition_key=ddb.Attribute(
                                   name='id', type=ddb.AttributeType.STRING),
                               billing_mode=ddb.BillingMode.PAY_PER_REQUEST)

        events_table_name = id + '-events-table-' + stage
        events_table = ddb.Table(self,
                                 id=events_table_name,
                                 table_name=events_table_name,
                                 partition_key=ddb.Attribute(
                                     name='id', type=ddb.AttributeType.STRING),
                                 billing_mode=ddb.BillingMode.PAY_PER_REQUEST,
                                 stream=ddb.StreamViewType.NEW_IMAGE)

        self._table_stream_arn = events_table.table_stream_arn

        # create our Lambda function for the bank account service
        func_name = id + '-' + stage + '-' + 'bank-accounts'
        handler = lambda_.Function(
            self,
            func_name,
            code=lambda_.Code.from_asset('bank_account_service'),
            runtime=lambda_.Runtime.NODEJS_10_X,
            handler='handler.handler',
            environment={
                'ACCOUNTS_TABLE_NAME': acct_table.table_name,
                'EVENTS_TABLE_NAME': events_table.table_name,
                'REGION': core.Aws.REGION
            })

        self._bank_account_service = handler

        gw.LambdaRestApi(self, id=stage + '-' + id, handler=handler)

        # give EventBridge permission to invoke this Lambda
        handler.add_permission(
            id='EBPermission',
            principal=iam.ServicePrincipal('events.amazonaws.com'),
            action='lambda:InvokeFunction')

        acct_table.grant_read_write_data(handler.role)
        events_table.grant_read_write_data(handler.role)
Exemplo n.º 18
0
    def create_table(self) -> aws_dynamodb.ITable:
        partition_key = aws_dynamodb.Attribute(
            name="pk", type=aws_dynamodb.AttributeType.STRING)

        sort_key = aws_dynamodb.Attribute(
            name="sk", type=aws_dynamodb.AttributeType.STRING)

        return aws_dynamodb.Table(
            self,
            "EternalGuessesTable",
            partition_key=partition_key,
            sort_key=sort_key,
            billing_mode=aws_dynamodb.BillingMode.PAY_PER_REQUEST)
Exemplo n.º 19
0
    def create_dynamodb(self) -> None:
        '''DynamoDB Tables and Event Sources
        '''
        # DynamoDB Table Attributes
        self.ddb_attr_time_to_live = 'time-to-live'

        # DynamoDB Parameters
        self.ddb_param_max_parallel_streams = 5

        # Single-table to store blog content
        self.ddb_table_blog = aws_dynamodb.Table(
            self,
            'sls-blog-dynamo-table',
            partition_key=aws_dynamodb.Attribute(
                name='id',
                type=aws_dynamodb.AttributeType.STRING,
            ),
            billing_mode=aws_dynamodb.BillingMode.PAY_PER_REQUEST,
            point_in_time_recovery=True,
            removal_policy=core.RemovalPolicy.DESTROY,
            time_to_live_attribute=self.ddb_attr_time_to_live,
            stream=aws_dynamodb.StreamViewType.NEW_AND_OLD_IMAGES,
        )

        # GSI to query blog content by item (type) and ordered by time
        self.ddb_gsi_latest = 'latest-blogs'

        self.ddb_table_blog.add_global_secondary_index(
            index_name=self.ddb_gsi_latest,
            partition_key=aws_dynamodb.Attribute(
                name='item-type',
                type=aws_dynamodb.AttributeType.STRING,
            ),
            sort_key=aws_dynamodb.Attribute(
                name='publish-timestamp',
                type=aws_dynamodb.AttributeType.NUMBER,
            ),
            projection_type=aws_dynamodb.ProjectionType.ALL,
        )

        # Generate streams from modifications to the "blog" DDB Table
        self.ddb_source_blog = aws_lambda_event_sources.DynamoEventSource(
            table=self.ddb_table_blog,
            starting_position=aws_lambda.StartingPosition.LATEST,
            batch_size=500,
            max_batching_window=core.Duration.seconds(60),
            parallelization_factor=self.ddb_param_max_parallel_streams,
            retry_attempts=2,
            on_failure=aws_lambda_destinations.SqsDestination(
                self.queue_ddb_streams_dlq),
        )
Exemplo n.º 20
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        table = dynamo.Table(
            self,
            'Table',
            table_name='subscriptions',
            partition_key=dynamo.Attribute(name='pk',
                                           type=dynamo.AttributeType.STRING),
            sort_key=dynamo.Attribute(name='sk',
                                      type=dynamo.AttributeType.STRING),
            time_to_live_attribute='ttl')

        template_name = 'EmailVerification'
        template = ses.CfnTemplate(
            self,
            'Template',
            template={
                'htmlPart': '<div><a href="{code}">Verify Email</a></div>',
                'subjectPart': 'Verify Email Address',
                'templateName': template_name
            })

        create_subscription = lambda_.Function(
            self,
            'CreateSubscriptionFunction',
            function_name='create_subscription',
            handler='src.main.handler',
            code=lambda_.Code.from_asset('../functions/create_subscription'),
            runtime=lambda_.Runtime.PYTHON_3_7,
            environment={
                'TEMPLATE_NAME': template_name,
                'TABLE_NAME': table.table_name
            })

        # Update Lambda Permissions
        create_subscription.add_to_role_policy(
            iam.PolicyStatement(actions=['ses:SendEmail', 'ses:SendRawEmail'],
                                resources=['*']))

        table.grant_read_write_data(create_subscription)

        # API
        api = ag.RestApi(self, 'Api', rest_api_name='email-subscriptions')

        subscriptions_resource = api.root.add_resource('subscriptions')

        subscriptions_resource.add_method('POST',
                                          integration=ag.LambdaIntegration(
                                              handler=create_subscription,
                                              proxy=True))
 def configure_dynamo_table(self, table_name, primary_key, sort_key):
     demo_table = aws_dynamodb.Table(
         self, table_name,
         table_name= table_name,
         partition_key=aws_dynamodb.Attribute(
             name=primary_key,
             type=aws_dynamodb.AttributeType.STRING
         ),
         sort_key=aws_dynamodb.Attribute(
             name=sort_key,
             type=aws_dynamodb.AttributeType.STRING
         ),
         removal_policy=core.RemovalPolicy.DESTROY
     )
    def __init__(
        self,
        scope: cdk.Construct,
        construct_id: str,
        stack_log_level: str,
        **kwargs
    ) -> None:
        super().__init__(scope, construct_id, **kwargs)

        id_prefix_str = f"elasticViews"
        ddb_table_name = "elasticViewsMoviesTable_2021"

        # create dynamo table
        movies_table = _ddb.Table(
            self,
            f"{id_prefix_str}SrcDdb",
            partition_key=_ddb.Attribute(
                name="year",
                type=_ddb.AttributeType.NUMBER
            ),
            sort_key=_ddb.Attribute(
                name="title",
                type=_ddb.AttributeType.STRING
            ),
            read_capacity=50,
            write_capacity=50,
            table_name=f"{ddb_table_name}",


            removal_policy=cdk.RemovalPolicy.DESTROY
        )


        ###########################################
        ################# OUTPUTS #################
        ###########################################
        output_0 = cdk.CfnOutput(
            self,
            "AutomationFrom",
            value=f"{GlobalArgs.SOURCE_INFO}",
            description="To know more about this automation stack, check out our github page."
        )

        output_1 = cdk.CfnOutput(
            self,
            "srcMoviesDdbTable",
            value=f"https://console.aws.amazon.com/events/home?region={cdk.Aws.REGION}#/eventbus/{movies_table.table_name}",
            description="Source Dynamodb table for Glue Elastic Views"
        )
Exemplo n.º 23
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        ZachDynamoDBInstanceName =  self.__class__.__name__
        ZachDynamoDBInstance = dy.Table(self,id=ZachDynamoDBInstanceName,table_name=ZachDynamoDBInstanceName,
                                        partition_key=dy.Attribute(name="UID",type=dy.AttributeType.NUMBER),
                                        billing_mode=dy.BillingMode.PROVISIONED,read_capacity=3,
                                        removal_policy=core.RemovalPolicy.DESTROY,
                                        server_side_encryption=True,
                                        sort_key=dy.Attribute(name="Date",type=dy.AttributeType.STRING),
                                        stream=dy.StreamViewType.KEYS_ONLY,
                                        write_capacity=3)

        core.CfnOutput(self, ZachDynamoDBInstanceName+"TableARN", value=str(ZachDynamoDBInstance.table_arn))
        core.CfnOutput(self, ZachDynamoDBInstanceName+"TableName", value=str(ZachDynamoDBInstance.table_name))
Exemplo n.º 24
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        table = ddb.Table(self, "DataStoreDynamoDBTable",
                          table_name="CatLHR-DataStore",
                          partition_key=ddb.Attribute(
                              name="uuid",
                              type=ddb.AttributeType.STRING
                          ),
                          sort_key=ddb.Attribute(
                              name="date",
                              type=ddb.AttributeType.STRING
                          ))

        self.table = table
Exemplo n.º 25
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # DynamoDB Table
        table = dynamo_db.Table(self,
                                "Hits",
                                partition_key=dynamo_db.Attribute(
                                    name="path",
                                    type=dynamo_db.AttributeType.STRING))

        # defines an AWS  Lambda resource
        dynamo_lambda = _lambda.Function(
            self,
            "DynamoLambdaHandler",
            runtime=_lambda.Runtime.NODEJS_12_X,  # execution environment
            handler="lambda.handler",  # file is "lambda", function is "handler"
            code=_lambda.Code.from_asset(
                "lambda"),  # Code loaded from the lambda dir
            environment={'HITS_TABLE_NAME': table.table_name})

        # grant the lambda role read/write permissions to our table'
        table.grant_read_write_data(dynamo_lambda)

        # defines an API Gateway REST API resource backed by our "dynamo_lambda" function.
        api_gw.LambdaRestApi(self, 'Endpoint', handler=dynamo_lambda)
Exemplo n.º 26
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        self.table = aws_dynamodb.Table(
            self,
            f"{id}-table",
            partition_key=aws_dynamodb.Attribute(
                name="url", type=aws_dynamodb.AttributeType.STRING),
        )

        poller = get_lambda(self,
                            f"{id}-lambda-poller",
                            code=f"lib/stacks/{id}/lambdas",
                            handler="pagespeed_poller.handler",
                            layers=[get_layer(self, "requests_oauthlib", id)],
                            environment={
                                "DYNAMODB_TABLE":
                                self.table.table_name,
                                "GOOGLE_PAGESPEED_API_KEY":
                                env["GOOGLE_PAGESPEED_API_KEY"],
                                "GOOGLE_PAGESPEED_TARGET_URLS":
                                env["GOOGLE_PAGESPEED_TARGET_URLS"],
                            })
        self.table.grant_read_write_data(poller)

        cronjob = aws_events.Rule(
            self,
            f"{id}-scheduled-event",
            enabled=True,
            schedule=aws_events.Schedule.cron(hour="6-16", minute="30"),  # pylint: disable=no-value-for-parameter
        )
        cronjob.add_target(aws_events_targets.LambdaFunction(handler=poller))
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        state_machine_role = _iam.Role(
            self,
            "state_machine_role",
            assumed_by=_iam.ServicePrincipal("lambda.amazonaws.com"),
        )

        state_machine_role.add_to_policy(
            _iam.PolicyStatement(
                resources=["*"],
                actions=[
                    "lambda:InvokeFunction",
                    "logs:CreateLogGroup",
                    "logs:CreateLogStream",
                    "logs:PutLogEvents",
                ],
            ))

        state_machine_handler = _lambda.Function(
            self,
            'state_machine_handler',
            code=_lambda.Code.asset('lambdas'),
            environment={
                "REGION": "us-east-1",
            },
            function_name="state_machine_handler",
            handler='state_machine_handler.lambda_handler',
            role=state_machine_role,
            runtime=_lambda.Runtime.PYTHON_3_7,
            timeout=core.Duration.seconds(2),
        )

        api_gateway_definition = _apigw.RestApi(
            self,
            'api_gateway_definition',
            default_cors_preflight_options={
                "allow_origins": _apigw.Cors.ALL_ORIGINS,
                "allow_methods": _apigw.Cors.ALL_METHODS,
            },
            description="Api to engage or handler states machines.",
            rest_api_name="state_machine_api_gateway",
        )

        state_machine = _apigw.LambdaIntegration(state_machine_handler)

        state_machine_resource = api_gateway_definition.root.add_resource("sm")

        state_machine_resource.add_method("GET", state_machine)

        _dynamodb.Table(
            self,
            "solicitud_test",
            table_name="solicitud_test",
            partition_key=_dynamodb.Attribute(
                name="id", type=_dynamodb.AttributeType.STRING),
            stream=_dynamodb.StreamViewType.NEW_AND_OLD_IMAGES,
        )
    def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        queue = _sqs.Queue(self,
                           "VsamToDynamoQueue",
                           visibility_timeout=Duration.seconds(300),
                           queue_name='VsamToDynamoQueue')

        dynamoTable = _dyn.Table(
            self,
            "CLIENT",
            partition_key=_dyn.Attribute(name="CLIENT-KEY",
                                         type=_dyn.AttributeType.STRING),
            table_name="CLIENT",
        )

        # Create the Lambda function to subscribe to SQS and store the record in DynamoDB
        # The source code is in './src' directory
        lambda_fn = _lambda.Function(
            self,
            "SQSToDynamoFunction",
            runtime=_lambda.Runtime.PYTHON_3_9,
            handler="insertRecord.handler",
            code=_lambda.Code.from_asset("lambda_fns"),
        )

        dynamoTable.grant_write_data(lambda_fn)

        queue.grant_consume_messages(lambda_fn)
        lambda_fn.add_event_source(_event.SqsEventSource(queue))
Exemplo n.º 29
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Dynamodb
        table = aws_dynamodb.Table(
            self,
            "mapping-table",
            partition_key=aws_dynamodb.Attribute(
                name="id", type=aws_dynamodb.AttributeType.STRING
            ),
        )

        # Lambda function
        function = aws_lambda.Function(
            self,
            "backend",
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            handler="handler.main",
            code=aws_lambda.Code.asset("./lambda"),
        )

        # Grant permissions to our lambda function to read and write to dynamodb table
        table.grant_read_write_data(function)

        # Add dynamodb table name environment variable to be used by our lambda function
        function.add_environment("TABLE_NAME", table.table_name)

        # API to access the lambda function
        api = aws_apigateway.LambdaRestApi(self, "api", handler=function)
Exemplo n.º 30
0
    def __init__(self, scope: cdk.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # DynamoDBの作成
        partition_key = dynamodb.Attribute(name='username',
                                           type=dynamodb.AttributeType.STRING)
        self.dynamodb_table = dynamodb.Table(
            self,
            'UsersTable',
            partition_key=partition_key,
            removal_policy=cdk.RemovalPolicy.DESTROY)
        cdk.CfnOutput(self,
                      'UsersTableName',
                      value=self.dynamodb_table.table_name)

        # LambdaがDynamoDBにアクセスするためのIAMロールを作成
        lambda_service_principal = iam.ServicePrincipal('lambda.amazonaws.com')
        self.api_handler_iam_role = iam.Role(
            self, 'ApiHandlerLambdaRole', assumed_by=lambda_service_principal)
        self.dynamodb_table.grant_read_write_data(self.api_handler_iam_role)

        # web_api_source_dirはChaliceアプリケーションソースコードへのパス
        # ソースコードはChaliceによってパッケージングされ、
        # SAMテンプレートの作成とLambdaデプロイのためのZIP化が行われる
        web_api_source_dir = os.path.join(os.path.dirname(__file__), os.pardir,
                                          os.pardir, 'web-api')
        chalice_stage_config = self._create_chalice_stage_config()

        # Chalice作成
        self.chalice = Chalice(self,
                               'WebApi',
                               source_dir=web_api_source_dir,
                               stage_config=chalice_stage_config)