def create_dynamo_table(self): t = self.template self.dynamoTable = t.add_resource(Table( "moviesDBTable", AttributeDefinitions=[ AttributeDefinition( AttributeName="id", AttributeType="S" ) ], KeySchema=[ KeySchema( AttributeName="id", KeyType="HASH" ) ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=5, WriteCapacityUnits=5 ), StreamSpecification=StreamSpecification( StreamViewType="NEW_IMAGE" ) )) t.add_output(Output( "TableName", Value=Ref(self.dynamoTable), Description="Table name of my new sample table" )) t.add_output(Output( "StreamArn", Value=GetAtt(self.dynamoTable, "StreamArn") ))
def add_dynamobd_terminate_db(self): self.terminate_dynamodb = self.template.add_resource( Table("terminationDB", TableName="terminateDB", TimeToLiveSpecification=TimeToLiveSpecification( AttributeName="ttl", Enabled=True), AttributeDefinitions=[ AttributeDefinition(AttributeName="termination", AttributeType="S"), ], KeySchema=[ KeySchema(AttributeName="termination", KeyType="HASH") ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=2, WriteCapacityUnits=1)))
def add_dynamo_db(self): self.dynamo_db = self.template.add_resource(Table( "dynamoDBTable", AttributeDefinitions=[ AttributeDefinition( AttributeName=self.sceptre_user_data["HashKeyElementName"], AttributeType=self.sceptre_user_data["HashKeyElementType"] ) ], KeySchema=[ KeySchema( AttributeName=self.sceptre_user_data["HashKeyElementName"], KeyType="HASH" ) ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=self.sceptre_user_data["ReadCapacityUnits"], WriteCapacityUnits=self.sceptre_user_data["WriteCapacityUnits"] ) ))
def add_table(self): table_kwargs = {"TableName": Ref(self.table_name)} table_properties = self.sceptre_user_data["TableProperties"] # Parse the provided KeySchema table_kwargs.update({"KeySchema": []}) for schema in table_properties.pop("KeySchema"): key_schema = KeySchema(**self.parse_key_schema(schema)) table_kwargs["KeySchema"].append(key_schema) # Parse the provisioned throughput table_kwargs.update({ "ProvisionedThroughput": ProvisionedThroughput(**table_properties["ProvisionedThroughput"]) }) # Parse attribute definitions if table_properties.get("AttributeDefinitions"): table_kwargs.update({"AttributeDefinitions": []}) for definition in table_properties.pop("AttributeDefinitions"): attribute_kwargs = { "AttributeName": definition.keys()[0], "AttributeType": definition.values()[0] } attribute_definition = AttributeDefinition(**attribute_kwargs) table_kwargs["AttributeDefinitions"].append( attribute_definition) # Parse the stream specification if table_properties.get("StreamSpecification"): table_kwargs.update({ "StreamSpecification": StreamSpecification( **table_properties.pop("StreamSpecification")) }) # Parse the time to live properties # if table_properties.get("TimeToLiveSpecification"): # table_kwargs.update({"TimeToLiveSpecification": TimeToLiveSpecification(**table_properties.pop("TimeToLiveSpecification"))}) # Geez that was a lot of parsing. Y u make so difficult, AWS? # Create the table, finally. dynamo_table = self.template.add_resource( Table("DynamoTable", **table_kwargs)) self.template.add_output(Output("DynamoTable", Value=Ref(dynamo_table)))
def create_dynamo_table(self): t = self.template self.dynamoTable = t.add_resource( Table("sampleDBTable", AttributeDefinitions=[ AttributeDefinition(AttributeName="artist", AttributeType="S"), AttributeDefinition(AttributeName="album", AttributeType="S") ], KeySchema=[ KeySchema(AttributeName="artist", KeyType="HASH"), KeySchema(AttributeName="album", KeyType="RANGE") ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=5, WriteCapacityUnits=5))) t.add_output( Output("TableName", Value=Ref(self.dynamoTable), Description="Table name of my new sample table"))
def create_question_table(self, table_name): question_table = Table( table_name, AttributeDefinitions=[ AttributeDefinition(AttributeName="question_id", AttributeType="S"), AttributeDefinition(AttributeName="question_text", AttributeType="S"), AttributeDefinition(AttributeName="question_image_url", AttributeType="S"), AttributeDefinition(AttributeName="category", AttributeType="S"), AttributeDefinition(AttributeName="answers", AttributeType="S"), AttributeDefinition(AttributeName="create_date", AttributeType="S") ], KeySchema=[KeySchema(AttributeName="question_id", KeyType="HASH")], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=Ref(PARAMETER_READ_CAPACITY), WriteCapacityUnits=Ref(PARAMETER_WRITE_CAPACITY))) self.t.add_resource(question_table)
def create_stats_table(self, table_name): stats_table = Table( table_name, AttributeDefinitions=[ AttributeDefinition(AttributeName="stat_id", AttributeType="S"), AttributeDefinition(AttributeName="session_id", AttributeType="S"), AttributeDefinition(AttributeName="question_id", AttributeType="S"), AttributeDefinition(AttributeName="no_of_answers", AttributeType="N"), AttributeDefinition(AttributeName="answers", AttributeType="S"), AttributeDefinition(AttributeName="create_date", AttributeType="S") ], KeySchema=[KeySchema(AttributeName="stat_id", KeyType="HASH")], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=Ref(PARAMETER_READ_CAPACITY), WriteCapacityUnits=Ref(PARAMETER_WRITE_CAPACITY))) self.t.add_resource(stats_table)
GSITable = template.add_resource(Table( "GSITable", AttributeDefinitions=[ AttributeDefinition(Ref(tableIndexName), Ref(tableIndexDataType)), AttributeDefinition(Ref(secondaryIndexHashName), Ref(secondaryIndexHashDataType)), AttributeDefinition(Ref(secondaryIndexRangeName), Ref(secondaryIndexRangeDataType)) ], KeySchema=[ Key(Ref(tableIndexName), "HASH") ], ProvisionedThroughput=ProvisionedThroughput( Ref(readunits), Ref(writeunits) ), GlobalSecondaryIndexes=[ GlobalSecondaryIndex( "SecondaryIndex", [ Key(Ref(secondaryIndexHashName), "HASH"), Key(Ref(secondaryIndexRangeName), "RANGE") ], Projection("ALL"), ProvisionedThroughput( Ref(readunits), Ref(writeunits) ) ) ] ))
Default="10", MinValue="5", MaxValue="10000", ConstraintDescription="should be between 5 and 10000" )) myDynamoDB = t.add_resource(Table( "myDynamoDBTable", AttributeDefinitions=[ AttributeDefinition( AttributeName=Ref(hashkeyname), AttributeType=Ref(hashkeytype) ), ], KeySchema=[ KeySchema( AttributeName=Ref(hashkeyname), KeyType="HASH" ) ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=Ref(readunits), WriteCapacityUnits=Ref(writeunits) ) )) t.add_output(Output( "TableName", Value=Ref(myDynamoDB), Description="Table name of the newly create DynamoDB table", ))
MaxLength="1", ConstraintDescription="S for string data, N for numeric data, or B for " "binary data")) GSITable = template.add_resource( Table("GSITable", AttributeDefinitions=[ AttributeDefinition(Ref(tableIndexName), Ref(tableIndexDataType)), AttributeDefinition(Ref(secondaryIndexHashName), Ref(secondaryIndexHashDataType)), AttributeDefinition(Ref(secondaryIndexRangeName), Ref(secondaryIndexRangeDataType)), AttributeDefinition("non_key_attribute_0", "S"), AttributeDefinition("non_key_attribute_1", "S") ], KeySchema=[Key(Ref(tableIndexName), "HASH")], ProvisionedThroughput=ProvisionedThroughput(Ref(readunits), Ref(writeunits)), GlobalSecondaryIndexes=[ GlobalSecondaryIndex( "SecondaryIndex", [ Key(Ref(secondaryIndexHashName), "HASH"), Key(Ref(secondaryIndexRangeName), "RANGE") ], Projection("INCLUDE", ["non_key_attribute_0"]), ProvisionedThroughput(Ref(readunits), Ref(writeunits))) ])) template.add_output( Output( "GSITable", Value=Ref(GSITable),
AllowedPattern="arn:aws[-a-z]*:\\S+:\\S+:\\d+:.*", Description="Kinesis StreamArn", Type="String", MinLength="37", MaxLength="1024", ConstraintDescription="must be a valid arn", )) DynamoDBTableWithKinesis = t.add_resource( Table( "DynamoDBTableWithKinesis", AttributeDefinitions=[ AttributeDefinition(AttributeName=Ref(hashkeyname), AttributeType=Ref(hashkeytype)), ], KeySchema=[KeySchema(AttributeName=Ref(hashkeyname), KeyType="HASH")], KinesisStreamSpecification=KinesisStreamSpecification( StreamArn=Ref(streamarn)), ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=Ref(readunits), WriteCapacityUnits=Ref(writeunits)), )) t.add_output( Output( "TableName", Value=Ref(DynamoDBTableWithKinesis), Description="Table with Kinesis Stream Specification", )) print(t.to_json())
video_table = template.add_resource(Table( 'VideoTable', BillingMode='PAY_PER_REQUEST', AttributeDefinitions=[AttributeDefinition( AttributeName='videoId', AttributeType='S', ), AttributeDefinition( AttributeName='lastModified', AttributeType='S', ), AttributeDefinition( AttributeName='videoState', AttributeType='S', ), AttributeDefinition( AttributeName='upvotes', AttributeType='N', )], KeySchema=[KeySchema( AttributeName='videoId', KeyType='HASH', )], GlobalSecondaryIndexes=[GlobalSecondaryIndex( IndexName='lastModifiedInState', KeySchema=[KeySchema( AttributeName='videoState', KeyType='HASH', ), KeySchema( AttributeName='lastModified', KeyType='RANGE', )], Projection=Projection( ProjectionType='ALL', ), ), GlobalSecondaryIndex( IndexName='upvotedInState', KeySchema=[KeySchema( AttributeName='videoState', KeyType='HASH', ), KeySchema( AttributeName='upvotes', KeyType='RANGE', )], Projection=Projection( ProjectionType='ALL', ), )] ))
AllocatedStorage='5', DBInstanceClass='db.t2.micro', Engine='MySQL', EngineVersion='5.5', MasterUsername=Ref(dbUser), MasterUserPassword=Ref(dbPassword), DBSubnetGroupName=Ref(dbSubnetGroup), VPCSecurityGroups=[Ref(instanceSecurityGroup)]) dbInstance = t.add_resource(dbInstanceParam) dynamoDbInstace = t.add_resource( Table("TestDynamoDB", KeySchema=[KeySchema(AttributeName='GameId', KeyType="HASH")], AttributeDefinitions=[ AttributeDefinition(AttributeName='GameId', AttributeType='N') ], ProvisionedThroughput=ProvisionedThroughput(ReadCapacityUnits=1, WriteCapacityUnits=1))) redisClusterSg = t.add_resource( elasticache.SecurityGroup('TestRedisSG', Description='redis security group')) t.add_resource( elasticache.SecurityGroupIngress( 'TestSGIngress', CacheSecurityGroupName=Ref(redisClusterSg), EC2SecurityGroupName=Ref(instanceSecurityGroup))) t.add_resource(
Table( "GSITable", AttributeDefinitions=[ AttributeDefinition(AttributeName=Ref(tableIndexName), AttributeType=Ref(tableIndexDataType)), AttributeDefinition( AttributeName=Ref(secondaryIndexHashName), AttributeType=Ref(secondaryIndexHashDataType), ), AttributeDefinition( AttributeName=Ref(secondaryIndexRangeName), AttributeType=Ref(secondaryIndexRangeDataType), ), ], KeySchema=[ KeySchema(AttributeName=Ref(tableIndexName), KeyType="HASH") ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=Ref(readunits), WriteCapacityUnits=Ref(writeunits)), GlobalSecondaryIndexes=[ GlobalSecondaryIndex( IndexName="SecondaryIndex", KeySchema=[ KeySchema(AttributeName=Ref(secondaryIndexHashName), KeyType="HASH"), KeySchema(AttributeName=Ref(secondaryIndexRangeName), KeyType="RANGE"), ], Projection=Projection(ProjectionType="ALL"), ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=Ref(readunits), WriteCapacityUnits=Ref(writeunits)), ) ], ))
Table( "TestLocalSecondaryIndexTable", KeySchema=[ KeySchema( AttributeName="GameId", KeyType="HASH" ), KeySchema( AttributeName="GameType", KeyType="RANGE" ) ], AttributeDefinitions=[ AttributeDefinition( AttributeName="GameId", AttributeType="N" ), AttributeDefinition( AttributeName="GameType", AttributeType="N" ), AttributeDefinition( AttributeName="SeasonId", AttributeType="N" ), ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=1, WriteCapacityUnits=1 ), LocalSecondaryIndexes=[ LocalSecondaryIndex( IndexName="LSISeasonId", KeySchema=[ KeySchema( AttributeName="GameId", KeyType="HASH" ), KeySchema( AttributeName="SeasonId", KeyType="RANGE" ) ], Projection=Projection( ProjectionType="ALL" ) ) ] )
def create_template(): t = Template() t.add_description("The DynamoDB tables stack for CBuildCI.") p_config_table_name = t.add_parameter(Parameter( "ConfigTableName", Type = "String", )) p_locks_table_name = t.add_parameter(Parameter( "LocksTableName", Type = "String", )) p_sessions_table_name = t.add_parameter(Parameter( "SessionsTableName", Type = "String", )) p_executions_table_name = t.add_parameter(Parameter( "ExecutionsTableName", Type = "String", )) p_config_table_rcu = t.add_parameter(Parameter( "ConfigTableRCU", Type = "Number", Default = "5", )) p_config_table_wcu = t.add_parameter(Parameter( "ConfigTableWCU", Type = "Number", Default = "1", )) p_locks_table_rcu = t.add_parameter(Parameter( "LocksTableRCU", Type = "Number", Default = "5", )) p_locks_table_wcu = t.add_parameter(Parameter( "LocksTableWCU", Type = "Number", Default = "2", )) p_sessions_table_rcu = t.add_parameter(Parameter( "SessionsTableRCU", Type = "Number", Default = "5", )) p_sessions_table_wcu = t.add_parameter(Parameter( "SessionsTableWCU", Type = "Number", Default = "2", )) p_executions_table_rcu = t.add_parameter(Parameter( "ExecutionsTableRCU", Type = "Number", Default = "15", )) p_executions_table_wcu = t.add_parameter(Parameter( "ExecutionsTableWCU", Type = "Number", Default = "5", )) p_executions_search_indexes_rcu = t.add_parameter(Parameter( "ExecutionsSearchIndexesRCU", Type = "Number", Default = "5", )) p_executions_search_indexes_wcu = t.add_parameter(Parameter( "ExecutionsSearchIndexesWCU", Type = "Number", Default = "1", )) # Replace with custom tags if desired. tags = build_tags_list(t) t.add_resource(Table( "ConfigDBTable", DeletionPolicy = "Retain", TableName = Ref(p_config_table_name), KeySchema = [ KeySchema( KeyType = "HASH", AttributeName = "id", ), ], AttributeDefinitions = [ AttributeDefinition( AttributeName = "id", AttributeType = "S", ), ], ProvisionedThroughput = ProvisionedThroughput( ReadCapacityUnits = Ref(p_config_table_rcu), WriteCapacityUnits = Ref(p_config_table_wcu) ), Tags = tags, )) t.add_resource(Table( "LocksTable", DeletionPolicy = "Retain", TableName = Ref(p_locks_table_name), KeySchema = [ KeySchema( KeyType = "HASH", AttributeName = "id", ), ], AttributeDefinitions = [ AttributeDefinition( AttributeName = "id", AttributeType = "S", ), ], ProvisionedThroughput = ProvisionedThroughput( ReadCapacityUnits = Ref(p_locks_table_rcu), WriteCapacityUnits = Ref(p_locks_table_wcu) ), Tags = tags, )) t.add_resource(Table( "SessionsTable", DeletionPolicy = "Retain", TableName = Ref(p_sessions_table_name), KeySchema = [ KeySchema( KeyType = "HASH", AttributeName = "id", ), ], AttributeDefinitions = [ AttributeDefinition( AttributeName = "id", AttributeType = "S", ), ], ProvisionedThroughput = ProvisionedThroughput( ReadCapacityUnits = Ref(p_sessions_table_rcu), WriteCapacityUnits = Ref(p_sessions_table_wcu) ), TimeToLiveSpecification = TimeToLiveSpecification( Enabled = True, AttributeName = "ttlTime", ), Tags = tags, )) t.add_resource(Table( "ExecutionsTable", DeletionPolicy = "Retain", TableName = Ref(p_executions_table_name), KeySchema = [ KeySchema( KeyType = "HASH", AttributeName = "repoId", ), KeySchema( KeyType = "RANGE", AttributeName = "executionId", ), ], AttributeDefinitions = [ AttributeDefinition( AttributeName = "repoId", AttributeType = "S", ), AttributeDefinition( AttributeName = "executionId", AttributeType = "S", ), AttributeDefinition( AttributeName = "createTime", AttributeType = "S", ), ], ProvisionedThroughput = ProvisionedThroughput( ReadCapacityUnits = Ref(p_executions_table_rcu), WriteCapacityUnits = Ref(p_executions_table_wcu) ), Tags = tags, GlobalSecondaryIndexes = [ GlobalSecondaryIndex( IndexName = "search-repoId-createTime-index", KeySchema = [ KeySchema( KeyType = "HASH", AttributeName = "repoId", ), KeySchema( KeyType = "RANGE", AttributeName = "createTime", ), ], Projection = Projection( NonKeyAttributes = [ "repoId", "executionId", "createTime", "updateTime", "status", "conclusion", "conclusionTime", "meta", ], ProjectionType = "INCLUDE", ), ProvisionedThroughput = ProvisionedThroughput( ReadCapacityUnits = Ref(p_executions_search_indexes_rcu), WriteCapacityUnits = Ref(p_executions_search_indexes_wcu), ), ), GlobalSecondaryIndex( IndexName = "search-repoId-executionId-index", KeySchema = [ KeySchema( KeyType = "HASH", AttributeName = "repoId", ), KeySchema( KeyType = "RANGE", AttributeName = "executionId", ), ], Projection = Projection( NonKeyAttributes = [ "repoId", "executionId", "createTime", "updateTime", "status", "conclusion", "conclusionTime", "meta", ], ProjectionType = "INCLUDE", ), ProvisionedThroughput = ProvisionedThroughput( ReadCapacityUnits = Ref(p_executions_search_indexes_rcu), WriteCapacityUnits = Ref(p_executions_search_indexes_wcu), ), ), ], )) return t
for queue in [inbound, outbound, addqueue, mirrorqueue]: t.add_output([ Output("{}QueueURL".format(queue.title), Description="{} SQS Queue URL".format(queue.title), Value=Ref(queue)), Output("{}QueueARN".format(queue.title), Description="ARN of {} SQS Queue".format(queue.title), Value=GetAtt(queue, "Arn")), ]) dev_db = t.add_resource( Table("DevNetKANStatus", AttributeDefinitions=[ AttributeDefinition(AttributeName="ModIdentifier", AttributeType="S"), ], KeySchema=[KeySchema(AttributeName="ModIdentifier", KeyType="HASH")], TableName="DevNetKANStatus", ProvisionedThroughput=ProvisionedThroughput(ReadCapacityUnits=5, WriteCapacityUnits=5))) t.add_output( Output( "TableName", Value=Ref(dev_db), Description="Table name of the newly create DynamoDB table", )) t.add_resource( PolicyType("DbDevPolicies", PolicyName="DbDevUsers",
from troposphere import Output, Parameter, Ref, Template from troposphere.dynamodb import (KeySchema, AttributeDefinition, ProvisionedThroughput) from troposphere.dynamodb import Table t = Template() t.set_description("AWS CloudFormation template for creating db for products") myDynamoDB = t.add_resource( Table("products", AttributeDefinitions=[ AttributeDefinition(AttributeName="productid", AttributeType="S") ], KeySchema=[KeySchema(AttributeName="productid", KeyType="HASH")], ProvisionedThroughput=ProvisionedThroughput(ReadCapacityUnits=5, WriteCapacityUnits=5))) t.add_output(Output("products", Value=Ref(myDynamoDB))) print(t.to_json())
# Events table video_events_table = template.add_resource( Table( 'VideoEventsTable', BillingMode='PAY_PER_REQUEST', AttributeDefinitions=[ AttributeDefinition( AttributeName='videoId', AttributeType='S', ), AttributeDefinition( AttributeName='timestamp', AttributeType='S', ) ], KeySchema=[ KeySchema( AttributeName='videoId', KeyType='HASH', ), KeySchema( AttributeName='timestamp', KeyType='RANGE', ) ], StreamSpecification=StreamSpecification(StreamViewType='NEW_IMAGE', ), )) # Managed policies lambda_managed_policy = template.add_resource(
]) ##################################################################################################################### # API Gateway ##################################################################################################################### rest_api = t.add_resource( RestApi("api", Name="{}-{}".format(env_l, app_group_l))) ##################################################################################################################### # DynamoDB table ##################################################################################################################### myDynamoDB = t.add_resource( Table("myDynamoDBTable", TableName='counters', AttributeDefinitions=[ AttributeDefinition(AttributeName='website', AttributeType='S') ], KeySchema=[KeySchema(AttributeName='website', KeyType='HASH')], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=readunits, WriteCapacityUnits=writeunits))) ##################################################################################################################### # Lambda ##################################################################################################################### # Create a Lambda function that will be mapped code = [ "var response = require('cfn-response');", "exports.handler = function(event, context) {", " context.succeed('foobar!');", " return 'foobar!';", "};", ]
def add_dynamodb(template: Template) -> Table: hash_key_name = template.add_parameter( Parameter( "HashKeyElementName", Description="HashType PrimaryKey Name", Type="String", AllowedPattern="[a-zA-Z0-9]*", MinLength="1", MaxLength="2048", Default="date", ConstraintDescription="must contain only alphanumberic characters") ) hash_key_type = template.add_parameter( Parameter("HashKeyElementType", Description="HashType PrimaryKey Type", Type="String", Default="S", AllowedPattern="[S|N]", MinLength="1", MaxLength="10", ConstraintDescription="must be either S or N")) read_units = template.add_parameter( Parameter("ReadCapacityUnits", Description="Provisioned read throughput", Type="Number", Default="5", MinValue="5", MaxValue="10000", ConstraintDescription="should be between 5 and 10000")) write_units = template.add_parameter( Parameter("WriteCapacityUnits", Description="Provisioned write throughput", Type="Number", Default="10", MinValue="5", MaxValue="10000", ConstraintDescription="should be between 5 and 10000")) my_dynamodb = template.add_resource( Table("myDynamoDBTable", AttributeDefinitions=[ AttributeDefinition(AttributeName=Ref(hash_key_name), AttributeType=Ref(hash_key_type)), ], KeySchema=[ KeySchema(AttributeName=Ref(hash_key_name), KeyType="HASH") ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=Ref(read_units), WriteCapacityUnits=Ref(write_units)))) template.add_output( Output( "TableName", Value=Ref(my_dynamodb), Description="Table name of the newly create DynamoDB table", )) return my_dynamodb
all_videos_lambda_code_key = template.add_parameter(Parameter( 'AllVideos', Type=constants.STRING, Default='lambda-code/admin/all_admin_videos.zip', )) template.add_parameter_to_group(consume_events_code_key, 'Lambda Keys') template.add_parameter_to_group(all_videos_lambda_code_key, 'Lambda Keys') video_table = template.add_resource(Table( 'VideoTable', BillingMode='PAY_PER_REQUEST', AttributeDefinitions=[AttributeDefinition( AttributeName='videoId', AttributeType='S', )], KeySchema=[KeySchema( AttributeName='videoId', KeyType='HASH', )], )) consume_events_role = template.add_resource(Role( 'ConsumeEventsRole', Path="/", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": {"Service": ["lambda.amazonaws.com"]},
readunits = t.add_parameter( Parameter("ReadCapacityUnits", Description="Provisioned read throughput", Type="Number", Default="1", MinValue="1", MaxValue="1", ConstraintDescription="should be between 1 and 5")) writeunits = t.add_parameter( Parameter("WriteCapacityUnits", Description="Provisioned write throughput", Type="Number", Default="1", MinValue="1", MaxValue="1", ConstraintDescription="should be between 1 and 5")) t.add_resource( Table( "LambdaAppDynamoDB", AttributeDefinitions=[ AttributeDefinition(AttributeName=Ref(hashkeyname), AttributeType=Ref(hashkeytype)) ], KeySchema=[KeySchema(AttributeName=Ref(hashkeyname), KeyType="HASH")], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=Ref(readunits), WriteCapacityUnits=Ref(writeunits))))
Description="{} SQS Queue URL".format(queue.title), Value=Ref(queue)), Output("{}QueueARN".format(queue.title), Description="ARN of {} SQS Queue".format(queue.title), Value=GetAtt(queue, "Arn")), ]) # DyanamoDB: NetKAN Status netkan_db = t.add_resource( Table( "NetKANStatus", AttributeDefinitions=[ AttributeDefinition(AttributeName="ModIdentifier", AttributeType="S"), ], KeySchema=[KeySchema(AttributeName="ModIdentifier", KeyType="HASH")], TableName="NetKANStatus", ProvisionedThroughput=ProvisionedThroughput( # The free tier allows for 25 R/W Capacity Units # 5 allocated already for dev testing ReadCapacityUnits=20, WriteCapacityUnits=20))) t.add_output( Output( "TableName", Value=Ref(netkan_db), Description="Table name of the newly create DynamoDB table", )) # Instance Role for Prod Indexing Instance to be able to
# FromEmail parameter from_email_param = t.add_parameter( Parameter( 'FromEmail', ConstraintDescription='Must be an email address', Description= 'Email address that is verified to send outbound SES emails', Type="String", AllowedPattern=r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)")) ## DynamoDB table to hold email addresses email_table = t.add_resource( Table("emailListTable", AttributeDefinitions=[ AttributeDefinition(AttributeName="email", AttributeType="S"), ], KeySchema=[KeySchema(AttributeName="email", KeyType="HASH")], ProvisionedThroughput=ProvisionedThroughput(ReadCapacityUnits=5, WriteCapacityUnits=5), TableName=dynamo_table_name)) t.add_output( Output( "emailListTableName", Value=Ref(email_table), Description="Table Name", )) # Policies allow_db_access_policy = t.add_resource( ManagedPolicy( "AllowDynamoEmailList",
Type="Number", Default="5", MinValue="5", MaxValue="10000", ConstraintDescription="should be between 5 and 10000")) segmentDynamoDBTable = t.add_resource( Table("segmentsDynamoDBTable", AttributeDefinitions=[ AttributeDefinition(AttributeName=Ref(segmentTableHashkeyName), AttributeType=Ref(segmentTableHashkeyType)), ], KeySchema=[ KeySchema(AttributeName=Ref(segmentTableHashkeyName), KeyType="HASH") ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=Ref(readunits), WriteCapacityUnits=Ref(writeunits)), PointInTimeRecoverySpecification=PointInTimeRecoverySpecification( PointInTimeRecoveryEnabled=True), Tags=Tags(app="gap-fill-evaluation", stage=Ref(stage)), TableName=Join( "-", ["SegmentsDynamoDBTable", Ref(stage)]))) segmentSetsDynamoDBTable = t.add_resource( Table("segmentSetsDynamoDBTable", AttributeDefinitions=[ AttributeDefinition( AttributeName=Ref(segmentSetsTableHashkeyName), AttributeType=Ref(segmentSetsTableHashkeyType)),
Description="Secondary Index: Range Key Data Type", Type="String", Default="S", AllowedPattern="[S|N|B]", MinLength="1", MaxLength="1", ConstraintDescription="S for string data, N for numeric data, or B for " "binary data")) myDynamoDB = template.add_resource( Table( "myDynamoDBTable", AttributeDefinitions=[ AttributeDefinition(AttributeName=Ref(hashkeyname), AttributeType=Ref(hashkeytype)), ], BillingMode=If("OnDemand", "PAY_PER_REQUEST", "PROVISIONED"), ProvisionedThroughput=If( "OnDemand", NoValue, ProvisionedThroughput(ReadCapacityUnits=Ref(readunits), WriteCapacityUnits=Ref(writeunits))), KeySchema=[KeySchema(AttributeName=Ref(hashkeyname), KeyType="HASH")])) GSITable = template.add_resource( Table( "GSITable", AttributeDefinitions=[ AttributeDefinition(AttributeName=Ref(tableIndexName), AttributeType=Ref(tableIndexDataType)), AttributeDefinition(AttributeName=Ref(secondaryIndexHashName), AttributeType=Ref(secondaryIndexHashDataType)), AttributeDefinition(AttributeName=Ref(secondaryIndexRangeName),
from troposphere import Template from troposphere.dynamodb import Table, KeySchema, AttributeDefinition, ProvisionedThroughput t = Template() testDB = t.add_resource( Table( "testDynamoDBTable", KeySchema=[ KeySchema( AttributeName="UserName", KeyType="HASH" ) ], AttributeDefinitions=[ AttributeDefinition( AttributeName="UserId", AttributeType="N" ) ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=1, WriteCapacityUnits=1 ) ) ) print(t.to_json())
MinValue="5", MaxValue="10000", ConstraintDescription="should be between 5 and 10000")) writeunits = t.add_parameter( Parameter("WriteCapacityUnits", Description="Provisioned write throughput", Type="Number", Default="10", MinValue="5", MaxValue="10000", ConstraintDescription="should be between 5 and 10000")) myDynamoDB = t.add_resource( Table("myDynamoDBTable", AttributeDefinitions=[ AttributeDefinition(Ref(hashkeyname), Ref(hashkeytype)), ], KeySchema=[Key(Ref(hashkeyname), "HASH")], ProvisionedThroughput=ProvisionedThroughput(Ref(readunits), Ref(writeunits)))) t.add_output( Output( "TableName", Value=Ref(myDynamoDB), Description="Table name of the newly create DynamoDB table", )) print(t.to_json())
def _deploy_service(self, service: ff.Service): context = self._context_map.get_context(service.name) if self._aws_config.get('image_uri') is None: self._package_and_deploy_code(context) template = Template() template.set_version('2010-09-09') memory_size = template.add_parameter( Parameter(f'{self._lambda_resource_name(service.name)}MemorySize', Type=NUMBER, Default=self._aws_config.get('memory_sync', '3008'))) timeout_gateway = template.add_parameter( Parameter( f'{self._lambda_resource_name(service.name)}GatewayTimeout', Type=NUMBER, Default='30')) timeout_async = template.add_parameter( Parameter( f'{self._lambda_resource_name(service.name)}AsyncTimeout', Type=NUMBER, Default='900')) role_title = f'{self._lambda_resource_name(service.name)}ExecutionRole' role = self._add_role(role_title, template) params = { 'FunctionName': f'{self._service_name(service.name)}Sync', 'Role': GetAtt(role_title, 'Arn'), 'MemorySize': Ref(memory_size), 'Timeout': Ref(timeout_gateway), 'Environment': self._lambda_environment(context) } image_uri = self._aws_config.get('image_uri') if image_uri is not None: params.update({ 'Code': Code(ImageUri=image_uri), 'PackageType': 'Image', }) else: params.update({ 'Code': Code(S3Bucket=self._bucket, S3Key=self._code_key), 'Runtime': 'python3.7', 'Handler': 'handlers.main', }) if self._security_group_ids and self._subnet_ids: params['VpcConfig'] = VPCConfig( SecurityGroupIds=self._security_group_ids, SubnetIds=self._subnet_ids) api_lambda = template.add_resource( Function(f'{self._lambda_resource_name(service.name)}Sync', **params)) route = inflection.dasherize(context.name) proxy_route = f'{route}/{{proxy+}}' template.add_resource( Permission( f'{self._lambda_resource_name(service.name)}SyncPermission', Action='lambda:InvokeFunction', FunctionName=f'{self._service_name(service.name)}Sync', Principal='apigateway.amazonaws.com', SourceArn=Join('', [ 'arn:aws:execute-api:', self._region, ':', self._account_id, ':', ImportValue( self._rest_api_reference()), '/*/*/', route, '*' ]), DependsOn=api_lambda)) if self._adaptive_memory: value = '3008' if not self._adaptive_memory else '256' try: value = int(self._aws_config.get('memory_async')) except ValueError: pass memory_size = template.add_parameter( Parameter( f'{self._lambda_resource_name(service.name)}MemorySizeAsync', Type=NUMBER, Default=value)) params = { 'FunctionName': self._lambda_function_name(service.name, 'Async'), 'Role': GetAtt(role_title, 'Arn'), 'MemorySize': Ref(memory_size), 'Timeout': Ref(timeout_async), 'Environment': self._lambda_environment(context) } if image_uri is not None: params.update({ 'Code': Code(ImageUri=image_uri), 'PackageType': 'Image', }) else: params.update({ 'Code': Code(S3Bucket=self._bucket, S3Key=self._code_key), 'Runtime': 'python3.7', 'Handler': 'handlers.main', }) if self._security_group_ids and self._subnet_ids: params['VpcConfig'] = VPCConfig( SecurityGroupIds=self._security_group_ids, SubnetIds=self._subnet_ids) async_lambda = template.add_resource( Function(self._lambda_resource_name(service.name, type_='Async'), **params)) if self._adaptive_memory: self._add_adaptive_memory_functions(template, context, timeout_async, role_title, async_lambda) # self._add_adaptive_memory_streams(template, context, async_lambda, role) # Timers for cls, _ in context.command_handlers.items(): if cls.has_timer(): timer = cls.get_timer() if timer.environment is not None and timer.environment != self._env: continue if isinstance(timer.command, str): timer_name = timer.command else: timer_name = timer.command.__name__ target = Target( f'{self._service_name(service.name)}AsyncTarget', Arn=GetAtt( self._lambda_resource_name(service.name, type_='Async'), 'Arn'), Id=self._lambda_resource_name(service.name, type_='Async'), Input= f'{{"_context": "{context.name}", "_type": "command", "_name": "{cls.__name__}"}}' ) rule = template.add_resource( Rule(f'{timer_name}TimerRule', ScheduleExpression=f'cron({timer.cron})', State='ENABLED', Targets=[target])) template.add_resource( Permission(f'{timer_name}TimerPermission', Action='lambda:invokeFunction', Principal='events.amazonaws.com', FunctionName=Ref(async_lambda), SourceArn=GetAtt(rule, 'Arn'))) integration = template.add_resource( Integration( self._integration_name(context.name), ApiId=ImportValue(self._rest_api_reference()), PayloadFormatVersion='2.0', IntegrationType='AWS_PROXY', IntegrationUri=Join('', [ 'arn:aws:lambda:', self._region, ':', self._account_id, ':function:', Ref(api_lambda), ]), )) template.add_resource( Route(f'{self._route_name(context.name)}Base', ApiId=ImportValue(self._rest_api_reference()), RouteKey=f'ANY /{route}', AuthorizationType='NONE', Target=Join( '/', ['integrations', Ref(integration)]), DependsOn=integration)) template.add_resource( Route(f'{self._route_name(context.name)}Proxy', ApiId=ImportValue(self._rest_api_reference()), RouteKey=f'ANY /{proxy_route}', AuthorizationType='NONE', Target=Join( '/', ['integrations', Ref(integration)]), DependsOn=integration)) # Error alarms / subscriptions if 'errors' in self._aws_config: alerts_topic = template.add_resource( Topic(self._alert_topic_name(service.name), TopicName=self._alert_topic_name(service.name))) if 'email' in self._aws_config.get('errors'): for address in self._aws_config.get('errors').get('email').get( 'recipients').split(','): template.add_resource( SubscriptionResource( self._alarm_subscription_name(context.name), Protocol='email', Endpoint=address, TopicArn=self._alert_topic_arn(context.name), DependsOn=[alerts_topic])) # Queues / Topics subscriptions = {} for subscription in self._get_subscriptions(context): if subscription['context'] not in subscriptions: subscriptions[subscription['context']] = [] subscriptions[subscription['context']].append(subscription) dlq = template.add_resource( Queue(f'{self._queue_name(context.name)}Dlq', QueueName=f'{self._queue_name(context.name)}Dlq', VisibilityTimeout=905, ReceiveMessageWaitTimeSeconds=20, MessageRetentionPeriod=1209600)) self._queue_policy(template, dlq, f'{self._queue_name(context.name)}Dlq', subscriptions) queue = template.add_resource( Queue(self._queue_name(context.name), QueueName=self._queue_name(context.name), VisibilityTimeout=905, ReceiveMessageWaitTimeSeconds=20, MessageRetentionPeriod=1209600, RedrivePolicy=RedrivePolicy(deadLetterTargetArn=GetAtt( dlq, 'Arn'), maxReceiveCount=1000), DependsOn=dlq)) self._queue_policy(template, queue, self._queue_name(context.name), subscriptions) template.add_resource( EventSourceMapping( f'{self._lambda_resource_name(context.name)}AsyncMapping', BatchSize=1, Enabled=True, EventSourceArn=GetAtt(queue, 'Arn'), FunctionName=self._lambda_function_name(service.name, 'Async'), DependsOn=[queue, async_lambda])) topic = template.add_resource( Topic(self._topic_name(context.name), TopicName=self._topic_name(context.name))) for context_name, list_ in subscriptions.items(): if context_name == context.name and len(list_) > 0: template.add_resource( SubscriptionResource( self._subscription_name(context_name), Protocol='sqs', Endpoint=GetAtt(queue, 'Arn'), TopicArn=self._topic_arn(context.name), FilterPolicy={ '_name': [x['name'] for x in list_], }, RedrivePolicy={ 'deadLetterTargetArn': GetAtt(dlq, 'Arn'), }, DependsOn=[queue, dlq, topic])) elif len(list_) > 0: if context_name not in self._context_map.contexts: self._find_or_create_topic(context_name) template.add_resource( SubscriptionResource( self._subscription_name(context.name, context_name), Protocol='sqs', Endpoint=GetAtt(queue, 'Arn'), TopicArn=self._topic_arn(context_name), FilterPolicy={'_name': [x['name'] for x in list_]}, RedrivePolicy={ 'deadLetterTargetArn': GetAtt(dlq, 'Arn'), }, DependsOn=[queue, dlq])) # DynamoDB Table ddb_table = template.add_resource( Table(self._ddb_resource_name(context.name), TableName=self._ddb_table_name(context.name), AttributeDefinitions=[ AttributeDefinition(AttributeName='pk', AttributeType='S'), AttributeDefinition(AttributeName='sk', AttributeType='S'), ], BillingMode='PAY_PER_REQUEST', KeySchema=[ KeySchema(AttributeName='pk', KeyType='HASH'), KeySchema(AttributeName='sk', KeyType='RANGE'), ], TimeToLiveSpecification=TimeToLiveSpecification( AttributeName='TimeToLive', Enabled=True))) template.add_output( Output("DDBTable", Value=Ref(ddb_table), Description="Document table")) for cb in self._pre_deployment_hooks: cb(template=template, context=context, env=self._env) self.info('Deploying stack') self._s3_client.put_object(Body=template.to_json(), Bucket=self._bucket, Key=self._template_key) url = self._s3_client.generate_presigned_url(ClientMethod='get_object', Params={ 'Bucket': self._bucket, 'Key': self._template_key }) stack_name = self._stack_name(context.name) try: self._cloudformation_client.describe_stacks(StackName=stack_name) self._update_stack(self._stack_name(context.name), url) except ClientError as e: if f'Stack with id {stack_name} does not exist' in str(e): self._create_stack(self._stack_name(context.name), url) else: raise e for cb in self._post_deployment_hooks: cb(template=template, context=context, env=self._env) self._migrate_schema(context) self.info('Done')