def get_cloudformation_template(self, lambda_filename): from troposphere import Template, GetAtt, Join from troposphere.awslambda import Environment from troposphere.awslambda import Permission from troposphere.serverless import Function t = Template() t.add_description("Built with WavyCloud's pylexbuilder") t.add_transform('AWS::Serverless-2016-10-31') lambda_func = t.add_resource( Function( self.name, Handler='handler.index', Runtime=self.runtime, CodeUri='s3://{}/{}'.format(self.s3_bucket_name, lambda_filename), Policies=['AmazonDynamoDBFullAccess', 'AmazonLexFullAccess'], AutoPublishAlias=self.lambda_alias, Environment=Environment( Variables=self.environment_variables)), ) for i, intent in enumerate(self.get_all_intents()): t.add_resource( Permission( "PermissionToLex{}".format(intent.name), FunctionName=GetAtt(lambda_func, "Arn"), Action="lambda:InvokeFunction", Principal="lex.amazonaws.com", SourceArn=Join("", [ 'arn:aws:lex:', Ref(AWS_REGION), ':', Ref(AWS_ACCOUNT_ID), ':intent:{}:*'.format(intent.name) ]))) return t
def test_transform(self): transform = 'AWS::Serverless-2016-10-31' template = Template() template.add_transform(transform) self.assertEqual(template.transform, transform)
# Converted from s3_processor located at: # https://github.com/awslabs/serverless-application-model/blob/dbc54b5d0cd31bf5cebd16d765b74aee9eb34641/examples/2016-10-31/s3_processor/template.yaml from troposphere import Template, Ref from troposphere.s3 import Bucket from troposphere.serverless import Function, S3Event t = Template() t.add_description( "A function is triggered off an upload to a bucket. It logs the content " "type of the uploaded object.") t.add_transform('AWS::Serverless-2016-10-31') s3_bucket = t.add_resource( Bucket("Bucket") ) t.add_resource( Function( "ProcessorFunction", Handler='index.handler', Runtime='nodejs4.3', CodeUri='s3://<bucket>/s3_processor.zip', Policies='AmazonS3ReadOnlyAccess', Events={ 'PhotoUpload': S3Event( 'PhotoUpload', Bucket=Ref(s3_bucket),
from troposphere import Template, Parameter, Ref, Sub, GetAtt, Output, Export, Join, AWS_STACK_NAME, apigateway, \ Equals, route53, FindInMap, AWS_REGION, serverless, constants, awslambda, cognito, kms, iam, s3 import custom_resources.ssm import custom_resources.acm import custom_resources.cognito import custom_resources.cloudformation import custom_resources.s3 import cfnutils.mappings import cfnutils.kms import cfnutils.output template = Template() custom_resources.use_custom_resources_stack_name_parameter(template) template.add_transform('AWS::Serverless-2016-10-31') param_s3_bucket_name = template.add_parameter( Parameter( "S3BucketName", Default="", Type=constants.STRING, Description="Location of the Lambda ZIP file, bucket name", )) template.set_parameter_label(param_s3_bucket_name, "Lambda S3 bucket") param_s3_key = template.add_parameter( Parameter( "S3Key", Default="", Type=constants.STRING,
class Stack(object): def __init__(self, sceptre_user_data): self.template = Template() self.template.add_transform("AWS::Serverless-2016-10-31") self.sceptre_user_data = sceptre_user_data self.add_s3() self.add_dynamo_db() self.add_to_ddb_lambda() def add_s3(self): self.s3 = self.template.add_resource(Bucket( "S3Bucket", BucketName=self.sceptre_user_data["bucket_name"], AccessControl=PublicRead, WebsiteConfiguration=WebsiteConfiguration( IndexDocument="index.html", ErrorDocument="error.html" ) )) def add_dynamo_db(self): self.dynamo_db = self.template.add_resource(Table( "dynamoDBTable", AttributeDefinitions=[ AttributeDefinition( AttributeName=self.sceptre_user_data["HashKeyElementName"], AttributeType=self.sceptre_user_data["HashKeyElementType"] ) ], KeySchema=[ KeySchema( AttributeName=self.sceptre_user_data["HashKeyElementName"], KeyType="HASH" ) ], ProvisionedThroughput=ProvisionedThroughput( ReadCapacityUnits=self.sceptre_user_data["ReadCapacityUnits"], WriteCapacityUnits=self.sceptre_user_data["WriteCapacityUnits"] ) )) def add_to_ddb_lambda(self): self.add_to_ddb_lambda_function = self.template.add_resource(Function( "AddToDDBFunction", Code=Code( ZipFile=Join("", [ "import cfnresponse, boto3\n", "def add(event, context): \n", " return null", ]) ), Handler="index.add", Runtime="python3.6", Environment=Environment( Variables={ "TABLE_NAME": Ref(self.dynamo_db) } ), Events={ "AddEntry": ApiEvent( "AddEntry", Path="/entry/{entryId}", Method="put" ) } ) )
import re from troposphere import ImportValue, Join, Parameter, Ref, Template from troposphere.s3 import Bucket, PublicRead t = Template() t.add_version('2010-09-09') t.add_transform(['AWS::Serverless-2016-10-31', 'AWS::CodeStar']) projectid = t.add_parameter( Parameter( "ProjectId", Description= "AWS CodeStar projectID used to associate new resources to team members", Type="String")) # S3 bucket t.add_resource(Bucket("MljsPlaceholderBucket", AccessControl=PublicRead)) for line in t.to_yaml().splitlines(): if not re.search(r'^\s*CodeUri:', line): print(line)