def template(make_public=False, **kwargs): required_params = ['Runtimes', 'Bucket', 'Key'] check_params_exist(required_params, kwargs) template = Template() layer_name = template.add_parameter( Parameter('LayerName', Type="String", AllowedPattern="[a-zA-Z0-9-]*")) template.set_description('Default template for Lambda Layer version') template.set_transform('AWS::Serverless-2016-10-31') version = template.add_resource( LayerVersion(f'LayerVersion{DATE}', DeletionPolicy='Retain', CompatibleRuntimes=kwargs['Runtimes'], Description=Sub(f'Layer ${{{layer_name.title}}}'), LayerName=Ref(layer_name), Content=Content(S3Bucket=kwargs['Bucket'], S3Key=kwargs['Key']))) if make_public: PERM = template.add_resource( LayerVersionPermission(f'LambdaVersionPermission{DATE}', DeletionPolicy='Retain', Principal='*', LayerVersionArn=Ref(version), Action='lambda:GetLayerVersion')) template.add_output(object_outputs(version, export=True)) return template
def initialize_template(): template = Template() ## Apply a transform to use serverless functions. template.set_transform("AWS::Serverless-2016-10-31") ## Initialize the resources necessary to make directories. with open('policies/lambda_role_assume_role_doc.json', "r") as f: assume_role_doc = json.load(f) ## Base lambda policy base_policy = lambda_basepolicy("LambdaBaseRole") ## Write permissions for lambda to s3 write_policy = lambda_writeS3('LambdaWriteS3Policy') ## template.add_resource(base_policy) role = Role("S3MakePathRole", AssumeRolePolicyDocument=assume_role_doc, ManagedPolicyArns=[Ref(base_policy)], Policies=[write_policy]) template.add_resource(role) ## Now we need to write a lambda function that actually does the work: function = Function("S3PutObjectFunction", CodeUri="../lambda_repo", Description="Puts Objects in S3", Handler="helper.handler_mkdir", Role=GetAtt(role, "Arn"), Runtime="python3.6", Timeout=30) template.add_resource(function) return template
def test_globals(self): template = Template() globals = Globals() with self.assertRaises(ValueError): template.set_globals(globals) transform = "AWS::Serverless-2016-10-31" template.set_transform(transform) template.set_globals(globals) self.assertEqual(template.globals, globals) with self.assertRaises(ValueError): template.set_transform("other_transform")
def initialize_template(self): template = Template() ## Apply a transform to use serverless functions. template.set_transform("AWS::Serverless-2016-10-31") ## Make role for custom resources. ## Initialize the resources necessary to make directories. ## First get the trust agreement: with open('policies/lambda_role_assume_role_doc.json', "r") as f: mkdirassume_role_doc = json.load(f) ## Base lambda policy base_policy = lambda_basepolicy("LambdaBaseRole") ## Write permissions for lambda to s3 write_policy = lambda_writeS3('LambdaWriteS3Policy') ## template.add_resource(base_policy) mkdirrole = Role("S3MakePathRole", AssumeRolePolicyDocument=mkdirassume_role_doc, ManagedPolicyArns=[Ref(base_policy)], Policies=[write_policy]) mkdirrole_attached = template.add_resource(mkdirrole) ## Get the lambda config parameters for initialization of the custom resource delete function [needs the region] lambdaconfig = self.config['Lambda']['LambdaConfig'] ## Now we need to write a lambda function that actually does the work: mkfunction = Function("S3PutObjectFunction", CodeUri="../../protocols", Description="Puts Objects in S3", Handler="helper.handler_mkdir", Environment=Environment(Variables=lambdaconfig), Role=GetAtt(mkdirrole_attached, "Arn"), Runtime="python3.6", Timeout=30) mkfunction_attached = template.add_resource(mkfunction) delfunction = Function("S3DelObjectFunction", CodeUri="../../protocols", Description="Deletes Objects from S3", Handler="helper.handler_delbucket", Environment=Environment(Variables=lambdaconfig), Role=GetAtt(mkdirrole_attached, "Arn"), Runtime="python3.6", Timeout=30) delfunction_attached = template.add_resource(delfunction) ## Custom resource to delete. delresource = CustomResource('DeleteCustomResource', ServiceToken=GetAtt( delfunction_attached, "Arn"), BucketName=self.config['PipelineName'], DependsOn='PipelineMainBucket') template.add_resource(delresource) ## We can add other custom resource initializations in the future return template, mkfunction_attached, delfunction_attached
def create_function_template(): template = Template() template.set_transform('AWS::Serverless-2016-10-31') template.add_resource(resource=Function( title='SampleLambdaFunction', CodeUri='.', FunctionName='sample-lambda-function', Handler='lambda_function.lambda_handler', Role=ImportValue( CommonResource.ExportName.LAMBDA_SERVICE_ROLE_ARN.value), Runtime='python3.7', )) with open('./function.yml', mode='w') as file: file.write(template.to_yaml())
def test_globals(self): t = Template() t.set_transform(SERVERLESS_TRANSFORM) t.set_globals( Globals( Function=FunctionGlobals(), Api=ApiGlobals(), HttpApi=HttpApiGlobals(), SimpleTable=SimpleTableGlobals(), )) t.to_json() with self.assertRaises(ValueError): _ = Globals(Unexpected="blah") with self.assertRaises(ValueError): _ = Globals(Function="not FunctionGlobals")
def create_template(): template = Template() template.set_description('This stack deploys the OTM communication stack') template.set_version() template.set_transform('AWS::Serverless-2016-10-31') # DB table = dynamodb_factory.add_dynamodb(template) # ROLES role_factory.add_role(template) use_dynamodb_role = role_factory.add_use_dynamodb_role(template) apigwlambda_role = role_factory.add_apigateway_role(template) # LAMBDAS add_record_lambda = lambda_factory.add_lambda(template, use_dynamodb_role, "./src/lambdas/records/add", "addRecordLambda", table) get_record_lambda = lambda_factory.add_lambda(template, use_dynamodb_role, "./src/lambdas/records/get", "getRecordLambda", table) remove_record_lambda = lambda_factory.add_lambda( template, use_dynamodb_role, "./src/lambdas/records/remove", "removeRecordLambda", table) # API GATEWAY METHODS rest_api = apigateway_factory.add_apigateway_to_lambda(template) resource = apigateway_factory.add_resource(template, rest_api) add_record_method = apigateway_factory.add_method_to_apigateway( template, add_record_lambda, apigwlambda_role, rest_api, resource, "GET") get_record_method = apigateway_factory.add_method_to_apigateway( template, get_record_lambda, apigwlambda_role, rest_api, resource, "POST") remove_record_method = apigateway_factory.add_method_to_apigateway( template, remove_record_lambda, apigwlambda_role, rest_api, resource, "DELETE") apigateway_factory.add_deployment( template, rest_api, [add_record_method, get_record_method, remove_record_method]) bucket = s3_factory.add_bucket(template) return template
def create_cloud_front_template(): template = Template() template.set_transform('AWS::Serverless-2016-10-31') service_role = template.add_resource( resource=Role(title='SampleLambdaServiceRole', RoleName='sample-lambda-service-role', Path='/', AssumeRolePolicyDocument={ "Statement": [{ "Effect": "Allow", "Principal": { "Service": ['lambda.amazonaws.com'] }, "Action": ["sts:AssumeRole"] }] }, Policies=[ Policy(PolicyName="sample-policy", PolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": 'lambda:*', "Resource": '*', "Effect": "Allow" }] }) ])) template.add_resource(resource=Function( title='SampleLambdaFunction', AutoPublishAlias='sample', CodeUri='.', FunctionName='sample-lambda-function', Handler='lambda_function.lambda_handler', Role=GetAtt(logicalName=service_role, attrName='Arn'), Runtime='python3.7', )) with open('./function.yml', mode='w') as file: file.write(template.to_yaml())
def main(): """main function""" t = Template() t.set_transform('AWS::Serverless-2016-10-31') # add resources to the template resources = [] \ + document_fsm.DocumentReviewMachine.cf_resources() \ + document_fsm.launch.cf_resources() \ + document_fsm.transition.cf_resources() \ + document_fsm.info.cf_resources() resources += document_tasks.archive_document.cf_resources() \ + document_tasks.delete_document.cf_resources() \ + document_tasks.notify_reviewer.cf_resources() \ + document_tasks.notify_uploader.cf_resources() \ + document_tasks.summarize_document.cf_resources() for r in resources: t.add_resource(r) # output yaml template print(t.to_yaml())
def create_function_template(): template = Template() template.set_transform('AWS::Serverless-2016-10-31') bucket = template.add_resource(resource=Function( title='SampleLambdaFunction', AutoPublishAlias='sample', CodeUri='.', FunctionName='sample-lambda-edge-function', Handler='lambda_function.lambda_handler', Role=ImportValue( CommonResource.ExportName.LAMBDA_EDGE_SERVICE_ROLE_ARN.value), Runtime='python3.7', )) template.add_output(output=Output(title=bucket.title, Value=GetAtt(bucket, 'Arn'), Export=Export(name=get_export_name()))) with open('./function.yml', mode='w') as file: file.write(template.to_yaml())
import json import stepfunctions as sf import troposphere.iam as iam from stepfunctions.steps import Fail, Catch, ChoiceRule from troposphere import Join, serverless, s3, Sub, AWS_ACCOUNT_ID, Ref from troposphere import Template from troposphere.cloudformation import AWSCustomObject from troposphere.glue import ExecutionProperty, JobCommand, Job, Database, \ DatabaseInput, Crawler, SchemaChangePolicy, S3Target, Targets from troposphere.stepfunctions import StateMachine template = Template() template.set_version("2010-09-09") template.set_transform('AWS::Serverless-2016-10-31') template.set_description("Example") #### Internal S3 Bucket #### internal_s3_bucket = template.add_resource(s3.Bucket( "DatalakeS3Bucket", BucketName=Sub(f'${{{AWS_ACCOUNT_ID}}}-test-stepfunctions-troposphere-glue'), PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True ) )) custom_resource_empty_on_delete_execution_role = template.add_resource( iam.Role(
def test_transform(self): transform = "AWS::Serverless-2016-10-31" template = Template() template.set_transform(transform) self.assertEqual(template.transform, transform)
def create_cloud_front_template(): template = Template() template.set_transform('AWS::Serverless-2016-10-31') bucket = template.add_resource( resource=Bucket( title='SampleOriginBucket', BucketName=Sub('sample-origin-bucket-${AWS::AccountId}') ) ) identity = template.add_resource( resource=CloudFrontOriginAccessIdentity( title='SampleOriginAccessIdentity', CloudFrontOriginAccessIdentityConfig=CloudFrontOriginAccessIdentityConfig( Comment='sample-lambda-edge' ) ) ) template.add_resource( resource=BucketPolicy( title='SampleBucketPolicy', Bucket=Ref(bucket), PolicyDocument={ 'Statement': [{ 'Action': 's3:GetObject', 'Effect': 'Allow', 'Resource': Join(delimiter='/', values=[GetAtt(bucket, 'Arn'), '*']), 'Principal': { 'CanonicalUser': GetAtt(logicalName=identity, attrName='S3CanonicalUserId') } }] } ) ) template.add_resource( resource=Distribution( title='SampleDistribution', DistributionConfig=DistributionConfig( DefaultCacheBehavior=DefaultCacheBehavior( ForwardedValues=ForwardedValues( QueryString=True, ), LambdaFunctionAssociations=[ LambdaFunctionAssociation( EventType='viewer-request', LambdaFunctionARN=Sub([ '${FUNCTION_ARN}:8', {'FUNCTION_ARN': ImportValue(get_export_name())} ]), ) ], TargetOriginId=Sub('S3-${' + bucket.title + '}'), ViewerProtocolPolicy='redirect-to-https', ), Enabled=True, Origins=[ Origin( Id=Sub('S3-${' + bucket.title + '}'), DomainName=Sub('${' + bucket.title + '}.s3.amazonaws.com'), S3OriginConfig=S3OriginConfig( OriginAccessIdentity=Sub('origin-access-identity/cloudfront/${' + identity.title + '}') ) ) ], ) ) ) with open('./cloudfront.yml', mode='w') as file: file.write(template.to_yaml())
BUCKET_NAME_SUFFIX, BUCKET_VERSIONING_CONFIG) from troposphere.iam import Role, Policy from troposphere.awslambda import Code, MEMORY_VALUES from troposphere.events import Rule, Target import troposphere.awslambda as tropo_lambda CRAWLER_DB_NAME = 'RawDataCrawlerDB'.lower() CRAWLER_TABLES_NAME_PREFIX = 'MockDatalakeTable_'.lower() CRAWLER_NAME = 'RawDataCrawler'.lower() T = Template() T.set_version('2010-09-09') T.set_transform('AWS::Serverless-2016-10-31') T.set_description( "AWS CloudFormation Template that create three s3 buckets \ 1)Landing Zone. 2)Work Zone. 3)Gold Zone. Landing Zone is \ the place where the raw data is entered in the datalake \ while the Work Zone ist the place where a partially \ transformed or filtered data is stored and the Gold Zone \ is the place where the final processesed or transformed data\ will be placed after passing through ETL pipeline.") MemorySize = T.add_parameter(Parameter( 'LambdaMemorySize', Type=NUMBER, Description='Amount of memory to allocate to the Lambda Function', Default='128',
def initialize_template(self): template = Template() ## Apply a transform to use serverless functions. template.set_transform("AWS::Serverless-2016-10-31") return template
# Converted from s3_processor located at: # https://github.com/awslabs/serverless-application-model/blob/dbc54b5d0cd31bf5cebd16d765b74aee9eb34641/examples/2016-10-31/s3_processor/template.yaml from troposphere import Template from troposphere.serverless import DeploymentPreference, Function t = Template() t.set_description("A function that uses the configured traffic shifting type " "for a canary deployment.") t.set_transform("AWS::Serverless-2016-10-31") t.add_resource( Function( "Function", Handler="index.handler", Runtime="nodejs6.10", CodeUri="s3://<bucket>/function.zip", AutoPublishAlias="live", DeploymentPreference=DeploymentPreference( Enabled=True, Type="Canary10Percent5Minutes"), )) print(t.to_json())
def create_aurora_template(region, account_id): template = Template() template.set_transform('AWS::Serverless-2016-10-31') api_name = template.add_parameter(parameter=Parameter( title='ApiName', Default='sample-api', Type='String', )) function_name = template.add_parameter(parameter=Parameter( title='FunctionName', Default='sample-lambda-function', Type='String', )) # swagger_path = template.add_parameter( # parameter=Parameter( # title='SwaggerPath', # Default='./swagger.yml', # Type='String', # ) # ) stage_name = template.add_parameter(parameter=Parameter( title='StageName', Default='prod', Type='String', )) api = template.add_resource(resource=Api( title='SampleApi', Name=Ref(api_name), # DefinitionUri=Ref(swagger_path), DefinitionUri='./swagger.yml', StageName=Ref(stage_name), )) path = '/sample/' method = 'get' function = template.add_resource( resource=Function(title='SampleLambdaFunction', AutoPublishAlias='sample', CodeUri='.', FunctionName=Ref(function_name), Handler='lambda_function.lambda_handler', Role=ImportValue('sample-lambda-service-role-arn'), Runtime='python3.7', Events={ 'ApiTrigger': { 'Type': 'Api', 'Properties': { 'Path': path, 'Method': method, 'RestApiId': Ref(api) } } })) template.add_resource( resource=Permission(title='SampleLambdaFunctionPermission', Action='lambda:InvokeFunction', FunctionName=Ref(function), Principal='apigateway.amazonaws.com')) with open('swagger_template.yml') as f: swagger_yaml = f.read() uri = URI.replace('{region}', region).replace('{account_id}', account_id) \ .replace('{function_name}', function_name.Default) # TODO: swagger = swagger_yaml.replace('{path}', path).replace('{method}', method).replace('{uri}', uri) with open('./api.yml', mode='w') as file: file.write(template.to_yaml()) with open('./swagger.yml', mode='w') as file: file.write(swagger)
class UXTemplate(object): ''' A class that handles all user experience aspects of the cloudformation stack creation. This includes creation of the affiliate's bucket, the pipeline folder within that bucket, an iam group for the affiliate, and iam users for each member of the affiliate's organization. ''' ## If there is no default template, the init method makes one and adds a bucket and iam group. def __init__(self, affiliatename, defaulttemplate=False): if defaulttemplate == False: self.template = Template() ## Update the template to accept serverless functions. self.template.set_transform('AWS::Serverless-2016-10-31') self.affiliatename = affiliatename ## TODO: Check that the affiliate name is all lowercase ## Declare the logical name for the bucket resource. self.bucket_logname = 'UserBucket' + affiliatename bucket = Bucket(self.bucket_logname, AccessControl='Private', BucketName=affiliatename) self.bucket = self.template.add_resource(bucket) ## Now define a new user policy: policy = Policy(PolicyDocument=self.customize_userpolicy(), PolicyName=self.affiliatename + 'policy') ## Now define an iam user group to which we can attach this policy: self.group_logname = 'UserGroup' + affiliatename self.groupname = self.affiliatename + 'group' usergroup = Group(self.group_logname, GroupName=self.groupname, Policies=[policy]) self.usergroup = self.template.add_resource(usergroup) self.users = [] self.usercount = 0 else: 'Implement me! and remember to implement getting of resources as attributes!' ## A method that customizes the json policy (see attached) to the particular affiliation name. def customize_userpolicy(self): ## First get the template policy with open('policies/iam_user_base_policy_doc.json', 'r') as f: obj = json.load(f) obj["Statement"].append({ 'Sid': 'VisualEditor2', 'Effect': 'Allow', 'Action': 's3:*', 'Resource': [ 'arn:aws:s3:::' + self.affiliatename + '/*', 'arn:aws:s3:::' + self.affiliatename ] }) with open('policies/' + self.affiliatename + '_policy.json', 'w') as fw: json.dump(obj, fw, indent=2) return obj ## Define a function that, given a group name and a user name, returns an iam user and puts credentials in the output. def generate_user_with_creds(self, username, password=True, accesskey=True): ## Generate a random password as 8-byte hexadecimal string data = {} assert password == True or accesskey == True 'Must have some credentials' ## Now we declare a user, as we need to reference a user to generate access keys. user = User(self.affiliatename + 'user' + str(username), UserName=Join("", [username, Ref(AWS_REGION)])) user_t = self.template.add_resource(user) if password == True: ## User can reset if desired ResetRequired = False default_password = secrets.token_hex(8) lp = LoginProfile(Password=default_password, PasswordResetRequired=ResetRequired) data['password'] = [] data['password'].append({'password': default_password}) self.template.add_output( Output('Password' + str(self.usercount), Value=default_password, Description='Default password of new user ' + username)) user_t.LoginProfile = lp ## Now we generate access keys: if accesskey == True: key = AccessKey('userkey' + str(self.usercount), UserName=Ref(user)) self.template.add_resource(key) accesskey = Ref(key) secretkey = GetAtt(key, 'SecretAccessKey') self.template.add_output( Output('AccessKey' + str(self.usercount), Value=accesskey, Description='Access Key of user: '******'SecretAccessKey' + str(self.usercount), Value=secretkey, Description='Secret Key of new user: '******'Users', GroupName=Ref(self.usergroup), Users=[Ref(u) for u in self.users])) def make_folder_custom_resource(self, bucketname, pathname, dirname): ## 1. Make a role for the lambda function to take on. ## First handle policies: ## Assume role policy doc: with open('policies/lambda_role_assume_role_doc.json', "r") as f: assume_role_doc = json.load(f) ## Base lambda policy base_policy = lambda_basepolicy("LambdaBaseRole") ## Write permissions for lambda to s3 write_policy = lambda_writeS3('LambdaWriteS3Policy') ## self.template.add_resource(base_policy) role = Role("S3MakePathRole", AssumeRolePolicyDocument=assume_role_doc, ManagedPolicyArns=[Ref(base_policy)], Policies=[write_policy]) self.template.add_resource(role) ## Now we need to write a lambda function that actually does the work: function = Function("S3PutObjectFunction", CodeUri="../lambda_repo", Description="Puts Objects in S3", Handler="helper.handler_mkdir", Role=GetAtt(role, "Arn"), Runtime="python3.6", Timeout=30) self.template.add_resource(function) ## Finally, we declare a custom resource that makes use of this lambda function. foldermake = CustomResource('S3PutObject', ServiceToken=GetAtt(function, "Arn"), BucketName=self.affiliatename, Path=pathname, DirName=dirname) self.template.add_resource(foldermake)