def initialize_template(self): template = Template() ## Apply a transform to use serverless functions. template.set_transform("AWS::Serverless-2016-10-31") ## Make role for custom resources. ## Initialize the resources necessary to make directories. ## First get the trust agreement: with open('policies/lambda_role_assume_role_doc.json', "r") as f: mkdirassume_role_doc = json.load(f) ## Base lambda policy base_policy = lambda_basepolicy("LambdaBaseRole") ## Write permissions for lambda to s3 write_policy = lambda_writeS3('LambdaWriteS3Policy') ## template.add_resource(base_policy) mkdirrole = Role("S3MakePathRole", AssumeRolePolicyDocument=mkdirassume_role_doc, ManagedPolicyArns=[Ref(base_policy)], Policies=[write_policy]) mkdirrole_attached = template.add_resource(mkdirrole) ## Get the lambda config parameters for initialization of the custom resource delete function [needs the region] lambdaconfig = self.config['Lambda']['LambdaConfig'] ## Now we need to write a lambda function that actually does the work: mkfunction = Function("S3PutObjectFunction", CodeUri="../../protocols", Description="Puts Objects in S3", Handler="helper.handler_mkdir", Environment=Environment(Variables=lambdaconfig), Role=GetAtt(mkdirrole_attached, "Arn"), Runtime="python3.6", Timeout=30) mkfunction_attached = template.add_resource(mkfunction) delfunction = Function("S3DelObjectFunction", CodeUri="../../protocols", Description="Deletes Objects from S3", Handler="helper.handler_delbucket", Environment=Environment(Variables=lambdaconfig), Role=GetAtt(mkdirrole_attached, "Arn"), Runtime="python3.6", Timeout=30) delfunction_attached = template.add_resource(delfunction) ## Custom resource to delete. delresource = CustomResource('DeleteCustomResource', ServiceToken=GetAtt( delfunction_attached, "Arn"), BucketName=self.config['PipelineName'], DependsOn='PipelineMainBucket') template.add_resource(delresource) ## We can add other custom resource initializations in the future return template, mkfunction_attached, delfunction_attached
def get_cloudformation_template(self, lambda_filename): from troposphere import Template, GetAtt, Join from troposphere.awslambda import Environment from troposphere.awslambda import Permission from troposphere.serverless import Function t = Template() t.add_description("Built with WavyCloud's pylexbuilder") t.add_transform('AWS::Serverless-2016-10-31') lambda_func = t.add_resource( Function( self.name, Handler='handler.index', Runtime=self.runtime, CodeUri='s3://{}/{}'.format(self.s3_bucket_name, lambda_filename), Policies=['AmazonDynamoDBFullAccess', 'AmazonLexFullAccess'], AutoPublishAlias=self.lambda_alias, Environment=Environment( Variables=self.environment_variables)), ) for i, intent in enumerate(self.get_all_intents()): t.add_resource( Permission( "PermissionToLex{}".format(intent.name), FunctionName=GetAtt(lambda_func, "Arn"), Action="lambda:InvokeFunction", Principal="lex.amazonaws.com", SourceArn=Join("", [ 'arn:aws:lex:', Ref(AWS_REGION), ':', Ref(AWS_ACCOUNT_ID), ':intent:{}:*'.format(intent.name) ]))) return t
def initialize_template(): template = Template() ## Apply a transform to use serverless functions. template.set_transform("AWS::Serverless-2016-10-31") ## Initialize the resources necessary to make directories. with open('policies/lambda_role_assume_role_doc.json', "r") as f: assume_role_doc = json.load(f) ## Base lambda policy base_policy = lambda_basepolicy("LambdaBaseRole") ## Write permissions for lambda to s3 write_policy = lambda_writeS3('LambdaWriteS3Policy') ## template.add_resource(base_policy) role = Role("S3MakePathRole", AssumeRolePolicyDocument=assume_role_doc, ManagedPolicyArns=[Ref(base_policy)], Policies=[write_policy]) template.add_resource(role) ## Now we need to write a lambda function that actually does the work: function = Function("S3PutObjectFunction", CodeUri="../lambda_repo", Description="Puts Objects in S3", Handler="helper.handler_mkdir", Role=GetAtt(role, "Arn"), Runtime="python3.6", Timeout=30) template.add_resource(function) return template
def add_to_ddb_lambda(self): self.add_to_ddb_lambda_function = self.template.add_resource(Function( "AddToDDBFunction", Code=Code( ZipFile=Join("", [ "import cfnresponse, boto3\n", "def add(event, context): \n", " return null", ]) ), Handler="index.add", Runtime="python3.6", Environment=Environment( Variables={ "TABLE_NAME": Ref(self.dynamo_db) } ), Events={ "AddEntry": ApiEvent( "AddEntry", Path="/entry/{entryId}", Method="put" ) } ) )
def test_s3_filter(self): t = Template() t.add_resource( Function( "ProcessorFunction", Handler='process_file.handler', CodeUri='.', Runtime='python3.6', Policies='AmazonS3FullAccess', Events={ 'FileUpload': S3Event( 'FileUpload', Bucket="bucket", Events=['s3:ObjectCreated:*'], Filter=Filter(S3Key=S3Key( Rules=[ Rules(Name="prefix", Value="upload/"), Rules(Name="suffix", Value=".txt"), ], )) ) } ) ) t.to_json()
def test_required_function(self): serverless_func = Function("SomeHandler", Handler="index.handler", Runtime="nodejs", CodeUri="s3://bucket/handler.zip") t = Template() t.add_resource(serverless_func) t.to_json()
def add_submit_lambda(self): ## We will make event triggers for all affiliates. all_affiliates = self.config["UXData"]["Affiliates"] ## Make Rule sets for each affiliate: all_events = {} for affiliate in all_affiliates: ## Get necessary properties: affiliatename = affiliate["AffiliateName"] ## If user input, reads directly from input directory. If other function output, reads from output directory. assert type(affiliate["UserInput"] ) == bool, "must provide a json boolean for UserInput" if affiliate["UserInput"] == True: readdir = self.config['Lambda']['LambdaConfig']['SUBMITDIR'] elif affiliate["UserInput"] == False: readdir = self.config['Lambda']['LambdaConfig']['OUTDIR'] aff_filter = Filter( 'Filter' + affiliatename, S3Key=S3Key('S3Key' + affiliatename, Rules=[ Rules('PrefixRule' + affiliatename, Name='prefix', Value=affiliatename + '/' + readdir), Rules('SuffixRule' + affiliatename, Name='suffix', Value='submit.json') ])) event_name = 'BucketEvent' + affiliatename all_events[event_name] = { 'Type': 'S3', 'Properties': { 'Bucket': Ref('PipelineMainBucket'), 'Events': ['s3:ObjectCreated:*'], 'Filter': aff_filter } } ## We're going to add in all of the lambda configuration items to the runtime environment. lambdaconfig = self.config['Lambda']['LambdaConfig'] #lambdaconfig ={} ### Most of the config can be done through the config file, but we will pass certain elements from the template. lambdaconfig['figlambid'] = Ref(self.figurelamb) lambdaconfig['figlambarn'] = GetAtt(self.figurelamb, 'Arn') lambdaconfig['cwrolearn'] = GetAtt(self.cwrole, 'Arn') ## Now add to a lambda function: function = Function( 'MainLambda', CodeUri='../../protocols', Runtime='python3.6', Handler='submit_start.handler', Description='Main Lambda Function for Serverless', MemorySize=128, Timeout=self.config["Lambda"]['LambdaConfig']["EXECUTION_TIMEOUT"], Role= 'arn:aws:iam::739988523141:role/lambda_dataflow', ## TODO: Create this in template Events=all_events, #Environment = Environment(Variables={'figlambid':Ref(self.figurelamb),'figlambarn':GetAtt(self.figurelamb,'Arn'),'cwrolearn':GetAtt(self.cwrole,'Arn')}) Environment=Environment(Variables=lambdaconfig)) self.template.add_resource(function)
def test_optional_auto_publish_alias(self): serverless_func = Function("SomeHandler", Handler="index.handler", Runtime="nodejs", CodeUri="s3://bucket/handler.zip", AutoPublishAlias="alias") t = Template() t.add_resource(serverless_func) t.to_json()
def test_policy_document(self): t = Template() t.add_resource( Function( "ProcessorFunction", Handler='process_file.handler', CodeUri='.', Runtime='python3.6', Policies="AmazonS3ReadOnly" ) ) t.to_json() t = Template() t.add_resource( Function( "ProcessorFunction", Handler='process_file.handler', CodeUri='.', Runtime='python3.6', Policies=["AmazonS3FullAccess", "AmazonDynamoDBFullAccess"] ) ) t.to_json() t = Template() t.add_resource( Function( "ProcessorFunction", Handler='process_file.handler', CodeUri='.', Runtime='python3.6', Policies={ "Statement": [{ "Effect": "Allow", "Action": ["s3:GetObject", "s3:PutObject"], "Resource": ["arn:aws:s3:::bucket/*"], }] }, ) ) t.to_json()
def test_s3_location(self): serverless_func = Function("SomeHandler", Handler="index.handler", Runtime="nodejs", CodeUri=S3Location( Bucket="mybucket", Key="mykey", )) t = Template() t.add_resource(serverless_func) t.to_json()
def add_lambda(template: Template, role: Role, code_uri: str, lambda_name: str, dynamodb_table: Table) -> Function: return template.add_resource( Function(lambda_name, Handler=f'{lambda_name}.lambda_handler', Runtime="python3.6", CodeUri=code_uri, Timeout=10, Role=GetAtt(role, "Arn"), Environment=Environment( Variables={"TABLE_NAME": Ref(dynamodb_table)})))
def test_optional_deployment_preference(self): serverless_func = Function( "SomeHandler", Handler="index.handler", Runtime="nodejs", CodeUri="s3://bucket/handler.zip", AutoPublishAlias="alias", DeploymentPreference=DeploymentPreference(Type="AllAtOnce")) t = Template() t.add_resource(serverless_func) t.to_json()
def test_tags(self): serverless_func = Function("SomeHandler", Handler="index.handler", Runtime="nodejs", CodeUri="s3://bucket/handler.zip", Tags=Tags({ 'Tag1': 'TagValue1', 'Tag2': 'TagValue2' })) t = Template() t.add_resource(serverless_func) t.to_json()
def test_DLQ(self): serverless_func = Function( "SomeHandler", Handler="index.handler", Runtime="nodejs", CodeUri="s3://bucket/handler.zip", DeadLetterQueue=DeadLetterQueue( Type='SNS', TargetArn='arn:aws:sns:us-east-1:000000000000:SampleTopic')) t = Template() t.add_resource(serverless_func) t.to_json()
def add_figure_lambda(self): ## Now add to a lambda function: function = Function( 'FigLambda', CodeUri='../../protocols', Runtime='python3.6', Handler='log.eventshandler', Description='Lambda Function logging start/stop for NCAP', MemorySize=128, Timeout=90, Role= 'arn:aws:iam::739988523141:role/lambda_dataflow', ## TODO: Create this in template Events={}) figurelamb = self.template.add_resource(function) ## Attach specific permissions to invoke this lambda function as well. cwpermission = Permission('CWPermissions', Action='lambda:InvokeFunction', Principal='events.amazonaws.com', FunctionName=Ref(figurelamb)) self.template.add_resource(cwpermission) ## Because this lambda function gets invoked by an unknown target, we need to take care of its log group separately. figloggroup = LogGroup('FignameLogGroup', LogGroupName=Sub("/aws/lambda/${FigLambda}")) self.template.add_resource(figloggroup) ## Now we need to configure this function as a potential target. ## Initialize role to send events to cloudwatch with open('policies/cloudwatch_events_assume_role_doc.json', 'r') as f: cloudwatchassume_role_doc = json.load(f) ## Now get the actual policy: with open('policies/cloudwatch_events_policy_doc.json', 'r') as f: cloudwatch_policy_doc = json.load(f) cloudwatchpolicy = ManagedPolicy( "CloudwatchBusPolicy", Description=Join(" ", [ "Base Policy for all lambda function roles in", Ref(AWS_STACK_NAME) ]), PolicyDocument=cloudwatch_policy_doc) self.template.add_resource(cloudwatchpolicy) ## create the role: cwrole = Role("CloudWatchBusRole", AssumeRolePolicyDocument=cloudwatchassume_role_doc, ManagedPolicyArns=[Ref(cloudwatchpolicy)]) cwrole_attached = self.template.add_resource(cwrole) self.cwrole = cwrole_attached return figurelamb
def test_exactly_one_code(self): serverless_func = Function( "SomeHandler", Handler="index.handler", Runtime="nodejs", CodeUri=S3Location( Bucket="mybucket", Key="mykey", ), InlineCode="", ) t = Template() t.add_resource(serverless_func) with self.assertRaises(ValueError): t.to_json()
def create_lambda(self): ## Now add to a lambda function: function = Function( 'CodeLambda', CodeUri='../../lambda_repo', Runtime='python3.6', Handler='updateami.commithandler', Description='Lambda Function pushing code changes for NCAP', MemorySize=128, Timeout=90, Role= 'arn:aws:iam::739988523141:role/lambda_dataflow', ## TODO: Create this in template Events={}) codelamb = self.template.add_resource(function) return codelamb
def create_function_template(): template = Template() template.set_transform('AWS::Serverless-2016-10-31') template.add_resource(resource=Function( title='SampleLambdaFunction', CodeUri='.', FunctionName='sample-lambda-function', Handler='lambda_function.lambda_handler', Role=ImportValue( CommonResource.ExportName.LAMBDA_SERVICE_ROLE_ARN.value), Runtime='python3.7', )) with open('./function.yml', mode='w') as file: file.write(template.to_yaml())
def add_function(template, name, path, db_access=False, get=False, post=False, options=False, timeout=None, memory_size=None, node_env="production"): common_function_args = { "Handler": "index.handler", "Runtime": "nodejs12.x", "CodeUri": "this is not really required, as it is specified in buildspec.yml", "Environment": Environment(Variables={ "NODE_ENV": node_env, "TABLE_NAME": "seders" }), "Role": ImportValue( Join("-", [Ref(projectid), Ref("AWS::Region"), "LambdaTrustRole"])) } common_args_db_access = { **common_function_args, "Policies": "AmazonDynamoDBFullAccess", } common_args = {} if db_access: common_args = common_args_db_access else: common_args = common_function_args if timeout: common_args["Timeout"] = timeout if memory_size: common_args["MemorySize"] = memory_size template.add_resource( Function(name, **common_args, **events(path, get=get, post=post, options=options)))
def create_cloud_front_template(): template = Template() template.set_transform('AWS::Serverless-2016-10-31') service_role = template.add_resource( resource=Role(title='SampleLambdaServiceRole', RoleName='sample-lambda-service-role', Path='/', AssumeRolePolicyDocument={ "Statement": [{ "Effect": "Allow", "Principal": { "Service": ['lambda.amazonaws.com'] }, "Action": ["sts:AssumeRole"] }] }, Policies=[ Policy(PolicyName="sample-policy", PolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": 'lambda:*', "Resource": '*', "Effect": "Allow" }] }) ])) template.add_resource(resource=Function( title='SampleLambdaFunction', AutoPublishAlias='sample', CodeUri='.', FunctionName='sample-lambda-function', Handler='lambda_function.lambda_handler', Role=GetAtt(logicalName=service_role, attrName='Arn'), Runtime='python3.7', )) with open('./function.yml', mode='w') as file: file.write(template.to_yaml())
def create_function_template(): template = Template() template.set_transform('AWS::Serverless-2016-10-31') bucket = template.add_resource(resource=Function( title='SampleLambdaFunction', AutoPublishAlias='sample', CodeUri='.', FunctionName='sample-lambda-edge-function', Handler='lambda_function.lambda_handler', Role=ImportValue( CommonResource.ExportName.LAMBDA_EDGE_SERVICE_ROLE_ARN.value), Runtime='python3.7', )) template.add_output(output=Output(title=bucket.title, Value=GetAtt(bucket, 'Arn'), Export=Export(name=get_export_name()))) with open('./function.yml', mode='w') as file: file.write(template.to_yaml())
def test_s3_filter(self): t = Template() t.add_resource( Function( "ProcessorFunction", Handler="process_file.handler", CodeUri=".", Runtime="python3.6", Policies="AmazonS3FullAccess", Events={ "FileUpload": S3Event( "FileUpload", Bucket="bucket", Events=["s3:ObjectCreated:*"], Filter=Filter(S3Key=S3Key(Rules=[ Rules(Name="prefix", Value="upload/"), Rules(Name="suffix", Value=".txt"), ], )), ) }, )) t.to_json()
def make_folder_custom_resource(self, bucketname, pathname, dirname): ## 1. Make a role for the lambda function to take on. ## First handle policies: ## Assume role policy doc: with open('policies/lambda_role_assume_role_doc.json', "r") as f: assume_role_doc = json.load(f) ## Base lambda policy base_policy = lambda_basepolicy("LambdaBaseRole") ## Write permissions for lambda to s3 write_policy = lambda_writeS3('LambdaWriteS3Policy') ## self.template.add_resource(base_policy) role = Role("S3MakePathRole", AssumeRolePolicyDocument=assume_role_doc, ManagedPolicyArns=[Ref(base_policy)], Policies=[write_policy]) self.template.add_resource(role) ## Now we need to write a lambda function that actually does the work: function = Function("S3PutObjectFunction", CodeUri="../lambda_repo", Description="Puts Objects in S3", Handler="helper.handler_mkdir", Role=GetAtt(role, "Arn"), Runtime="python3.6", Timeout=30) self.template.add_resource(function) ## Finally, we declare a custom resource that makes use of this lambda function. foldermake = CustomResource('S3PutObject', ServiceToken=GetAtt(function, "Arn"), BucketName=self.affiliatename, Path=pathname, DirName=dirname) self.template.add_resource(foldermake)
# Converted from s3_processor located at: # https://github.com/awslabs/serverless-application-model/blob/dbc54b5d0cd31bf5cebd16d765b74aee9eb34641/examples/2016-10-31/s3_processor/template.yaml from troposphere import Template from troposphere.serverless import Function, DeploymentPreference t = Template() t.add_description("A function that uses the configured traffic shifting type " "for a canary deployment.") t.add_transform('AWS::Serverless-2016-10-31') t.add_resource( Function("Function", Handler='index.handler', Runtime='nodejs6.10', CodeUri='s3://<bucket>/function.zip', AutoPublishAlias="live", DeploymentPreference=DeploymentPreference( Enabled=True, Type="Canary10Percent5Minutes"))) print(t.to_json())
AccessControl=PublicReadWrite, CorsConfiguration=BUCKET_CORS_CONFIG, VersioningConfiguration=BUCKET_VERSIONING_CONFIG, # Uncomment below line to add accelerated write # AccelerateConfiguration=BUCKET_ACCELERATION_CONFIG )) if bucket is BUCKETS[0][0]: T.add_resource(Function( 'S3CreateEventTrigger', FunctionName='S3CreateEventTrigger'.lower(), Handler='index.createEventTrigger', Runtime='python3.7', MemorySize=Ref(MemorySize), Role=GetAtt("LambdaExecutionRole", "Arn"), InlineCode=inspect.getsource(index), Timeout=Ref(Timeout), Events={ 'S3ObjectCreateEvent': S3Event( 'S3ObjectCreateEvent', Bucket=Ref(S3_BUCKET), Events=['s3:ObjectCreated:*'] ) }) ) T.add_resource( Crawler( CRAWLER_NAME, Name=CRAWLER_NAME, Role=GetAtt("LambdaExecutionRole", "Arn"), DatabaseName=CRAWLER_DB_NAME,
t.set_description( "Simple CRUD webservice. State is stored in a SimpleTable (DynamoDB) " "resource." ) t.set_transform("AWS::Serverless-2016-10-31") simple_table = t.add_resource(SimpleTable("Table")) t.add_resource( Function( "GetFunction", Handler="index.get", Runtime="nodejs4.3", CodeUri="s3://<bucket>/api_backend.zip", Policies="AmazonDynamoDBReadOnlyAccess", Environment=Environment(Variables={"TABLE_NAME": Ref(simple_table)}), Events={ "GetResource": ApiEvent( "GetResource", Path="/resource/{resourceId}", Method="get" ) }, ) ) t.add_resource( Function( "PutFunction", Handler="index.put", Runtime="nodejs4.3", CodeUri="s3://<bucket>/api_backend.zip", Policies="AmazonDynamoDBFullAccess",
t.add_description( "Simple CRUD webservice. State is stored in a SimpleTable (DynamoDB) " "resource.") t.add_transform('AWS::Serverless-2016-10-31') simple_table = t.add_resource(SimpleTable("Table")) t.add_resource( Function( "GetFunction", Handler='index.get', Runtime='nodejs4.3', CodeUri='s3://<bucket>/api_backend.zip', Policies='AmazonDynamoDBReadOnlyAccess', Environment=Environment(Variables={'TABLE_NAME': Ref(simple_table)}), Events={ 'GetResource': ApiEvent('GetResource', Path='/resource/{resourceId}', Method='get') })) t.add_resource( Function( "PutFunction", Handler='index.put', Runtime='nodejs4.3', CodeUri='s3://<bucket>/api_backend.zip', Policies='AmazonDynamoDBFullAccess', Environment=Environment(Variables={'TABLE_NAME': Ref(simple_table)}),
def create_aurora_template(region, account_id): template = Template() template.set_transform('AWS::Serverless-2016-10-31') api_name = template.add_parameter(parameter=Parameter( title='ApiName', Default='sample-api', Type='String', )) function_name = template.add_parameter(parameter=Parameter( title='FunctionName', Default='sample-lambda-function', Type='String', )) # swagger_path = template.add_parameter( # parameter=Parameter( # title='SwaggerPath', # Default='./swagger.yml', # Type='String', # ) # ) stage_name = template.add_parameter(parameter=Parameter( title='StageName', Default='prod', Type='String', )) api = template.add_resource(resource=Api( title='SampleApi', Name=Ref(api_name), # DefinitionUri=Ref(swagger_path), DefinitionUri='./swagger.yml', StageName=Ref(stage_name), )) path = '/sample/' method = 'get' function = template.add_resource( resource=Function(title='SampleLambdaFunction', AutoPublishAlias='sample', CodeUri='.', FunctionName=Ref(function_name), Handler='lambda_function.lambda_handler', Role=ImportValue('sample-lambda-service-role-arn'), Runtime='python3.7', Events={ 'ApiTrigger': { 'Type': 'Api', 'Properties': { 'Path': path, 'Method': method, 'RestApiId': Ref(api) } } })) template.add_resource( resource=Permission(title='SampleLambdaFunctionPermission', Action='lambda:InvokeFunction', FunctionName=Ref(function), Principal='apigateway.amazonaws.com')) with open('swagger_template.yml') as f: swagger_yaml = f.read() uri = URI.replace('{region}', region).replace('{account_id}', account_id) \ .replace('{function_name}', function_name.Default) # TODO: swagger = swagger_yaml.replace('{path}', path).replace('{method}', method).replace('{uri}', uri) with open('./api.yml', mode='w') as file: file.write(template.to_yaml()) with open('./swagger.yml', mode='w') as file: file.write(swagger)
# Converted from s3_processor located at: # https://github.com/awslabs/serverless-application-model/blob/dbc54b5d0cd31bf5cebd16d765b74aee9eb34641/examples/2016-10-31/s3_processor/template.yaml from troposphere import Template from troposphere.serverless import DeploymentPreference, Function t = Template() t.set_description("A function that uses the configured traffic shifting type " "for a canary deployment.") t.set_transform("AWS::Serverless-2016-10-31") t.add_resource( Function( "Function", Handler="index.handler", Runtime="nodejs6.10", CodeUri="s3://<bucket>/function.zip", AutoPublishAlias="live", DeploymentPreference=DeploymentPreference( Enabled=True, Type="Canary10Percent5Minutes"), )) print(t.to_json())
t.add_description( "A function is triggered off an upload to a bucket. It logs the content " "type of the uploaded object.") t.add_transform('AWS::Serverless-2016-10-31') s3_bucket = t.add_resource( Bucket("Bucket") ) t.add_resource( Function( "ProcessorFunction", Handler='index.handler', Runtime='nodejs4.3', CodeUri='s3://<bucket>/s3_processor.zip', Policies='AmazonS3ReadOnlyAccess', Events={ 'PhotoUpload': S3Event( 'PhotoUpload', Bucket=Ref(s3_bucket), Events=['s3:ObjectCreated:*'] ) } ) ) print(t.to_json())