def getLambda( name: str , src: Source , role: Role , stack: str , stage: str , env_vars: dict , config: dict ) -> Function: ''' Takes the source code and an IAM role and creates a lambda function ''' code = Code( S3Bucket = src[0] , S3Key = src[1] ) func_name = "".join([name, stage]) env_vars = Environment( Variables = env_vars ) memory = 128 if "MemorySize" in config: memory = config["MemorySize"] timeout = 60 if "Timeout" in config: timeout = config["Timeout"] return Function( toAlphanum(name) , FunctionName = func_name , Handler = config["Handler"] , Code = code , Role = GetAtt(role, "Arn") , Runtime = "python3.6" , Environment = env_vars , MemorySize = memory , Timeout = timeout )
def make_lambda_function(*, name, lambda_params=None, role): camel_case_name = get_camel_case(name) s3_key = make_s3_key("{}.zip".format(name)) lambda_func = Function( "{}Function".format(camel_case_name), FunctionName=Sub( "${StackName}_hyp3_${FunctionName}", StackName=Ref('AWS::StackName'), FunctionName=name ), Code=make_lambda_code( S3Bucket=environment.source_bucket, S3Key=s3_key, S3ObjectVersion=getattr(environment, "{}_version".format(name)) ), Handler="lambda_function.lambda_handler", Role=GetAtt(role, "Arn"), Runtime="python3.6" ) if lambda_params is not None: for param_name, param_val in lambda_params.items(): setattr(lambda_func, param_name, param_val) return lambda_func
def lambda_function(**kwargs): function = Function('LambdaFunction', Code=Code(S3Bucket='replace-me', S3Key='replace-me'), Handler='function.lambda_handler', MemorySize='256', Timeout=30) for key in kwargs.keys(): if key == 'Layers': layers = [] for layer in kwargs[key]: layers.append(filter_layer(layer)) print(layers) setattr(function, key, layers) elif key == 'S3Bucket' or key == 'S3Key': setattr(function, 'Code', Code(S3Bucket=kwargs['S3Bucket'], S3Key=kwargs['S3Key'])) elif key == 'Role': setattr(function, 'Role', filter_iamrole(kwargs[key])) elif key == 'Environment': if isinstance(kwargs[key], dict): setattr(function, key, Environment(Variables=kwargs[key])) elif isinstance(kwargs[key], Environment): setattr(function, key, kwargs[key]) else: setattr(function, key, kwargs[key]) return function
def test_zip_file(self): lambda_func = Function( "AMIIDLookup", Handler="index.handler", Role=GetAtt("LambdaExecutionRole", "Arn"), Code=Code( ZipFile=Join( "", [ "var response = require('cfn-response');", "exports.handler = function(event, context) {", " var input = parseInt(event.ResourceProperties.Input);", " var responseData = {Value: input * 5};", " response.send(" " event, context, response.SUCCESS, responseData" " );", "};", ], ), ), Runtime="nodejs", Timeout="25", ) t = Template() t.add_resource(lambda_func) t.to_json()
def test_package_type_image(self): Function( "TestFunction", Code=Code(ImageUri="something"), PackageType="Image", Role=GetAtt("LambdaExecutionRole", "Arn"), ).validate()
def gen_postgis_function(): PostGisFunction = Function( "PostGisProvisionerFunction", Code=Code( S3Bucket=Ref("BucketName"), S3Key=Ref("BucketKey"), ), FunctionName=Sub("${AWS::StackName}-PostGisProvisioner"), Handler="postgis_provisioner.lambda_handler", Role=GetAtt("PostgisProvisionerExecutionRole", "Arn"), Timeout="60", Runtime="python3.6", VpcConfig=VPCConfig( SecurityGroupIds=[Ref("PostGisProvisionerSg")], SubnetIds=[ Select( 0, Split( ",", ImportValue( Sub("${NetworkName}-network-vpc-PrivateSubnets"))) ), Select( 1, Split( ",", ImportValue( Sub("${NetworkName}-network-vpc-PrivateSubnets")))) ])) return PostGisFunction
def __init__(self, prefix: str, lambda_under_deployment: Function) -> None: """ Constructor. :param prefix: A prefix for deployment lambda resource names. :param lambda_under_deployment: An AWS Lambda function to execute deployments against. """ self.lambda_role = Role( prefix + "DeploymentLambdaRole", Path="/", Policies=[Policy( PolicyName=prefix + "DeploymentLambdaRole", PolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["logs:*"], "Resource": "arn:aws:logs:*:*:*", "Effect": "Allow" }, { "Action": ["lambda:UpdateFunctionCode"], "Resource": "*", "Effect": "Allow" }, { "Action": ["s3:*"], "Resource": "*", "Effect": "Allow" }] })], AssumeRolePolicyDocument={"Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": [ "lambda.amazonaws.com", ] } } ]}, ) self.function = Function( prefix + "DeploymentLambda", Code=Code(ZipFile=self.__read_template()), Handler='index.handler', Role=GetAtt(self.lambda_role, "Arn"), Runtime='python3.6', MemorySize='128', FunctionName=prefix + 'DeploymentLambda', Timeout='10', Environment=Environment( Variables={ 'LAMBDA_FUNCTION_NAME': Ref(lambda_under_deployment) } ), Description=( f'Deployment lambda which updates lambda under deployment function code ' f'from an output from a ci/cd pipeline for {prefix.lower()}.' ) )
def _setup_firehose_custom_resource(self): # Setup the FirehoseLambda CloudFormation Custom Resource self.FirehoseLambdaCFExecRole = self.add_resource( Role( "FirehoseLambdaCFRole", AssumeRolePolicyDocument=Policy( Version="2012-10-17", Statement=[ Statement(Effect=Allow, Action=[AssumeRole], Principal=Principal("Service", "lambda.amazonaws.com")) ]), Path="/", )) self.FirehoseLambdaPolicy = self.add_resource( PolicyType("FirehoseCFPolicy", PolicyName="FirehoseLambdaCFRole", PolicyDocument=Policy( Version="2012-10-17", Statement=[ Statement(Effect=Allow, Action=[ Action("logs", "CreateLogGroup"), Action("logs", "CreateLogStream"), Action("logs", "PutLogEvents"), ], Resource=["arn:aws:logs:*:*:*"]), Statement(Effect=Allow, Action=[ Action("firehose", "CreateDeliveryStream"), Action("firehose", "DeleteDeliveryStream"), Action("firehose", "ListDeliveryStreams"), Action("firehose", "DescribeDeliveryStream"), Action("firehose", "UpdateDestination"), ], Resource=["*"]) ]), Roles=[Ref(self.FirehoseLambdaCFExecRole)], DependsOn="FirehoseLambdaCFRole")) self.FirehoseCFCustomResource = self.add_resource( Function( "FirehoseCustomResource", Description=( "Creates, updates, and deletes Firehose delivery streams"), Runtime="python2.7", Timeout=300, Handler="lambda_function.lambda_handler", Role=GetAtt(self.FirehoseLambdaCFExecRole, "Arn"), Code=Code( S3Bucket="cloudformation-custom-resources", S3Key="firehose_lambda.zip", ), DependsOn="FirehoseCFPolicy"))
def test_package_type_invalid(self): with self.assertRaises(ValueError): Function( "TestFunction", Code=Code(ImageUri="something"), PackageType="Invalid", Role=GetAtt("LambdaExecutionRole", "Arn"), ).validate()
def build(self) -> Function: checkForNoneValues(self) return Function(self._name, Code=self._code, Handler=self._handler, FunctionName=Sub(self._name + "${AWS::StackName}"), MemorySize=self._memory, Role=GetAtt(self._role, "Arn"), Runtime=str(self._runtime), Environment=Environment(Variables=self._envVars))
def gen_lambda_function(): function = Function("CloudformationCleanupFunction", Code=Code(ZipFile=Join("", get_code())), Handler="index.lambda_handler", Role=GetAtt("LambdaExecutionRole", "Arn"), Runtime="python3.6", MemorySize=Ref(parameters['LambdaMemorySize']), Timeout=Ref(parameters['LambdaTimeout']), Tags=gen_tags("CloudFormation-Cleaner")) return function
def build_function(self): name = self.template.add_parameter(Parameter("Name", Type="String")) role = self.template.add_parameter(Parameter("Role", Type="String")) kwargs = self.sceptre_user_data kwargs["FunctionName"] = Ref(name) kwargs["Role"] = Ref(role) kwargs["Code"] = Code(**kwargs["Code"]) function = self.template.add_resource(Function("Function", **kwargs)) self.template.add_output(Output("Arn", Value=GetAtt(function, "Arn")))
def _setup_s3writer_custom_resource(self): self.S3WriterLambdaCFExecRole = self.add_resource( Role( "S3WriterLambdaCFRole", AssumeRolePolicyDocument=Policy( Version="2012-10-17", Statement=[ Statement(Effect=Allow, Action=[AssumeRole], Principal=Principal("Service", "lambda.amazonaws.com")) ]), Path="/", )) self.S3WriterCFPolicy = self.add_resource( PolicyType("S3WriterCFPolicy", PolicyName="S3WriterLambdaCFRole", PolicyDocument=Policy( Version="2012-10-17", Statement=[ Statement(Effect=Allow, Action=[ Action("logs", "CreateLogGroup"), Action("logs", "CreateLogStream"), Action("logs", "PutLogEvents"), ], Resource=["arn:aws:logs:*:*:*"]), Statement(Effect=Allow, Action=[ s3.DeleteObject, s3.ListBucket, s3.PutObject, s3.GetObject, ], Resource=["*"]) ]), Roles=[Ref(self.S3WriterLambdaCFExecRole)], DependsOn="S3WriterLambdaCFRole")) self.S3WriterCFCustomResource = self.add_resource( Function( "S3WriterCustomResource", Description=( "Creates, updates, and deletes S3 Files with custom content" ), Runtime="python2.7", Timeout=300, Handler="lambda_function.lambda_handler", Role=GetAtt(self.S3WriterLambdaCFExecRole, "Arn"), Code=Code( S3Bucket="cloudformation-custom-resources", S3Key="s3writer_lambda.zip", ), DependsOn="S3WriterCFPolicy"))
def add_lambda(self, name, s3_bucket, role): s3_key = self.t.add_parameter(Parameter( f"{name}KeyS3", Description=f"S3 key for lambda function: {name}", Type="String" )) s3_version = self.t.add_parameter(Parameter( f"{name}ObjectVersionS3", Description=f"S3 object version ID for lambda function: {name}", Type="String" )) function = self.t.add_resource(Function( f"{name}Lambda", FunctionName=name, Handler="index.handler", Runtime="python3.6", Role=GetAtt(role, "Arn"), Code=Code( S3Bucket=Ref(s3_bucket), S3Key=Ref(s3_key), S3ObjectVersion=Ref(s3_version) ) )) version = self.t.add_resource(CustomLambdaVersion( f"{name}LambdaVersion", ServiceToken=GetAtt(self.custom_lambda_version_lambda, "Arn"), FunctionName=Ref(function), S3ObjectVersion=Ref(s3_version) )) uri = Join('', [ 'arn:aws:apigateway:', Ref('AWS::Region'), ':lambda:path/2015-03-31/functions/arn:aws:lambda:', Ref('AWS::Region'), ':', Ref('AWS::AccountId'), ':function:', Ref(function), ':', GetAtt(version, "Version"), '/invocations', ]) self.t.add_output(Output( f"{name}LambdaURI", Value=uri, Description=f"{name}LambdaURI" ))
def add_lambda_function(self): lambda_code = open("templates/lambda_code/lambda_function.py", "r") self.lambda_function = self.template.add_resource( Function( "SpotTerminatingSceptre", FunctionName="Spot_Terminate_Sceptre", Description= "Function that trigers when a spot instance is marked for termination due to outbid", Code=Code(ZipFile=lambda_code.read()), Handler="index.lambda_handler", Role=GetAtt("LambdaExecutionRolemARC", "Arn"), Runtime="python3.6", ))
def __init__(self, sceptre_user_data): self.template = Template() name = self.template.add_parameter(Parameter("Name", Type="String")) role = self.template.add_parameter(Parameter("Role", Type="String")) sceptre_user_data["FunctionName"] = Ref(name) sceptre_user_data["Role"] = Ref(role) sceptre_user_data["Code"] = Code(**sceptre_user_data["Code"]) function = self.template.add_resource( Function("Function", **sceptre_user_data)) self.template.add_output(Output("Arn", Value=GetAtt(function, "Arn")))
def add_resources(self): self.lambda_execution_role = self.template.add_resource( Role( "LambdaExecutionRole", Path="/", ManagedPolicyArns=[ "arn:aws:iam::aws:policy/AmazonSSMFullAccess", "arn:aws:iam::aws:policy/AutoScalingFullAccess", ], Policies=[ Policy( PolicyName="root", PolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["logs:*"], "Resource": "arn:aws:logs:*:*:*", "Effect": "Allow", }], }, ) ], AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["lambda.amazonaws.com"] }, }], }, )) with open("lambda_functions/unregister_runner.py", "r") as f: self.runner_unregister_code = f.read() self.runner_unregister_function = self.template.add_resource( Function( "RunnerUnregisterFunction", Code=Code(ZipFile=self.runner_unregister_code), Handler=Ref(self.runner_lambda_handler), Role=GetAtt("LambdaExecutionRole", "Arn"), Runtime=Ref(self.runner_lambda_runtime), MemorySize="128", Timeout="30", ))
def test_exclusive(self): lambda_func = Function( "AMIIDLookup", Handler="index.handler", Role=GetAtt("LambdaExecutionRole", "Arn"), Code=Code( S3Bucket="lambda-functions", S3Key="amilookup.zip", ), Runtime="nodejs", Timeout="25", ) t = Template() t.add_resource(lambda_func) t.to_json()
def test_package_type_zip(self): Function( "TestFunction", Code=Code(ZipFile=Join("", [ "var response = require('cfn-response');", "exports.handler = function(event, context) {", " var input = parseInt(event.ResourceProperties.Input);", " var responseData = {Value: input * 5};", " response.send(" " event, context, response.SUCCESS, responseData" " );", "};" ]), ), Handler="index.handler", PackageType="Zip", Role=GetAtt("LambdaExecutionRole", "Arn"), Runtime="nodejs", ).validate()
def build_function(self): name = self.template.add_parameter(Parameter("Name", Type="String")) role = self.template.add_parameter(Parameter("Role", Type="String")) kwargs = self.sceptre_user_data kwargs["FunctionName"] = Ref(name) kwargs["Role"] = Ref(role) kwargs["Code"] = Code( S3Bucket=kwargs.pop("S3_Bucket"), S3Key=kwargs.pop("S3_Key"), S3ObjectVersion=kwargs.pop("S3_Version"), ) function = self.template.add_resource(Function("Function", **kwargs)) self.template.add_output(Output("Arn", Value=GetAtt(function, "Arn")))
def create_lambda_function(self): self.create_lambda_role() t = self.template self.lambda_fn = t.add_resource(Function( "Function", Code=Code(S3Bucket="js-test-buckett", S3Key="lambda_code.zip"), Description="Function that streams data from DDB Streams to ElasticSearch", Environment=Environment( Variables={ "ES_ENDPOINT": ES ENDPOINT HERE, "ES_INDEX": "movies", "ES_DOCTYPE": "movie" }), Handler="lambda_code.handler", Role=GetAtt(self.LambdaExecutionRole, "Arn"), Runtime="nodejs6.10" ))
def add_version(self, title: str, lambda_function: awslambda.Function) -> awslambda.Version: """Create a version association with a Lambda@Edge function. In order to ensure different versions of the function are appropriately uploaded a hash based on the code of the lambda is appended to the name. As the code changes so will this hash value. Args: title: The name of the function in PascalCase. lambda_function: The Lambda function. """ s3_key = lambda_function.properties["Code"].to_dict()["S3Key"] code_hash = s3_key.split(".")[0].split("-")[-1] return self.template.add_resource( awslambda.Version(title + "Ver" + code_hash, FunctionName=lambda_function.ref()))
def create_function(self, code, role, environment=None, memory='128', timeout='5', description='', handler='lambda.handler', name_prefix='', **kwargs): environment_variables = self.create_environment_variables(environment) return self.template.add_resource( Function('{0}Function'.format(name_prefix), Runtime=self.PYTHON_RUNTIME, Code=code, Handler=handler, Timeout=timeout, MemorySize=memory, Description=description, Role=role, Environment=environment_variables, **kwargs))
def create_lambda_function(self): self.create_lambda_role() t = self.template self.lambda_fn = t.add_resource( Function( "Function", Code=Code(S3Bucket="js-test-buckett", S3Key="lambda_code"), Description= "Function that streams data from DDB Streams to ElasticSearch", Environment=Environment( Variables={ "ES_ENDPOINT": GetAtt(self.es_domain, "DomainEndpoint"), "ES_INDEX": "movies", "ES_DOCTYPE": "movie" }), Handler="index.handler", Role=GetAtt(self.LambdaExecutionRole, "Arn"), Runtime="nodejs6.10")) t.add_output(Output("FunctionName", Value=Ref(self.lambda_fn))) t.add_output(Output("FunctionArn", Value=GetAtt(self.lambda_fn, "Arn")))
def add_lambda_function(self): if "CodeFilepath" in self.sceptre_user_data["FunctionProperties"]: file_path = self.sceptre_user_data["FunctionProperties"].pop( "CodeFilepath") code = Code(ZipFile=Join("", code_from_file(file_path))) elif "CodeInS3" in self.sceptre_user_data["FunctionProperties"]: s3_code = self.sceptre_user_data["FunctionProperties"].pop( "CodeInS3") code = Code(**s3_code) function_kwargs = { "FunctionName": Ref(self.name), "Description": Ref(self.description), "Handler": Ref(self.handler), "Role": Ref(self.iam_role), "Runtime": Ref(self.runtime), "Code": code } function_kwargs.update(self.sceptre_user_data["FunctionProperties"]) if "Environment" in function_kwargs: environment_kwargs = { "Variables": function_kwargs.pop("Environment") } environment = Environment(**environment_kwargs) function_kwargs.update({"Environment": environment}) if function_kwargs.pop("InVpc", False): print vpc_config_kwargs = { "SecurityGroupIds": Ref(self.sg_ids), "SubnetIds": Ref(self.subnet_ids) } vpc_config = VPCConfig(**vpc_config_kwargs) function_kwargs.update({"VpcConfig": vpc_config}) self.function = self.template.add_resource( Function("LambdaFunction", **function_kwargs)) self.template.add_output( Output("LambdaArn", Description="The ARN of the Lambda Function", Value=GetAtt(self.function, "Arn")))
def create_lambda(title, bucketname, filename, handlername, role_arn, environment): """ Create Lambda function :param title: name of function :param bucketname: name of bucket with source file :param filename: path to source file :param handlername: handler function name :param role_arn: role to run under :param environment: runtime environment to use (Python, NodeJS, etc.) """ print('Adding lambda {0} to template...'.format(title)) trop_lambda = Function(title, Code=Code(S3Bucket=bucketname, S3Key=filename), Description='Cloudsploit', FunctionName=title, Handler=handlername, MemorySize=128, Role=role_arn, Runtime=environment, Timeout=300) return trop_lambda
def add_lambda(self, lambda_name, lambda_role_name=None, lambda_handler='index.handler'): self.log.info('Adding AWS Lambda Function %s with handler %s' % (lambda_name, lambda_handler)) if lambda_role_name is None: self.lambda_role = self.create_lambda_role() else: self.lambda_role = self.create_lambda_role(lambda_role_name) self.template.add_resource(self.lambda_role) code = Code(ZipFile=Join('\n', [ 'import json', '', 'def handler(event, context):', '\tresponse = {', '\t\t\'statusCode\': 200,', '\t\t\'headers\': {},', '\t\t\'body\': json.dumps(event),', '\t\t\'isBase64Encoded\': False', '\t}', '\treturn response' ])) # add default function, update later through upload self.lambda_function = Function(lambda_name, Code=code, Handler=lambda_handler, Runtime='python3.6', Role=GetAtt(self.lambda_role, 'Arn')) self.template.add_resource(self.lambda_function)
"Service": [ "lambda.amazonaws.com", "apigateway.amazonaws.com" ] } }] }, )) lambdaFunction = t.add_resource(Function( "lambdaFunction", FunctionName="GDACK", Code=Code( S3Bucket=Ref(lambdaBucket), S3Key=Ref(lambdaKey) ), Description="GDACK Function", Handler="index.handler", Role=GetAtt(lambdaRole, "Arn"), Runtime="python3.6", MemorySize=128, Timeout="180" )) restApi = t.add_resource(api.RestApi( "restApi", Name="GDACK" )) apiResource = t.add_resource(api.Resource( "apiResource", RestApiId=Ref(restApi),
)) function_role = t.add_resource( Role( "FunctionRole", AssumeRolePolicyDocument=Policy(Statement=[ Statement(Effect=Allow, Action=[AssumeRole], Principal=Principal("Service", ["lambda.amazonaws.com"])) ]))) aws_lambda = t.add_resource( Function("LambdaFunction", Code=Code(ZipFile="exports.handler = function(event,context){}"), Description="A function template", Handler=Ref(handler), MemorySize=Ref(memory_size), Role=GetAtt(function_role, "Arn"), Runtime="nodejs8.10", Timeout=Ref(timeout))) function_policy = t.add_resource( PolicyType("FunctionPolicy", PolicyDocument={ "Id": "FunctionPolicy", "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Action": [
"Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["lambda.amazonaws.com", "apigateway.amazonaws.com"] } }] }, )) # Create the Lambda function foobar_function = t.add_resource( Function( "FoobarFunction", Code=Code(ZipFile=Join("", code)), Handler="index.handler", Role=GetAtt("LambdaExecutionRole", "Arn"), Runtime="nodejs4.3", )) # Create a resource to map the lambda function to resource = t.add_resource( Resource( "FoobarResource", RestApiId=Ref(rest_api), PathPart="foobar", ParentId=GetAtt("ExampleApi", "RootResourceId"), )) # Create a Lambda API method for the Lambda resource method = t.add_resource(