def get_cloudformation_template(self, lambda_filename): from troposphere import Template, GetAtt, Join from troposphere.awslambda import Environment from troposphere.awslambda import Permission from troposphere.serverless import Function t = Template() t.add_description("Built with WavyCloud's pylexbuilder") t.add_transform('AWS::Serverless-2016-10-31') lambda_func = t.add_resource( Function( self.name, Handler='handler.index', Runtime=self.runtime, CodeUri='s3://{}/{}'.format(self.s3_bucket_name, lambda_filename), Policies=['AmazonDynamoDBFullAccess', 'AmazonLexFullAccess'], AutoPublishAlias=self.lambda_alias, Environment=Environment( Variables=self.environment_variables)), ) for i, intent in enumerate(self.get_all_intents()): t.add_resource( Permission( "PermissionToLex{}".format(intent.name), FunctionName=GetAtt(lambda_func, "Arn"), Action="lambda:InvokeFunction", Principal="lex.amazonaws.com", SourceArn=Join("", [ 'arn:aws:lex:', Ref(AWS_REGION), ':', Ref(AWS_ACCOUNT_ID), ':intent:{}:*'.format(intent.name) ]))) return t
def test_environment_variable_invalid_name(self): for var in ["1", "2var", "_var", "/var"]: with self.assertRaises(ValueError) as context: Environment(Variables={var: "value"}) self.assertTrue("Invalid environment variable name: %s" % var in context.exception.args)
def __init__(self, prefix: str, lambda_under_deployment: Function) -> None: """ Constructor. :param prefix: A prefix for deployment lambda resource names. :param lambda_under_deployment: An AWS Lambda function to execute deployments against. """ self.lambda_role = Role( prefix + "DeploymentLambdaRole", Path="/", Policies=[Policy( PolicyName=prefix + "DeploymentLambdaRole", PolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["logs:*"], "Resource": "arn:aws:logs:*:*:*", "Effect": "Allow" }, { "Action": ["lambda:UpdateFunctionCode"], "Resource": "*", "Effect": "Allow" }, { "Action": ["s3:*"], "Resource": "*", "Effect": "Allow" }] })], AssumeRolePolicyDocument={"Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": [ "lambda.amazonaws.com", ] } } ]}, ) self.function = Function( prefix + "DeploymentLambda", Code=Code(ZipFile=self.__read_template()), Handler='index.handler', Role=GetAtt(self.lambda_role, "Arn"), Runtime='python3.6', MemorySize='128', FunctionName=prefix + 'DeploymentLambda', Timeout='10', Environment=Environment( Variables={ 'LAMBDA_FUNCTION_NAME': Ref(lambda_under_deployment) } ), Description=( f'Deployment lambda which updates lambda under deployment function code ' f'from an output from a ci/cd pipeline for {prefix.lower()}.' ) )
def add_to_ddb_lambda(self): self.add_to_ddb_lambda_function = self.template.add_resource(Function( "AddToDDBFunction", Code=Code( ZipFile=Join("", [ "import cfnresponse, boto3\n", "def add(event, context): \n", " return null", ]) ), Handler="index.add", Runtime="python3.6", Environment=Environment( Variables={ "TABLE_NAME": Ref(self.dynamo_db) } ), Events={ "AddEntry": ApiEvent( "AddEntry", Path="/entry/{entryId}", Method="put" ) } ) )
def test_environment_variable_invalid_name(self): for var in ['1', '2var', '_var', '/var']: with self.assertRaises(ValueError) as context: Environment(Variables={var: 'value'}) self.assertTrue('Invalid environment variable name: %s' % var in context.exception)
def lambda_function(**kwargs): function = Function('LambdaFunction', Code=Code(S3Bucket='replace-me', S3Key='replace-me'), Handler='function.lambda_handler', MemorySize='256', Timeout=30) for key in kwargs.keys(): if key == 'Layers': layers = [] for layer in kwargs[key]: layers.append(filter_layer(layer)) print(layers) setattr(function, key, layers) elif key == 'S3Bucket' or key == 'S3Key': setattr(function, 'Code', Code(S3Bucket=kwargs['S3Bucket'], S3Key=kwargs['S3Key'])) elif key == 'Role': setattr(function, 'Role', filter_iamrole(kwargs[key])) elif key == 'Environment': if isinstance(kwargs[key], dict): setattr(function, key, Environment(Variables=kwargs[key])) elif isinstance(kwargs[key], Environment): setattr(function, key, kwargs[key]) else: setattr(function, key, kwargs[key]) return function
def getLambda( name: str , src: Source , role: Role , stack: str , stage: str , env_vars: dict , config: dict ) -> Function: ''' Takes the source code and an IAM role and creates a lambda function ''' code = Code( S3Bucket = src[0] , S3Key = src[1] ) func_name = "".join([name, stage]) env_vars = Environment( Variables = env_vars ) memory = 128 if "MemorySize" in config: memory = config["MemorySize"] timeout = 60 if "Timeout" in config: timeout = config["Timeout"] return Function( toAlphanum(name) , FunctionName = func_name , Handler = config["Handler"] , Code = code , Role = GetAtt(role, "Arn") , Runtime = "python3.6" , Environment = env_vars , MemorySize = memory , Timeout = timeout )
def test_environment_variable_reserved(self): for var in ['AWS_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_LAMBDA_FUNCTION_MEMORY_SIZE']: with self.assertRaises(ValueError) as context: Environment(Variables={var: 'value'}) self.assertTrue("Lambda Function environment variables names " "can't be none of" in context.exception.args[0])
def build(self) -> Function: checkForNoneValues(self) return Function(self._name, Code=self._code, Handler=self._handler, FunctionName=Sub(self._name + "${AWS::StackName}"), MemorySize=self._memory, Role=GetAtt(self._role, "Arn"), Runtime=str(self._runtime), Environment=Environment(Variables=self._envVars))
def add_lambda(template: Template, role: Role, code_uri: str, lambda_name: str, dynamodb_table: Table) -> Function: return template.add_resource( Function(lambda_name, Handler=f'{lambda_name}.lambda_handler', Runtime="python3.6", CodeUri=code_uri, Timeout=10, Role=GetAtt(role, "Arn"), Environment=Environment( Variables={"TABLE_NAME": Ref(dynamodb_table)})))
def create_lambda_function(self): self.create_lambda_role() t = self.template self.lambda_fn = t.add_resource(Function( "Function", Code=Code(S3Bucket="js-test-buckett", S3Key="lambda_code.zip"), Description="Function that streams data from DDB Streams to ElasticSearch", Environment=Environment( Variables={ "ES_ENDPOINT": ES ENDPOINT HERE, "ES_INDEX": "movies", "ES_DOCTYPE": "movie" }), Handler="lambda_code.handler", Role=GetAtt(self.LambdaExecutionRole, "Arn"), Runtime="nodejs6.10" ))
def add_versioned_lambda( template, deployment_id, function, ): environment = function.properties.setdefault("Environment", Environment(Variables={})) environment.Variables["X__DO_NOT_USE__DEPLOYMENT_ID"] = deployment_id function = template.add_resource(function) (is_odd_deployment, is_even_deployment) = add_double_sided_condition( template, f"{function.title}DeploymentIdParityOdd", Equals(determine_parity(deployment_id), "ODD"), ) version_a = template.add_resource( Version( f"{function.title}VersionA", FunctionName=GetAtt(function, "Arn"), Condition=is_odd_deployment, )) version_b = template.add_resource( Version( f"{function.title}VersionB", FunctionName=GetAtt(function, "Arn"), Condition=is_even_deployment, )) version_number = If( is_odd_deployment, GetAtt(version_a, "Version"), GetAtt(version_b, "Version"), ) alias = template.add_resource( Alias( f"{function.title}Alias", FunctionName=GetAtt(function, "Arn"), FunctionVersion=version_number, Name="latest", )) return function, alias
def _lambda_environment(self, context: ff.Context): env = ((context.config.get('extensions') or {}).get('firefly_aws') or {}).get('environment') defaults = { 'PROJECT': self._project, 'ENV': self._env, 'ACCOUNT_ID': self._account_id, 'CONTEXT': context.name, 'REGION': self._region, 'BUCKET': self._bucket, } if env is not None: defaults.update(env) return Environment( 'LambdaEnvironment', Variables=defaults )
def add_function(template, name, path, db_access=False, get=False, post=False, options=False, timeout=None, memory_size=None, node_env="production"): common_function_args = { "Handler": "index.handler", "Runtime": "nodejs12.x", "CodeUri": "this is not really required, as it is specified in buildspec.yml", "Environment": Environment(Variables={ "NODE_ENV": node_env, "TABLE_NAME": "seders" }), "Role": ImportValue( Join("-", [Ref(projectid), Ref("AWS::Region"), "LambdaTrustRole"])) } common_args_db_access = { **common_function_args, "Policies": "AmazonDynamoDBFullAccess", } common_args = {} if db_access: common_args = common_args_db_access else: common_args = common_function_args if timeout: common_args["Timeout"] = timeout if memory_size: common_args["MemorySize"] = memory_size template.add_resource( Function(name, **common_args, **events(path, get=get, post=post, options=options)))
def add_lambda_function(self): if "CodeFilepath" in self.sceptre_user_data["FunctionProperties"]: file_path = self.sceptre_user_data["FunctionProperties"].pop( "CodeFilepath") code = Code(ZipFile=Join("", code_from_file(file_path))) elif "CodeInS3" in self.sceptre_user_data["FunctionProperties"]: s3_code = self.sceptre_user_data["FunctionProperties"].pop( "CodeInS3") code = Code(**s3_code) function_kwargs = { "FunctionName": Ref(self.name), "Description": Ref(self.description), "Handler": Ref(self.handler), "Role": Ref(self.iam_role), "Runtime": Ref(self.runtime), "Code": code } function_kwargs.update(self.sceptre_user_data["FunctionProperties"]) if "Environment" in function_kwargs: environment_kwargs = { "Variables": function_kwargs.pop("Environment") } environment = Environment(**environment_kwargs) function_kwargs.update({"Environment": environment}) if function_kwargs.pop("InVpc", False): print vpc_config_kwargs = { "SecurityGroupIds": Ref(self.sg_ids), "SubnetIds": Ref(self.subnet_ids) } vpc_config = VPCConfig(**vpc_config_kwargs) function_kwargs.update({"VpcConfig": vpc_config}) self.function = self.template.add_resource( Function("LambdaFunction", **function_kwargs)) self.template.add_output( Output("LambdaArn", Description="The ARN of the Lambda Function", Value=GetAtt(self.function, "Arn")))
def create_lambda_function(self): self.create_lambda_role() t = self.template self.lambda_fn = t.add_resource( Function( "Function", Code=Code(S3Bucket="js-test-buckett", S3Key="lambda_code"), Description= "Function that streams data from DDB Streams to ElasticSearch", Environment=Environment( Variables={ "ES_ENDPOINT": GetAtt(self.es_domain, "DomainEndpoint"), "ES_INDEX": "movies", "ES_DOCTYPE": "movie" }), Handler="index.handler", Role=GetAtt(self.LambdaExecutionRole, "Arn"), Runtime="nodejs6.10")) t.add_output(Output("FunctionName", Value=Ref(self.lambda_fn))) t.add_output(Output("FunctionArn", Value=GetAtt(self.lambda_fn, "Arn")))
def _lambda_environment(self, context: ff.Context): env = ((context.config.get('extensions') or {}).get('firefly_aws') or {}).get('environment') defaults = { 'PROJECT': self._project, 'FF_ENVIRONMENT': self._env, 'ACCOUNT_ID': self._account_id, 'CONTEXT': context.name, 'REGION': self._region, 'BUCKET': self._bucket, 'DDB_TABLE': self._ddb_table_name(context.name), } if self._adaptive_memory is not None: defaults['ADAPTIVE_MEMORY'] = self._adaptive_memory if env is not None: defaults.update(env) if 'SLACK_ERROR_URL' in os.environ: defaults['SLACK_ERROR_URL'] = os.environ.get('SLACK_ERROR_URL') return Environment('LambdaEnvironment', Variables=defaults)
Policies=[ describe_autoscale, get_queue_attributes, put_metric_data ], AssumeRolePolicyDocument=utils.get_static_policy('lambda-policy-doc'), )) custom_metric = t.add_resource(utils.make_lambda_function( name='custom_metric', role=role, lambda_params={ "KmsKeyArn": GetAtt(kms_key, "Arn"), "Environment": Environment( Variables={ "HyP3StackName": Ref("AWS::StackName") } ), "Timeout": 60 } )) custom_metric_target = Target( "CustomMetricTarget", Arn=GetAtt(custom_metric, 'Arn'), Id="CustomMetricFunction1", Input=Sub( '{"QueueUrl":"${QueueUrl}","AutoScalingGroupName":"${AGName}","MetricName":"${MetricName}"}', QueueUrl=Ref(start_events), AGName=Ref(processing_group), MetricName=custom_metric_name
def __init__(self, utils, templatePath='./cloudformation/stage.json', description='API Gateway Stage Template for {App}-{Stage}', version='2010-09-09'): super(self.__class__, self).__init__() self.utils = utils self.templatePath = templatePath appName = self.utils.config['App'] stageName = self.utils.config['Stage'] tags = self.utils.config['Tags'] self.add_version(version) self.add_description(description.format(App=appName, Stage=stageName)) #################### # Lambda Functions # #################### self.lambdaFunctions = [] self.lambdaFunctionRoles = [] for functionId in self.utils.config['LambdaFunctions'].keys(): f = self.utils.config['LambdaFunctions'][functionId] functionName = '{App}Stage{FunctionId}Function{Stage}'.format( App=appName, FunctionId=functionId, Stage=stageName) policyName = '{App}Stage{FunctionId}FunctionPolicy{Stage}'.format( App=appName, FunctionId=functionId, Stage=stageName) roleName = '{App}Stage{FunctionId}FunctionRole{Stage}'.format( App=appName, FunctionId=functionId, Stage=stageName) functionSubParams = { 'FunctionName': functionName, 'Api': ImportValue('{App}Api'.format(App=appName)), 'Stage': stageName.lower(), } ################# # Function Role # ################# with open( './lambda/policies/{FunctionId}.json'.format( FunctionId=functionId), 'r') as functionPolicyJson: functionPolicyDocument = functionPolicyJson.read() functionPolicy = Policy( policyName, PolicyName=policyName, PolicyDocument=Sub(functionPolicyDocument, **functionSubParams), ) functionRole = self.add_resource( Role( roleName, AssumeRolePolicyDocument=PolicyDocument(Statement=[ Statement( Effect=Allow, Action=[AssumeRole], Principal=Principal('Service', [ 'lambda.amazonaws.com', ]), ) ], ), Path='/service-role/{App}/{Stage}/'.format( App=appName, Stage=stageName), Policies=[functionPolicy], RoleName=roleName, )) lambdaFunction = self.add_resource( Function( functionName, Code=path.abspath(f['LocalCode']), Description='API Proxy Function for Stage: {App}-{Stage}'. format(App=appName, Stage=stageName), FunctionName=functionName, Handler=f['Handler'], MemorySize=f['Memory'], Role=GetAtt(functionRole, 'Arn'), Runtime=f['Runtime'], Timeout=f['Timeout'], Tags=Tags(tags), )) lambdaEnvironment = { 'APP_NAME': appName, 'STAGE_NAME': stageName, } if f['Environment'] != None: lambdaEnvironment.update(f['Environment']) lambdaFunction.Environment = Environment( Variables=lambdaEnvironment, ) if f['KmsKeyArn'] != None: lambdaFunction.KmsKeyArn = f['KmsKeyArn'] if f['Vpc'] != None: lambdaFunction.VpcConfig = VpcConfig( SecurityGroupIds=f['Vpc']['SecurityGroupIds'], SubnetIds=f['Vpc']['SubnetIds'], ) if f['Tracing'] != None: lambdaFunction.TracingConfig = TracingConfig( Mode=f['Tracing'], ) self.lambdaFunctions.append(lambdaFunction) self.lambdaFunctionRoles.append(functionRole) ################## # Lambda Proxies # ################## self.proxyResources = [] self.proxyMethods = [] self.proxyMethodTitles = [] self.proxyPermissions = [] for resourceName in self.utils.config['LambdaProxies'].keys(): resource = self.utils.config['LambdaProxies'][resourceName] resourcePath = resource['Path'].strip() functionName = '{App}Stage{FunctionName}Function{Stage}'.format( App=appName, FunctionName=resource['Function'], Stage=stageName) resourceSubParams = { 'FunctionName': functionName, 'Api': ImportValue('{App}Api'.format(App=appName)), 'Stage': stageName.lower(), } if resourcePath == '': proxyParent = ImportValue('{App}ApiRoot'.format(App=appName)) resourceSubParams['ResourcePath'] = '*' else: resourceSubParams['ResourcePath'] = resourcePath + '/*' ################# # Path Resource # ################# pathResource = self.add_resource( Resource( '{App}Stage{ResourceName}PathResource{Stage}'.format( App=appName, ResourceName=resourceName, Stage=stageName), ParentId=ImportValue( '{App}ApiRoot'.format(App=appName)), PathPart=resource['Path'], RestApiId=ImportValue('{App}Api'.format(App=appName)), )) self.proxyResources.append(pathResource) proxyParent = Ref(pathResource) ############### # Path Method # ############### pathMethod = self.add_resource( self.generate_proxy_method( '{App}Stage{ResourceName}PathMethod{Stage}'.format( App=appName, ResourceName=resourceName, Stage=stageName), resource['Auth'], resourceSubParams, proxyParent, ImportValue('{App}Api'.format(App=appName)), )) self.proxyMethods.append(pathMethod) self.proxyMethodTitles.append(pathMethod.title) ################## # Proxy Resource # ################## proxyResource = self.add_resource( Resource( '{App}Stage{ResourceName}ProxyResource{Stage}'.format( App=appName, ResourceName=resourceName, Stage=stageName), ParentId=proxyParent, PathPart='{proxy+}', RestApiId=ImportValue('{App}Api'.format(App=appName)), )) self.proxyResources.append(proxyResource) ################ # Proxy Method # ################ proxyMethod = self.add_resource( self.generate_proxy_method( '{App}Stage{ResourceName}ProxyMethod{Stage}'.format( App=appName, ResourceName=resourceName, Stage=stageName), resource['Auth'], resourceSubParams, Ref(proxyResource), ImportValue('{App}Api'.format(App=appName)), )) self.proxyMethods.append(proxyMethod) self.proxyMethodTitles.append(proxyMethod.title) #################### # Proxy Permission # #################### proxyPermission = self.add_resource( Permission( '{App}Stage{ResourceName}Permission{Stage}'.format( App=appName, ResourceName=resourceName, Stage=stageName), Action='lambda:InvokeFunction', FunctionName=functionName, Principal='apigateway.amazonaws.com', SourceArn=Sub( 'arn:${AWS::Partition}:execute-api:${AWS::Region}:${AWS::AccountId}:${Api}/${Stage}/*/${ResourcePath}', **resourceSubParams), DependsOn=[functionName], )) self.proxyPermissions.append(proxyPermission) ################# # RestApi Stage # ################# hashComponents = [self.proxyResources, self.proxyMethods] deploymentHash = md5(pickle.dumps(hashComponents)).hexdigest() self.deployment = self.add_resource( Deployment( '{App}StageDeployment{Stage}{Hash}'.format( App=appName, Stage=stageName, Hash=deploymentHash), Description='Deployment for {App} {Stage} Stage'.format( App=appName, Stage=stageName), RestApiId=ImportValue('{App}Api'.format(App=appName)), DependsOn=self.proxyMethodTitles, )) self.prodStage = self.add_resource( Stage( '{App}Stage'.format(App=appName), DeploymentId=Ref(self.deployment), Description='Stage for {App} {Stage} Stage.'.format( App=appName, Stage=stageName, Run=self.utils.run_time), MethodSettings=[ MethodSetting( DataTraceEnabled=True, HttpMethod='*', LoggingLevel='INFO', ResourcePath='/*', #MetricsEnabled=True, ), ], RestApiId=ImportValue('{App}Api'.format(App=appName)), StageName=stageName.lower(), )) ################## # Write Template # ################## with open(templatePath, 'w') as templateFile: templateFile.write(self.to_json())
"arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole" ], Policies=[sns_policy, sqs_policy], AssumeRolePolicyDocument=utils.get_static_policy('lambda-policy-doc'), )) scheduler = utils.make_lambda_function( name="scheduler", role=lambda_role, lambda_params={ "Environment": Environment( Variables={ 'SNS_ARN': Ref(finish_sns), 'DB_HOST': utils.get_host_address(), 'DB_USER': Ref(db_user), 'DB_PASSWORD': Ref(db_pass), 'DB_NAME': Ref(db_name), 'QUEUE_URL': Ref(start_events) }), "KmsKeyArn": GetAtt(kms_key, "Arn"), "MemorySize": 128, "Timeout": 300 }) if 'unittest' in environment.maturity: scheduler.VpcConfig = VPCConfig( SecurityGroupIds=['sg-0d8cdb7c', 'sg-72f8c803'],
request_encoding_function = template.add_resource( Function( 'RequestEncodingFunction', Description='Creates Elastic Transcoder job web formats.', Runtime='python3.7', Handler='index.handler', Role=GetAtt(request_encoding_lambda_role, 'Arn'), Code=Code( S3Bucket=ImportValue( Join('-', [Ref(core_stack), 'LambdaCodeBucket-Ref'])), S3Key=Ref(request_encoding_lambda_code_key), ), Environment=Environment( Variables={ 'VIDEO_EVENTS_TABLE': _video_events_table, 'PIPELINE_ID_PARAMETER': _pipeline_id_parameter, }), TracingConfig=TracingConfig(Mode='Active', ), )) template.add_resource( LogGroup( "RequestEncodingLambdaLogGroup", LogGroupName=Join( '/', ['/aws/lambda', Ref(request_encoding_function)]), RetentionInDays=7, )) request_encoding_topic = template.add_resource( Topic(
"Unsubscribe Endpoint. Deletes provided email to dynamo table", FunctionName="EmailUnsubscribeFunction")) # Emailer with open(emailer_src_file, "r") as file: code = file.readlines() file.close() EmailSendFunction = t.add_resource( Function( "EmailSendFunction", Code=Code(ZipFile=Join("", code)), Handler="index.daily_quote_handler", Role=GetAtt(LambdaSesExecutionRole, "Arn"), Runtime="python2.7", Environment=Environment( Variables={"email_from": Ref(from_email_param)}), Description="Sends the Daily Quote. No endpoint, runs on a schedule.", FunctionName="EmailSendFunction")) ### API GATEWAY rest_api = t.add_resource(RestApi("EmailListApi", Name="EmailListApi")) # /subscribe (POST) subscribe_resource = t.add_resource( Resource("subscribeResource", RestApiId=Ref(rest_api), PathPart="subscribe", ParentId=GetAtt("EmailListApi", "RootResourceId"))) subscribe_method = t.add_resource( Method("subscribeMethod",
efficientLearningFunction = template.add_resource(Function( FUNCTION_NAME, FunctionName=FUNCTION_NAME, Description="Lambda returns quizz questions and mastery scores ", Handler="index.handler", Role=GetAtt(lambdaExecutionRole, "Arn"), Layers=[ Ref(pythonLayer), "arn:aws:lambda:eu-west-1:580247275435:layer:LambdaInsightsExtension:1" ], Runtime="python3.7", Environment=Environment( Variables={ "S3_BUCKET": Ref(questionsBucketName), "S3_CONFIG": Ref(configFolder), "S3_FOLDER": Ref(questionsFolder) } ), MemorySize=Ref(memorySize), Timeout=Ref(timeout), Code=Code( ZipFile="$LATEST" ), )) alias = template.add_resource(Alias( "LambdaAlias", Description="Cosmos Alias", FunctionName=Ref(efficientLearningFunction), FunctionVersion="$LATEST",
AssumeRolePolicyDocument=Policy(Statement=[ Statement(Effect=Allow, Action=[AssumeRole], Principal=Principal("Service", ["lambda.amazonaws.com"])) ]))) rds_macro_lambda = t.add_resource( Function("RdsSnapShotLambdaFunction", Code=Code(S3Bucket="rds-snapshot-id-lambda", S3Key="rdsmacroinstance.zip"), Environment=Environment( Variables={ 'log_level': 'info', 'rds_snapshot_stack_name': 'mv-rds-db-stack', 'replace_with_snapshot': 'false', 'snapshot_type': '', 'snapshot_id': '', 'restore_time': '2019-09-07T23:45:00Z', 'restore_point_in_time': 'false', 'properties_to_remove': '', 'properties_to_add': '', }), Description="Function used to manipulate the dbinstance template", Handler=Ref(rds_macro_handler), MemorySize=Ref(memory_size), FunctionName=Sub("${LambdaEnv}-%s" % ugc_rds_macro_function_name), Role=GetAtt(function_role, "Arn"), Runtime="python3.7", Timeout=900)) role = Role( "RdsMacroLogRole",
def scaffold(self): """ Create long lived stack resources for the cluster """ self.t.add_resource( Cluster("Cluster", ClusterName=self.cluster_vars['name'])) OUTPUT_SG = ["ALB", "DB", "Cache", "Aux"] for sg in OUTPUT_SG: tmpsg = SecurityGroup( "{}BadgeSg".format(sg), GroupDescription= "SG for {} to wear in order to talk to ecs instances".format( sg), VpcId=self.cluster_vars.get('vpc')) self.t.add_resource(tmpsg) self.t.add_output( Output("{}BadgeSg".format(sg), Description="{} Security Group Badge".format(sg), Export=Export(Sub("${AWS::StackName}:%sBadgeSg" % sg)), Value=GetAtt(tmpsg, "GroupId"))) # Refactor like this ### removing this because it's in the agent now add_asg_cleanup(self.t, sanitize_cfn_resource_name(self.cluster_vars['name'])) # add metric lambda self.t.add_resource( Function("ECSMetricLambda", Code=Code(S3Bucket=Sub("${S3Bucket}"), S3Key=Sub("${S3Prefix}/deployment.zip")), Handler="metrics.cluster_metrics.lambda_handler", Role=GetAtt("CronLambdaRole", "Arn"), Runtime="python3.7", MemorySize=128, Timeout=300, Environment=Environment( Variables={ "CLUSTER": Sub("${ClusterName}"), "ASGPREFIX": Sub("${ClusterName}-asg-"), "REGION": Ref("AWS::Region") }))) self.t.add_resource( Role("CronLambdaRole", AssumeRolePolicyDocument={ "Statement": [{ "Effect": "Allow", "Action": "sts:AssumeRole", "Principal": { "Service": "lambda.amazonaws.com" }, }] }, Policies=[ Policy(PolicyName="logs-and-stuff", PolicyDocument={ "Statement": [{ "Effect": "Allow", "Action": ["logs:*"], "Resource": "arn:aws:logs:*:*:*" }, { "Effect": "Allow", "Action": [ "ec2:DescribeAutoScalingGroups", "ec2:UpdateAutoScalingGroup", "ecs:*", "cloudwatch:PutMetricData" ], "Resource": "*" }] }) ])) # run metrics every minute self.t.add_resource( Rule( "CronStats", ScheduleExpression="rate(1 minute)", Description="Cron for cluster stats", Targets=[Target(Id="1", Arn=GetAtt("ECSMetricLambda", "Arn"))])) self.t.add_resource( Permission("StatPerm", Action="lambda:InvokeFunction", FunctionName=GetAtt("ECSMetricLambda", "Arn"), Principal="events.amazonaws.com", SourceArn=GetAtt("CronStats", "Arn")))
Description='Extracts video metadata using Rekognition', Runtime='python3.7', Handler='index.handler', Role=GetAtt(rekognition_role, 'Arn'), Code=Code( S3Bucket=ImportValue( Join('-', [Ref(core_stack), 'LambdaCodeBucket-Ref'])), S3Key=Ref(rekognition_code_key), ), Environment=Environment( Variables={ 'VIDEO_EVENTS_TABLE': ImportValue( Join('-', [Ref(core_stack), 'VideoEventsTable', 'Ref'])), 'REKOGNITION_UPDATES_TOPIC': Ref(rekognition_updates_topic), 'REKOGNITION_ROLE_ARN': GetAtt(rekognition_publish_role, 'Arn'), 'INPUT_BUCKET': ImportValue( Join('-', [Ref(encoding_stack), 'UploadBucket', 'Ref'])), }), TracingConfig=TracingConfig(Mode='Active', ), )) template.add_resource( LogGroup( "RekognitionFunctionLogGroup", LogGroupName=Join( '/', ['/aws/lambda', Ref(rekognition_function)]), RetentionInDays=7,
"\treturn {}" ] Civ6Notif_Lambda = Function("Civ6NotifFunction", Code=Code(ZipFile=Join("", code)), Handler="index.lambda_handler", Role=GetAtt("Civ6NotifLambdaExecutionRole", "Arn"), Runtime="python3.7", Environment=Environment( Variables={ "SendToSNS": Ref("SendToSNS"), "SendToDiscord": Ref("SendToDiscord"), "SNSTopic": Join("", [ "arn:aws:sns:", Ref("AWS::Region"), ":", Ref("AWS::AccountId"), ":", GetAtt("Civ6NotifTopic", "TopicName") ]), "DiscordWebhookURL": Ref("DiscordWebhookURL") })) Civ6Notif_LambdaPermission = Permission("Civ6LambdaPermission", Action="lambda:InvokeFunction", Principal="apigateway.amazonaws.com", FunctionName=GetAtt( "Civ6NotifFunction", "Arn")) Civ6Notif_GW_Resource = Resource(
def test_environment_variable_not_reserved(self): for var in ["NODE_PATH", "NODE_ENV", "FOO"]: try: Environment(Variables={var: "value"}) except ValueError: self.fail("Environment() raised ValueError")
def create_environment_variables(environment=None): if not environment: environment = {} return Environment(Variables=environment)
Principal=Principal("Service", ["lambda.amazonaws.com"])) ]))) t.add_resource( Function( "LambdaFunction", Code=Code(S3Bucket=Ref("LambdaBucket"), S3Key=Ref("S3Key")), Description= "Function used to save galileo babel notifications in a bucket", Handler="galileo_babel_s3.lambda_handler", MemorySize=Ref("LambdaMemorySize"), FunctionName=If("IsTest", "testtest-editorial-search-galileo-babel", Sub("${LambdaEnv}-editorial-search-galileo-babel")), Environment=Environment( Variables={ 'GALILEO_BABEL_LAMBDA_ENV': Sub("${LambdaEnv}"), 'BUCKET': Sub("${LambdaEnv}-editorial-search-galileo-babel") }), Role=GetAtt("LambdaExecutionRole", "Arn"), Runtime="python3.6", Tags=Tags(BBCProject="editorial-platform", BBCComponent="editorial-search-galileo-babel", BBCEnvironment=Sub("${LambdaEnv}")), Timeout=Ref("LambdaTimeout"))) t.add_resource( PolicyType( "FunctionPolicy", PolicyName="FunctionPolicy", Roles=[Ref("LambdaExecutionRole")], PolicyDocument=Policy(Statement=[