def test_s3_filter(self): t = Template() t.add_resource( Function( "ProcessorFunction", Handler='process_file.handler', CodeUri='.', Runtime='python3.6', Policies='AmazonS3FullAccess', Events={ 'FileUpload': S3Event( 'FileUpload', Bucket="bucket", Events=['s3:ObjectCreated:*'], Filter=Filter(S3Key=S3Key( Rules=[ Rules(Name="prefix", Value="upload/"), Rules(Name="suffix", Value=".txt"), ], )) ) } ) ) t.to_json()
def add_submit_lambda(self): ## We will make event triggers for all affiliates. all_affiliates = self.config["UXData"]["Affiliates"] ## Make Rule sets for each affiliate: all_events = {} for affiliate in all_affiliates: ## Get necessary properties: affiliatename = affiliate["AffiliateName"] ## If user input, reads directly from input directory. If other function output, reads from output directory. assert type(affiliate["UserInput"] ) == bool, "must provide a json boolean for UserInput" if affiliate["UserInput"] == True: readdir = self.config['Lambda']['LambdaConfig']['SUBMITDIR'] elif affiliate["UserInput"] == False: readdir = self.config['Lambda']['LambdaConfig']['OUTDIR'] aff_filter = Filter( 'Filter' + affiliatename, S3Key=S3Key('S3Key' + affiliatename, Rules=[ Rules('PrefixRule' + affiliatename, Name='prefix', Value=affiliatename + '/' + readdir), Rules('SuffixRule' + affiliatename, Name='suffix', Value='submit.json') ])) event_name = 'BucketEvent' + affiliatename all_events[event_name] = { 'Type': 'S3', 'Properties': { 'Bucket': Ref('PipelineMainBucket'), 'Events': ['s3:ObjectCreated:*'], 'Filter': aff_filter } } ## We're going to add in all of the lambda configuration items to the runtime environment. lambdaconfig = self.config['Lambda']['LambdaConfig'] #lambdaconfig ={} ### Most of the config can be done through the config file, but we will pass certain elements from the template. lambdaconfig['figlambid'] = Ref(self.figurelamb) lambdaconfig['figlambarn'] = GetAtt(self.figurelamb, 'Arn') lambdaconfig['cwrolearn'] = GetAtt(self.cwrole, 'Arn') ## Now add to a lambda function: function = Function( 'MainLambda', CodeUri='../../protocols', Runtime='python3.6', Handler='submit_start.handler', Description='Main Lambda Function for Serverless', MemorySize=128, Timeout=self.config["Lambda"]['LambdaConfig']["EXECUTION_TIMEOUT"], Role= 'arn:aws:iam::739988523141:role/lambda_dataflow', ## TODO: Create this in template Events=all_events, #Environment = Environment(Variables={'figlambid':Ref(self.figurelamb),'figlambarn':GetAtt(self.figurelamb,'Arn'),'cwrolearn':GetAtt(self.cwrole,'Arn')}) Environment=Environment(Variables=lambdaconfig)) self.template.add_resource(function)
def test_s3_filter(self): t = Template() t.add_resource( Function( "ProcessorFunction", Handler="process_file.handler", CodeUri=".", Runtime="python3.6", Policies="AmazonS3FullAccess", Events={ "FileUpload": S3Event( "FileUpload", Bucket="bucket", Events=["s3:ObjectCreated:*"], Filter=Filter(S3Key=S3Key(Rules=[ Rules(Name="prefix", Value="upload/"), Rules(Name="suffix", Value=".txt"), ], )), ) }, )) t.to_json()
) ]), LifecycleConfiguration=LifecycleConfiguration(Rules=[ LifecycleRule( Id='DeleteUploadsAfterOneDay', Status='Enabled', ExpirationInDays=1, Prefix='upload/', ) ], ), NotificationConfiguration=NotificationConfiguration( TopicConfigurations=[ TopicConfigurations(Event='s3:ObjectCreated:*', Filter=Filter(S3Key=S3Key(Rules=[ Rules( Name='prefix', Value='upload/', ) ], ), ), Topic=Ref(upload_topic)) ], ), )) video_bucket = template.add_resource( Bucket( 'VideoBucket', CorsConfiguration=CorsConfiguration(CorsRules=[ CorsRules( AllowedOrigins=['*'], AllowedMethods=['GET', 'HEAD'], AllowedHeaders=['*'], )
Type="String", Default="gziptosnappy")) """ Create S3 Bucket With a Notifications Configuration. The bucket notifies the lambda function in case there is a PUT event. """ S3Bucket = t.add_resource( Bucket("S3Bucket", BucketName=Ref(inputBucketName), NotificationConfiguration=NotificationConfiguration( LambdaConfigurations=[ LambdaConfigurations(Event="s3:ObjectCreated:*", Filter=Filter(S3Key=S3Key(Rules=[ Rules(Name="prefix", Value=Ref(inputKeyPrefix)), Rules(Name="suffix", Value=".gz") ])), Function=Ref(functionName)) ]))) BucketPermission = t.add_resource( Permission("BucketPermission", Action="lambda:InvokeFunction", FunctionName=Ref(functionName), Principal="s3.amazonaws.com", SourceAccount=Ref("AWS::AccountId"), SourceArn=Join("", ["aws:arn:s3:::", Ref(S3Bucket)]))) """ Create Lambda Function