def test_sns_to_sqs(): sqs_client = aws_stack.connect_to_service('sqs') sns_client = aws_stack.connect_to_service('sns') # create topic and queue queue_info = sqs_client.create_queue(QueueName=TEST_QUEUE_NAME_FOR_SNS) topic_info = sns_client.create_topic(Name=TEST_TOPIC_NAME) # subscribe SQS to SNS, publish message sns_client.subscribe(TopicArn=topic_info['TopicArn'], Protocol='sqs', Endpoint=aws_stack.sqs_queue_arn(TEST_QUEUE_NAME_FOR_SNS)) test_value = short_uid() sns_client.publish(TopicArn=topic_info['TopicArn'], Message='test message for SQS', MessageAttributes={'attr1': {'DataType': 'String', 'StringValue': test_value}}) # receive, assert, and delete message from SQS queue_url = queue_info['QueueUrl'] assertions = [] # make sure we receive the correct topic ARN in notifications assertions.append({'TopicArn': topic_info['TopicArn']}) # make sure the notification contains message attributes assertions.append({'Value': test_value}) receive_assert_delete(queue_url, assertions, sqs_client)
def test_put_event_with_content_base_rule_in_pattern(self): queue_name = "queue-{}".format(short_uid()) rule_name = "rule-{}".format(short_uid()) target_id = "target-{}".format(short_uid()) sqs_client = aws_stack.connect_to_service("sqs") queue_url = sqs_client.create_queue(QueueName=queue_name)["QueueUrl"] queue_arn = aws_stack.sqs_queue_arn(queue_name) pattern = { "Source": [{ "exists": True }], "detail-type": [{ "prefix": "core.app" }], "Detail": { "decription": ["this-is-event-details"], "amount": [200], "salary": [2000, 4000], "env": ["dev", "prod"], "user": ["user1", "user2", "user3"], "admins": ["skyli", { "prefix": "hey" }, { "prefix": "ad" }], "test1": [{ "anything-but": 200 }], "test2": [{ "anything-but": "test2" }], "test3": [{ "anything-but": ["test3", "test33"] }], "test4": [{ "anything-but": { "prefix": "test4" } }], "ip": [{ "cidr": "10.102.1.0/24" }], "num-test1": [{ "numeric": ["<", 200] }], "num-test2": [{ "numeric": ["<=", 200] }], "num-test3": [{ "numeric": [">", 200] }], "num-test4": [{ "numeric": [">=", 200] }], "num-test5": [{ "numeric": [">=", 200, "<=", 500] }], "num-test6": [{ "numeric": [">", 200, "<", 500] }], "num-test7": [{ "numeric": [">=", 200, "<", 500] }], }, } event = { "EventBusName": TEST_EVENT_BUS_NAME, "Source": "core.update-account-command", "DetailType": "core.app.backend", "Detail": json.dumps({ "decription": "this-is-event-details", "amount": 200, "salary": 2000, "env": "prod", "user": "******", "admins": "admin", "test1": 300, "test2": "test22", "test3": "test333", "test4": "this test4", "ip": "10.102.1.100", "num-test1": 100, "num-test2": 200, "num-test3": 300, "num-test4": 200, "num-test5": 500, "num-test6": 300, "num-test7": 300, }), } self.events_client.create_event_bus(Name=TEST_EVENT_BUS_NAME) self.events_client.put_rule( Name=rule_name, EventBusName=TEST_EVENT_BUS_NAME, EventPattern=json.dumps(pattern), ) self.events_client.put_targets( Rule=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Targets=[{ "Id": target_id, "Arn": queue_arn, "InputPath": "$.detail" }], ) self.events_client.put_events(Entries=[event]) def get_message(queue_url): resp = sqs_client.receive_message(QueueUrl=queue_url) return resp.get("Messages") messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(1, len(messages)) self.assertEqual(json.loads(event["Detail"]), json.loads(messages[0].get("Body"))) event_details = json.loads(event["Detail"]) event_details["admins"] = "no" event["Detail"] = json.dumps(event_details) self.events_client.put_events(Entries=[event]) messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertIsNone(messages) # clean up self.cleanup(TEST_EVENT_BUS_NAME, rule_name, target_id, queue_url=queue_url)
def SQS_Queue_get_cfn_attribute(self, attribute_name): if attribute_name in ['Arn', 'QueueArn']: return aws_stack.sqs_queue_arn(queue_name=self.name) return SQS_Queue_get_cfn_attribute_orig(self, attribute_name)
def test_put_event_with_content_base_rule_in_pattern(self): queue_name = 'queue-{}'.format(short_uid()) rule_name = 'rule-{}'.format(short_uid()) target_id = 'target-{}'.format(short_uid()) sqs_client = aws_stack.connect_to_service('sqs') queue_url = sqs_client.create_queue(QueueName=queue_name)['QueueUrl'] queue_arn = aws_stack.sqs_queue_arn(queue_name) pattern = { 'Source': [{'exists': True}], 'detail-type': [{'prefix': 'core.app'}], 'Detail': json.dumps({ 'decription': ['this-is-event-details'], 'amount': [200], 'salary': [2000, 4000], 'env': ['dev', 'prod'], 'user': ['user1', 'user2', 'user3'], 'admins': ['skyli', {'prefix': 'hey'}, {'prefix': 'ad'}], 'test1': [{'anything-but': 200}], 'test2': [{'anything-but': 'test2'}], 'test3': [{'anything-but': ['test3', 'test33']}], 'test4': [{'anything-but': {'prefix': 'test4'}}], 'ip': [{'cidr': '10.102.1.0/24'}], 'num-test1': [{'numeric': ['<', 200]}], 'num-test2': [{'numeric': ['<=', 200]}], 'num-test3': [{'numeric': ['>', 200]}], 'num-test4': [{'numeric': ['>=', 200]}], 'num-test5': [{'numeric': ['>=', 200, '<=', 500]}], 'num-test6': [{'numeric': ['>', 200, '<', 500]}], 'num-test7': [{'numeric': ['>=', 200, '<', 500]}] }) } event = { 'EventBusName': TEST_EVENT_BUS_NAME, 'Source': 'core.update-account-command', 'DetailType': 'core.app.backend', 'Detail': json.dumps({ 'decription': 'this-is-event-details', 'amount': 200, 'salary': 2000, 'env': 'prod', 'user': ['user4', 'user3'], 'admins': 'admin', 'test1': 300, 'test2': 'test22', 'test3': 'test333', 'test4': 'this test4', 'ip': '10.102.1.100', 'num-test1': 100, 'num-test2': 200, 'num-test3': 300, 'num-test4': 200, 'num-test5': 500, 'num-test6': 300, 'num-test7': 300 }) } self.events_client.create_event_bus( Name=TEST_EVENT_BUS_NAME ) self.events_client.put_rule( Name=rule_name, EventBusName=TEST_EVENT_BUS_NAME, EventPattern=json.dumps(pattern) ) self.events_client.put_targets( Rule=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Targets=[ { 'Id': target_id, 'Arn': queue_arn, 'InputPath': '$.detail' } ] ) self.events_client.put_events( Entries=[event] ) def get_message(queue_url): resp = sqs_client.receive_message(QueueUrl=queue_url) return resp.get('Messages') messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(len(messages), 1) self.assertEqual(json.loads(messages[0].get('Body')), json.loads(event['Detail'])) event_details = json.loads(event['Detail']) event_details['admins'] = 'not_admin' event['Detail'] = json.dumps(event_details) self.events_client.put_events( Entries=[event] ) messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(messages, None) # clean up sqs_client.delete_queue(QueueUrl=queue_url) self.events_client.remove_targets( Rule=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Ids=[target_id], Force=True ) self.events_client.delete_rule( Name=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Force=True ) self.events_client.delete_event_bus( Name=TEST_EVENT_BUS_NAME )
def test_put_events_with_input_path_multiple(self): queue_name = 'queue-{}'.format(short_uid()) queue_name_1 = 'queue-{}'.format(short_uid()) rule_name = 'rule-{}'.format(short_uid()) target_id = 'target-{}'.format(short_uid()) target_id_1 = 'target-{}'.format(short_uid()) bus_name = 'bus-{}'.format(short_uid()) sqs_client = aws_stack.connect_to_service('sqs') queue_url = sqs_client.create_queue(QueueName=queue_name)['QueueUrl'] queue_arn = aws_stack.sqs_queue_arn(queue_name) queue_url_1 = sqs_client.create_queue(QueueName=queue_name_1)['QueueUrl'] queue_arn_1 = aws_stack.sqs_queue_arn(queue_name_1) self.events_client.create_event_bus( Name=bus_name ) self.events_client.put_rule( Name=rule_name, EventBusName=bus_name, EventPattern=json.dumps(TEST_EVENT_PATTERN) ) self.events_client.put_targets( Rule=rule_name, EventBusName=bus_name, Targets=[ { 'Id': target_id, 'Arn': queue_arn, 'InputPath': '$.detail' }, { 'Id': target_id_1, 'Arn': queue_arn_1, } ] ) self.events_client.put_events( Entries=[{ 'EventBusName': bus_name, 'Source': TEST_EVENT_PATTERN['Source'][0], 'DetailType': TEST_EVENT_PATTERN['detail-type'][0], 'Detail': json.dumps(TEST_EVENT_PATTERN['Detail'][0]) }] ) def get_message(queue_url): resp = sqs_client.receive_message(QueueUrl=queue_url) return resp.get('Messages') messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(len(messages), 1) self.assertEqual(json.loads(messages[0].get('Body')), EVENT_DETAIL) messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url_1) self.assertEqual(len(messages), 1) self.assertEqual(json.loads(messages[0].get('Body')).get('detail'), EVENT_DETAIL) self.events_client.put_events( Entries=[{ 'EventBusName': bus_name, 'Source': 'dummySource', 'DetailType': TEST_EVENT_PATTERN['detail-type'][0], 'Detail': json.dumps(TEST_EVENT_PATTERN['Detail'][0]) }] ) messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(messages, None) # clean up sqs_client.delete_queue(QueueUrl=queue_url) self.events_client.remove_targets( Rule=rule_name, EventBusName=bus_name, Ids=[target_id], Force=True ) self.events_client.delete_rule( Name=rule_name, EventBusName=bus_name, Force=True ) self.events_client.delete_event_bus( Name=bus_name )
def test_put_events_with_target_sqs(self): queue_name = 'queue-{}'.format(short_uid()) rule_name = 'rule-{}'.format(short_uid()) target_id = 'target-{}'.format(short_uid()) sqs_client = aws_stack.connect_to_service('sqs') queue_url = sqs_client.create_queue(QueueName=queue_name)['QueueUrl'] queue_arn = aws_stack.sqs_queue_arn(queue_name) self.events_client.create_event_bus( Name=TEST_EVENT_BUS_NAME ) self.events_client.put_rule( Name=rule_name, EventBusName=TEST_EVENT_BUS_NAME, EventPattern=json.dumps(TEST_EVENT_PATTERN) ) rs = self.events_client.put_targets( Rule=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Targets=[ { 'Id': target_id, 'Arn': queue_arn } ] ) self.assertIn('FailedEntryCount', rs) self.assertIn('FailedEntries', rs) self.assertEqual(rs['FailedEntryCount'], 0) self.assertEqual(rs['FailedEntries'], []) self.events_client.put_events( Entries=[{ 'EventBusName': TEST_EVENT_BUS_NAME, 'Source': TEST_EVENT_PATTERN['Source'], 'DetailType': TEST_EVENT_PATTERN['DetailType'], 'Detail': TEST_EVENT_PATTERN['Detail'] }] ) def get_message(queue_url): resp = sqs_client.receive_message(QueueUrl=queue_url) return resp['Messages'] messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(len(messages), 1) self.assertEqual(messages[0]['Body'], TEST_EVENT_PATTERN['Detail']) # clean up sqs_client.delete_queue(QueueUrl=queue_url) self.events_client.remove_targets( Rule=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Ids=[target_id], Force=True ) self.events_client.delete_rule( Name=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Force=True ) self.events_client.delete_event_bus( Name=TEST_EVENT_BUS_NAME )
def test_event_source_mapping_default_batch_size(self): function_name = 'lambda_func-{}'.format(short_uid()) queue_name_1 = 'queue-{}-1'.format(short_uid()) queue_name_2 = 'queue-{}-2'.format(short_uid()) ddb_table = 'ddb_table-{}'.format(short_uid()) testutil.create_lambda_function(handler_file=TEST_LAMBDA_ECHO_FILE, func_name=function_name, runtime=LAMBDA_RUNTIME_PYTHON36) lambda_client = aws_stack.connect_to_service('lambda') sqs_client = aws_stack.connect_to_service('sqs') queue_url_1 = sqs_client.create_queue( QueueName=queue_name_1)['QueueUrl'] queue_arn_1 = aws_stack.sqs_queue_arn(queue_name_1) rs = lambda_client.create_event_source_mapping( EventSourceArn=queue_arn_1, FunctionName=function_name) self.assertEqual(rs['BatchSize'], BATCH_SIZE_RANGES['sqs'][0]) uuid = rs['UUID'] try: # Update batch size with invalid value lambda_client.update_event_source_mapping( UUID=uuid, FunctionName=function_name, BatchSize=BATCH_SIZE_RANGES['sqs'][1] + 1) self.fail( 'This call should not be successful as the batch size > MAX_BATCH_SIZE' ) except ClientError as e: self.assertEqual(e.response['Error']['Code'], INVALID_PARAMETER_VALUE_EXCEPTION) queue_url_2 = sqs_client.create_queue( QueueName=queue_name_2)['QueueUrl'] queue_arn_2 = aws_stack.sqs_queue_arn(queue_name_2) try: # Create event source mapping with invalid batch size value lambda_client.create_event_source_mapping( EventSourceArn=queue_arn_2, FunctionName=function_name, BatchSize=BATCH_SIZE_RANGES['sqs'][1] + 1) self.fail( 'This call should not be successful as the batch size > MAX_BATCH_SIZE' ) except ClientError as e: self.assertEqual(e.response['Error']['Code'], INVALID_PARAMETER_VALUE_EXCEPTION) table_arn = aws_stack.create_dynamodb_table( ddb_table, partition_key='id')['TableDescription']['TableArn'] rs = lambda_client.create_event_source_mapping( EventSourceArn=table_arn, FunctionName=function_name) self.assertEqual(rs['BatchSize'], BATCH_SIZE_RANGES['dynamodb'][0]) # clean up dynamodb_client = aws_stack.connect_to_service('dynamodb') dynamodb_client.delete_table(TableName=ddb_table) sqs_client.delete_queue(QueueUrl=queue_url_1) sqs_client.delete_queue(QueueUrl=queue_url_2) lambda_client.delete_function(FunctionName=function_name)
def test_scheduled_expression_events(self): class HttpEndpointListener(ProxyListener): def forward_request(self, method, path, data, headers): event = json.loads(to_str(data)) events.append(event) return 200 local_port = get_free_tcp_port() proxy = start_proxy(local_port, update_listener=HttpEndpointListener()) wait_for_port_open(local_port) topic_name = "topic-{}".format(short_uid()) queue_name = "queue-{}".format(short_uid()) fifo_queue_name = "queue-{}.fifo".format(short_uid()) rule_name = "rule-{}".format(short_uid()) endpoint = "{}://{}:{}".format(get_service_protocol(), config.LOCALSTACK_HOSTNAME, local_port) sm_role_arn = aws_stack.role_arn("sfn_role") sm_name = "state-machine-{}".format(short_uid()) topic_target_id = "target-{}".format(short_uid()) sm_target_id = "target-{}".format(short_uid()) queue_target_id = "target-{}".format(short_uid()) fifo_queue_target_id = "target-{}".format(short_uid()) events = [] state_machine_definition = """ { "StartAt": "Hello", "States": { "Hello": { "Type": "Pass", "Result": "World", "End": true } } } """ state_machine_arn = self.sfn_client.create_state_machine( name=sm_name, definition=state_machine_definition, roleArn=sm_role_arn)["stateMachineArn"] topic_arn = self.sns_client.create_topic(Name=topic_name)["TopicArn"] self.sns_client.subscribe(TopicArn=topic_arn, Protocol="http", Endpoint=endpoint) queue_url = self.sqs_client.create_queue( QueueName=queue_name)["QueueUrl"] fifo_queue_url = self.sqs_client.create_queue( QueueName=fifo_queue_name, Attributes={"FifoQueue": "true"})["QueueUrl"] queue_arn = aws_stack.sqs_queue_arn(queue_name) fifo_queue_arn = aws_stack.sqs_queue_arn(fifo_queue_name) event = {"env": "testing"} self.events_client.put_rule(Name=rule_name, ScheduleExpression="rate(1 minutes)") self.events_client.put_targets( Rule=rule_name, Targets=[ { "Id": topic_target_id, "Arn": topic_arn, "Input": json.dumps(event) }, { "Id": sm_target_id, "Arn": state_machine_arn, "Input": json.dumps(event), }, { "Id": queue_target_id, "Arn": queue_arn, "Input": json.dumps(event) }, { "Id": fifo_queue_target_id, "Arn": fifo_queue_arn, "Input": json.dumps(event), "SqsParameters": { "MessageGroupId": "123" }, }, ], ) def received(q_urls): # state machine got executed executions = self.sfn_client.list_executions( stateMachineArn=state_machine_arn)["executions"] self.assertGreaterEqual(len(executions), 1) # http endpoint got events self.assertGreaterEqual(len(events), 2) notifications = [ event["Message"] for event in events if event["Type"] == "Notification" ] self.assertGreaterEqual(len(notifications), 1) # get state machine execution detail execution_arn = executions[0]["executionArn"] execution_input = self.sfn_client.describe_execution( executionArn=execution_arn)["input"] all_msgs = [] # get message from queue for url in q_urls: msgs = self.sqs_client.receive_message(QueueUrl=url).get( "Messages", []) self.assertGreaterEqual(len(msgs), 1) all_msgs.append(msgs[0]) return execution_input, notifications[0], all_msgs execution_input, notification, msgs_received = retry( received, retries=5, sleep=15, q_urls=[queue_url, fifo_queue_url]) self.assertEqual(event, json.loads(notification)) self.assertEqual(event, json.loads(execution_input)) for msg_received in msgs_received: self.assertEqual(event, json.loads(msg_received["Body"])) # clean up proxy.stop() self.cleanup( None, rule_name, target_ids=[topic_target_id, sm_target_id], queue_url=queue_url, ) self.sns_client.delete_topic(TopicArn=topic_arn) self.sfn_client.delete_state_machine(stateMachineArn=state_machine_arn)
def test_exists_filter_policy(self): # connect SNS topic to an SQS queue queue_name, queue_arn, queue_url = self._create_queue() filter_policy = {'store': [{'exists': True}]} def do_subscribe(self, filter_policy, queue_arn): self.sns_client.subscribe( TopicArn=self.topic_arn, Protocol='sqs', Endpoint=queue_arn, Attributes={'FilterPolicy': json.dumps(filter_policy)}) do_subscribe(self, filter_policy, queue_arn) # get number of messages num_msgs_0 = len( self.sqs_client.receive_message(QueueUrl=queue_url).get( 'Messages', [])) # publish message that satisfies the filter policy, assert that message is received message = u'This is a test message' self.sns_client.publish(TopicArn=self.topic_arn, Message=message, MessageAttributes={ 'store': { 'DataType': 'Number', 'StringValue': '99' }, 'def': { 'DataType': 'Number', 'StringValue': '99' } }) num_msgs_1 = len( self.sqs_client.receive_message(QueueUrl=queue_url, VisibilityTimeout=0)['Messages']) self.assertEqual(num_msgs_1, num_msgs_0 + 1) # publish message that does not satisfy the filter policy, assert that message is not received message = u'This is a test message' self.sns_client.publish(TopicArn=self.topic_arn, Message=message, MessageAttributes={ 'attr1': { 'DataType': 'Number', 'StringValue': '111' } }) num_msgs_2 = len( self.sqs_client.receive_message(QueueUrl=queue_url, VisibilityTimeout=0)['Messages']) self.assertEqual(num_msgs_2, num_msgs_1) # test with exist operator set to false. queue_arn = aws_stack.sqs_queue_arn(TEST_QUEUE_NAME) filter_policy = {'store': [{'exists': False}]} do_subscribe(self, filter_policy, queue_arn) # get number of messages num_msgs_0 = len( self.sqs_client.receive_message(QueueUrl=self.queue_url).get( 'Messages', [])) # publish message with the attribute and see if its getting filtered. message = u'This is a test message' self.sns_client.publish(TopicArn=self.topic_arn, Message=message, MessageAttributes={ 'store': { 'DataType': 'Number', 'StringValue': '99' }, 'def': { 'DataType': 'Number', 'StringValue': '99' } }) num_msgs_1 = len( self.sqs_client.receive_message(QueueUrl=self.queue_url, VisibilityTimeout=0).get( 'Messages', [])) self.assertEqual(num_msgs_1, num_msgs_0) # publish message that without the attribute and see if its getting filtered. message = u'This is a test message' self.sns_client.publish(TopicArn=self.topic_arn, Message=message, MessageAttributes={ 'attr1': { 'DataType': 'Number', 'StringValue': '111' } }) num_msgs_2 = len( self.sqs_client.receive_message(QueueUrl=self.queue_url, VisibilityTimeout=0).get( 'Messages', [])) self.assertEqual(num_msgs_2, num_msgs_1) # clean up self.sqs_client.delete_queue(QueueUrl=queue_url)
def _create_queue(self): queue_name = 'queue-%s' % short_uid() queue_arn = aws_stack.sqs_queue_arn(queue_name) queue_url = self.sqs_client.create_queue( QueueName=queue_name)['QueueUrl'] return queue_name, queue_arn, queue_url
def test_bucket_notifications(self): s3_resource = aws_stack.connect_to_resource('s3') s3_client = aws_stack.connect_to_service('s3') sqs_client = aws_stack.connect_to_service('sqs') # create test bucket and queue s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) queue_info = sqs_client.create_queue(QueueName=TEST_QUEUE_NAME_FOR_S3) # create notification on bucket queue_url = queue_info['QueueUrl'] queue_arn = aws_stack.sqs_queue_arn(TEST_QUEUE_NAME_FOR_S3) events = ['s3:ObjectCreated:*', 's3:ObjectRemoved:Delete'] filter_rules = { 'FilterRules': [{ 'Name': 'prefix', 'Value': 'testupload/' }, { 'Name': 'suffix', 'Value': 'testfile.txt' }] } s3_client.put_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'QueueConfigurations': [ { 'Id': 'id0001', 'QueueArn': queue_arn, 'Events': events, 'Filter': { 'Key': filter_rules } }, { # Add second dummy config to fix https://github.com/localstack/localstack/issues/450 'Id': 'id0002', 'QueueArn': queue_arn, 'Events': [], 'Filter': { 'Key': filter_rules } } ] }) # retrieve and check notification config config = s3_client.get_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) self.assertEqual(len(config['QueueConfigurations']), 2) config = [c for c in config['QueueConfigurations'] if c['Events']][0] self.assertEqual(events, config['Events']) self.assertEqual(filter_rules, config['Filter']['Key']) # upload file to S3 (this should NOT trigger a notification) test_key1 = '/testdata' test_data1 = b'{"test": "bucket_notification1"}' s3_client.upload_fileobj(BytesIO(test_data1), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key1) # upload file to S3 (this should trigger a notification) test_key2 = 'testupload/dir1/testfile.txt' test_data2 = b'{"test": "bucket_notification2"}' s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2) # receive, assert, and delete message from SQS self._receive_assert_delete( queue_url, [{ 'key': test_key2 }, { 'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS }], sqs_client) # delete notification config self._delete_notification_config() # put notification config with single event type event = 's3:ObjectCreated:*' s3_client.put_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'QueueConfigurations': [{ 'Id': 'id123456', 'QueueArn': queue_arn, 'Events': [event] }] }) config = s3_client.get_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) config = config['QueueConfigurations'][0] self.assertEqual(config['Events'], [event]) # put notification config with single event type event = 's3:ObjectCreated:*' filter_rules = { 'FilterRules': [{ 'Name': 'prefix', 'Value': 'testupload/' }] } s3_client.put_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'QueueConfigurations': [{ 'Id': 'id123456', 'QueueArn': queue_arn, 'Events': [event], 'Filter': { 'Key': filter_rules } }] }) config = s3_client.get_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) config = config['QueueConfigurations'][0] self.assertEqual(config['Events'], [event]) self.assertEqual(filter_rules, config['Filter']['Key']) # upload file to S3 (this should trigger a notification) test_key2 = 'testupload/dir1/testfile.txt' test_data2 = b'{"test": "bucket_notification2"}' s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2) # receive, assert, and delete message from SQS self._receive_assert_delete( queue_url, [{ 'key': test_key2 }, { 'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS }], sqs_client) # delete notification config self._delete_notification_config() # # Tests s3->sns->sqs notifications # sns_client = aws_stack.connect_to_service('sns') topic_info = sns_client.create_topic(Name=TEST_S3_TOPIC_NAME) s3_client.put_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'TopicConfigurations': [{ 'Id': 'id123', 'Events': ['s3:ObjectCreated:*'], 'TopicArn': topic_info['TopicArn'] }] }) sns_client.subscribe(TopicArn=topic_info['TopicArn'], Protocol='sqs', Endpoint=queue_arn) test_key2 = 'testupload/dir1/testfile.txt' test_data2 = b'{"test": "bucket_notification2"}' s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2) # verify subject and records def verify(): response = sqs_client.receive_message(QueueUrl=queue_url) for message in response['Messages']: snsObj = json.loads(message['Body']) testutil.assert_object({'Subject': 'Amazon S3 Notification'}, snsObj) notificationObj = json.loads(snsObj['Message']) testutil.assert_objects( [{ 'key': test_key2 }, { 'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS }], notificationObj['Records']) sqs_client.delete_message( QueueUrl=queue_url, ReceiptHandle=message['ReceiptHandle']) retry(verify, retries=PUBLICATION_RETRIES, sleep=PUBLICATION_TIMEOUT) self._delete_notification_config()
def test_bucket_notifications(): s3_resource = aws_stack.connect_to_resource('s3') s3_client = aws_stack.connect_to_service('s3') sqs_client = aws_stack.connect_to_service('sqs') # create test bucket and queue s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) queue_info = sqs_client.create_queue(QueueName=TEST_QUEUE_NAME_FOR_S3) # create notification on bucket queue_url = queue_info['QueueUrl'] queue_arn = aws_stack.sqs_queue_arn(TEST_QUEUE_NAME_FOR_S3) events = ['s3:ObjectCreated:*', 's3:ObjectRemoved:Delete'] filter_rules = { 'FilterRules': [{ 'Name': 'prefix', 'Value': 'testupload/' }, { 'Name': 'suffix', 'Value': 'testfile.txt' }] } s3_client.put_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'QueueConfigurations': [{ 'Id': 'id123456', 'QueueArn': queue_arn, 'Events': events, 'Filter': { 'Key': filter_rules } }] }) # retrieve and check notification config config = s3_client.get_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) config = config['QueueConfigurations'][0] assert events == config['Events'] assert filter_rules == config['Filter']['Key'] # upload file to S3 (this should NOT trigger a notification) test_key1 = '/testdata' test_data1 = b'{"test": "bucket_notification1"}' s3_client.upload_fileobj(BytesIO(test_data1), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key1) # upload file to S3 (this should trigger a notification) test_key2 = 'testupload/dir1/testfile.txt' test_data2 = b'{"test": "bucket_notification2"}' s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2) # receive, assert, and delete message from SQS receive_assert_delete(queue_url, [{ 'key': test_key2 }, { 'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS }], sqs_client) # delete notification config _delete_notification_config() # put notification config with single event type event = 's3:ObjectCreated:*' s3_client.put_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'QueueConfigurations': [{ 'Id': 'id123456', 'QueueArn': queue_arn, 'Events': [event] }] }) config = s3_client.get_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) config = config['QueueConfigurations'][0] assert config['Events'] == [event] # put notification config with single event type event = 's3:ObjectCreated:*' filter_rules = { 'FilterRules': [{ 'Name': 'prefix', 'Value': 'testupload/' }] } s3_client.put_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'QueueConfigurations': [{ 'Id': 'id123456', 'QueueArn': queue_arn, 'Events': [event], 'Filter': { 'Key': filter_rules } }] }) config = s3_client.get_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) config = config['QueueConfigurations'][0] assert config['Events'] == [event] assert filter_rules == config['Filter']['Key'] # upload file to S3 (this should trigger a notification) test_key2 = 'testupload/dir1/testfile.txt' test_data2 = b'{"test": "bucket_notification2"}' s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2) # receive, assert, and delete message from SQS receive_assert_delete(queue_url, [{ 'key': test_key2 }, { 'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS }], sqs_client) # delete notification config _delete_notification_config()
def test_exists_filter_policy(self): # connect SNS topic to an SQS queue queue_name, queue_arn, queue_url = self._create_queue() filter_policy = {"store": [{"exists": True}]} def do_subscribe(self, filter_policy, queue_arn): self.sns_client.subscribe( TopicArn=self.topic_arn, Protocol="sqs", Endpoint=queue_arn, Attributes={"FilterPolicy": json.dumps(filter_policy)}, ) do_subscribe(self, filter_policy, queue_arn) # get number of messages num_msgs_0 = len( self.sqs_client.receive_message(QueueUrl=queue_url).get( "Messages", [])) # publish message that satisfies the filter policy, assert that message is received message = "This is a test message" self.sns_client.publish( TopicArn=self.topic_arn, Message=message, MessageAttributes={ "store": { "DataType": "Number", "StringValue": "99" }, "def": { "DataType": "Number", "StringValue": "99" }, }, ) def check_message1(): num_msgs_1 = len( self.sqs_client.receive_message( QueueUrl=queue_url, VisibilityTimeout=0)["Messages"]) self.assertEqual(num_msgs_1, num_msgs_0 + 1) return num_msgs_1 num_msgs_1 = retry(check_message1, retries=PUBLICATION_RETRIES, sleep=PUBLICATION_TIMEOUT) # publish message that does not satisfy the filter policy, assert that message is not received message = "This is a test message" self.sns_client.publish( TopicArn=self.topic_arn, Message=message, MessageAttributes={ "attr1": { "DataType": "Number", "StringValue": "111" } }, ) def check_message2(): num_msgs_2 = len( self.sqs_client.receive_message( QueueUrl=queue_url, VisibilityTimeout=0)["Messages"]) self.assertEqual(num_msgs_2, num_msgs_1) return num_msgs_2 retry(check_message2, retries=PUBLICATION_RETRIES, sleep=PUBLICATION_TIMEOUT) # test with exist operator set to false. queue_arn = aws_stack.sqs_queue_arn(TEST_QUEUE_NAME) filter_policy = {"store": [{"exists": False}]} do_subscribe(self, filter_policy, queue_arn) # get number of messages num_msgs_0 = len( self.sqs_client.receive_message(QueueUrl=self.queue_url).get( "Messages", [])) # publish message with the attribute and see if its getting filtered. message = "This is a test message" self.sns_client.publish( TopicArn=self.topic_arn, Message=message, MessageAttributes={ "store": { "DataType": "Number", "StringValue": "99" }, "def": { "DataType": "Number", "StringValue": "99" }, }, ) def check_message(): num_msgs_1 = len( self.sqs_client.receive_message(QueueUrl=self.queue_url, VisibilityTimeout=0).get( "Messages", [])) self.assertEqual(num_msgs_1, num_msgs_0) return num_msgs_1 num_msgs_1 = retry(check_message, retries=PUBLICATION_RETRIES, sleep=PUBLICATION_TIMEOUT) # publish message that without the attribute and see if its getting filtered. message = "This is a test message" self.sns_client.publish( TopicArn=self.topic_arn, Message=message, MessageAttributes={ "attr1": { "DataType": "Number", "StringValue": "111" } }, ) def check_message3(): num_msgs_2 = len( self.sqs_client.receive_message(QueueUrl=self.queue_url, VisibilityTimeout=0).get( "Messages", [])) self.assertEqual(num_msgs_2, num_msgs_1) return num_msgs_2 retry(check_message3, retries=PUBLICATION_RETRIES, sleep=PUBLICATION_TIMEOUT) # clean up self.sqs_client.delete_queue(QueueUrl=queue_url)
def test_put_events_with_target_sqs_event_detail_match(self): queue_name = "queue-{}".format(short_uid()) rule_name = "rule-{}".format(short_uid()) target_id = "target-{}".format(short_uid()) bus_name = "bus-{}".format(short_uid()) sqs_client = aws_stack.connect_to_service("sqs") queue_url = sqs_client.create_queue(QueueName=queue_name)["QueueUrl"] queue_arn = aws_stack.sqs_queue_arn(queue_name) self.events_client.create_event_bus(Name=bus_name) self.events_client.put_rule( Name=rule_name, EventBusName=bus_name, EventPattern=json.dumps({"detail": { "EventType": ["0", "1"] }}), ) rs = self.events_client.put_targets( Rule=rule_name, EventBusName=bus_name, Targets=[{ "Id": target_id, "Arn": queue_arn, "InputPath": "$.detail" }], ) self.assertIn("FailedEntryCount", rs) self.assertIn("FailedEntries", rs) self.assertEqual(0, rs["FailedEntryCount"]) self.assertEqual([], rs["FailedEntries"]) self.events_client.put_events( Entries=[{ "EventBusName": bus_name, "Source": TEST_EVENT_PATTERN["Source"][0], "DetailType": TEST_EVENT_PATTERN["detail-type"][0], "Detail": json.dumps({"EventType": "1"}), }]) def get_message(queue_url): resp = sqs_client.receive_message(QueueUrl=queue_url) return resp.get("Messages") messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(1, len(messages)) actual_event = json.loads(messages[0]["Body"]) self.assertEqual({"EventType": "1"}, actual_event) self.events_client.put_events( Entries=[{ "EventBusName": bus_name, "Source": TEST_EVENT_PATTERN["Source"][0], "DetailType": TEST_EVENT_PATTERN["detail-type"][0], "Detail": json.dumps({"EventType": "2"}), }]) def get_message(queue_url): resp = sqs_client.receive_message(QueueUrl=queue_url) return resp.get("Messages", []) messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(0, len(messages)) # clean up self.cleanup(bus_name, rule_name, target_id, queue_url=queue_url)
def test_put_events_with_target_sns(self): queue_name = 'test-%s' % short_uid() rule_name = 'rule-{}'.format(short_uid()) target_id = 'target-{}'.format(short_uid()) bus_name = 'bus-{}'.format(short_uid()) sns_client = aws_stack.connect_to_service('sns') sqs_client = aws_stack.connect_to_service('sqs') topic_name = 'topic-{}'.format(short_uid()) topic_arn = sns_client.create_topic(Name=topic_name)['TopicArn'] queue_url = sqs_client.create_queue(QueueName=queue_name)['QueueUrl'] queue_arn = aws_stack.sqs_queue_arn(queue_name) sns_client.subscribe(TopicArn=topic_arn, Protocol='sqs', Endpoint=queue_arn) self.events_client.create_event_bus( Name=bus_name ) self.events_client.put_rule( Name=rule_name, EventBusName=bus_name, EventPattern=json.dumps(TEST_EVENT_PATTERN) ) rs = self.events_client.put_targets( Rule=rule_name, EventBusName=bus_name, Targets=[ { 'Id': target_id, 'Arn': topic_arn } ] ) self.assertIn('FailedEntryCount', rs) self.assertIn('FailedEntries', rs) self.assertEqual(rs['FailedEntryCount'], 0) self.assertEqual(rs['FailedEntries'], []) self.events_client.put_events( Entries=[{ 'EventBusName': bus_name, 'Source': TEST_EVENT_PATTERN['Source'][0], 'DetailType': TEST_EVENT_PATTERN['detail-type'][0], 'Detail': json.dumps(TEST_EVENT_PATTERN['Detail'][0]) }] ) def get_message(queue_url): resp = sqs_client.receive_message(QueueUrl=queue_url) return resp['Messages'] messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(len(messages), 1) actual_event = json.loads(messages[0]['Body']).get('Message') self.assertIsValidEvent(actual_event) self.assertEqual(json.loads(actual_event).get('detail'), TEST_EVENT_PATTERN['Detail'][0]) # clean up sqs_client.delete_queue(QueueUrl=queue_url) sns_client.delete_topic(TopicArn=topic_arn) self.events_client.remove_targets( Rule=rule_name, EventBusName=bus_name, Ids=[target_id], Force=True ) self.events_client.delete_rule( Name=rule_name, EventBusName=bus_name, Force=True ) self.events_client.delete_event_bus( Name=bus_name )
def test_put_events_with_target_sns(self): queue_name = "test-%s" % short_uid() rule_name = "rule-{}".format(short_uid()) target_id = "target-{}".format(short_uid()) bus_name = "bus-{}".format(short_uid()) sns_client = aws_stack.connect_to_service("sns") sqs_client = aws_stack.connect_to_service("sqs") topic_name = "topic-{}".format(short_uid()) topic_arn = sns_client.create_topic(Name=topic_name)["TopicArn"] queue_url = sqs_client.create_queue(QueueName=queue_name)["QueueUrl"] queue_arn = aws_stack.sqs_queue_arn(queue_name) sns_client.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=queue_arn) self.events_client.create_event_bus(Name=bus_name) self.events_client.put_rule( Name=rule_name, EventBusName=bus_name, EventPattern=json.dumps(TEST_EVENT_PATTERN), ) rs = self.events_client.put_targets( Rule=rule_name, EventBusName=bus_name, Targets=[{ "Id": target_id, "Arn": topic_arn }], ) self.assertIn("FailedEntryCount", rs) self.assertIn("FailedEntries", rs) self.assertEqual(0, rs["FailedEntryCount"]) self.assertEqual([], rs["FailedEntries"]) self.events_client.put_events( Entries=[{ "EventBusName": bus_name, "Source": TEST_EVENT_PATTERN["Source"][0], "DetailType": TEST_EVENT_PATTERN["detail-type"][0], "Detail": json.dumps(TEST_EVENT_PATTERN["Detail"][0]), }]) def get_message(queue_url): resp = sqs_client.receive_message(QueueUrl=queue_url) return resp["Messages"] messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(1, len(messages)) actual_event = json.loads(messages[0]["Body"]).get("Message") self.assertIsValidEvent(actual_event) self.assertEqual(TEST_EVENT_PATTERN["Detail"][0], json.loads(actual_event).get("detail")) # clean up sns_client.delete_topic(TopicArn=topic_arn) self.cleanup(bus_name, rule_name, target_id, queue_url=queue_url)
def test_put_events_into_event_bus(self): queue_name = 'queue-{}'.format(short_uid()) rule_name = 'rule-{}'.format(short_uid()) target_id = 'target-{}'.format(short_uid()) bus_name_1 = 'bus1-{}'.format(short_uid()) bus_name_2 = 'bus2-{}'.format(short_uid()) sqs_client = aws_stack.connect_to_service('sqs') queue_url = sqs_client.create_queue(QueueName=queue_name)['QueueUrl'] queue_arn = aws_stack.sqs_queue_arn(queue_name) self.events_client.create_event_bus( Name=bus_name_1 ) resp = self.events_client.create_event_bus( Name=bus_name_2 ) self.events_client.put_rule( Name=rule_name, EventBusName=bus_name_1, ) self.events_client.put_targets( Rule=rule_name, EventBusName=bus_name_1, Targets=[ { 'Id': target_id, 'Arn': resp.get('EventBusArn') } ] ) self.events_client.put_targets( Rule=rule_name, EventBusName=bus_name_2, Targets=[ { 'Id': target_id, 'Arn': queue_arn } ] ) self.events_client.put_events( Entries=[{ 'EventBusName': bus_name_1, 'Source': TEST_EVENT_PATTERN['Source'][0], 'DetailType': TEST_EVENT_PATTERN['detail-type'][0], 'Detail': json.dumps(TEST_EVENT_PATTERN['Detail'][0]) }] ) def get_message(queue_url): resp = sqs_client.receive_message(QueueUrl=queue_url) return resp['Messages'] messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(len(messages), 1) actual_event = json.loads(messages[0]['Body']) self.assertIsValidEvent(actual_event) self.assertEqual(actual_event['detail'], TEST_EVENT_PATTERN['Detail'][0]) # clean up sqs_client.delete_queue(QueueUrl=queue_url) self.events_client.remove_targets( Rule=rule_name, EventBusName=bus_name_1, Ids=[target_id], Force=True ) self.events_client.remove_targets( Rule=rule_name, EventBusName=bus_name_2, Ids=[target_id], Force=True ) self.events_client.delete_rule( Name=rule_name, EventBusName=bus_name_1, Force=True ) self.events_client.delete_event_bus( Name=bus_name_1 ) self.events_client.delete_event_bus( Name=bus_name_2 )
def test_put_events_with_input_path_multiple(self): queue_name = "queue-{}".format(short_uid()) queue_name_1 = "queue-{}".format(short_uid()) rule_name = "rule-{}".format(short_uid()) target_id = "target-{}".format(short_uid()) target_id_1 = "target-{}".format(short_uid()) bus_name = "bus-{}".format(short_uid()) sqs_client = aws_stack.connect_to_service("sqs") queue_url = sqs_client.create_queue(QueueName=queue_name)["QueueUrl"] queue_arn = aws_stack.sqs_queue_arn(queue_name) queue_url_1 = sqs_client.create_queue( QueueName=queue_name_1)["QueueUrl"] queue_arn_1 = aws_stack.sqs_queue_arn(queue_name_1) self.events_client.create_event_bus(Name=bus_name) self.events_client.put_rule( Name=rule_name, EventBusName=bus_name, EventPattern=json.dumps(TEST_EVENT_PATTERN), ) self.events_client.put_targets( Rule=rule_name, EventBusName=bus_name, Targets=[ { "Id": target_id, "Arn": queue_arn, "InputPath": "$.detail" }, { "Id": target_id_1, "Arn": queue_arn_1, }, ], ) self.events_client.put_events( Entries=[{ "EventBusName": bus_name, "Source": TEST_EVENT_PATTERN["Source"][0], "DetailType": TEST_EVENT_PATTERN["detail-type"][0], "Detail": json.dumps(TEST_EVENT_PATTERN["Detail"][0]), }]) def get_message(queue_url): resp = sqs_client.receive_message(QueueUrl=queue_url) return resp.get("Messages") messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertEqual(1, len(messages)) self.assertEqual(EVENT_DETAIL, json.loads(messages[0].get("Body"))) messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url_1) self.assertEqual(1, len(messages)) self.assertEqual(EVENT_DETAIL, json.loads(messages[0].get("Body")).get("detail")) self.events_client.put_events( Entries=[{ "EventBusName": bus_name, "Source": "dummySource", "DetailType": TEST_EVENT_PATTERN["detail-type"][0], "Detail": json.dumps(TEST_EVENT_PATTERN["Detail"][0]), }]) messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url) self.assertIsNone(messages) # clean up self.cleanup(bus_name, rule_name, target_id, queue_url=queue_url)
def test_scheduled_expression_events(self): class HttpEndpointListener(ProxyListener): def forward_request(self, method, path, data, headers): event = json.loads(to_str(data)) events.append(event) return 200 local_port = get_free_tcp_port() proxy = start_proxy(local_port, backend_url=None, update_listener=HttpEndpointListener()) wait_for_port_open(local_port) topic_name = 'topic-{}'.format(short_uid()) queue_name = 'queue-{}'.format(short_uid()) fifo_queue_name = 'queue-{}.fifo'.format(short_uid()) rule_name = 'rule-{}'.format(short_uid()) endpoint = '{}://{}:{}'.format(get_service_protocol(), config.LOCALSTACK_HOSTNAME, local_port) sm_role_arn = aws_stack.role_arn('sfn_role') sm_name = 'state-machine-{}'.format(short_uid()) topic_target_id = 'target-{}'.format(short_uid()) sm_target_id = 'target-{}'.format(short_uid()) queue_target_id = 'target-{}'.format(short_uid()) fifo_queue_target_id = 'target-{}'.format(short_uid()) events = [] state_machine_definition = """ { "StartAt": "Hello", "States": { "Hello": { "Type": "Pass", "Result": "World", "End": true } } } """ state_machine_arn = self.sfn_client.create_state_machine( name=sm_name, definition=state_machine_definition, roleArn=sm_role_arn )['stateMachineArn'] topic_arn = self.sns_client.create_topic(Name=topic_name)['TopicArn'] self.sns_client.subscribe(TopicArn=topic_arn, Protocol='http', Endpoint=endpoint) queue_url = self.sqs_client.create_queue(QueueName=queue_name)['QueueUrl'] fifo_queue_url = self.sqs_client.create_queue( QueueName=fifo_queue_name, Attributes={'FifoQueue': 'true'})['QueueUrl'] queue_arn = aws_stack.sqs_queue_arn(queue_name) fifo_queue_arn = aws_stack.sqs_queue_arn(fifo_queue_name) event = { 'env': 'testing' } self.events_client.put_rule( Name=rule_name, ScheduleExpression='rate(1 minutes)' ) self.events_client.put_targets( Rule=rule_name, Targets=[ { 'Id': topic_target_id, 'Arn': topic_arn, 'Input': json.dumps(event) }, { 'Id': sm_target_id, 'Arn': state_machine_arn, 'Input': json.dumps(event) }, { 'Id': queue_target_id, 'Arn': queue_arn, 'Input': json.dumps(event) }, { 'Id': fifo_queue_target_id, 'Arn': fifo_queue_arn, 'Input': json.dumps(event), 'SqsParameters': { 'MessageGroupId': '123' } } ] ) def received(q_urls): # state machine got executed executions = self.sfn_client.list_executions(stateMachineArn=state_machine_arn)['executions'] self.assertGreaterEqual(len(executions), 1) # http endpoint got events self.assertGreaterEqual(len(events), 2) notifications = [event['Message'] for event in events if event['Type'] == 'Notification'] self.assertGreaterEqual(len(notifications), 1) # get state machine execution detail execution_arn = executions[0]['executionArn'] execution_input = self.sfn_client.describe_execution(executionArn=execution_arn)['input'] all_msgs = [] # get message from queue for url in q_urls: msgs = self.sqs_client.receive_message(QueueUrl=url).get('Messages', []) self.assertGreaterEqual(len(msgs), 1) all_msgs.append(msgs[0]) return execution_input, notifications[0], all_msgs execution_input, notification, msgs_received = retry( received, retries=5, sleep=15, q_urls=[queue_url, fifo_queue_url] ) self.assertEqual(json.loads(notification), event) self.assertEqual(json.loads(execution_input), event) for msg_received in msgs_received: self.assertEqual(json.loads(msg_received['Body']), event) proxy.stop() self.events_client.remove_targets( Rule=rule_name, Ids=[topic_target_id, sm_target_id], Force=True ) self.events_client.delete_rule( Name=rule_name, Force=True ) self.sns_client.delete_topic(TopicArn=topic_arn) self.sfn_client.delete_state_machine(stateMachineArn=state_machine_arn) self.sqs_client.delete_queue(QueueUrl=queue_url)
def test_bucket_notifications(): s3_resource = aws_stack.connect_to_resource('s3') s3_client = aws_stack.connect_to_service('s3') sqs_client = aws_stack.connect_to_service('sqs') # create test bucket and queue s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) queue_info = sqs_client.create_queue(QueueName=TEST_QUEUE_NAME_FOR_S3) # create notification on bucket queue_url = queue_info['QueueUrl'] queue_arn = aws_stack.sqs_queue_arn(TEST_QUEUE_NAME_FOR_S3) events = ['s3:ObjectCreated:*', 's3:ObjectRemoved:Delete'] filter_rules = { 'FilterRules': [{ 'Name': 'prefix', 'Value': 'testupload/' }, { 'Name': 'suffix', 'Value': 'testfile.txt' }] } s3_client.put_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'QueueConfigurations': [{ 'Id': 'id123456', 'QueueArn': queue_arn, 'Events': events, 'Filter': { 'Key': filter_rules } }] } ) # retrieve and check notification config config = s3_client.get_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) config = config['QueueConfigurations'][0] assert events == config['Events'] assert filter_rules == config['Filter']['Key'] # upload file to S3 (this should NOT trigger a notification) test_key1 = '/testdata' test_data1 = b'{"test": "bucket_notification1"}' s3_client.upload_fileobj(BytesIO(test_data1), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key1) # upload file to S3 (this should trigger a notification) test_key2 = 'testupload/dir1/testfile.txt' test_data2 = b'{"test": "bucket_notification2"}' s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2) # receive, assert, and delete message from SQS receive_assert_delete(queue_url, [{'key': test_key2}, {'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS}], sqs_client) # delete notification config _delete_notification_config() # put notification config with single event type event = 's3:ObjectCreated:*' s3_client.put_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'QueueConfigurations': [{ 'Id': 'id123456', 'QueueArn': queue_arn, 'Events': [event] }] } ) config = s3_client.get_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) config = config['QueueConfigurations'][0] assert config['Events'] == [event] # put notification config with single event type event = 's3:ObjectCreated:*' filter_rules = { 'FilterRules': [{ 'Name': 'prefix', 'Value': 'testupload/' }] } s3_client.put_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'QueueConfigurations': [{ 'Id': 'id123456', 'QueueArn': queue_arn, 'Events': [event], 'Filter': { 'Key': filter_rules } }] } ) config = s3_client.get_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS) config = config['QueueConfigurations'][0] assert config['Events'] == [event] assert filter_rules == config['Filter']['Key'] # upload file to S3 (this should trigger a notification) test_key2 = 'testupload/dir1/testfile.txt' test_data2 = b'{"test": "bucket_notification2"}' s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2) # receive, assert, and delete message from SQS receive_assert_delete(queue_url, [{'key': test_key2}, {'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS}], sqs_client) # delete notification config _delete_notification_config() # # Tests s3->sns->sqs notifications # sns_client = aws_stack.connect_to_service('sns') topic_info = sns_client.create_topic(Name=TEST_S3_TOPIC_NAME) s3_client.put_bucket_notification_configuration( Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={ 'TopicConfigurations': [ { 'Id': 'id123', 'Events': ['s3:ObjectCreated:*'], 'TopicArn': topic_info['TopicArn'] } ] }) sns_client.subscribe(TopicArn=topic_info['TopicArn'], Protocol='sqs', Endpoint=queue_arn) test_key2 = 'testupload/dir1/testfile.txt' test_data2 = b'{"test": "bucket_notification2"}' s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2) # verify subject and records response = sqs_client.receive_message(QueueUrl=queue_url) for message in response['Messages']: snsObj = json.loads(message['Body']) testutil.assert_object({'Subject': 'Amazon S3 Notification'}, snsObj) notificationObj = json.loads(snsObj['Message']) # notificationRecs = [ json.loads( rec_text ) for rec_text in notification testutil.assert_objects( [ {'key': test_key2}, {'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS} ], notificationObj['Records']) sqs_client.delete_message(QueueUrl=queue_url, ReceiptHandle=message['ReceiptHandle']) _delete_notification_config()
def test_event_source_mapping_default_batch_size(self): function_name = "lambda_func-{}".format(short_uid()) queue_name_1 = "queue-{}-1".format(short_uid()) queue_name_2 = "queue-{}-2".format(short_uid()) ddb_table = "ddb_table-{}".format(short_uid()) testutil.create_lambda_function( handler_file=TEST_LAMBDA_PYTHON_ECHO, func_name=function_name, runtime=LAMBDA_RUNTIME_PYTHON36, ) lambda_client = aws_stack.create_external_boto_client("lambda") sqs_client = aws_stack.create_external_boto_client("sqs") queue_url_1 = sqs_client.create_queue( QueueName=queue_name_1)["QueueUrl"] queue_arn_1 = aws_stack.sqs_queue_arn(queue_name_1) rs = lambda_client.create_event_source_mapping( EventSourceArn=queue_arn_1, FunctionName=function_name) self.assertEqual(BATCH_SIZE_RANGES["sqs"][0], rs["BatchSize"]) uuid = rs["UUID"] try: # Update batch size with invalid value lambda_client.update_event_source_mapping( UUID=uuid, FunctionName=function_name, BatchSize=BATCH_SIZE_RANGES["sqs"][1] + 1, ) self.fail( "This call should not be successful as the batch size > MAX_BATCH_SIZE" ) except ClientError as e: self.assertEqual(INVALID_PARAMETER_VALUE_EXCEPTION, e.response["Error"]["Code"]) queue_url_2 = sqs_client.create_queue( QueueName=queue_name_2)["QueueUrl"] queue_arn_2 = aws_stack.sqs_queue_arn(queue_name_2) try: # Create event source mapping with invalid batch size value lambda_client.create_event_source_mapping( EventSourceArn=queue_arn_2, FunctionName=function_name, BatchSize=BATCH_SIZE_RANGES["sqs"][1] + 1, ) self.fail( "This call should not be successful as the batch size > MAX_BATCH_SIZE" ) except ClientError as e: self.assertEqual(INVALID_PARAMETER_VALUE_EXCEPTION, e.response["Error"]["Code"]) table_arn = aws_stack.create_dynamodb_table( ddb_table, partition_key="id")["TableDescription"]["TableArn"] rs = lambda_client.create_event_source_mapping( EventSourceArn=table_arn, FunctionName=function_name) self.assertEqual(BATCH_SIZE_RANGES["dynamodb"][0], rs["BatchSize"]) # clean up dynamodb_client = aws_stack.create_external_boto_client("dynamodb") dynamodb_client.delete_table(TableName=ddb_table) sqs_client.delete_queue(QueueUrl=queue_url_1) sqs_client.delete_queue(QueueUrl=queue_url_2) lambda_client.delete_function(FunctionName=function_name)