def test_disabled_event_source_mapping_with_dynamodb(self): function_name = "lambda_func-{}".format(short_uid()) ddb_table = "ddb_table-{}".format(short_uid()) testutil.create_lambda_function( handler_file=TEST_LAMBDA_PYTHON_ECHO, func_name=function_name, runtime=LAMBDA_RUNTIME_PYTHON36, ) table_arn = aws_stack.create_dynamodb_table( ddb_table, partition_key="id")["TableDescription"]["TableArn"] lambda_client = aws_stack.create_external_boto_client("lambda") rs = lambda_client.create_event_source_mapping( FunctionName=function_name, EventSourceArn=table_arn) uuid = rs["UUID"] dynamodb = aws_stack.connect_to_resource("dynamodb") table = dynamodb.Table(ddb_table) items = [ { "id": short_uid(), "data": "data1" }, { "id": short_uid(), "data": "data2" }, ] table.put_item(Item=items[0]) events = get_lambda_log_events(function_name) # lambda was invoked 1 time self.assertEqual(1, len(events[0]["Records"])) # disable event source mapping lambda_client.update_event_source_mapping(UUID=uuid, Enabled=False) table.put_item(Item=items[1]) events = get_lambda_log_events(function_name) # lambda no longer invoked, still have 1 event self.assertEqual(1, len(events[0]["Records"])) # clean up dynamodb_client = aws_stack.create_external_boto_client("dynamodb") dynamodb_client.delete_table(TableName=ddb_table) lambda_client.delete_function(FunctionName=function_name)
def test_publish_message_by_target_arn(self): self.unsubscribe_all_from_sns() topic_name = 'queue-{}'.format(short_uid()) func_name = 'lambda-%s' % short_uid() topic_arn = self.sns_client.create_topic(Name=topic_name)['TopicArn'] testutil.create_lambda_function( handler_file=TEST_LAMBDA_ECHO_FILE, func_name=func_name, runtime=LAMBDA_RUNTIME_PYTHON36 ) lambda_arn = aws_stack.lambda_function_arn(func_name) subscription_arn = self.sns_client.subscribe( TopicArn=topic_arn, Protocol='lambda', Endpoint=lambda_arn )['SubscriptionArn'] self.sns_client.publish( TopicArn=topic_arn, Message='test_message_1', Subject='test subject' ) events = get_lambda_log_events(func_name) # Lambda invoked 1 time self.assertEqual(len(events), 1) message = events[0]['Records'][0] self.assertEqual(message['EventSubscriptionArn'], subscription_arn) self.sns_client.publish( TargetArn=topic_arn, Message='test_message_2', Subject='test subject' ) events = get_lambda_log_events(func_name) # Lambda invoked 1 more time self.assertEqual(len(events), 2) for event in events: message = event['Records'][0] self.assertEqual(message['EventSubscriptionArn'], subscription_arn) # clean up self.sns_client.delete_topic(TopicArn=topic_arn) lambda_client = aws_stack.connect_to_service('lambda') lambda_client.delete_function(FunctionName=func_name)
def test_lambda_subscribe_sns_topic(self): function_name = '{}-{}'.format(TEST_LAMBDA_FUNCTION_PREFIX, short_uid()) testutil.create_lambda_function(handler_file=TEST_LAMBDA_ECHO_FILE, func_name=function_name, runtime=LAMBDA_RUNTIME_PYTHON36) topic = self.sns_client.create_topic(Name=TEST_SNS_TOPIC_NAME) topic_arn = topic['TopicArn'] self.sns_client.subscribe( TopicArn=topic_arn, Protocol='lambda', Endpoint=lambda_api.func_arn(function_name), ) subject = '[Subject] Test subject' message = 'Hello world.' self.sns_client.publish(TopicArn=topic_arn, Subject=subject, Message=message) events = get_lambda_log_events(function_name) self.assertEqual(len(events), 1) notification = events[0]['Records'][0]['Sns'] self.assertIn('Subject', notification) self.assertEqual(notification['Subject'], subject)
def test_lambda_invoked_by_sqs_message_with_attributes(self): function_name = 'lambda_func-{}'.format(short_uid()) queue_name = 'queue-{}'.format(short_uid()) queue_url = self.client.create_queue(QueueName=queue_name)['QueueUrl'] testutil.create_lambda_function(handler_file=TEST_LAMBDA_ECHO_FILE, func_name=function_name, runtime=LAMBDA_RUNTIME_PYTHON36) lambda_client = aws_stack.connect_to_service('lambda') lambda_client.create_event_source_mapping( EventSourceArn=aws_stack.sqs_queue_arn(queue_name), FunctionName=function_name) self.client.send_message(QueueUrl=queue_url, MessageBody='hello world.', MessageAttributes=TEST_MESSAGE_ATTRIBUTES) events = get_lambda_log_events(function_name) self.assertEqual(len(events), 1) sqs_msg = events[0]['Records'][0] self.assertEqual(sqs_msg['body'], 'hello world.') self.assertIn('messageAttributes', sqs_msg) self.assertIn('City', sqs_msg['messageAttributes']) self.assertEqual(sqs_msg['messageAttributes']['City'], TEST_MESSAGE_ATTRIBUTES['City']) # clean up self.client.delete_queue(QueueUrl=queue_url) lambda_client.delete_function(FunctionName=function_name)
def check_invocation(): events = testutil.get_lambda_log_events( test_lambda_name, log_group=logs_log_group, logs_client=logs_client) assert len(events) == 2 assert "test" in events assert "test 2" in events
def test_put_events_with_target_lambda(self): rule_name = 'rule-{}'.format(short_uid()) function_name = 'lambda-func-{}'.format(short_uid()) target_id = 'target-{}'.format(short_uid()) rs = testutil.create_lambda_function(handler_file=os.path.join( THIS_FOLDER, 'lambdas', 'lambda_echo.py'), func_name=function_name, runtime=LAMBDA_RUNTIME_PYTHON36) func_arn = rs['CreateFunctionResponse']['FunctionArn'] self.events_client.create_event_bus(Name=TEST_EVENT_BUS_NAME) self.events_client.put_rule( Name=rule_name, EventBusName=TEST_EVENT_BUS_NAME, EventPattern=json.dumps(TEST_EVENT_PATTERN)) rs = self.events_client.put_targets(Rule=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Targets=[{ 'Id': target_id, 'Arn': func_arn }]) self.assertIn('FailedEntryCount', rs) self.assertIn('FailedEntries', rs) self.assertEqual(rs['FailedEntryCount'], 0) self.assertEqual(rs['FailedEntries'], []) self.events_client.put_events( Entries=[{ 'EventBusName': TEST_EVENT_BUS_NAME, 'Source': TEST_EVENT_PATTERN['Source'], 'DetailType': TEST_EVENT_PATTERN['DetailType'], 'Detail': TEST_EVENT_PATTERN['Detail'] }]) # Get lambda's log events events = get_lambda_log_events(function_name) self.assertEqual(len(events), 1) actual_event = events[0] self.assertIsValidEvent(actual_event) self.assertDictEqual(json.loads(actual_event['detail']), json.loads(TEST_EVENT_PATTERN['Detail'])) # clean up testutil.delete_lambda_function(function_name) self.events_client.remove_targets(Rule=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Ids=[target_id], Force=True) self.events_client.delete_rule(Name=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Force=True) self.events_client.delete_event_bus(Name=TEST_EVENT_BUS_NAME)
def test_lambda_invoked_by_sqs_message_with_delay_seconds(self): function_name = 'lambda_func-{}'.format(short_uid()) queue_name = 'queue-{}'.format(short_uid()) delay_time = 6 queue_url = self.client.create_queue(QueueName=queue_name)['QueueUrl'] queue_arn = aws_stack.sqs_queue_arn(queue_name) testutil.create_lambda_function( handler_file=TEST_LAMBDA_ECHO_FILE, func_name=function_name, runtime=LAMBDA_RUNTIME_PYTHON36 ) lambda_client = aws_stack.connect_to_service('lambda') lambda_client.create_event_source_mapping( EventSourceArn=queue_arn, FunctionName=function_name ) rs = self.client.send_message( QueueUrl=queue_url, MessageBody='hello world.', DelaySeconds=delay_time ) message_id = rs['MessageId'] time.sleep(delay_time / 2) # There is no log group for this lambda (lambda not invoked yet) log_events = get_lambda_log_events(function_name) self.assertEqual(len(log_events), 0) # After delay time, lambda invoked by sqs events = get_lambda_log_events(function_name, delay_time * 1.5) # Lambda just invoked 1 time self.assertEqual(len(events), 1) message = events[0]['Records'][0] self.assertEqual(message['eventSourceARN'], queue_arn) self.assertEqual(message['messageId'], message_id) # clean up self.client.delete_queue(QueueUrl=queue_url) lambda_client.delete_function(FunctionName=function_name)
def test_create_kinesis_event_source_mapping(self): function_name = f"lambda_func-{short_uid()}" stream_name = f"test-foobar-{short_uid()}" testutil.create_lambda_function( handler_file=TEST_LAMBDA_PYTHON_ECHO, func_name=function_name, runtime=LAMBDA_RUNTIME_PYTHON36, ) arn = aws_stack.kinesis_stream_arn(stream_name, account_id="000000000000") lambda_client = aws_stack.create_external_boto_client("lambda") lambda_client.create_event_source_mapping(EventSourceArn=arn, FunctionName=function_name) def process_records(record): assert record aws_stack.create_kinesis_stream(stream_name, delete=True) kinesis_connector.listen_to_kinesis( stream_name=stream_name, listener_func=process_records, wait_until_started=True, ) kinesis = aws_stack.create_external_boto_client("kinesis") stream_summary = kinesis.describe_stream_summary( StreamName=stream_name) self.assertEqual( 1, stream_summary["StreamDescriptionSummary"]["OpenShardCount"]) num_events_kinesis = 10 kinesis.put_records( Records=[{ "Data": "{}", "PartitionKey": "test_%s" % i } for i in range(0, num_events_kinesis)], StreamName=stream_name, ) events = get_lambda_log_events(function_name) self.assertEqual(10, len(events[0]["Records"])) self.assertIn("eventID", events[0]["Records"][0]) self.assertIn("eventSourceARN", events[0]["Records"][0]) self.assertIn("eventSource", events[0]["Records"][0]) self.assertIn("eventVersion", events[0]["Records"][0]) self.assertIn("eventName", events[0]["Records"][0]) self.assertIn("invokeIdentityArn", events[0]["Records"][0]) self.assertIn("awsRegion", events[0]["Records"][0]) self.assertIn("kinesis", events[0]["Records"][0])
def check_invocation(): events = testutil.get_lambda_log_events(TEST_LAMBDA_NAME_PY3) self.assertEqual(len(events), 2)
def check_invocation(): events = testutil.get_lambda_log_events(test_lambda_name) assert len(events) == 2
def get_logs(): logs = get_lambda_log_events(func_name) self.assertGreater(len(logs), 0)
def test_sqs_batch_lambda_forward(self): sqs = aws_stack.connect_to_service("sqs") lambda_api = aws_stack.connect_to_service("lambda") lambda_name_queue_batch = "lambda_queue_batch-%s" % short_uid() # deploy test lambda connected to SQS queue sqs_queue_info = testutil.create_sqs_queue(lambda_name_queue_batch) queue_url = sqs_queue_info["QueueUrl"] resp = testutil.create_lambda_function( handler_file=TEST_LAMBDA_PYTHON_ECHO, func_name=lambda_name_queue_batch, event_source_arn=sqs_queue_info["QueueArn"], libs=TEST_LAMBDA_LIBS, ) event_source_id = resp["CreateEventSourceMappingResponse"]["UUID"] lambda_api.update_event_source_mapping(UUID=event_source_id, BatchSize=5) messages_to_send = [ { "Id": "message{:02d}".format(i), "MessageBody": "msgBody{:02d}".format(i), "MessageAttributes": { "CustomAttribute": { "DataType": "String", "StringValue": "CustomAttributeValue{:02d}".format(i), } }, } for i in range(1, 12) ] # send 11 messages (which should get split into 3 batches) sqs.send_message_batch(QueueUrl=queue_url, Entries=messages_to_send[:10]) sqs.send_message( QueueUrl=queue_url, MessageBody=messages_to_send[10]["MessageBody"], MessageAttributes=messages_to_send[10]["MessageAttributes"], ) def wait_for_done(): attributes = sqs.get_queue_attributes( QueueUrl=queue_url, AttributeNames=[ "ApproximateNumberOfMessages", "ApproximateNumberOfMessagesDelayed", "ApproximateNumberOfMessagesNotVisible", ], )["Attributes"] msg_count = int(attributes.get("ApproximateNumberOfMessages")) self.assertEqual(0, msg_count, "expecting queue to be empty") delayed_count = int(attributes.get("ApproximateNumberOfMessagesDelayed")) if delayed_count != 0: LOGGER.warning( "SQS delayed message count (actual/expected): %s/%s" % (delayed_count, 0) ) not_visible_count = int(attributes.get("ApproximateNumberOfMessagesNotVisible")) if not_visible_count != 0: LOGGER.warning( "SQS messages not visible (actual/expected): %s/%s" % (not_visible_count, 0) ) self.assertEqual(0, delayed_count, "no messages waiting for retry") self.assertEqual(0, delayed_count + not_visible_count, "no in flight messages") # wait for the queue to drain (max 60s) retry(wait_for_done, retries=12, sleep=5.0) events = get_lambda_log_events(lambda_name_queue_batch, 10) self.assertEqual(3, len(events), "expected 3 lambda invocations") testutil.delete_lambda_function(lambda_name_queue_batch) sqs.delete_queue(QueueUrl=queue_url)
def get_events(): events = get_lambda_log_events(function_name, regex_filter=r"event.*Records", logs_client=logs_client) assert len(events) == 2 return events
def get_lambda_events(): events = get_lambda_log_events(function_name, logs_client=logs_client) assert events return events
def assert_lambda_log_events(): events = get_lambda_log_events(function_name=function_name, logs_client=logs_client) # lambda was invoked 1 time assert 1 == len(events[0]["Records"])
def test_disabled_event_source_mapping_with_dynamodb( self, create_lambda_function, lambda_client, dynamodb_resource, dynamodb_client, dynamodb_create_table, logs_client, dynamodbstreams_client, lambda_su_role, ): function_name = f"lambda_func-{short_uid()}" ddb_table = f"ddb_table-{short_uid()}" create_lambda_function( func_name=function_name, handler_file=TEST_LAMBDA_PYTHON_ECHO, runtime=LAMBDA_RUNTIME_PYTHON36, role=lambda_su_role, ) latest_stream_arn = dynamodb_create_table( table_name=ddb_table, partition_key="id", stream_view_type="NEW_IMAGE" )["TableDescription"]["LatestStreamArn"] rs = lambda_client.create_event_source_mapping( FunctionName=function_name, EventSourceArn=latest_stream_arn, StartingPosition="TRIM_HORIZON", MaximumBatchingWindowInSeconds=1, ) uuid = rs["UUID"] def wait_for_table_created(): return (dynamodb_client.describe_table( TableName=ddb_table)["Table"]["TableStatus"] == "ACTIVE") assert poll_condition(wait_for_table_created, timeout=30) def wait_for_stream_created(): return (dynamodbstreams_client.describe_stream( StreamArn=latest_stream_arn)["StreamDescription"] ["StreamStatus"] == "ENABLED") assert poll_condition(wait_for_stream_created, timeout=30) table = dynamodb_resource.Table(ddb_table) items = [ { "id": short_uid(), "data": "data1" }, { "id": short_uid(), "data": "data2" }, ] table.put_item(Item=items[0]) def assert_events(): events = get_lambda_log_events(function_name, logs_client=logs_client) # lambda was invoked 1 time assert 1 == len(events[0]["Records"]) # might take some time against AWS retry(assert_events, sleep=3, retries=10) # disable event source mapping lambda_client.update_event_source_mapping(UUID=uuid, Enabled=False) table.put_item(Item=items[1]) events = get_lambda_log_events(function_name, logs_client=logs_client) # lambda no longer invoked, still have 1 event assert 1 == len(events[0]["Records"])
def get_events(): events = get_lambda_log_events(function_name, logs_client=logs_client) assert len(events) == expected_num_events return events
def check_lambda_logs(): events = get_lambda_log_events(lambda_name_queue_batch, 10) assert 3 == len(events), "expected 3 lambda invocations"
def test_sqs_batch_lambda_forward(self): sqs = aws_stack.connect_to_service('sqs') lambda_api = aws_stack.connect_to_service('lambda') lambda_name_queue_batch = 'lambda_queue_batch-%s' % short_uid() # deploy test lambda connected to SQS queue sqs_queue_info = testutil.create_sqs_queue(lambda_name_queue_batch) queue_url = sqs_queue_info['QueueUrl'] resp = testutil.create_lambda_function( handler_file=TEST_LAMBDA_PYTHON_ECHO, func_name=lambda_name_queue_batch, event_source_arn=sqs_queue_info['QueueArn'], runtime=LAMBDA_RUNTIME_PYTHON27, libs=TEST_LAMBDA_LIBS) event_source_id = resp['CreateEventSourceMappingResponse']['UUID'] lambda_api.update_event_source_mapping(UUID=event_source_id, BatchSize=5) messages_to_send = [{ 'Id': 'message{:02d}'.format(i), 'MessageBody': 'msgBody{:02d}'.format(i), 'MessageAttributes': { 'CustomAttribute': { 'DataType': 'String', 'StringValue': 'CustomAttributeValue{:02d}'.format(i) } } } for i in range(1, 12)] # send 11 messages (which should get split into 3 batches) sqs.send_message_batch(QueueUrl=queue_url, Entries=messages_to_send[:10]) sqs.send_message( QueueUrl=queue_url, MessageBody=messages_to_send[10]['MessageBody'], MessageAttributes=messages_to_send[10]['MessageAttributes']) def wait_for_done(): attributes = sqs.get_queue_attributes( QueueUrl=queue_url, AttributeNames=[ 'ApproximateNumberOfMessages', 'ApproximateNumberOfMessagesDelayed', 'ApproximateNumberOfMessagesNotVisible' ], )['Attributes'] msg_count = int(attributes.get('ApproximateNumberOfMessages')) self.assertEqual(msg_count, 0, 'expecting queue to be empty') delayed_count = int( attributes.get('ApproximateNumberOfMessagesDelayed')) if delayed_count != 0: LOGGER.warning( 'SQS delayed message count (actual/expected): %s/%s' % (delayed_count, 0)) not_visible_count = int( attributes.get('ApproximateNumberOfMessagesNotVisible')) if not_visible_count != 0: LOGGER.warning( 'SQS messages not visible (actual/expected): %s/%s' % (not_visible_count, 0)) self.assertEqual(delayed_count, 0, 'no messages waiting for retry') self.assertEqual(delayed_count + not_visible_count, 0, 'no in flight messages') # wait for the queue to drain (max 60s) retry(wait_for_done, retries=12, sleep=5.0) events = get_lambda_log_events(lambda_name_queue_batch, 10) self.assertEqual(len(events), 3, 'expected 3 lambda invocations') testutil.delete_lambda_function(lambda_name_queue_batch) sqs.delete_queue(QueueUrl=queue_url)
def assert_invocations(): events = get_lambda_log_events(function_name2) self.assertEqual(1, len(events))