def test_log_line_filtering_oom(mock_aio_post): # we don't want logging enabled 'cause that saves all messages, interesting or not mock_aio_post.return_value = aio_post_response() message_1 = "START RequestId: b3c55437-3847-4230-a1ed-0e94425372e8 Version: $LATEST" message_2 = "some garbage" message_3 = "END RequestId: b3c55437-3847-4230-a1ed-0e94425372e8" message_4 = ( "REPORT RequestId: b3c55437-3847-4230-a1ed-0e94425372e8 Duration: 245.44 ms" ) message_5 = ("RequestId: b3c55437-3847-4230-a1ed-0e94425372e8 Error: " "Runtime exited with error: signal: killed\n" "Runtime.ExitError\n") messages = [message_1, message_2, message_3, message_4, message_5] assert len(messages) == 5 # log entries are gzipped and base64 encoded and inside another json object event = aws_log_events.create_aws_event(messages) function.lambda_handler(event, context) # Note that header names are somehow lower-cased mock_aio_post.assert_called() assert (mock_aio_post.call_args[0][0] == "https://cloud-collector.newrelic.com/aws/lambda/v1") data = mock_aio_post.call_args[1]["data"] entry = gunzip_json_object(data)["entry"] entry_json = json.loads(entry) assert len(entry_json["logEvents"]) == 2 assert entry_json["logEvents"][0]["message"].startswith("REPORT") assert "Error: Runtime exited" in entry_json["logEvents"][1]["message"] headers = mock_aio_post.call_args[1]["headers"] assert headers["X-license-key"] == license_key assert headers["Content-encoding"] == "gzip"
def test_lambda_request_ids_are_extracted(mock_aio_post): mock_aio_post.return_value = aio_post_response() expected_request_id = str(uuid.uuid4()) expected_request_id2 = str(uuid.uuid4()) unexpected_request_id = str(uuid.uuid4()) event = aws_log_events.create_aws_event([ "START RequestId: {} Version: $LATEST".format(expected_request_id), "2019-07-22T21:37:22.353Z {} Some Log Line with a random UUID".format( unexpected_request_id), "2019-07-22T21:37:22.353Z Doesn't have a RequestId", "END RequestId: {}".format(expected_request_id), "START RequestId: {} Version: $LATEST".format(expected_request_id2), ]) function.lambda_handler(event, context) mock_aio_post.assert_called() data = mock_aio_post.call_args[1]["data"] messages = gunzip_json_object(data)[0]["logs"] assert len(messages) == 5 assert messages[0]["timestamp"] == timestamp assert messages[0]["attributes"]["aws"][ "lambda_request_id"] == expected_request_id assert messages[1]["timestamp"] == timestamp assert messages[1]["attributes"]["aws"][ "lambda_request_id"] == expected_request_id assert messages[2]["timestamp"] == timestamp assert messages[2]["attributes"]["aws"][ "lambda_request_id"] == expected_request_id assert messages[3]["timestamp"] == timestamp assert messages[3]["attributes"]["aws"][ "lambda_request_id"] == expected_request_id assert messages[4]["timestamp"] == timestamp assert messages[4]["attributes"]["aws"][ "lambda_request_id"] == expected_request_id2
def test_log_line_filtering(mock_aio_post): # we don't want logging enabled 'cause that saves all messages, interesting or not mock_aio_post.return_value = aio_post_response() message_1 = "START RequestId: b3c55437-3847-4230-a1ed-0e94425372e8 Version: $LATEST" message_2 = '[1,"NR_LAMBDA_MONITORING","H4sIAImox"]' message_3 = "END RequestId: b3c55437-3847-4230-a1ed-0e94425372e8" message_4 = ( "REPORT RequestId: b3c55437-3847-4230-a1ed-0e94425372e8 Duration: 245.44 ms" ) message_5 = ( "2020-02-04T00:26:18.068Z b3c55437-3847-4230-a1ed-0e94425372e8 Task timed out" " after 3.00 seconds") messages = [message_1, message_2, message_3, message_4, message_5] assert len(messages) == 5 # log entries are gzipped and base64 encoded and inside another json object event = aws_log_events.create_aws_event(messages) function.lambda_handler(event, context) # Note that header names are somehow lower-cased mock_aio_post.assert_called() assert (mock_aio_post.call_args[0][0] == "https://cloud-collector.newrelic.com/aws/lambda/v1") data = mock_aio_post.call_args[1]["data"] entry = gunzip_json_object(data)["entry"] entry_json = json.loads(entry) assert len(entry_json["logEvents"]) == 3 assert entry_json["logEvents"][0]["message"].startswith("[1,") assert entry_json["logEvents"][1]["message"].startswith("REPORT") assert "Task timed out" in entry_json["logEvents"][2]["message"] headers = mock_aio_post.call_args[1]["headers"] assert headers["X-license-key"] == license_key assert headers["Content-encoding"] == "gzip"
def test_big_payloads_are_split(mock_aio_post): mock_aio_post.side_effect = [aio_post_response(), aio_post_response()] messages = [] message_count = 500 for message_index in range(message_count): messages.append("Test Message %s" % (message_index)) assert (len(json.dumps(messages)) > function.MAX_PAYLOAD_SIZE ), "We do not have enough test data to force a split" event = aws_log_events.create_aws_event(messages) function.lambda_handler(event, context) # Payload should be split into multiple calls assert mock_aio_post.call_count > 1 # Each call body size should be less than the max payload size observed_messages = [] for request_index in range(mock_aio_post.call_count): data = mock_aio_post.call_args_list[request_index][1]["data"] assert len(data) < function.MAX_PAYLOAD_SIZE for observed_message in gunzip_json_object(data)[0]["logs"]: observed_messages.append(observed_message["message"]) # The messages sent across all calls should be the same as in the original event assert len(observed_messages) == message_count for message_index in range(message_count): print(observed_messages[message_index]) print(messages[message_index]) assert observed_messages[message_index] == messages[message_index]
def test_big_payloads_are_split(mock_urlopen): mock_urlopen.return_value = urlopen_accepted_response() messages = [] message_count = 500 for message_index in range(message_count): messages.append('Test Message %s' % (message_index)) assert len(json.dumps(messages)) > function.MAX_PAYLOAD_SIZE, \ 'We do not have enough test data to force a split' event = aws_log_events.create_aws_event(messages) function.lambda_handler(event, context) # Payload should be split into multiple calls assert mock_urlopen.call_count > 1 # Each call body size should be less than the max payload size observed_messages = [] for request_index in range(mock_urlopen.call_count): request = mock_urlopen.call_args_list[request_index][0][0] assert len(request.data) < function.MAX_PAYLOAD_SIZE for observed_message in gunzip_json_object(request.data)[0]['logs']: observed_messages.append(observed_message['message']) # The messages sent across all calls should be the same as in the original event assert len(observed_messages) == message_count for message_index in range(message_count): print(observed_messages[message_index]) print(messages[message_index]) assert observed_messages[message_index] == messages[message_index]
def test_rds_enhanced_metrics(mock_aio_post): # we don't want logging enabled 'cause that saves all messages, interesting or not mock_aio_post.return_value = aio_post_response() message_1 = "This is a RDS" message_2 = "Enhanced metrics" message_3 = "message with a lot of data" messages = [message_1, message_2, message_3] assert len(messages) == 3 # log entries are gzipped and base64 encoded and inside another json object event = aws_rds_enhanced_log_events.create_aws_event(messages) function.lambda_handler(event, context) # Note that header names are somehow lower-cased mock_aio_post.assert_called() assert (mock_aio_post.call_args[0][0] == "https://cloud-collector.newrelic.com/aws/v1") data = mock_aio_post.call_args[1]["data"] entry = gunzip_json_object(data)["entry"] entry_json = json.loads(entry) assert len(entry_json["logEvents"]) == 3 assert entry_json["logEvents"][0]["message"] == message_1 assert entry_json["logEvents"][1]["message"] == message_2 assert entry_json["logEvents"][2]["message"] == message_3 headers = mock_aio_post.call_args[1]["headers"] assert headers["X-license-key"] == license_key assert headers["Content-encoding"] == "gzip"
def test_when_first_call_fails_code_should_retry(mock_aio_post): # First fail, and then succeed mock_aio_post.side_effect = [urlopen_error_response(), aio_post_response()] event = aws_log_events.create_aws_event(["Test Message 1"]) function.lambda_handler(event, context) assert mock_aio_post.call_count == 2
def test_when_call_always_fails_code_should_not_retry_forever(mock_urlopen): # Expect an exception to be thrown when max retries is reached with pytest.raises(function.MaxRetriesException): # Always fails mock_urlopen.side_effect = urlopen_error_response() event = aws_log_events.create_aws_event(['Test Message 1']) # Should throw function.lambda_handler(event, context)
def test_when_first_two_calls_fail_code_should_retry(mock_urlopen): # First two fail, and then third succeeds mock_urlopen.side_effect = [ urlopen_error_response(), urlopen_error_response(), urlopen_accepted_response()] event = aws_log_events.create_aws_event(['Test Message 1']) function.lambda_handler(event, context) assert mock_urlopen.call_count == 3
def test_when_session_timeouts_exception_should_be_raised(mock_aio_session): expected_message = "timeout_in_session" mock_aio_session.side_effect = asyncio.TimeoutError(expected_message) event = aws_log_events.create_aws_event(["Test Message 1"]) with pytest.raises(asyncio.TimeoutError) as excinfo: function.lambda_handler(event, context) pytest.fail( "TimeoutError should have been raised by the ClientSession") assert expected_message == str(excinfo.value)
def test_id_field_is_not_added(mock_aio_post): mock_aio_post.return_value = aio_post_response() message = "Test Message 1" event = aws_log_events.create_aws_event([message]) function.lambda_handler(event, context) mock_aio_post.assert_called() data = mock_aio_post.call_args[1]["data"] messages = gunzip_json_object(data)[0]["logs"] assert len(messages) == 1 assert "id" not in messages[0]
def test_id_field_is_not_added(mock_urlopen): mock_urlopen.return_value = urlopen_accepted_response() message = 'Test Message 1' event = aws_log_events.create_aws_event([message]) function.lambda_handler(event, context) mock_urlopen.assert_called() request = mock_urlopen.call_args[0][0] messages = gunzip_json_object(request.data)[0]['logs'] assert len(messages) == 1 assert 'id' not in messages[0]
def test_logs_have_logstream_and_loggroup(mock_aio_post): mock_aio_post.return_value = aio_post_response() message = "Test Message 1" event = aws_log_events.create_aws_event([message]) function.lambda_handler(event, context) mock_aio_post.assert_called() data = mock_aio_post.call_args[1]["data"] common = gunzip_json_object(data)[0]["common"] assert common["attributes"]["aws"]["logGroup"] == log_group_name assert common["attributes"]["aws"]["logStream"] == log_stream_name
def test_when_exception_is_thrown_it_should_be_raised(mock_aio_session): expected_message = "unexpected_exception_in_session" mock_aio_session.side_effect = IOError(expected_message) event = aws_log_events.create_aws_event(["Test Message 1"]) with pytest.raises(IOError) as excinfo: function.lambda_handler(event, context) pytest.fail( "An unexpected exception should have been raised by the ClientSession" ) assert expected_message == str(excinfo.value)
def test_when_first_call_timeouts_code_should_retry(mock_aio_post): # First two calls timeout, and then third succeeds mock_aio_post.side_effect = [ aio_post_timeout(), aio_post_timeout(), aio_post_response(), ] event = aws_log_events.create_aws_event(["Test Message 1"]) function.lambda_handler(event, context) assert mock_aio_post.call_count == 3
def test_message_fields_in_body(mock_urlopen): mock_urlopen.return_value = urlopen_accepted_response() message = 'Test Message 1' event = aws_log_events.create_aws_event([message]) function.lambda_handler(event, context) mock_urlopen.assert_called() request = mock_urlopen.call_args[0][0] messages = gunzip_json_object(request.data)[0]['logs'] assert len(messages) == 1 assert messages[0]['timestamp'] == timestamp assert messages[0]['message'] == message
def test_proper_headers_are_added(mock_urlopen): mock_urlopen.return_value = urlopen_accepted_response() message_1 = 'Test Message 1' event = aws_log_events.create_aws_event([message_1]) function.lambda_handler(event, context) # Note that header names are somehow lower-cased mock_urlopen.assert_called() request = mock_urlopen.call_args[0][0] assert request.get_header('X-insert-key') == logging_api_key assert request.get_header('X-event-source') == 'logs' assert request.get_header('Content-encoding') == 'gzip'
def test_proper_headers_are_added(mock_aio_post): mock_aio_post.return_value = aio_post_response() message_1 = "Test Message 1" event = aws_log_events.create_aws_event([message_1]) function.lambda_handler(event, context) # Note that header names are somehow lower-cased mock_aio_post.assert_called() headers = mock_aio_post.call_args[1]["headers"] assert headers["X-license-key"] == license_key assert headers["X-event-source"] == "logs" assert headers["Content-encoding"] == "gzip"
def test_that_json_array_is_not_parsed(mock_urlopen): mock_urlopen.return_value = urlopen_accepted_response() message = '[{"foo": "bar"}]' event = aws_log_events.create_aws_event([message]) function.lambda_handler(event, context) mock_urlopen.assert_called() request = mock_urlopen.call_args[0][0] messages = gunzip_json_object(request.data)[0]['logs'] assert len(messages) == 1 assert messages[0]['timestamp'] == timestamp assert messages[0]['message'] == message
def test_message_fields_in_body(mock_aio_post): mock_aio_post.return_value = aio_post_response() message = "Test Message 1" event = aws_log_events.create_aws_event([message]) function.lambda_handler(event, context) mock_aio_post.assert_called() data = mock_aio_post.call_args[1]["data"] messages = gunzip_json_object(data)[0]["logs"] assert len(messages) == 1 assert messages[0]["timestamp"] == timestamp assert messages[0]["message"] == message
def test_logs_have_logstream_and_loggroup(mock_urlopen): mock_urlopen.return_value = urlopen_accepted_response() message = 'Test Message 1' event = aws_log_events.create_aws_event([message]) function.lambda_handler(event, context) mock_urlopen.assert_called() request = mock_urlopen.call_args[0][0] data = gunzip_json_object(request.data) common = data[0]['common'] assert common['attributes']['logGroup'] == log_group_name assert common['attributes']['logStream'] == log_stream_name
def test_lambda_request_ids_are_extracted(mock_urlopen): mock_urlopen.return_value = urlopen_accepted_response() expected_request_id = str(uuid.uuid4()) event = aws_log_events.create_aws_event(["{} sweet message".format(expected_request_id)]) function.lambda_handler(event, context) mock_urlopen.assert_called() request = mock_urlopen.call_args[0][0] messages = gunzip_json_object(request.data)[0]['logs'] assert len(messages) == 1 assert messages[0]['timestamp'] == timestamp assert messages[0]['attributes']['lambda_request_id'] == expected_request_id
def test_logs_have_plugin_info(mock_urlopen): mock_urlopen.return_value = urlopen_accepted_response() message = 'Test Message 1' event = aws_log_events.create_aws_event([message]) function.lambda_handler(event, context) mock_urlopen.assert_called() request = mock_urlopen.call_args[0][0] data = gunzip_json_object(request.data) messages = data[0]['logs'] assert len(messages) == 1 assert messages[0]['timestamp'] == timestamp assert messages[0]['message'] == message assert data[0]['common']['attributes']['plugin'] == function.PLUGIN
def test_multiple_messages(mock_aio_post): mock_aio_post.return_value = aio_post_response() message_1 = "Test Message 1" message_2 = "Test Message 2" message_3 = "Test Message 3" event = aws_log_events.create_aws_event([message_1, message_2, message_3]) function.lambda_handler(event, context) mock_aio_post.assert_called() data = mock_aio_post.call_args[1]["data"] messages = gunzip_json_object(data)[0]["logs"] assert len(messages) == 3 assert messages[0]["message"] == message_1 assert messages[1]["message"] == message_2 assert messages[2]["message"] == message_3
def test_multiple_messages(mock_urlopen): mock_urlopen.return_value = urlopen_accepted_response() message_1 = 'Test Message 1' message_2 = 'Test Message 2' message_3 = 'Test Message 3' event = aws_log_events.create_aws_event([message_1, message_2, message_3]) function.lambda_handler(event, context) mock_urlopen.assert_called() request = mock_urlopen.call_args[0][0] messages = gunzip_json_object(request.data)[0]['logs'] assert len(messages) == 3 assert messages[0]['message'] == message_1 assert messages[1]['message'] == message_2 assert messages[2]['message'] == message_3
def test_logs_have_plugin_info(mock_aio_post): mock_aio_post.return_value = aio_post_response() message = "Test Message 1" event = aws_log_events.create_aws_event([message]) function.lambda_handler(event, context) mock_aio_post.assert_called() data = mock_aio_post.call_args[1]["data"] body = gunzip_json_object(data) messages = body[0]["logs"] assert len(messages) == 1 assert messages[0]["timestamp"] == timestamp assert messages[0]["message"] == message assert body[0]["common"]["attributes"][ "plugin"] == function.LOGGING_PLUGIN_METADATA