def test_content_with_exact_len_not_trimmed(): message = "WALTHAM, Mass.--(BUSINESS WIRE)-- Software intelligence company Dynatrace (NYSE: DT)" content_length_limit_backup = logs_ingest.main.content_length_limit # given log_entry = create_log_entry(message) logs_ingest.main.content_length_limit = len(json.dumps(log_entry)) # when try: actual_output = parse_record( log_entry, SelfMonitoring(execution_time=datetime.utcnow())) finally: # restore original value logs_ingest.main.content_length_limit = content_length_limit_backup # then expected_output = { "cloud.provider": "Azure", "severity": "INFO", "content": '{"content": "WALTHAM, Mass.--(BUSINESS WIRE)-- Software intelligence company Dynatrace (NYSE: DT)"}' } assert actual_output == expected_output
def test_self_monitoring_metrics_with_zero_values(): self_monitoring = SelfMonitoring(execution_time=execution_time) self_monitoring.dynatrace_connectivities = [DynatraceConnectivity.Ok] self_monitoring.too_old_records = 0 self_monitoring.parsing_errors = 0 self_monitoring.all_requests = 1 self_monitoring.processing_time = 0.0878758430480957 self_monitoring.sending_time = 0.3609178066253662 self_monitoring.too_long_content_size = [] metric_data = self_monitoring.prepare_metric_data() assert metric_data == expected_metric_data_without_zeros_metrics
def test_all_self_monitoring_metrics(): self_monitoring = SelfMonitoring(execution_time=execution_time) self_monitoring.dynatrace_connectivities = [ DynatraceConnectivity.Other, DynatraceConnectivity.Other, DynatraceConnectivity.TooManyRequests ] self_monitoring.too_old_records = 6 self_monitoring.parsing_errors = 3 self_monitoring.all_requests = 3 self_monitoring.processing_time = 0.0878758430480957 self_monitoring.sending_time = 0.3609178066253662 self_monitoring.too_long_content_size = [2000, 5000, 6000, 40000] metric_data = self_monitoring.prepare_metric_data() assert metric_data == all_expected_metric_data
def test_content_trimmed(): content_length_limit = 100 content_length_limit_backup = logs_ingest.main.content_length_limit # given log_entry = create_log_entry(log_message) logs_ingest.main.content_length_limit = content_length_limit # when try: actual_output = parse_record( log_entry, SelfMonitoring(execution_time=datetime.utcnow())) finally: # restore original value logs_ingest.main.content_length_limit = content_length_limit_backup # then expected_content = "{\"content\": \"WALTHAM, Mass.--(BUSINESS WIRE)-- Software intelligence company Dynatrace (N[TRUNCATED]" assert len(actual_output["content"]) == content_length_limit assert actual_output["content"] == expected_content
def test_log_forwarder_setup(): cloud_log_forwarder_backup = logs_ingest.main.cloud_log_forwarder logs_ingest.main.cloud_log_forwarder = "MyLogForwarderSetup" # given test_record = { "cloud.provider": "Azure", "severity": "INFO", "content": '{"content": "WALTHAM, Mass.--(BUSINESS WIRE)-- Software intelligence company Dynatrace (NYSE: DT)"}' } # when try: actual_output = parse_record( test_record, SelfMonitoring(execution_time=datetime.utcnow())) finally: logs_ingest.main.cloud_log_forwarder = cloud_log_forwarder_backup # then assert actual_output['cloud.log_forwarder'] == "MyLogForwarderSetup"
def self_monitoring(): return SelfMonitoring(execution_time=datetime.utcnow())
def test_trimming_attribute_values(monkeypatch: MonkeyPatchFixture): monkeypatch.setattr(main, 'attribute_value_length_limit', 4) actual_output = main.parse_record( record, SelfMonitoring(execution_time=datetime.utcnow())) assert actual_output == expected_output_attribute_values_trimmed
def test_default(): actual_output = main.parse_record( record, SelfMonitoring(execution_time=datetime.utcnow())) assert actual_output == expected_output
def test_not_known_category(): actual_output = parse_record( not_known_category_record, SelfMonitoring(execution_time=datetime.utcnow())) assert actual_output == not_known_category_expected_output
def test_function_app_logs(): actual_output = parse_record( function_app_logs_record, SelfMonitoring(execution_time=datetime.utcnow())) assert actual_output == function_app_logs_expected_output
def test_api_management_service(): actual_output = parse_record( record, SelfMonitoring(execution_time=datetime.utcnow())) assert actual_output == expected_output
def test_event_hub_namespace(): actual_output = parse_record( record, SelfMonitoring(execution_time=datetime.utcnow())) assert actual_output == expected_output
def test_kube_controller_manager(): output = parse_record(kube_controller_manager_record, SelfMonitoring(execution_time=datetime.utcnow())) assert output == kube_controller_manager_expected_output
def test_kube_audit(): output = parse_record(kube_audit_record, SelfMonitoring(execution_time=datetime.utcnow())) assert output == kube_audit_expected_output
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import time from datetime import datetime from logs_ingest.dynatrace_client import send_logs # script for testing sending implementation and logs ingest endpoint responses, # loads dynatrace URL and token from local.settings.json from logs_ingest.self_monitoring import SelfMonitoring source_directory = os.path.dirname(os.path.realpath(__file__)) local_settings_json_path = os.path.join(source_directory, "../../local.settings.json") with open(local_settings_json_path) as local_settings_json_file: local_settings_json = json.load(local_settings_json_file) logs = [ { "cloud.provider": "Azure", "timestamp": time.time(), "content": "TOO_LONG" * 8192 } for i in range(1) ] send_logs(local_settings_json["Values"]["DYNATRACE_URL"], local_settings_json["Values"]["DYNATRACE_ACCESS_KEY"], logs, SelfMonitoring(execution_time=datetime.utcnow()))