def test_must_record_wrapped_user_exception(self, ContextMock): ContextMock.get_current_context.return_value = self.context_mock expected_exception = UserException("Something went wrong", wrapped_from="CustomException") expected_exception.exit_code = 1235 def real_fn(): raise expected_exception with self.assertRaises(UserException) as context: track_command(real_fn)() self.assertEqual( context.exception, expected_exception, "Must re-raise the original exception object " "without modification", ) expected_attrs = _ignore_common_attributes({ "exitReason": "CustomException", "exitCode": 1235 }) args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "commandRun" self.assertGreaterEqual(metric.get_data().items(), expected_attrs.items())
def test_must_emit_true_warning_metric(self, secho_mock, TemplateWarningsCheckerMock, ContextMock): ContextMock.get_current_context.return_value = self.context_mock template_warnings_checker_mock = TemplateWarningsCheckerMock.return_value = Mock( ) template_warnings_checker_mock.check_template_for_warning.return_value = "DummyWarningMessage" def real_fn(): return True, "Dummy warning message" track_template_warnings(["DummyWarningName"])(real_fn)() expected_attrs = { "awsProfileProvided": False, "debugFlagProvided": False, "region": "myregion", "warningName": "DummyWarningName", "warningCount": 1, } args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "templateWarning" self.assertGreaterEqual(metric.get_data().items(), expected_attrs.items()) secho_mock.assert_called_with("WARNING: DummyWarningMessage", fg="yellow")
def test_must_record_any_exceptions(self, ContextMock): ContextMock.get_current_context.return_value = self.context_mock expected_exception = KeyError("IO Error test") def real_fn(): raise expected_exception with self.assertRaises(KeyError) as context: track_command(real_fn)() self.assertEqual( context.exception, expected_exception, "Must re-raise the original exception object " "without modification", ) expected_attrs = _ignore_common_attributes({ "exitReason": "KeyError", "exitCode": 255 } # Unhandled exceptions always use exit code 255 ) args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "commandRun" self.assertGreaterEqual(metric.get_data().items(), expected_attrs.items())
def test_must_send_installed_metric_with_attributes(self, TelemetryClassMock): telemetry_mock = TelemetryClassMock.return_value = Mock() self.gc_mock.return_value.telemetry_enabled = False send_installed_metric() args, _ = telemetry_mock.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "installed" self.assertGreaterEqual( metric.get_data().items(), {"osPlatform": platform.system(), "telemetryEnabled": False}.items() )
def test_must_emit_one_metric(self, ContextMock): ContextMock.get_current_context.return_value = self.context_mock def real_fn(): pass track_command(real_fn)() args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "commandRun" self.assertEqual(self.telemetry_instance.emit.mock_calls, [call(ANY)], "The one command metric must be sent")
def test_must_emit_command_run_metric_with_sanitized_profile_value(self, ContextMock): ContextMock.get_current_context.return_value = self.context_mock self.context_mock.profile = "myprofilename" def real_fn(): pass track_command(real_fn)() expected_attrs = _ignore_common_attributes({"awsProfileProvided": True}) args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "commandRun" self.assertGreaterEqual(metric.get_data().items(), expected_attrs.items())
def test_must_decorate_functions(self, ContextMock): @track_command def real_fn(a, b=None): return "{} {}".format(a, b) actual = real_fn("hello", b="world") self.assertEqual(actual, "hello world") args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "commandRun" self.assertEqual( self.telemetry_instance.emit.mock_calls, [call(ANY)], "The command metrics be emitted when used as a decorator", )
def test_must_record_function_duration(self, ContextMock): ContextMock.get_current_context.return_value = self.context_mock sleep_duration = 0.01 # 10 millisecond def real_fn(): time.sleep(sleep_duration) track_command(real_fn)() # commandRun metric should be the only call to emit. # And grab the second argument passed to this call, which are the attributes args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "commandRun" self.assertGreaterEqual( metric.get_data()["duration"], sleep_duration, "Measured duration must be in milliseconds and " "greater than equal to the sleep duration", )
def test_must_emit_command_run_metric(self, ContextMock): ContextMock.get_current_context.return_value = self.context_mock def real_fn(): pass track_command(real_fn)() expected_attrs = { "awsProfileProvided": False, "debugFlagProvided": False, "region": "myregion", "commandName": "fakesam local invoke", "duration": ANY, "exitReason": "success", "exitCode": 0, } args, _ = self.telemetry_instance.emit.call_args_list[0] metric = args[0] assert metric.get_metric_name() == "commandRun" self.assertGreaterEqual(metric.get_data().items(), expected_attrs.items())