def test_rules_close(self): config = DotDict() config.logger = Mock().s config.tag = 'test.rule' config.action = 'apply_all_rules' config['RuleTestLaughable.laughable'] = 'wilma' config['RuleTestDangerous.dangerous'] = 'dwight' config.rules_list = DotDict() config.rules_list.class_list = [ ( 'RuleTestLaughable', RuleTestLaughable, 'RuleTestLaughable' ), ( 'RuleTestDangerous', RuleTestDangerous, 'RuleTestDangerous' ) ] trs = transform_rules.TransformRuleSystem(config) trs.close() assert trs.rules[0].close_counter == 1 assert trs.rules[1].close_counter == 1
def test_rules_close_if_close_method_available(self): config = DotDict() config.logger = Mock() config.tag = 'test.rule' config.action = 'apply_all_rules' config.rules_list = DotDict() config.rules_list.class_list = [ ( 'RuleTestNoCloseMethod', RuleTestNoCloseMethod, 'RuleTestNoCloseMethod' ), ( 'RuleTestDangerous', RuleTestDangerous, 'RuleTestDangerous' ) ] trs = transform_rules.TransformRuleSystem(config) trs.close() assert len(config.logger.debug.mock_calls) == 3 config.logger.debug.assert_any_call( 'trying to close %s', 'socorro.unittest.lib.test_transform_rules.' 'RuleTestNoCloseMethod' ) config.logger.debug.assert_any_call( 'trying to close %s', 'socorro.unittest.lib.test_transform_rules.' 'RuleTestDangerous' )
def test_rules_in_config(self): config = DotDict() config.tag = 'test.rule' config.action = 'apply_all_rules' config['RuleTestLaughable.laughable'] = 'wilma' config['RuleTestDangerous.dangerous'] = 'dwight' config.rules_list = DotDict() config.rules_list.class_list = [ ( 'RuleTestLaughable', RuleTestLaughable, 'RuleTestLaughable' ), ( 'RuleTestDangerous', RuleTestDangerous, 'RuleTestDangerous' ) ] trs = transform_rules.TransformRuleSystem(config) assert isinstance(trs.rules[0], RuleTestLaughable) assert isinstance(trs.rules[1], RuleTestDangerous) assert trs.rules[0].predicate(None) assert trs.rules[1].action(None)
def create_basic_fake_processor(): """Create fake processor configuration.""" fake_processor = DotDict() fake_processor.c_signature_tool = c_signature_tool fake_processor.config = DotDict() fake_processor.processor_notes = [] return fake_processor
def __init__(self, config=None, quit_check=None): if quit_check: self._quit_check = quit_check else: self._quit_check = self._null_quit_check self.rules = [] if not config: config = DotDict() if 'chatty_rules' not in config: config.chatty_rules = False self.config = config if "rules_list" in config: self.tag = config.tag self.act = getattr(self, config.action) list_of_rules = config.rules_list.class_list for a_rule_class_name, a_rule_class, ns_name in list_of_rules: try: self.rules.append( a_rule_class(config[ns_name]) ) except KeyError, x: self.rules.append( a_rule_class(config) )
def test_subprocess_fail(self, mocked_subprocess_module): config = self.get_basic_config() raw_crash = copy.copy(canonical_standard_raw_crash) raw_dumps = {config.dump_field: 'a_fake_dump.dump'} processed_crash = CDotDict() processed_crash.product = 'Firefox' processed_crash.os_name = 'Windows 386' processed_crash.cpu_name = 'x86' processed_crash.signature = 'EnterBaseline' processed_crash['json_dump.crashing_thread.frames'] = [ DotDict({'not_module': 'not-a-module',}), DotDict({'module': 'a-module',}) ] processor_meta = self.get_basic_processor_meta() mocked_subprocess_handle = ( mocked_subprocess_module.Popen.return_value ) mocked_subprocess_handle.stdout.read.return_value = ( None ) mocked_subprocess_handle.wait.return_value = -1 rule = JitCrashCategorizeRule(config) # the call to be tested rule.act(raw_crash, raw_dumps, processed_crash, processor_meta) eq_(processor_meta.processor_notes, []) ok_(processed_crash.classifications.jit.category is None) eq_(processed_crash.classifications.jit.category_return_code, -1)
def __init__(self, config, quit_check=None): super(CrashStorageSystemForLocalFS, self).__init__(config) # new_config is an adapter to allow the modern configman enabled # file system crash storage classes to use the old style configuration. new_config = DotDict() new_config.logger = config.logger new_config.primary = DotDict() new_config.primary.storage_class = FileSystemRawCrashStorage new_config.primary.std_fs_root = config.localFS new_config.primary.dump_dir_count = config.localFSDumpDirCount new_config.primary.dump_gid = config.localFSDumpGID new_config.primary.dump_permissions = config.localFSDumpPermissions new_config.primary.dir_permissions = config.localFSDirPermissions new_config.primary.json_file_suffix = config.jsonFileSuffix new_config.primary.dump_file_suffix = config.dumpFileSuffix new_config.primary.logger = config.logger new_config.fallback = DotDict() new_config.fallback.storage_class = FileSystemRawCrashStorage new_config.fallback.std_fs_root = config.fallbackFS new_config.fallback.dump_dir_count = config.fallbackDumpDirCount new_config.fallback.dump_gid = config.fallbackDumpGID new_config.fallback.dump_permissions = config.fallbackDumpPermissions new_config.fallback.dir_permissions = config.fallbackDirPermissions new_config.fallback.json_file_suffix = config.jsonFileSuffix new_config.fallback.dump_file_suffix = config.dumpFileSuffix new_config.fallback.logger = config.logger self.crash_storage = FallbackCrashStorage(new_config, quit_check)
def _setup_config(self): config = DotDict() config.rabbitmq_class = MagicMock() config.priority_queue_name = 'priority' config.standard_queue_name = 'standard' config.reprocessing_queue_name = 'reprocessing' return config
def testCountStackWalkerFailures_fail(self, statsd_obj): config = DotDict() config.counter_class = Mock() config.rule_name = 'stackwalker_timeout_kills' config.statsd_class = Mock() config.statsd_host = 'some_statsd_host' config.statsd_port = 3333 config.statsd_prefix = '' config.active_list = ['stackwalker_timeout_kills'] a_rule = CountStackWalkerFailures(config) raw_crash_mock = Mock() raw_dumps_mock = Mock() processed_crash_mock = Mock() proc_meta = DotDict() proc_meta.processor_notes = [ 'hello', 'this is a list of notes from the processor', 'it has information about the what the processor', 'thought was important', ] ok_( not a_rule._predicate( raw_crash_mock, raw_dumps_mock, processed_crash_mock, proc_meta ) )
def test_rule_simple(self): fake_config = DotDict() fake_config.logger = Mock() fake_config.chatty_rules = False fake_config.chatty = False r1 = transform_rules.Rule(fake_config) eq_(r1.predicate(None, None, None, None), True) eq_(r1.action(None, None, None, None), True) eq_(r1.act(), (True, True)) class BadPredicate(transform_rules.Rule): def _predicate(self, *args, **kwargs): return False r2 = BadPredicate(fake_config) eq_(r2.predicate(None, None, None, None), False) eq_(r2.action(None, None, None, None), True) eq_(r2.act(), (False, None)) class BadAction(transform_rules.Rule): def _action(self, *args, **kwargs): return False r3 = BadAction(fake_config) eq_(r3.predicate(None, None, None, None), True) eq_(r3.action(None, None, None, None), False) eq_(r3.act(), (True, False))
def test_overlay_config_2(self): n = config_manager.Namespace() n.add_option('a') n.a.default = 1 n.a.doc = 'the a' n.b = 17 n.c = c = config_manager.Namespace() c.x = 'fred' c.y = 3.14159 c.add_option('z') c.z.default = 99 c.z.doc = 'the 99' c = config_manager.ConfigurationManager([n], use_admin_controls=False, #use_config_files=False, use_auto_help=False, argv_source=[]) o = {"a": 2, "c.z": 22, "c.x": 'noob', "c.y": "2.89", "n": "not here"} c.overlay_config_recurse(o, ignore_mismatches=True) d = c.get_config() e = DotDict() e.a = 2 e.b = 17 e.c = DotDict() e.c.x = 'noob' e.c.y = 2.89 e.c.z = 22 self.assertEqual(d, e)
def get_standard_config(self): config = DotDict() config.logger = mock.MagicMock() config.services = DotDict() config.services.services_controller = DotDict() class Service1(object): pass config.services.service1 = DotDict() config.services.service1.service_implementation_class = Service1 class Service2(object): pass config.services.service2 = DotDict() config.services.service2.service_implementation_class = Service2 config.services.services_controller.service_list = [ ('service1', '/submit', Service1), ('service2', '/unsubmit', Service2), ] config.web_server = DotDict() self.mocked_web_server = mock.MagicMock() config.web_server.wsgi_server_class = mock.MagicMock( return_value=self.mocked_web_server ) return config
def test_parse_arguments_with_class_validators(self): class NumberConverter(object): def clean(self, value): conv = {'one': 1, 'two': 2, 'three': 3} try: return conv[value] except KeyError: raise ValueError('No idea?!') # Define a set of filters with some types being non-trivial types # but instead a custom validator. filters = [ ("param1", 0, NumberConverter()), ] arguments = { "param1": "one", } params_exp = DotDict() params_exp.param1 = 1 params = external_common.parse_arguments( filters, arguments, modern=True ) assert params == params_exp # note that a ValueError becomes a BadArgumentError arguments = { "param1": "will cause a ValueError in NumberConverter.clean", } with pytest.raises(BadArgumentError): external_common.parse_arguments(filters, arguments, modern=True)
def test_incoming_job_stream_priority(self): config = DotDict() config.database_class = mock.Mock() config.transaction_executor_class = mock.Mock() config.batchJobLimit = 10 config.logger = mock.Mock() class StubbedIterators(LegacyNewCrashSource): def _normal_jobs_iter(self): while True: yield None def _priority_jobs_iter(self): values = [ (1, '1234', 1), (2, '2345', 1), (3, '3456', 1), (4, '4567', 1), (5, '5678', 1), ] for x in values: yield x new_crash_source = StubbedIterators(config, processor_name='victor1234') expected = ('1234', '2345', '3456', '4567', '5678', ) for x, y in zip(new_crash_source, expected): self.assertEqual(x, ((y,), {})) self.assertEqual(len([x for x in new_crash_source]), 5)
def test_iteritems_breadth_first(self): d = {'a': {'aa': 13, 'ab': 14,}, 'b': {'ba': {'baa': 0, 'bab': 1,}, 'bb': {'bba': 2,}}, 'c': 9, 'd': {'dd': 2}} e = [('a.aa', 13), ('a.ab', 14), ('b.ba.baa', 0), ('b.ba.bab', 1), ('b.bb.bba', 2), ('c', 9), ('d.dd', 2)] a = [x for x in iteritems_breadth_first(d)] e = sorted(e) a = sorted(a) self.assertEqual(a, e) # try a round trip dd = DotDict() for k, v in a: print k, v dd.assign(k, v) ddkv = sorted(iteritems_breadth_first(dd)) self.assertEqual(e, ddkv)
def test_process_crash_existing_processed_crash(self): cm = ConfigurationManager( definition_source=Processor2015.get_required_config(), values_source_list=[{'rule_sets': '[]'}], ) config = cm.get_config() config.logger = Mock() config.processor_name = 'dwight' p = Processor2015(config) raw_crash = DotDict() raw_dumps = {} processed_crash = DotDict() processed_crash.processor_notes = "we've been here before; yep" processed_crash.started_datetime = '2014-01-01T00:00:00' with patch('socorro.processor.processor_2015.utc_now') as faked_utcnow: faked_utcnow.return_value = '2015-01-01T00:00:00' processed_crash = p.process_crash( raw_crash, raw_dumps, processed_crash ) ok_(processed_crash.success) eq_(processed_crash.started_datetime, '2015-01-01T00:00:00') eq_(processed_crash.startedDateTime, '2015-01-01T00:00:00') eq_(processed_crash.completed_datetime, '2015-01-01T00:00:00') eq_(processed_crash.completeddatetime, '2015-01-01T00:00:00') eq_( processed_crash.processor_notes, "dwight; Processor2015; earlier processing: 2014-01-01T00:00:00; " "we've been here before; yep" )
def test_everything_we_hoped_for(self, mocked_subprocess_module): config = self.get_basic_config() raw_crash = copy.copy(canonical_standard_raw_crash) raw_dumps = {config.dump_field: 'a_fake_dump.dump'} processed_crash = DotDict() processed_crash.product = 'Firefox' processed_crash.os_name = 'Windows 386' processed_crash.cpu_name = 'x86' processed_crash.signature = 'EnterBaseline' processed_crash['json_dump.crashing_thread.frames'] = [ DotDict({'not_module': 'not-a-module'}), DotDict({'module': 'a-module'}) ] processor_meta = get_basic_processor_meta() mocked_subprocess_handle = mocked_subprocess_module.Popen.return_value mocked_subprocess_handle.stdout.read.return_value = 'EXTRA-SPECIAL' mocked_subprocess_handle.wait.return_value = 0 rule = JitCrashCategorizeRule(config) rule.act(raw_crash, raw_dumps, processed_crash, processor_meta) assert processor_meta.processor_notes == [] assert processed_crash.classifications.jit.category == 'EXTRA-SPECIAL' assert processed_crash.classifications.jit.category_return_code == 0
def setup_config(self, statsd_class): config = DotDict() config.statsd_class = statsd_class config.statsd_host = 'localhost' config.statsd_port = 3333 config.statsd_prefix = 'prefix' return config
def test_rules_close_bubble_close_errors(self): config = DotDict() config.logger = Mock() config.tag = 'test.rule' config.action = 'apply_all_rules' config.rules_list = DotDict() config.rules_list.class_list = [ ( 'TestRuleTestBrokenCloseMethod', TestRuleTestBrokenCloseMethod, 'TestRuleTestBrokenCloseMethod' ), ] trs = transform_rules.TransformRuleSystem(config) assert_raises( AttributeError, trs.close ) assert len(config.logger.debug.mock_calls) == 1 config.logger.debug.assert_any_call( 'trying to close %s', 'collector.unittest.lib.test_transform_rules.' 'TestRuleTestBrokenCloseMethod' )
def _get_mocked_config(self): config = DotDict() config.database = DotDict() config.database.database_class = mock.Mock() config.database.transaction_executor_class = mock.Mock() config.logger = SilentFakeLogger() self.config = config
def test_rule_simple(self): fake_config = DotDict() fake_config.logger = Mock() r1 = transform_rules.Rule(fake_config) assert r1.predicate(None, None, None, None) is True assert r1.action(None, None, None, None) is True assert r1.act() == (True, True) class BadPredicate(transform_rules.Rule): def _predicate(self, *args, **kwargs): return False r2 = BadPredicate(fake_config) assert r2.predicate(None, None, None, None) is False assert r2.action(None, None, None, None) is True assert r2.act() == (False, None) class BadAction(transform_rules.Rule): def _action(self, *args, **kwargs): return False r3 = BadAction(fake_config) assert r3.predicate(None, None, None, None) is True assert r3.action(None, None, None, None) is False assert r3.act() == (True, False)
def test_deleting_attributes(self): dd = DotDict() dd.name = 'peter' dd.age = 31 del dd.name del dd.age self.assertEqual(dict(dd), {})
def test_convert_raw_crash_to_processed_crash_unexpected_error(self): config = setup_config_with_mocks() mocked_transform_rules_str = "socorro.processor.legacy_processor.TransformRuleSystem" with mock.patch(mocked_transform_rules_str) as m_transform_class: m_transform = mock.Mock() m_transform_class.return_value = m_transform m_transform.attach_mock(mock.Mock(), "apply_all_rules") utc_now_str = "socorro.processor.legacy_processor.utc_now" with mock.patch(utc_now_str) as m_utc_now: m_utc_now.return_value = datetime(2012, 5, 4, 15, 11, tzinfo=UTC) raw_crash = DotDict() raw_crash.uuid = "3bc4bcaa-b61d-4d1f-85ae-30cb32120504" raw_crash.submitted_timestamp = "2012-05-04T15:33:33" raw_dump = {"upload_file_minidump": "abcdef"} leg_proc = LegacyCrashProcessor(config, config.mock_quit_fn) started_timestamp = datetime(2012, 5, 4, 15, 10, tzinfo=UTC) leg_proc._log_job_start = mock.Mock(return_value=started_timestamp) basic_processed_crash = DotDict() basic_processed_crash.uuid = raw_crash.uuid basic_processed_crash.success = False basic_processed_crash.hang_type = 0 basic_processed_crash.java_stack_trace = None leg_proc._create_basic_processed_crash = mock.Mock(return_value=basic_processed_crash) leg_proc._get_temp_dump_pathname = mock.Mock(return_value="/tmp/x") leg_proc._log_job_end = mock.Mock() processed_crash_update_dict = DotDict() processed_crash_update_dict.success = True leg_proc._do_breakpad_stack_dump_analysis = mock.Mock( side_effect=Exception("nobody expects the spanish " "inquisition") ) # Here's the call being tested processed_crash = leg_proc.convert_raw_crash_to_processed_crash(raw_crash, raw_dump) self.assertEqual(1, leg_proc._log_job_end.call_count) leg_proc._log_job_end.assert_called_with( datetime(2012, 5, 4, 15, 11, tzinfo=UTC), False, raw_crash.uuid ) e = { "processor_notes": "testing_processor:2012; unrecoverable " "processor error", "completeddatetime": datetime(2012, 5, 4, 15, 11, tzinfo=UTC), "success": False, "uuid": raw_crash.uuid, "hang_type": 0, "java_stack_trace": None, "additional_minidumps": [], } self.assertEqual(e, processed_crash) leg_proc._statistics.assert_has_calls( [mock.call.incr("jobs"), mock.call.incr("restarts"), mock.call.incr("errors")], any_order=True )
def _create_minimal_processed_crash(self): processed_crash = DotDict() processed_crash.processor = DotDict() processed_crash.processor.name = self.config.processor_name processed_crash.processor.notes = [] #processed_crash.classifications = DotDict() processed_crash.signature = '' return processed_crash
def _analyze_header(self, ooid, dump_analysis_line_iterator, submitted_timestamp, processor_notes): for x in zip(xrange(5), dump_analysis_line_iterator): pass dump_analysis_line_iterator.next() processed_crash_update = DotDict() processed_crash_update.crashedThread = 17 processed_crash_update.os_name = "Windows NT" return processed_crash_update
def test_constuctor1(self): config = DotDict() config.quit_on_empty_queue = False tm = TaskManager(config) assert tm.config == config assert tm.task_func == default_task_func assert tm.quit is False
def get_standard_config(self): config = DotDict() config.search_root = None config.dump_suffix = '.dump' config.dump_field = "upload_file_minidump" config.logger = mock.MagicMock() return config
def _add_argument_from_original_source(self, qualified_name, option): argparse_foreign_data = option.foreign_data.argparse if argparse_foreign_data.flags.subcommand: # this argument represents a subcommand, we must setup the # subparsers self.subcommand = argparse_foreign_data self.subparser_orignal_args = argparse_foreign_data.subparsers self.subcommand_configman_option = option else: new_arguments = DotDict() new_arguments.args = argparse_foreign_data.args new_arguments.kwargs = copy.copy(argparse_foreign_data.kwargs) new_arguments.qualified_name = qualified_name new_arguments.owning_subparser_name = \ argparse_foreign_data.owning_subparser_name if new_arguments.args == (qualified_name.split('.')[-1],): new_arguments.args = (qualified_name,) elif 'dest' in new_arguments.kwargs: if new_arguments.kwargs['dest'] != qualified_name: new_arguments.kwargs['dest'] = qualified_name else: new_arguments.kwargs['dest'] = qualified_name try: new_arguments.kwargs['dest'] = \ new_arguments.kwargs['dest'].replace('$', '') except KeyError: # there was no 'dest' key, so we can ignore this error pass self.arguments_for_building_argparse.append(new_arguments)
def test_key_errors(self): dd = DotDict() try: dd.name raise AssertionError("should have raised KeyError") except KeyError: pass #self.assertRaises(KeyError, getattr(dd, 'name')) self.assertEqual(dd.get('age'), None) self.assertEqual(dd.get('age', 0), 0)
def test__ack_crash(self): config = self._setup_config() connection = Mock() ack_token = DotDict() ack_token.delivery_tag = 1 crash_id = 'some-crash-id' crash_queue = RabbitMQCrashQueue(config) crash_queue._ack_crash(connection, crash_id, ack_token) connection.channel.basic_ack.assert_called_once_with(delivery_tag=1)
def test_constructor(self): faked_connection_object = Mock() config = DotDict() conn = Connection( config, faked_connection_object ) assert conn.config is config assert conn.connection is faked_connection_object faked_connection_object.channel.called_once_with() assert faked_connection_object.channel.return_value.queue_declare.call_count == 3 expected = [ call(queue='socorro.normal', durable=True), call(queue='socorro.priority', durable=True), call(queue='socorro.reprocessing', durable=True), ] assert faked_connection_object.channel.return_value.queue_declare.call_args_list == expected
def test_external_fails(self, mocked_subprocess_module): config = self.get_basic_config() raw_crash = copy.copy(canonical_standard_raw_crash) raw_dumps = {config.dump_field: 'a_fake_dump.dump'} processed_crash = DotDict() processor_meta = get_basic_processor_meta() mocked_subprocess_handle = mocked_subprocess_module.Popen.return_value mocked_subprocess_handle.stdout.read.return_value = '{}' mocked_subprocess_handle.wait.return_value = 124 rule = ExternalProcessRule(config) rule.act(raw_crash, raw_dumps, processed_crash, processor_meta) assert processed_crash.bogus_command_result == {} assert processed_crash.bogus_command_return_code == 124 assert processor_meta.processor_notes == []
def test_get_raw_crash(self): config = self.get_standard_config() sub_walker = SubmitterFileSystemWalkerSource(config) raw = ('{"name":"Gabi", ' '"submitted_timestamp":"%d"}' % time.time()) fake_raw_crash = DotDict(json.loads(raw)) mocked_get_raw_crash = mock.Mock(return_value=fake_raw_crash) sub_walker.get_raw_crash = mocked_get_raw_crash path_tuple = [ '6611a662-e70f-4ba5-a397-69a3a2121129.dump', '6611a662-e70f-4ba5-a397-69a3a2121129.flash1.dump', '6611a662-e70f-4ba5-a397-69a3a2121129.flash2.dump', ] raw_crash = sub_walker.get_raw_crash(path_tuple) ok_(isinstance(raw_crash, DotDict)) eq_(raw_crash['name'], 'Gabi')
def test_basic_postgres_save_processed_success_2(self): config = DotDict() config.database_class = mock.MagicMock() config.transaction_executor_class = TransactionExecutorWithInfiniteBackoff config.redactor_class = mock.Mock() config.backoff_delays = [1] config.wait_log_interval = 10 config.logger = mock.Mock() mocked_database_connection_source = config.database_class.return_value mocked_connection = (mocked_database_connection_source.return_value. __enter__.return_value) mocked_cursor = mocked_connection.cursor.return_value.__enter__.return_value fetch_all_returns = [ ((666, ), ), None, ((23, ), ), ] def fetch_all_func(*args): result = fetch_all_returns.pop(0) return result mocked_cursor.fetchall = fetch_all_func # the call to be tested crashstorage = PostgreSQLCrashStorage(config) crashstorage.save_processed(a_processed_crash) eq_(mocked_database_connection_source.call_count, 1) eq_(mocked_cursor.execute.call_count, 6) # check correct fragments sql_fragments = [ "UPDATE reports_20120402", 'select id from plugins', 'insert into plugins', 'delete from plugins_reports_20120402', 'insert into plugins_reports_20120402', ] for a_call, a_fragment in zip(mocked_cursor.execute.call_args_list, sql_fragments): ok_(a_fragment in a_call[0][0])
def test_transform_polystorage_error_with_sentry_configured_failing( self, is_enabled, mock_get_hub, caplogpp): caplogpp.set_level('DEBUG') mock_hub = mock.MagicMock() # Mock this to throw an error if it's called because it shouldn't get called mock_hub.capture_exception.side_effect = ValueError('sentry error') mock_get_hub.return_value = mock_hub # Set up processor and mock .save_raw_and_processed() to raise an exception config = self.get_standard_config() pa = ProcessorApp(config) pa._setup_source_and_destination() pa.source.get_raw_crash.return_value = DotDict({'raw': 'crash'}) pa.source.get_raw_dumps_as_files.return_value = {} first_exc_info = (NameError, NameError('waldo'), 'fake tb 1') second_exc_info = (AssertionError, AssertionError(False), 'fake tb 2') expected_exception = PolyStorageError() expected_exception.exceptions.append(first_exc_info) expected_exception.exceptions.append(second_exc_info) pa.destination.save_raw_and_processed.side_effect = expected_exception # Make sure the PolyStorageError is raised and not the error from # .captureException() with pytest.raises(PolyStorageError): pa.transform('mycrashid') # Assert calls to logger--one set for each of the errors in # PolyStorageError expected = [ ('Unable to report error with Sentry', WHATEVER), ('Sentry DSN is not configured and an exception happened', None), ('Exception occurred', first_exc_info), ('error in processing or saving crash mycrashid', None), ('Unable to report error with Sentry', WHATEVER), ('Sentry DSN is not configured and an exception happened', None), ('Exception occurred', second_exc_info), ('error in processing or saving crash mycrashid', None) ] actual = [(rec.message, rec.exc_info) for rec in caplogpp.records] assert actual == expected
def test_transform_polystorage_error_with_raven_configured_successful( self, mock_raven, ): captured_exceptions = [] # a global def mock_capture_exception(exc_info=None): captured_exceptions.append(exc_info) return 'someidentifier' raven_mock_client = mock.MagicMock() raven_mock_client.captureException.side_effect = mock_capture_exception mock_raven.Client.return_value = raven_mock_client config = self.get_standard_config( sentry_dsn='https://[email protected]/project') pa = ProcessorApp(config) pa._setup_source_and_destination() pa.source.get_raw_crash.return_value = DotDict({'raw': 'crash'}) pa.source.get_raw_dumps_as_files.return_value = {} def mocked_save_raw_and_processed(*_): exception = PolyStorageError() exception.exceptions.append(NameError('waldo')) exception.exceptions.append(AssertionError(False)) raise exception pa.destination.save_raw_and_processed.side_effect = ( mocked_save_raw_and_processed) # The important thing is that this is the exception that # is raised and not something from the raven error handling. with pytest.raises(PolyStorageError): pa.transform('mycrashid') config.logger.info.assert_called_with( 'Error captured in Sentry! Reference: someidentifier') assert len(captured_exceptions) == 2 captured_exception, captured_exception_2 = captured_exceptions assert captured_exception.__class__ == NameError assert captured_exception.message == 'waldo' assert captured_exception_2.__class__ == AssertionError assert captured_exception_2.message is False
def get_basic_config(self): config = CDotDict() config.logger = Mock() config.chatty = True config.dump_field = 'upload_file_minidump' config.stackwalk_command_line = ( 'timeout -s KILL 30 $minidump_stackwalk_pathname ' '--raw-json $rawfilePathname $dumpfilePathname ' '$processor_symbols_pathname_list 2>/dev/null' ) config.minidump_stackwalk_pathname = '/bin/stackwalker' config.processor_symbols_pathname_list = ( '/mnt/socorro/symbols/symbols_ffx,' '/mnt/socorro/symbols/symbols_sea,' '/mnt/socorro/symbols/symbols_tbrd,' '/mnt/socorro/symbols/symbols_sbrd,' '/mnt/socorro/symbols/symbols_os' ) config.symbol_cache_path = '/mnt/socorro/symbols' return config
def test_save_raw_crash_no_legacy(self): config = self._setup_config() config.filter_on_legacy_processing = False crash_store = RabbitMQCrashStorage(config) with patch( 'socorro.external.rabbitmq.crashstorage.retry') as retry_mock: # test for "legacy_processing" missing from crash crash_store.save_raw_crash(raw_crash=DotDict(), dumps=DotDict(), crash_id='crash_id') retry_mock.assert_called_with(crash_store.rabbitmq, crash_store.quit_check, crash_store._save_raw_crash, crash_id='crash_id') with patch( 'socorro.external.rabbitmq.crashstorage.retry') as retry_mock: # test for normal save raw_crash = DotDict() raw_crash.legacy_processing = 0 crash_store.save_raw_crash(raw_crash=raw_crash, dumps=DotDict, crash_id='crash_id') retry_mock.assert_called_with(crash_store.rabbitmq, crash_store.quit_check, crash_store._save_raw_crash, crash_id='crash_id') with patch( 'socorro.external.rabbitmq.crashstorage.retry') as retry_mock: # test for save without regard to "legacy_processing" value raw_crash = DotDict() raw_crash.legacy_processing = 5 crash_store.save_raw_crash(raw_crash=raw_crash, dumps=DotDict, crash_id='crash_id') retry_mock.assert_called_with(crash_store.rabbitmq, crash_store.quit_check, crash_store._save_raw_crash, crash_id='crash_id')
def _execute_external_process(self, crash_id, command_pathname, command_line, processor_meta): output, return_code = execute_external_process( command_pathname=command_pathname, command_line=command_line, processor_meta=processor_meta, interpret_output=self._interpret_output, ) if not isinstance(output, Mapping): msg = "MDSW produced unexpected output: %s (%s)" % str(output)[:20] processor_meta.processor_notes.append(msg) self.logger.warning(msg + " (%s)" % crash_id) output = {} stackwalker_data = DotDict() stackwalker_data.json_dump = output stackwalker_data.mdsw_return_code = return_code stackwalker_data.mdsw_status_string = output.get( "status", "unknown error") stackwalker_data.success = stackwalker_data.mdsw_status_string == "OK" self.metrics.incr( "run", tags=[ "outcome:%s" % ("success" if stackwalker_data.success else "fail"), "exitcode:%s" % return_code, ], ) if return_code == 124: msg = "MDSW timeout (SIGKILL)" processor_meta.processor_notes.append(msg) self.logger.warning(msg + " (%s)" % crash_id) elif return_code != 0 or not stackwalker_data.success: msg = "MDSW failed with %s: %s" % ( return_code, stackwalker_data.mdsw_status_string, ) # subprocess.Popen with shell=False returns negative exit codes # where the number is the signal that got kicked up if return_code == -6: msg = msg + " (SIGABRT)" processor_meta.processor_notes.append(msg) self.logger.warning(msg + " (%s)" % crash_id) return stackwalker_data, return_code
def test_rules_close_bubble_close_errors(self): config = DotDict() config.logger = Mock() config.tag = 'test.rule' config.action = 'apply_all_rules' config.rules_list = DotDict() config.rules_list.class_list = [ ('RuleTestBrokenCloseMethod', RuleTestBrokenCloseMethod, 'RuleTestBrokenCloseMethod'), ] trs = transform_rules.TransformRuleSystem(config) assert_raises(AttributeError, trs.close) assert len(config.logger.debug.mock_calls) == 1 config.logger.debug.assert_any_call( 'trying to close %s', 'socorro.unittest.lib.test_transform_rules.' 'RuleTestBrokenCloseMethod')
def get_standard_config(self): config = DotDict() config.logger = mock.MagicMock() config.throttler = mock.MagicMock() config.collector = DotDict() config.collector.collector_class = BreakpadCollector config.collector.dump_id_prefix = 'bp-' config.collector.dump_field = 'dump' config.collector.accept_submitted_crash_id = False config.collector.accept_submitted_legacy_processing = False config.crash_storage = mock.MagicMock() return config
def test_rules_in_config(self): config = DotDict() config.chatty_rules = False config.chatty = False config.tag = 'test.rule' config.action = 'apply_all_rules' config['TestRuleTestLaughable.laughable'] = 'wilma' config['TestRuleTestDangerous.dangerous'] = 'dwight' config.rules_list = DotDict() config.rules_list.class_list = [ ('TestRuleTestLaughable', TestRuleTestLaughable, 'TestRuleTestLaughable'), ('TestRuleTestDangerous', TestRuleTestDangerous, 'TestRuleTestDangerous') ] trs = transform_rules.TransformRuleSystem(config) ok_(isinstance(trs.rules[0], TestRuleTestLaughable)) ok_(isinstance(trs.rules[1], TestRuleTestDangerous)) ok_(trs.rules[0].predicate(None)) ok_(trs.rules[1].action(None))
def test_rules_close(self): config = DotDict() config.chatty_rules = False config.chatty = False config.tag = 'test.rule' config.action = 'apply_all_rules' config['TestRuleTestLaughable.laughable'] = 'wilma' config['TestRuleTestDangerous.dangerous'] = 'dwight' config.rules_list = DotDict() config.rules_list.class_list = [ ('TestRuleTestLaughable', TestRuleTestLaughable, 'TestRuleTestLaughable'), ('TestRuleTestDangerous', TestRuleTestDangerous, 'TestRuleTestDangerous') ] trs = transform_rules.TransformRuleSystem(config) trs.close() eq_(trs.rules[0].close_counter, 1) eq_(trs.rules[1].close_counter, 1)
def test_rule_exceptions_send_to_sentry_with_crash_id(self, mock_raven): def mock_capture_exception(): return 'someidentifier' client = MagicMock() extras = [] def mock_context_merge(context): extras.append(context['extra']) def mock_Client(**config): client.config = config client.context.merge.side_effect = mock_context_merge client.captureException.side_effect = mock_capture_exception return client mock_raven.Client.side_effect = mock_Client fake_config = DotDict() fake_config.logger = Mock() fake_config.chatty_rules = False fake_config.chatty = False fake_config.sentry = DotDict() fake_config.sentry.dsn = ( 'https://*****:*****@sentry.example.com/01' ) class BadPredicate(transform_rules.Rule): def _predicate(self, *args, **kwargs): raise NameError("highwater") p = BadPredicate(fake_config) raw_crash = {'uuid': 'ABC123'} assert p.predicate(raw_crash) is False fake_config.logger.info.assert_called_with( 'Error captured in Sentry! Reference: someidentifier' ) # When the client was created and the extra context # merged, we can expect that it included a tag and a crash_id assert len(extras) == 1 assert extras[0]['tag'] == 'predicate' assert extras[0]['crash_id'] == 'ABC123'
class FakeStorageSource(object): def __init__(self, config, namespace=""): self.store = DotDict({ "1234": DotDict({ "ooid": "1234", "Product": "FireSquid", "Version": "1.0" }), "1235": DotDict({ "ooid": "1235", "Product": "ThunderRat", "Version": "1.0" }), "1236": DotDict({ "ooid": "1236", "Product": "Caminimal", "Version": "1.0" }), "1237": DotDict({ "ooid": "1237", "Product": "Fennicky", "Version": "1.0" }), }) self.number_of_close_calls = 0 def get_raw_crash(self, ooid): return self.store[ooid] def get_raw_dumps(self, ooid): return {"upload_file_minidump": "this is a fake dump"} def new_crashes(self): for k in self.store.keys(): yield k def close(self): self.number_of_close_calls += 1
def test_rules_close(self): config = DotDict() config.logger = Mock().s config.tag = 'test.rule' config.action = 'apply_all_rules' config['RuleTestLaughable.laughable'] = 'wilma' config['RuleTestDangerous.dangerous'] = 'dwight' config.rules_list = DotDict() config.rules_list.class_list = [ ('RuleTestLaughable', RuleTestLaughable, 'RuleTestLaughable'), ('RuleTestDangerous', RuleTestDangerous, 'RuleTestDangerous') ] trs = transform_rules.TransformRuleSystem(config) trs.close() assert trs.rules[0].close_counter == 1 assert trs.rules[1].close_counter == 1
class FakeStorageSource(object): def __init__(self, config, namespace='', quit_check_callback=None): self.store = DotDict({ '1234': DotDict({ 'ooid': '1234', 'Product': 'FireSquid', 'Version': '1.0' }), '1235': DotDict({ 'ooid': '1235', 'Product': 'ThunderRat', 'Version': '1.0' }), '1236': DotDict({ 'ooid': '1236', 'Product': 'Caminimal', 'Version': '1.0' }), '1237': DotDict({ 'ooid': '1237', 'Product': 'Fennicky', 'Version': '1.0' }), }) self.number_of_close_calls = 0 def get_raw_crash(self, ooid): return self.store[ooid] def get_raw_dumps(self, ooid): return {'upload_file_minidump': 'this is a fake dump'} def new_crashes(self): for k in self.store.keys(): yield k def close(self): self.number_of_close_calls += 1
def test_external_fails_2(self, mocked_subprocess_module): config = self.get_basic_config() raw_crash = copy.copy(canonical_standard_raw_crash) raw_dumps = {config.dump_field: 'a_fake_dump.dump'} processed_crash = DotDict() processor_meta = get_basic_processor_meta() mocked_subprocess_handle = mocked_subprocess_module.Popen.return_value # This data will fail in json.loads and throw an error mocked_subprocess_handle.stdout.read.return_value = '{ff' mocked_subprocess_handle.wait.return_value = -1 rule = ExternalProcessRule(config) rule.act(raw_crash, raw_dumps, processed_crash, processor_meta) assert processed_crash.bogus_command_result == {} assert processed_crash.bogus_command_return_code == -1 assert ('bogus_command output failed in json: Expecting property name' in processor_meta.processor_notes[0])
def test_transform_polystorage_error_without_raven_configured(self): config = self.get_standard_config() pa = ProcessorApp(config) pa._setup_source_and_destination() pa.source.get_raw_crash.return_value = DotDict({'raw': 'crash'}) pa.source.get_raw_dumps_as_files.return_value = {} def mocked_save_raw_and_processed(*_): exception = PolyStorageError() exception.exceptions.append(NameError('waldo')) raise exception pa.destination.save_raw_and_processed.side_effect = ( mocked_save_raw_and_processed) # The important thing is that this is the exception that # is raised and not something from the raven error handling. assert_raises(PolyStorageError, pa.transform, 'mycrashid') config.logger.warning.assert_called_with( 'Raven DSN is not configured and an exception happened')
def __init__(self, config, quit_check_callback=None): """instantiate all the subordinate crashstorage instances parameters: config - a configman dot dict holding configuration information quit_check_callback - a function to be called periodically during long running operations. instance variables: self.storage_namespaces - the list of the namespaces inwhich the subordinate instances are stored. self.stores - instances of the subordinate crash stores """ super(PolyCrashStorage, self).__init__(config, quit_check_callback) self.storage_namespaces = config.storage_classes.subordinate_namespace_names self.stores = ConfigmanDotDict() for a_namespace in self.storage_namespaces: self.stores[a_namespace] = config[a_namespace].crashstorage_class( config[a_namespace], quit_check_callback)
def test_save_raw_crash_normal(self): config = self._setup_config() crash_store = RabbitMQCrashStorage(config) with patch('socorro.external.rabbitmq.crashstorage.retry') as retry_mock: # test for "legacy_processing" missing from crash crash_store.save_raw_crash( raw_crash=DotDict(), dumps=DotDict(), crash_id='crash_id' ) assert not retry_mock.called with patch('socorro.external.rabbitmq.crashstorage.retry') as retry_mock: # test for normal save raw_crash = DotDict() raw_crash.legacy_processing = 0 crash_store.save_raw_crash( raw_crash=raw_crash, dumps=DotDict, crash_id='crash_id' ) retry_mock.assert_called_with( crash_store.rabbitmq, crash_store.quit_check, crash_store._save_raw_crash, crash_id='crash_id' ) with patch('socorro.external.rabbitmq.crashstorage.retry') as retry_mock: # test for save rejection because of "legacy_processing" raw_crash = DotDict() raw_crash.legacy_processing = 5 crash_store.save_raw_crash( raw_crash=raw_crash, dumps=DotDict, crash_id='crash_id' ) assert not retry_mock.called
def test_transform_success(self): config = self.get_standard_config() pa = ProcessorApp(config) pa._setup_source_and_destination() fake_raw_crash = DotDict() mocked_get_raw_crash = mock.Mock(return_value=fake_raw_crash) pa.source.get_raw_crash = mocked_get_raw_crash fake_dump = 'fake dump' mocked_get_raw_dump = mock.Mock(return_value=fake_dump) pa.source.get_raw_dump = mocked_get_raw_dump mocked_convert_raw_crash_to_processed_crash = mock.Mock(return_value=7) pa.processor.convert_raw_crash_to_processed_crash = \ mocked_convert_raw_crash_to_processed_crash pa.destination.save_processed = mock.Mock() pa.transform(17) pa.source.get_raw_crash.assert_called_with(17) pa.source.get_raw_dump.assert_called_with(17) pa.processor.convert_raw_crash_to_processed_crash.assert_called_with( fake_raw_crash, fake_dump) pa.destination.save_processed.assert_called_with(7)
def _execute_external_process(self, crash_id, command_pathname, command_line, processor_meta): output, return_code = ( execute_external_process( command_pathname=command_pathname, command_line=command_line, processor_meta=processor_meta, interpret_output=self._interpret_output ) ) if not isinstance(output, Mapping): msg = 'MDSW produced unexpected output: %s (%s)' % str(output)[:20] processor_meta.processor_notes.append(msg) self.logger.warning(msg + ' (%s)' % crash_id) output = {} stackwalker_data = DotDict() stackwalker_data.json_dump = output stackwalker_data.mdsw_return_code = return_code stackwalker_data.mdsw_status_string = output.get('status', 'unknown error') stackwalker_data.success = stackwalker_data.mdsw_status_string == 'OK' self.metrics.incr( 'run', tags=[ 'outcome:%s' % ('success' if stackwalker_data.success else 'fail'), 'exitcode:%s' % return_code, ] ) if return_code == 124: msg = 'MDSW timeout (SIGKILL)' processor_meta.processor_notes.append(msg) self.logger.warning(msg + ' (%s)' % crash_id) elif return_code != 0 or not stackwalker_data.success: msg = 'MDSW failed with %s: %s' % ( return_code, stackwalker_data.mdsw_status_string ) # subprocess.Popen with shell=False returns negative exit codes # where the number is the signal that got kicked up if return_code == -6: msg = msg + ' (SIGABRT)' processor_meta.processor_notes.append(msg) self.logger.warning(msg + ' (%s)' % crash_id) return stackwalker_data, return_code
def add_parser(self, *args, **kwargs): """each time a subparser action is used to create a new parser object we must save the original args & kwargs. In a later phase of configman, we'll need to reproduce the subparsers exactly without resorting to copying. We save the args & kwargs in the 'foreign_data' section of the configman option that corresponds with the subparser action.""" command_name = args[0] new_kwargs = kwargs.copy() new_kwargs['configman_subparsers_option'] = self._configman_option new_kwargs['subparser_name'] = command_name subparsers = self._configman_option.foreign_data.argparse.subparsers a_subparser = super(ConfigmanSubParsersAction, self).add_parser(*args, **new_kwargs) subparsers[command_name] = DotDict({ "args": args, "kwargs": new_kwargs, "subparser": a_subparser }) return a_subparser
def test_transform_polystorage_error_with_raven_configured_failing( self, mock_raven, ): def mock_capture_exception(exc_info=None): raise ValueError('Someone is wrong on the Internet') raven_mock_client = mock.MagicMock() raven_mock_client.captureException.side_effect = mock_capture_exception mock_raven.Client.return_value = raven_mock_client config = self.get_standard_config( sentry_dsn='https://[email protected]/project' ) pa = ProcessorApp(config) pa._setup_source_and_destination() pa.source.get_raw_crash.return_value = DotDict({'raw': 'crash'}) pa.source.get_raw_dumps_as_files.return_value = {} def mocked_save_raw_and_processed(*_): exception = PolyStorageError() exception.exceptions.append(NameError('waldo')) exception.exceptions.append(AssertionError(False)) raise exception pa.destination.save_raw_and_processed.side_effect = ( mocked_save_raw_and_processed ) # The important thing is that this is the exception that # is raised and not something from the raven error handling. assert_raises( PolyStorageError, pa.transform, 'mycrashid' ) config.logger.error.assert_called_with( 'Unable to report error with Raven', exc_info=True )
def test_redact(self): d = DotDict() # these keys survive redaction d['a.b.c'] = 11 d['sensitive.x'] = 2 d['not_url'] = 'not a url' # these keys do not survive redaction d['url'] = 'http://very.embarassing.com' d['email'] = '*****@*****.**', d['user_id'] = '3333' d['exploitability'] = 'yep' d['json_dump.sensitive'] = 22 d['upload_file_minidump_flash1.json_dump.sensitive'] = 33 d['upload_file_minidump_flash2.json_dump.sensitive'] = 44 d['upload_file_minidump_browser.json_dump.sensitive.exploitable'] = 55 d['upload_file_minidump_browser.json_dump.sensitive.secret'] = 66 d['memory_info'] = {'incriminating_memory': 'call the FBI'} ok_('json_dump' in d) config = DotDict() config.forbidden_keys = Redactor.required_config.forbidden_keys.default expected_surviving_keys = [ 'a', 'sensitive', 'not_url', 'json_dump', 'upload_file_minidump_flash1', 'upload_file_minidump_flash2', 'upload_file_minidump_browser' ] expected_surviving_keys.sort() redactor = Redactor(config) redactor(d) actual_surviving_keys = [x for x in d.keys()] actual_surviving_keys.sort() eq_( len(actual_surviving_keys), len(expected_surviving_keys) ) eq_( actual_surviving_keys, expected_surviving_keys )
def _fake_unredacted_processed_crash(self): d = self._fake_processed_crash() # these keys do not survive redaction d["url"] = "http://very.embarassing.com" d["email"] = "*****@*****.**" d["user_id"] = "3333" d["exploitability"] = "yep" d.json_dump = DotDict() d.json_dump.sensitive = 22 d.upload_file_minidump_flash1 = DotDict() d.upload_file_minidump_flash1.json_dump = DotDict() d.upload_file_minidump_flash1.json_dump.sensitive = 33 d.upload_file_minidump_flash2 = DotDict() d.upload_file_minidump_flash2.json_dump = DotDict() d.upload_file_minidump_flash2.json_dump.sensitive = 33 d.upload_file_minidump_browser = DotDict() d.upload_file_minidump_browser.json_dump = DotDict() d.upload_file_minidump_browser.json_dump.sensitive = DotDict() d.upload_file_minidump_browser.json_dump.sensitive.exploitable = 55 d.upload_file_minidump_browser.json_dump.sensitive.secret = 66 return d
def _fake_unredacted_processed_crash(self): d = self._fake_processed_crash() # these keys do not survive redaction d['url'] = 'http://very.embarassing.com' d['email'] = '*****@*****.**' d['user_id'] = '3333' d['exploitability'] = 'yep' d.json_dump = DotDict() d.json_dump.sensitive = 22 d.upload_file_minidump_flash1 = DotDict() d.upload_file_minidump_flash1.json_dump = DotDict() d.upload_file_minidump_flash1.json_dump.sensitive = 33 d.upload_file_minidump_flash2 = DotDict() d.upload_file_minidump_flash2.json_dump = DotDict() d.upload_file_minidump_flash2.json_dump.sensitive = 33 d.upload_file_minidump_browser = DotDict() d.upload_file_minidump_browser.json_dump = DotDict() d.upload_file_minidump_browser.json_dump.sensitive = DotDict() d.upload_file_minidump_browser.json_dump.sensitive.exploitable = 55 d.upload_file_minidump_browser.json_dump.sensitive.secret = 66 return d
def test_module_on_stack_top(self, mocked_subprocess_module): rule = self.build_rule() raw_crash = copy.copy(canonical_standard_raw_crash) raw_dumps = {rule.dump_field: "a_fake_dump.dump"} processed_crash = DotDict() processed_crash.product = "Firefox" processed_crash.os_name = "Windows NT" processed_crash.cpu_name = "x86" processed_crash.signature = "EnterBaseline" processed_crash["json_dump.crashing_thread.frames"] = [ DotDict({"module": "a-module"}), DotDict({"not_module": "not-a-module"}), ] processor_meta = get_basic_processor_meta() mocked_subprocess_handle = mocked_subprocess_module.Popen.return_value mocked_subprocess_handle.stdout.read.return_value = "EXTRA-SPECIAL" mocked_subprocess_handle.wait.return_value = 0 rule.act(raw_crash, raw_dumps, processed_crash, processor_meta) assert "classifications.jit.category" not in processed_crash assert "classifications.jit.category_return_code" not in processed_crash
def test_module_on_stack_top(self, mocked_subprocess_module): rule = self.build_rule() raw_crash = copy.copy(canonical_standard_raw_crash) raw_dumps = {rule.dump_field: 'a_fake_dump.dump'} processed_crash = DotDict() processed_crash.product = 'Firefox' processed_crash.os_name = 'Windows NT' processed_crash.cpu_name = 'x86' processed_crash.signature = 'EnterBaseline' processed_crash['json_dump.crashing_thread.frames'] = [ DotDict({'module': 'a-module'}), DotDict({'not_module': 'not-a-module'}), ] processor_meta = get_basic_processor_meta() mocked_subprocess_handle = mocked_subprocess_module.Popen.return_value mocked_subprocess_handle.stdout.read.return_value = 'EXTRA-SPECIAL' mocked_subprocess_handle.wait.return_value = 0 rule.act(raw_crash, raw_dumps, processed_crash, processor_meta) assert 'classifications.jit.category' not in processed_crash assert 'classifications.jit.category_return_code' not in processed_crash