def test_predicate_no_crashing_thread(self): rule = self.build_rule() processed_crash = DotDict({ "product": "Firefox", "os_name": "Windows NT", "cpu_name": "x86", "signature": "EnterBaseline", # No "crashing_thread" key "json_dump": {}, }) assert rule.predicate({}, {}, processed_crash, {}) is True
def test_rules_close_if_close_method_available(self): config = DotDict() config.logger = Mock() config.tag = 'test.rule' config.action = 'apply_all_rules' config.rules_list = DotDict() config.rules_list.class_list = [ ('RuleTestNoCloseMethod', RuleTestNoCloseMethod, 'RuleTestNoCloseMethod'), ('RuleTestDangerous', RuleTestDangerous, 'RuleTestDangerous') ] trs = transform_rules.TransformRuleSystem(config) trs.close() assert len(config.logger.debug.mock_calls) == 3 config.logger.debug.assert_any_call( 'trying to close %s', 'socorro.unittest.lib.test_transform_rules.' 'RuleTestNoCloseMethod') config.logger.debug.assert_any_call( 'trying to close %s', 'socorro.unittest.lib.test_transform_rules.' 'RuleTestDangerous')
def __init__(self, config): rabbitconfig = DotDict() rabbitconfig.host = config['rabbitMQHost'] rabbitconfig.port = config['rabbitMQPort'] rabbitconfig.virtual_host = config['rabbitMQVirtualhost'] rabbitconfig.rabbitmq_user = config['rabbitMQUsername'] rabbitconfig.rabbitmq_password = config['rabbitMQPassword'] rabbitconfig.standard_queue_name = config['rabbitMQStandardQueue'] rabbitconfig.priority_queue_name = config['rabbitMQPriorityQueue'] rabbitconfig.rabbitmq_connection_wrapper_class = Connection self.context = ConnectionContext(config=rabbitconfig)
def test_constuctor1(self): config = DotDict() config.number_of_threads = 1 config.maximum_queue_size = 1 ttm = ThreadedTaskManager(config) try: assert ttm.config == config assert ttm.task_func == default_task_func assert not ttm.quit finally: # we got threads to join ttm._kill_worker_threads()
def _setup_config(self): config = DotDict() self.transaction_executor = MagicMock() config.transaction_executor_class = self.transaction_executor config.logger = Mock() config.rabbitmq_class = ConnectionContext config.routing_key = 'socorro.reprocessing' config.filter_on_legacy_processing = True config.forbidden_keys = '' config.redactor_class = Redactor config.throttle = 100 return config
def test_rules_in_config(self): config = DotDict() config.chatty_rules = False config.chatty = False config.tag = 'test.rule' config.action = 'apply_all_rules' config['TestRuleTestLaughable.laughable'] = 'wilma' config['TestRuleTestDangerous.dangerous'] = 'dwight' config.rules_list = DotDict() config.rules_list.class_list = [ ('TestRuleTestLaughable', TestRuleTestLaughable, 'TestRuleTestLaughable'), ('TestRuleTestDangerous', TestRuleTestDangerous, 'TestRuleTestDangerous') ] trs = transform_rules.TransformRuleSystem(config) ok_(isinstance(trs.rules[0], TestRuleTestLaughable)) ok_(isinstance(trs.rules[1], TestRuleTestDangerous)) ok_(trs.rules[0].predicate(None)) ok_(trs.rules[1].action(None))
def test_get_values(self): new_proxy = ApplicationDefaultsProxy() vs = ValueSource(new_proxy) eq_(vs.get_values(None, None, dict), {}) eq_(vs.get_values(None, None, DotDict), DotDict()) new_proxy.str_to_application_class( 'collector.unittest.app.test_for_application_defaults.SomeApp') eq_(vs.get_values(None, None, dict), { 'alpha': 17, 'beta': 23, }) ok_(isinstance(vs.get_values(None, None, DotDict), DotDict))
def test_rules_close(self): config = DotDict() config.chatty_rules = False config.chatty = False config.tag = 'test.rule' config.action = 'apply_all_rules' config['TestRuleTestLaughable.laughable'] = 'wilma' config['TestRuleTestDangerous.dangerous'] = 'dwight' config.rules_list = DotDict() config.rules_list.class_list = [ ('TestRuleTestLaughable', TestRuleTestLaughable, 'TestRuleTestLaughable'), ('TestRuleTestDangerous', TestRuleTestDangerous, 'TestRuleTestDangerous') ] trs = transform_rules.TransformRuleSystem(config) trs.close() eq_(trs.rules[0].close_counter, 1) eq_(trs.rules[1].close_counter, 1)
def _analyze_frames(self, hang_type, java_stack_trace, make_modules_lower_case, dump_analysis_line_iterator, submitted_timestamp, crashed_thread, processor_notes): for x in zip(xrange(5), dump_analysis_line_iterator): pass return DotDict({ "signature": 'signature', "truncated": False, "topmost_filenames": 'topmost_sourcefiles', })
def get_standard_config(self): config = DotDict() config.logger = mock.MagicMock() config.throttler = mock.MagicMock() config.collector_class = BreakpadCollector2015 config.dump_id_prefix = 'bp-' config.dump_field = 'dump' config.accept_submitted_crash_id = False config.accept_submitted_legacy_processing = False config.checksum_method = hashlib.md5 config.storage = DotDict() config.storage.crashstorage_class = mock.MagicMock() config.throttler = DotDict() config.throttler.throttler_class = mock.MagicMock() return config
def process(self, crash_ids): if not isinstance(crash_ids, (list, tuple)): crash_ids = [crash_ids] success = bool(crash_ids) for crash_id in crash_ids: if not self.save_raw_crash( DotDict({'legacy_processing': 0}), [], crash_id ): success = False return success
def get_standard_config(self): config = DotDict() config.source = DotDict() mocked_source_crashstorage = mock.Mock() mocked_source_crashstorage.id = 'mocked_source_crashstorage' config.source.crashstorage_class = mock.Mock( return_value=mocked_source_crashstorage) config.destination = DotDict() mocked_destination_crashstorage = mock.Mock() mocked_destination_crashstorage.id = 'mocked_destination_crashstorage' config.destination.crashstorage_class = mock.Mock( return_value=mocked_destination_crashstorage) config.processor = DotDict() mocked_processor = mock.Mock() mocked_processor.id = 'mocked_processor' config.processor.processor_class = mock.Mock( return_value=mocked_processor) config.queue = DotDict() config.queue.crashqueue_class = FakeCrashQueue config.companion_process = DotDict() mocked_companion_process = mock.Mock() config.companion_process.companion_class = mock.Mock( return_value=mocked_companion_process) return config
def test_post(self, req_mock): config = DotDict({ 'urls': 'http://example.com/submit,http://2.example.com/submit', 'logger': mock.MagicMock(), 'redactor_class': mock.MagicMock(), }) bpd = BreakpadPOSTDestination(config) raw_crash = DotDict({'Product': 'Firefox'}) dumps = {} crash_id = create_new_ooid() # Set up the request mock to return what Antenna returns response_text = 'CrashID=bp-%s\n' % crash_id req_mock.post('http://example.com/submit', text=response_text) req_mock.post('http://2.example.com/submit', text=response_text) # Run the method in question bpd.save_raw_crash_with_file_dumps(raw_crash, dumps, crash_id) # Verify what happened with requests.post assert req_mock.call_count == 2 req_history = req_mock.request_history assert req_history[0].method == 'POST' assert req_history[0].url == 'http://example.com/submit' assert req_history[1].method == 'POST' assert req_history[1].url == 'http://2.example.com/submit' # Generating the paylod involves some random-string bits in poster, so # we can't do a string compare. So it's hard to verify the data that # got posted was correct. Instead, we check to see if some strings # made it and assume that's probably good. history_0_text = str(req_history[0].text) assert 'Content-Disposition: form-data; name="Product"' in history_0_text assert 'Firefox' in history_0_text # Assert the same stuff was sent to both urls history_1_text = str(req_history[1].text) assert history_0_text == history_1_text
def test_POST_reject_browser_with_hangid(self, mocked_web, mocked_webapi, mocked_utc_now, mocked_time): config = self.get_standard_config() c = BreakpadCollector(config) rawform = DotDict() rawform[u'\u0000ProductName'] = 'FireSquid' rawform.Version = '99' rawform.dump = DotDict({'value': 'fake dump', 'file': 'faked file'}) rawform.some_field = '23' rawform.some_other_field = ObjectWithValue('XYZ') rawform.HangID = 'xyz' rawform.ProcessType = 'browser' form = DotDict(rawform) form.dump = rawform.dump.value erc = DotDict() erc.ProductName = 'FireSquid' erc.Version = '99' erc.some_field = '23' erc.some_other_field = 'XYZ' erc.legacy_processing = ACCEPT erc.throttle_rate = None erc.timestamp = 3.0 erc.submitted_timestamp = '2012-05-04T15:10:00' erc.type_tag = 'bp' erc = dict(erc) mocked_web.ctx.configure_mock(env={'CONTENT_LENGTH': 1000}) mocked_webapi.rawinput.return_value = rawform mocked_utc_now.return_value = datetime(2012, 5, 4, 15, 10) mocked_time.time.return_value = 3.0 c.throttler.throttle.return_value = (IGNORE, None) r = c.POST() eq_(r, "Unsupported=1\n") ok_(not c.crash_storage.save_raw_crash.call_count) # Verify metrics were captured and .capture_stats() was called. config.metrics.capture_stats.assert_called_with( {'collector.crash_report_size_rejected_uncompressed': 1000})
def get_standard_config(self): config = DotDict() config.source = DotDict() mocked_source_crashstorage = mock.Mock() mocked_source_crashstorage.id = 'mocked_source_crashstorage' config.source.crashstorage_class = mock.Mock( return_value=mocked_source_crashstorage) config.destination = DotDict() mocked_destination_crashstorage = mock.Mock() mocked_destination_crashstorage.id = 'mocked_destination_crashstorage' config.destination.crashstorage_class = mock.Mock( return_value=mocked_destination_crashstorage) config.producer_consumer = DotDict() mocked_producer_consumer = mock.Mock() mocked_producer_consumer.id = 'mocked_producer_consumer' config.producer_consumer.producer_consumer_class = mock.Mock( return_value=mocked_producer_consumer) config.producer_consumer.number_of_threads = float(1) config.new_crash_source = DotDict() config.new_crash_source.new_crash_source_class = None config.submitter = DotDict() config.submitter.delay = 0 config.submitter.dry_run = False config.number_of_submissions = "all" config.logger = mock.MagicMock() return config
def test_redact(self): d = DotDict() # these keys survive redaction d["a.b.c"] = 11 d["sensitive.x"] = 2 d["not_url"] = "not a url" # these keys do not survive redaction d["url"] = "http://very.embarassing.com" d["email"] = ("*****@*****.**", ) d["user_id"] = "3333" d["exploitability"] = "yep" d["json_dump.sensitive"] = 22 d["upload_file_minidump_flash1.json_dump.sensitive"] = 33 d["upload_file_minidump_flash2.json_dump.sensitive"] = 44 d["upload_file_minidump_browser.json_dump.sensitive.exploitable"] = 55 d["upload_file_minidump_browser.json_dump.sensitive.secret"] = 66 d["memory_info"] = {"incriminating_memory": "call the FBI"} assert "json_dump" in d config = DotDict() config.forbidden_keys = Redactor.required_config.forbidden_keys.default expected_surviving_keys = [ "a", "sensitive", "not_url", "json_dump", "upload_file_minidump_flash1", "upload_file_minidump_flash2", "upload_file_minidump_browser", ] expected_surviving_keys.sort() redactor = Redactor(config) redactor(d) actual_surviving_keys = [x for x in d.keys()] actual_surviving_keys.sort() assert actual_surviving_keys == expected_surviving_keys
def testCountStackWalkerTimeoutKills_success(self, statsd_obj): config = DotDict() config.counter_class = Mock() config.rule_name = 'stackwalker_timeout_kills' config.statsd_class = Mock() config.statsd_host = 'some_statsd_host' config.statsd_port = 3333 config.statsd_prefix = '' config.active_list = ['stackwalker_timeout_kills'] a_rule = CountStackWalkerTimeoutKills(config) raw_crash_mock = Mock() raw_dumps_mock = Mock() processed_crash_mock = Mock() proc_meta = DotDict() proc_meta.processor_notes = [ 'hello', 'this is a list of notes from the processor', 'it has information about the what the processor', 'thought was important', 'like, maybe, SIGKILL of the stackwalker', 'or other such things.' ] assert a_rule._predicate( raw_crash_mock, raw_dumps_mock, processed_crash_mock, proc_meta ) a_rule._action( raw_crash_mock, raw_dumps_mock, processed_crash_mock, proc_meta ) a_rule.counter._incr.assert_called_once_with( 'stackwalker_timeout_kills' )
def test_rule_exceptions_send_to_sentry_with_crash_id(self, mock_raven): def mock_capture_exception(): return 'someidentifier' client = MagicMock() extras = [] def mock_context_merge(context): extras.append(context['extra']) def mock_Client(**config): client.config = config client.context.merge.side_effect = mock_context_merge client.captureException.side_effect = mock_capture_exception return client mock_raven.Client.side_effect = mock_Client fake_config = DotDict() fake_config.logger = Mock() fake_config.chatty_rules = False fake_config.chatty = False fake_config.sentry = DotDict() fake_config.sentry.dsn = ('https://*****:*****@sentry.example.com/01') class BadPredicate(transform_rules.Rule): def _predicate(self, *args, **kwargs): raise NameError("highwater") p = BadPredicate(fake_config) raw_crash = {'uuid': 'ABC123'} eq_(p.predicate(raw_crash), False) fake_config.logger.info.assert_called_with( 'Error captured in Sentry! Reference: someidentifier') # When the client was created and the extra context # merged, we can expect that it included a tag and a crash_id assert len(extras) == 1 eq_(extras[0]['tag'], 'predicate') eq_(extras[0]['crash_id'], 'ABC123')
def get_standard_config(self): config = DotDict() config.logger = mock.MagicMock() config.services = DotDict() config.services.services_controller = DotDict() class Service1(object): pass config.services.service1 = DotDict() config.services.service1.service_implementation_class = Service1 class Service2(object): pass config.services.service2 = DotDict() config.services.service2.service_implementation_class = Service2 config.services.services_controller.service_list = [ ('service1', '/submit', Service1), ('service2', '/unsubmit', Service2), ] config.web_server = DotDict() self.mocked_web_server = mock.MagicMock() config.web_server.wsgi_server_class = mock.MagicMock( return_value=self.mocked_web_server) return config
def _setup_config(self): config = DotDict() config.host = 'localhost' config.virtual_host = '/' config.port = '5672' config.rabbitmq_user = '******' config.rabbitmq_password = '******' config.standard_queue_name = 'dwight' config.priority_queue_name = 'wilma' config.reprocessing_queue_name = 'betty' config.rabbitmq_connection_wrapper_class = Connection config.executor_identity = lambda: 'MainThread' return config
def test_legacy_new_crash_source_basics(self): m_transaction_executor_class = mock.Mock() config = DotDict() database = mock.Mock() config.database_class = mock.Mock(return_value=database) config.transaction_executor_class = m_transaction_executor_class config.batchJobLimit = 10 LegacyNewCrashSource(config, processor_name='dwight-1234') eq_(m_transaction_executor_class.call_count, 1) m_transaction_executor_class.assert_called_with(config, database, None)
def test_transform_success(self): config = self.get_standard_config() pa = ProcessorApp(config) pa._setup_source_and_destination() fake_raw_crash = DotDict() mocked_get_raw_crash = mock.Mock(return_value=fake_raw_crash) pa.source.get_raw_crash = mocked_get_raw_crash fake_dump = {'upload_file_minidump': 'fake_dump_TEMPORARY.dump'} mocked_get_raw_dumps_as_files = mock.Mock(return_value=fake_dump) pa.source.get_raw_dumps_as_files = mocked_get_raw_dumps_as_files fake_processed_crash = DotDict() mocked_get_unredacted_processed = mock.Mock( return_value=fake_processed_crash ) pa.source.get_unredacted_processed = mocked_get_unredacted_processed mocked_process_crash = mock.Mock(return_value=7) pa.processor.process_crash = mocked_process_crash pa.destination.save_processed = mock.Mock() finished_func = mock.Mock() patch_path = 'socorro.processor.processor_app.os.unlink' with mock.patch(patch_path) as mocked_unlink: # the call being tested pa.transform(17, finished_func) # test results mocked_unlink.assert_called_with('fake_dump_TEMPORARY.dump') pa.source.get_raw_crash.assert_called_with(17) pa.processor.process_crash.assert_called_with( fake_raw_crash, fake_dump, fake_processed_crash ) pa.destination.save_raw_and_processed.assert_called_with( fake_raw_crash, None, 7, 17 ) assert finished_func.call_count == 1
def test_POST_reject_browser_with_hangid(self): config = self.get_standard_config() c = Collector(config) rawform = DotDict() rawform.ProductName = 'FireFloozy' rawform.Version = '99' rawform.dump = DotDict({'value': 'fake dump', 'file': 'faked file'}) rawform.some_field = '23' rawform.some_other_field = ObjectWithValue('XYZ') rawform.HangID = 'xyz' rawform.ProcessType = 'browser' form = DotDict(rawform) form.dump = rawform.dump.value erc = DotDict() erc.ProductName = 'FireFloozy' erc.Version = '99' erc.some_field = '23' erc.some_other_field = 'XYZ' erc.legacy_processing = ACCEPT erc.timestamp = 3.0 erc.submitted_timestamp = '2012-05-04T15:10:00' erc = dict(erc) with nested(mock.patch('socorro.collector.wsgicollector.web'), mock.patch('socorro.collector.wsgicollector.web.webapi'), mock.patch('socorro.collector.wsgicollector.utc_now'), mock.patch('socorro.collector.wsgicollector.time')) \ as (mocked_web, mocked_webapi, mocked_utc_now, mocked_time): mocked_web.input.return_value = form mocked_webapi.rawinput.return_value = rawform mocked_utc_now.return_value = datetime(2012, 5, 4, 15, 10) mocked_time.time.return_value = 3.0 c.legacy_throttler.throttle.return_value = IGNORE r = c.POST() self.assertEqual(r, "Unsupported=1\n") self.assertFalse(self.crash_storage.save_raw.call_count)
def test_everything_we_hoped_for(self): config = get_basic_config() raw_crash = copy.copy(canonical_standard_raw_crash) raw_dumps = {} processed_crash = DotDict() processed_crash.json_dump = copy.copy(canonical_stackwalker_output) processor_meta = get_basic_processor_meta() rule = CrashingThreadRule(config) rule.act(raw_crash, raw_dumps, processed_crash, processor_meta) assert processed_crash.crashedThread == 0
def test_blocking_start_with_quit_on_empty(self): config = DotDict() config.idle_delay = 1 config.quit_on_empty_queue = True tm = TaskManager(config, task_func=Mock()) waiting_func = Mock() tm.blocking_start(waiting_func=waiting_func) assert tm.task_func.call_count == 10 assert waiting_func.call_count == 0
def get_standard_config(self): config = DotDict() mocked_source_implementation = mock.Mock() mocked_source_implementation.quit_check_callback = None config.source_implementation = mock.Mock( return_value=mocked_source_implementation) config.sql = 'select uuid from jobs order by \ queueddatetime DESC limit 1000' config.logger = mock.MagicMock() return config
def setup_config_with_mocks(): config = DotDict() config.mock_quit_fn = mock.Mock() config.logger = mock.Mock() config.transaction = mock.Mock() config.transaction_executor_class = mock.Mock( return_value=config.transaction) config.database = mock.Mock() config.database_class = mock.Mock(return_value=config.database) config.stackwalk_command_line = ( '$minidump_stackwalk_pathname -m $dumpfilePathname ' '$processor_symbols_pathname_list 2>/dev/null') config.minidump_stackwalk_pathname = '/bin/mdsw' config.symbol_cache_path = '/symbol/cache' config.processor_symbols_pathname_list = '"/a/a" "/b/b" "/c/c"' config.c_signature = DotDict() config.c_signature.c_signature_tool_class = mock.Mock() config.java_signature = DotDict() config.java_signature.java_signature_tool_class = mock.Mock() return config
def test_predicate_no_crashing_thread(self): rule = self.build_rule() processed_crash = DotDict({ 'product': 'Firefox', 'os_name': 'Windows NT', 'cpu_name': 'x86', 'signature': 'EnterBaseline', # No "crashing_thread" key 'json_dump': {}, }) assert rule.predicate({}, {}, processed_crash, {}) is True
def str_to_application_class(self, an_app_key): """a configman compatible str_to_* converter""" try: app_class = str_to_python_object(self.apps[an_app_key]) except KeyError: app_class = str_to_python_object(an_app_key) try: self.application_defaults = DotDict( app_class.get_application_defaults()) except AttributeError: # no get_application_defaults, skip this step pass return app_class
def add_namespace(self, name, a_namespace): """as we build up argparse, the actions that define a subparser are translated into configman options. Each of those options must be tagged with the value of the subparse to which they correspond.""" # save a local copy of the namespace self.namespaces[name] = a_namespace # iterate through the namespace branding each of the options with the # name of the subparser to which they belong for k in a_namespace.keys_breadth_first(): an_option = a_namespace[k] if not an_option.foreign_data: an_option.foreign_data = DotDict() an_option.foreign_data['argparse.owning_subparser_name'] = name