def test_existing_unreproducible_testcase(self): """Test existing unreproducible testcase.""" crashes = [self._make_crash('c1'), self._make_crash('c2')] self.mock.test_for_reproducibility.return_value = False existing_testcase = data_types.Testcase() existing_testcase.crash_stacktrace = 'existing' existing_testcase.crash_type = crashes[0].crash_type existing_testcase.crash_state = crashes[0].crash_state existing_testcase.security_flag = crashes[0].security_flag existing_testcase.one_time_crasher_flag = True existing_testcase.job_type = 'existing_job' existing_testcase.timestamp = datetime.datetime.now() existing_testcase.project_name = 'some_project' existing_testcase.put() new_crash_count, known_crash_count, groups = fuzz_task.process_crashes( crashes=crashes, context=fuzz_task.Context( project_name='some_project', bot_name='bot', job_type='job', fuzz_target=data_types.FuzzTarget(engine='engine', binary='binary'), redzone=111, platform_id='platform', crash_revision=1234, fuzzer_name='fuzzer', window_argument='win_args', fuzzer_metadata={}, testcases_metadata={}, timeout_multiplier=1, test_timeout=2, thread_wait_timeout=3, data_directory='/data')) self.assertEqual(0, new_crash_count) self.assertEqual(2, known_crash_count) self.assertEqual(1, len(groups)) self.assertEqual(2, len(groups[0].crashes)) self.assertFalse(groups[0].is_new()) self.assertEqual(crashes[0].crash_type, groups[0].main_crash.crash_type) self.assertEqual(crashes[0].crash_state, groups[0].main_crash.crash_state) self.assertEqual(crashes[0].security_flag, groups[0].main_crash.security_flag) testcases = list(data_types.Testcase.query()) self.assertEqual(1, len(testcases)) self.assertEqual('existing', testcases[0].crash_stacktrace)
def test_create_many_groups(self, project_name): """Test creating many groups.""" self.mock.get_project_name.return_value = project_name self.mock.insert.return_value = {'insertErrors': [{'index': 0}]} invalid_crash = self._make_crash('e1', state='error1') invalid_crash.error = 'error' # TODO(metzman): Add a seperate test for strategies. r2_stacktrace = ('r2\ncf::fuzzing_strategies: value_profile\n') crashes = [ self._make_crash('r1', state='reproducible1'), self._make_crash(r2_stacktrace, state='reproducible1'), self._make_crash('r3', state='reproducible1'), self._make_crash('r4', state='reproducible2'), self._make_crash('u1', state='unreproducible1'), self._make_crash('u2', state='unreproducible2'), self._make_crash('u3', state='unreproducible2'), self._make_crash('u4', state='unreproducible3'), invalid_crash ] self.mock.test_for_reproducibility.side_effect = [ False, # For r1. It returns False. So, r1 is demoted. True, # For r2. It returns True. So, r2 becomes primary for its group. True, # For r4. False, # For u1. False, # For u2. False, # For u3. False ] # For u4. new_crash_count, known_crash_count, groups = fuzz_task.process_crashes( crashes=crashes, context=fuzz_task.Context( project_name=project_name, bot_name='bot', job_type='job', fuzz_target=data_types.FuzzTarget(engine='engine', binary='binary'), redzone=111, platform_id='platform', crash_revision=1234, fuzzer_name='fuzzer', window_argument='win_args', fuzzer_metadata={}, testcases_metadata={}, timeout_multiplier=1, test_timeout=2, thread_wait_timeout=3, data_directory='/data')) self.assertEqual(5, new_crash_count) self.assertEqual(3, known_crash_count) self.assertEqual(5, len(groups)) self.assertEqual([ 'reproducible1', 'reproducible2', 'unreproducible1', 'unreproducible2', 'unreproducible3' ], [group.main_crash.crash_state for group in groups]) self.assertEqual([True, True, True, True, True], [group.is_new() for group in groups]) self.assertEqual([3, 1, 1, 2, 1], [len(group.crashes) for group in groups]) testcases = list(data_types.Testcase.query()) self.assertEqual(5, len(testcases)) self.assertSetEqual( set([r2_stacktrace, 'r4', 'u1', 'u2', 'u4']), set(t.crash_stacktrace for t in testcases)) self.assertSetEqual( set([ '{"fuzzing_strategies": ["value_profile"]}', None, None, None, None ]), set(t.additional_metadata for t in testcases)) # There's one invalid_crash. And r2 is a reproducible crash, so r3 doesn't # invoke archive_testcase_in_blobstore. Therefore, the # archive_testcase_in_blobstore is called `len(crashes) - 2`. self.assertEqual( len(crashes) - 2, self.mock.archive_testcase_and_dependencies_in_gcs.call_count) # Check only the desired testcases were saved. actual_crash_infos = [group.main_crash.crash_info for group in groups] if project_name != 'chromium': expected_crash_infos = [None] * len(actual_crash_infos) else: expected_saved_crash_info = crash_uploader.CrashReportInfo( product='Chrome_' + environment.platform().lower().capitalize(), version='this.is.fake.ver', serialized_crash_stack_frames='f00df00d') expected_crash_infos = [ expected_saved_crash_info, # r2 is main crash for group r1,r2,r3 expected_saved_crash_info, # r4 is main crash for its own group None, # u1 is not reproducible None, # u2, u3 are not reproducible None, # u4 is not reproducible ] self.assertEqual(len(expected_crash_infos), len(actual_crash_infos)) for expected, actual in zip(expected_crash_infos, actual_crash_infos): if not expected: self.assertIsNone(actual) continue self.assertEqual(expected.product, actual.product) self.assertEqual(expected.version, actual.version) self.assertEqual(expected.serialized_crash_stack_frames, actual.serialized_crash_stack_frames) def _make_big_query_json(crash, reproducible_flag, new_flag, testcase_id): return { 'crash_type': crash.crash_type, 'crash_state': crash.crash_state, 'created_at': 987, 'platform': 'platform', 'crash_time_in_ms': int(crash.crash_time * 1000), 'parent_fuzzer_name': 'engine', 'fuzzer_name': 'engine_binary', 'job_type': 'job', 'security_flag': crash.security_flag, 'reproducible_flag': reproducible_flag, 'revision': '1234', 'new_flag': new_flag, 'project': project_name, 'testcase_id': testcase_id } def _get_testcase_id(crash): rows = list( data_types.Testcase.query( data_types.Testcase.crash_type == crash.crash_type, data_types.Testcase.crash_state == crash.crash_state, data_types.Testcase.security_flag == crash.security_flag)) if not rows: return None return str(rows[0].key.id()) # Calls to write 5 groups of crashes to BigQuery. self.assertEqual(5, self.mock.insert.call_count) self.mock.insert.assert_has_calls([ mock.call(mock.ANY, [ big_query.Insert( _make_big_query_json(crashes[0], True, False, None), '%s:bot:987:0' % crashes[0].key), big_query.Insert( _make_big_query_json(crashes[1], True, True, _get_testcase_id(crashes[1])), '%s:bot:987:1' % crashes[0].key), big_query.Insert( _make_big_query_json(crashes[2], True, False, None), '%s:bot:987:2' % crashes[0].key) ]), mock.call(mock.ANY, [ big_query.Insert( _make_big_query_json(crashes[3], True, True, _get_testcase_id(crashes[3])), '%s:bot:987:0' % crashes[3].key) ]), mock.call(mock.ANY, [ big_query.Insert( _make_big_query_json(crashes[4], False, True, _get_testcase_id(crashes[4])), '%s:bot:987:0' % crashes[4].key) ]), mock.call(mock.ANY, [ big_query.Insert( _make_big_query_json(crashes[5], False, True, _get_testcase_id(crashes[5])), '%s:bot:987:0' % crashes[5].key), big_query.Insert( _make_big_query_json(crashes[6], False, False, None), '%s:bot:987:1' % crashes[5].key) ]), mock.call(mock.ANY, [ big_query.Insert( _make_big_query_json(crashes[7], False, True, _get_testcase_id(crashes[7])), '%s:bot:987:0' % crashes[7].key) ]), ])