def _GetSameStepFromBuild(self, master_name, builder_name, build_number, step_name, http_client): """Downloads swarming test results for a step from previous build.""" step = WfStep.Get(master_name, builder_name, build_number, step_name) if step and step.isolated and step.log_data: # Test level log has been saved for this step. return step # Sends request to swarming server for isolated data. step_isolated_data = swarming_util.GetIsolatedDataForStep( master_name, builder_name, build_number, step_name, http_client) if not step_isolated_data: # pragma: no cover return None result_log = swarming_util.RetrieveShardedTestResultsFromIsolatedServer( step_isolated_data, http_client) if (not result_log or not result_log.get('per_iteration_data') or result_log['per_iteration_data'] == 'invalid'): # pragma: no cover return None step = WfStep.Create(master_name, builder_name, build_number, step_name) step.isolated = True self._InitiateTestLevelFirstFailureAndSaveLog(result_log, step) return step
def GetIsolatedDataForFailedBuild( master_name, builder_name, build_number, failed_steps, http_client): """Checks failed step_names in swarming log for the build. Searches each failed step_name to identify swarming/non-swarming tests and keeps track of isolated data for each failed swarming steps. """ data = ListSwarmingTasksDataByTags( master_name, builder_name, build_number, http_client) if not data: return False tag_name = 'stepname' build_isolated_data = defaultdict(list) for item in data: if item['failure'] and not item['internal_failure']: # Only retrieves test results from tasks which have failures and # the failure should not be internal infrastructure failure. swarming_step_name = GetTagValue(item['tags'], tag_name) if swarming_step_name in failed_steps and item.get('outputs_ref'): isolated_data = _GenerateIsolatedData(item['outputs_ref']) build_isolated_data[swarming_step_name].append(isolated_data) new_steps = [] for step_name in build_isolated_data: failed_steps[step_name]['list_isolated_data'] = ( build_isolated_data[step_name]) # Create WfStep object for all the failed steps. step = WfStep.Create(master_name, builder_name, build_number, step_name) step.isolated = True new_steps.append(step) ndb.put_multi(new_steps) return True
def testFailureLogFetched(self): master_name = 'm' builder_name = 'b 1' build_number = 123 step_name = 'compile' step_url = CreateStdioLogUrl(master_name, builder_name, build_number, step_name) step_log = WfStep.Create(master_name, builder_name, build_number, step_name) step_log.log_data = 'Log has been successfully fetched!' step_log.put() self.mock_current_user(user_email='*****@*****.**', is_admin=True) response = self.test_app.get('/failure-log', params={ 'url': step_url, 'format': 'json' }) expected_response = { 'master_name': 'm', 'builder_name': 'b 1', 'build_number': 123, 'step_name': 'compile', 'step_logs': 'Log has been successfully fetched!' } self.assertEquals(200, response.status_int) self.assertEquals(expected_response, response.json_body)
def testGetSignalFromStepLog(self, _): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 'abc_test' # Mock both stdiolog and gtest json results to test whether Findit will # go to step log first when both logs exist. self.MockGetGtestJsonResult() self._CreateAndSaveWfAnanlysis( master_name, builder_name, build_number) pipeline = ExtractSignalPipeline(FAILURE_INFO) signals = pipeline.run(FAILURE_INFO) step = WfStep.Get(master_name, builder_name, build_number, step_name) expected_files = { 'a/b/u2s1.cc': [567], 'a/b/u3s2.cc': [110] } self.assertIsNotNone(step) self.assertIsNotNone(step.log_data) self.assertEqual(expected_files, signals['abc_test']['files'])
def UpdateSwarmingSteps(master_name, builder_name, build_number, failed_steps, http_client): """Updates swarming steps based on swarming task data. Searches each failed step_name to identify swarming/non-swarming steps and updates failed swarming steps for isolated data. Also creates and saves swarming steps in datastore. """ build_isolated_data = swarming.GetIsolatedDataForFailedStepsInABuild( master_name, builder_name, build_number, failed_steps, http_client) if not build_isolated_data: return False new_steps = [] for step_name in build_isolated_data: failed_steps[step_name].list_isolated_data = ( IsolatedDataList.FromSerializable(build_isolated_data[step_name])) # Create WfStep object for all the failed steps. step = WfStep.Create(master_name, builder_name, build_number, step_name) step.isolated = True new_steps.append(step) ndb.put_multi(new_steps) return True
def testGetSignalFromStepLogFlaky(self): master_name = 'm' builder_name = 'b' build_number = 124 step_name = 'abc_test' failure_info = { 'master_name': 'm', 'builder_name': 'b', 'build_number': 124, 'failed': True, 'chromium_revision': 'a_git_hash', 'failed_steps': { 'abc_test': { 'last_pass': 123, 'current_failure': 124, 'first_failure': 124, } } } self.MockGetStdiolog(master_name, builder_name, build_number, step_name) self.MockGetGtestJsonResult() pipeline = ExtractSignalPipeline() signals = pipeline.run(failure_info) step = WfStep.Get(master_name, builder_name, build_number, step_name) self.assertIsNotNone(step) self.assertIsNotNone(step.log_data) self.assertEqual('flaky', step.log_data) self.assertEqual({}, signals['abc_test']['files'])
def HandleGet(self): """Fetch the log of a failed step as a JSON result.""" url = self.request.get('url', '') step_info = buildbot.ParseStepUrl(url) if not step_info: return BaseHandler.CreateError( 'Url "%s" is not pointing to a step.' % url, 501) master_name, builder_name, build_number, step_name = step_info step = WfStep.Get(master_name, builder_name, build_number, step_name) if not step: return BaseHandler.CreateError('No failure log available.', 404) failure_log = self._GetFormattedJsonLogIfSwarming(step) data = { 'master_name': master_name, 'builder_name': builder_name, 'build_number': build_number, 'step_name': step_name, 'step_logs': failure_log, } return {'template': 'failure_log.html', 'data': data}
def testBackwardTraverseBuildsWhenGettingTestLevelFailureInfo( self, mock_fun, *_): master_name = 'm' builder_name = 'b' build_number = 221 step_name = 'abc_test' failed_steps = { 'abc_test': { 'current_failure': 223, 'first_failure': 223, 'supported': True, 'list_isolated_data': [{ 'isolatedserver': 'https://isolateserver.appspot.com', 'namespace': 'default-gzip', 'digest': 'isolatedhashabctest-223' }] } } builds = { '221': { 'blame_list': ['commit1'], 'chromium_revision': 'commit1' }, '222': { 'blame_list': ['commit2'], 'chromium_revision': 'commit2' }, '223': { 'blame_list': ['commit3', 'commit4'], 'chromium_revision': 'commit4' } } failure_info = { 'master_name': master_name, 'builder_name': builder_name, 'build_number': build_number, 'failed_steps': failed_steps, 'builds': builds } failure_info = TestFailureInfo.FromSerializable(failure_info) expected_failed_steps = failed_steps expected_failed_steps['abc_test']['tests'] = None expected_failed_steps['abc_test']['last_pass'] = None step = WfStep.Create(master_name, builder_name, build_number, step_name) step.isolated = True step.put() ci_test_failure.CheckFirstKnownFailureForSwarmingTests( master_name, builder_name, build_number, failure_info) mock_fun.assert_called_once_with( master_name, builder_name, build_number, step_name, TestFailedStep.FromSerializable(failed_steps[step_name]), ['223', '222', '221'], None)
def _SaveIsolatedResultToStep(master_name, builder_name, build_number, step_name, failed_test_log): """Parses the json data and saves all the reliable failures to the step.""" step = (WfStep.Get(master_name, builder_name, build_number, step_name) or WfStep.Create(master_name, builder_name, build_number, step_name)) step.isolated = True step.log_data = json.dumps( failed_test_log) if failed_test_log else constants.FLAKY_FAILURE_LOG try: step.put() except (BadRequestError, RequestTooLargeError) as e: step.isolated = True step.log_data = constants.TOO_LARGE_LOG logging.warning( 'Failed to save data in %s/%s/%d/%s: %s' % (master_name, builder_name, build_number, step_name, e.message)) step.put()
def testExtractSignalsForTestsFlaky(self): master_name = 'm' builder_name = 'b' build_number = 223 failure_info = { 'master_name': master_name, 'builder_name': builder_name, 'build_number': build_number, 'failed': True, 'chromium_revision': 'a_git_hash', 'failed_steps': { 'abc_test': { 'last_pass': 221, 'current_failure': 223, 'first_failure': 222, 'tests': { 'Unittest2.Subtest1': { 'current_failure': 223, 'first_failure': 222, 'last_pass': 221 }, 'Unittest3.Subtest2': { 'current_failure': 223, 'first_failure': 222, 'last_pass': 221 } } } } } step = WfStep.Create(master_name, builder_name, build_number, 'abc_test') step.isolated = True step.log_data = 'flaky' step.put() expected_signals = { 'abc_test': { 'files': {}, 'keywords': {}, 'tests': {} } } self._CreateAndSaveWfAnanlysis( master_name, builder_name, build_number) pipeline = ExtractSignalPipeline() signals = pipeline.run(failure_info) self.assertEqual(expected_signals, signals)
def testCheckFirstKnownFailureForSwarmingTestsFoundFlaky( self, mock_module): master_name = 'm' builder_name = 'b' build_number = 221 step_name = 'abc_test' failed_steps = { 'abc_test': { 'current_failure': 221, 'first_failure': 221, 'list_isolated_data': [{ 'isolatedserver': 'https://isolateserver.appspot.com', 'namespace': 'default-gzip', 'digest': 'isolatedhashabctest-223' }] } } builds = { '221': { 'blame_list': ['commit1'], 'chromium_revision': 'commit1' }, '222': { 'blame_list': ['commit2'], 'chromium_revision': 'commit2' }, '223': { 'blame_list': ['commit3', 'commit4'], 'chromium_revision': 'commit4' } } expected_failed_steps = failed_steps step = WfStep.Create(master_name, builder_name, build_number, step_name) step.isolated = True step.put() mock_module.GetIsolatedDataForFailedBuild.return_value = True mock_module.RetrieveShardedTestResultsFromIsolatedServer.return_value = ( json.loads( self._GetSwarmingData('isolated-plain', 'm_b_223_abc_test_flaky.json'))) pipeline = DetectFirstFailurePipeline() pipeline._CheckFirstKnownFailureForSwarmingTests( master_name, builder_name, build_number, failed_steps, builds) self.assertEqual(expected_failed_steps, failed_steps)
def _GetTestLevelLogForAStep(master_name, builder_name, build_number, step_name, http_client): """Downloads swarming test results for a step from a build and returns logs for failed tests. Returns: A dict of failure logs for each failed test. """ step = WfStep.Get(master_name, builder_name, build_number, step_name) if (step and step.isolated and step.log_data and step.log_data != constants.TOO_LARGE_LOG): # Test level log has been saved for this step. try: if step.log_data == constants.FLAKY_FAILURE_LOG: return {} return json.loads(step.log_data) except ValueError: logging.error( 'log_data %s of step %s/%s/%d/%s is not json loadable.' % (step.log_data, master_name, builder_name, build_number, step_name)) return None # Sends request to swarming server for isolated data. step_isolated_data = swarming.GetIsolatedDataForStep( master_name, builder_name, build_number, step_name, http_client) if not step_isolated_data: logging.warning( 'Failed to get step_isolated_data for build %s/%s/%d/%s.' % (master_name, builder_name, build_number, step_name)) return None result_log = swarmed_test_util.RetrieveShardedTestResultsFromIsolatedServer( step_isolated_data, http_client) test_results = test_results_util.GetTestResultObject(result_log) if not test_results: logging.warning( 'Failed to get swarming test results for build %s/%s/%d/%s.' % (master_name, builder_name, build_number, step_name)) return None failed_test_log, _ = ( test_results_service.GetFailedTestsInformationFromTestResult( test_results)) return failed_test_log
def testWfStepStdioLogNotDownloadedYet(self, _): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 'abc_test' self._CreateAndSaveWfAnanlysis( master_name, builder_name, build_number) pipeline = ExtractSignalPipeline(FAILURE_INFO) pipeline.start() self.execute_queued_tasks() step = WfStep.Create(master_name, builder_name, build_number, step_name) self.assertIsNotNone(step)
def testWfStepStdioLogNotDownloadedYet(self): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 'abc_test' self.MockGetStdiolog(master_name, builder_name, build_number, step_name) pipeline = ExtractSignalPipeline(self.FAILURE_INFO) pipeline.start() self.execute_queued_tasks() step = WfStep.Create(master_name, builder_name, build_number, step_name) self.assertIsNotNone(step)
def testGetLogForTheSameStepFromBuildNotNotJsonLoadable(self): master_name = 'm' builder_name = 'b' build_number = 121 step_name = 'atest' step = WfStep.Create(master_name, builder_name, build_number, step_name) step.isolated = True step.log_data = 'log' step.put() self.assertIsNone( ci_test_failure._GetTestLevelLogForAStep(master_name, builder_name, build_number, step_name, None))
def testWfStepStdioLogAlreadyDownloaded(self, _): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 'abc_test' step = WfStep.Create(master_name, builder_name, build_number, step_name) step.log_data = ABC_TEST_FAILURE_LOG step.put() self._CreateAndSaveWfAnanlysis( master_name, builder_name, build_number) pipeline = ExtractSignalPipeline(FAILURE_INFO) signals = pipeline.run(FAILURE_INFO) self.assertEqual(FAILURE_SIGNALS, signals)
def testUpdateFirstFailureOnTestLevelFlaky(self): master_name = 'm' builder_name = 'b' build_number = 223 step_name = 'abc_test' failed_step = { 'current_failure': 223, 'first_failure': 221, 'supported': True, 'tests': { 'Unittest2.Subtest1': { 'current_failure': 223, 'first_failure': 223, 'last_pass': 223, 'base_test_name': 'Unittest2.Subtest1' } } } failed_step = TestFailedStep.FromSerializable(failed_step) step = WfStep.Create(master_name, builder_name, 222, step_name) step.isolated = True step.log_data = 'flaky' step.put() ci_test_failure._UpdateFirstFailureOnTestLevel(master_name, builder_name, build_number, step_name, failed_step, [223, 222, 221], FinditHttpClient()) expected_failed_step = { 'current_failure': 223, 'first_failure': 223, 'last_pass': 222, 'supported': True, 'list_isolated_data': None, 'tests': { 'Unittest2.Subtest1': { 'current_failure': 223, 'first_failure': 223, 'last_pass': 222, 'base_test_name': 'Unittest2.Subtest1' } } } self.assertEqual(expected_failed_step, failed_step.ToSerializable())
def _StartTestLevelCheckForFirstFailure(self, master_name, builder_name, build_number, step_name, failed_step, http_client): """Downloads test results and initiates first failure info at test level.""" list_isolated_data = failed_step['list_isolated_data'] result_log = swarming_util.RetrieveShardedTestResultsFromIsolatedServer( list_isolated_data, http_client) if (not result_log or not result_log.get('per_iteration_data') or result_log['per_iteration_data'] == 'invalid'): # pragma: no cover return False step = WfStep.Get(master_name, builder_name, build_number, step_name) return self._InitiateTestLevelFirstFailureAndSaveLog( result_log, step, failed_step)
def testGetFormattedJsonLogIfSwarming(self): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 'browser_test' step_url = CreateStdioLogUrl(master_name, builder_name, build_number, step_name) step_log = WfStep.Create(master_name, builder_name, build_number, step_name) step_log.isolated = True step_log.log_data = ( '{"Unittest2.Subtest1": "RVJST1I6eF90ZXN0LmNjOjEyMzQKYS9iL3Uy' 'czEuY2M6NTY3OiBGYWlsdXJlCkVSUk9SOlsyXTogMjU5NDczNTAwMCBib2dvLW1pY3Jv' 'c2Vjb25kcwpFUlJPUjp4X3Rlc3QuY2M6MTIzNAphL2IvdTJzMS5jYzo1Njc6IE' 'ZhaWx1cmUK", ' '"Unittest3.Subtest2": "YS9iL3UzczIuY2M6MTEwOiBGYWlsdXJlCg=="}') step_log.put() self.mock_current_user(user_email='*****@*****.**', is_admin=True) response = self.test_app.get('/failure-log', params={ 'url': step_url, 'format': 'json' }) expected_response = { 'master_name': 'm', 'builder_name': 'b', 'build_number': 123, 'step_name': 'browser_test', 'step_logs': ('{\n "Unittest2.Subtest1": "ERROR:x_test.cc:1234' '\n a/b/u2s1.cc:567: Failure\n ' 'ERROR:[2]: 2594735000 bogo-microseconds\n ' 'ERROR:x_test.cc:1234\n a/b/u2s1.cc:567: Failure' '\n ", \n "Unittest3.Subtest2": ' '"a/b/u3s2.cc:110: Failure\n "\n}') } self.assertEquals(200, response.status_int) self.assertEquals(expected_response, response.json_body)
def testUpdateFirstFailureOnTestLevelFlaky(self): master_name = 'm' builder_name = 'b' build_number = 223 step_name = 'abc_test' failed_step = { 'current_failure': 223, 'first_failure': 221, 'tests': { 'Unittest2.Subtest1': { 'current_failure': 223, 'first_failure': 223, 'last_pass': 223, 'base_test_name': 'Unittest2.Subtest1' } } } step = WfStep.Create(master_name, builder_name, 222, step_name) step.isolated = True step.log_data = 'flaky' step.put() pipeline = DetectFirstFailurePipeline() pipeline._UpdateFirstFailureOnTestLevel(master_name, builder_name, build_number, step_name, failed_step, HttpClientAppengine()) expected_failed_step = { 'current_failure': 223, 'first_failure': 223, 'last_pass': 222, 'tests': { 'Unittest2.Subtest1': { 'current_failure': 223, 'first_failure': 223, 'last_pass': 222, 'base_test_name': 'Unittest2.Subtest1' } } } self.assertEqual(expected_failed_step, failed_step)
def testWfStepStdioLogAlreadyDownloaded(self): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 'abc_test' step = WfStep.Create(master_name, builder_name, build_number, step_name) step.log_data = self.ABC_TEST_FAILURE_LOG step.put() step_log_url = buildbot.CreateStdioLogUrl(master_name, builder_name, build_number, step_name) with self.mock_urlfetch() as urlfetch: urlfetch.register_handler(step_log_url, 'If used, test should fail!') pipeline = ExtractSignalPipeline(self.FAILURE_INFO) signals = pipeline.run(self.FAILURE_INFO) self.assertEqual(self.FAILURE_SIGNALS, signals)
def testSaveLogToStepLogTooBig(self): master_name = 'm' builder_name = 'b' build_number = 250 step_name = 'atest' original_step_put = WfStep.put calls = [] def MockNdbTransaction(func, **options): if len(calls) < 1: calls.append(1) raise datastore_errors.BadRequestError('log_data is too long') return original_step_put(func, **options) self.mock(WfStep, 'put', MockNdbTransaction) ci_test_failure._SaveIsolatedResultToStep(master_name, builder_name, build_number, step_name, {}) step = WfStep.Get(master_name, builder_name, build_number, step_name) self.assertEqual(step.log_data, constants.TOO_LARGE_LOG)
def testCompileStepSignalFromCachedStepLog(self): master_name = 'm' builder_name = 'b' build_number = 123 step_name = 'compile' step = WfStep.Create(master_name, builder_name, build_number, step_name) step.log_data = _NINJA_OUTPUT_JSON step.put() self._CreateAndSaveWfAnanlysis(master_name, builder_name, build_number) signals = extract_compile_signal.ExtractSignalsForCompileFailure( CompileFailureInfo.FromSerializable(_COMPILE_FAILURE_INFO), None) expected_failed_edges = [{ 'output_nodes': ['a/b.o'], 'rule': 'CXX', 'dependencies': ['b.h', 'b.c'] }] self.assertEqual(expected_failed_edges, signals['compile']['failed_edges'])
def testGetSignalFromStepLogInvalid(self): master_name = 'm' builder_name = 'b' build_number = 125 step_name = 'abc_test' failure_info = { 'master_name': 'm', 'builder_name': 'b', 'build_number': 125, 'failed': True, 'chromium_revision': 'a_git_hash', 'failed_steps': { 'abc_test': { 'last_pass': 124, 'current_failure': 125, 'first_failure': 125, } } } self.MockGetStdiolog(master_name, builder_name, build_number, step_name) self.MockGetGtestJsonResult() pipeline = ExtractSignalPipeline() signals = pipeline.run(failure_info) step = WfStep.Get(master_name, builder_name, build_number, step_name) expected_files = { 'content/common/gpu/media/v4l2_video_encode_accelerator.cc': [306] } self.assertIsNotNone(step) self.assertIsNotNone(step.log_data) self.assertEqual(expected_files, signals['abc_test']['files'])
def testAnalyzeSwarmingTestResultsInitiateLastPassForTests(self): json_data = json.loads( self._GetSwarmingData('isolated-plain', 'm_b_223_abc_test.json')) step = WfStep.Create('m', 'b', 223, 'abc_test') step.isolated = True step.put() failed_step = { 'current_failure': 223, 'first_failure': 221, 'tests': {} } pipeline = DetectFirstFailurePipeline() pipeline._InitiateTestLevelFirstFailureAndSaveLog( json_data, step, failed_step) expected_failed_step = { 'current_failure': 223, 'first_failure': 221, 'tests': { 'Unittest2.Subtest1': { 'current_failure': 223, 'first_failure': 223, 'base_test_name': 'Unittest2.Subtest1' }, 'Unittest3.Subtest2': { 'current_failure': 223, 'first_failure': 223, 'base_test_name': 'Unittest3.Subtest2' } } } self.assertEqual(expected_failed_step, failed_step)
def ExtractSignalsForCompileFailure(failure_info, http_client): signals = {} master_name = failure_info.master_name builder_name = failure_info.builder_name build_number = failure_info.build_number step_name = 'compile' if step_name not in (failure_info.failed_steps or {}): logging.debug( 'No compile failure found when extracting signals for failed ' 'build %s/%s/%d', master_name, builder_name, build_number) return signals if not failure_info.failed_steps[step_name].supported: # Bail out if the step is not supported. logging.info('Findit could not analyze compile failure for master %s.', master_name) return signals failure_log = None # 1. Tries to get stored failure log from step. step = (WfStep.Get(master_name, builder_name, build_number, step_name) or WfStep.Create(master_name, builder_name, build_number, step_name)) if step.log_data: failure_log = step.log_data else: # 2. Tries to get ninja_output as failure log. from_ninja_output = False use_ninja_output_log = (waterfall_config.GetDownloadBuildDataSettings( ).get('use_ninja_output_log')) if use_ninja_output_log: failure_log = step_util.GetWaterfallBuildStepLog( master_name, builder_name, build_number, step_name, http_client, 'json.output[ninja_info]') from_ninja_output = True if not failure_log: # 3. Tries to get stdout log for compile step. from_ninja_output = False failure_log = extract_signal.GetStdoutLog(master_name, builder_name, build_number, step_name, http_client) try: if not failure_log: raise extract_signal.FailedToGetFailureLogError( 'Failed to pull failure log (stdio or ninja output) of step %s of' ' %s/%s/%d' % (step_name, master_name, builder_name, build_number)) except extract_signal.FailedToGetFailureLogError: return {} # Save step log in datastore and avoid downloading again during retry. step.log_data = extract_signal.ExtractStorablePortionOfLog( failure_log, from_ninja_output) try: step.put() except Exception as e: # pragma: no cover # Sometimes, the step log is too large to save in datastore. logging.exception(e) signals[step_name] = extractors.ExtractSignal( master_name, builder_name, step_name, test_name=None, failure_log=failure_log).ToDict() extract_signal.SaveSignalInAnalysis(master_name, builder_name, build_number, signals) return signals
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """An example of using Remote API to query datastore on live App Engine.""" import os import sys # Append path of Findit root directory to import remote_api. _FINDIT_DIR = os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir) sys.path.insert(0, _FINDIT_DIR) # During importing, sys.path will be setup appropriately. from local_libs import remote_api # pylint: disable=W # Set up the Remote API to use services on the live App Engine. remote_api.EnableFinditRemoteApi() from model.wf_step import WfStep step = WfStep.Get('chromium.memory', 'Linux ASan Tests (sandboxed)', 11413, 'browser_tests') with open('/tmp/step.log', 'w') as f: f.write(step.log_data)
def run(self, failure_info): """Extracts failure signals from failed steps. Args: failure_info (dict): Output of pipeline DetectFirstFailurePipeline.run(). Returns: A dict like below: { 'step_name1': waterfall.failure_signal.FailureSignal.ToDict(), ... } """ signals = {} if not failure_info['failed'] or not failure_info['chromium_revision']: # Bail out if no failed step or no chromium revision. return signals # Bail out on infra failure if failure_info.get('failure_type') == failure_type.INFRA: return signals master_name = failure_info['master_name'] builder_name = failure_info['builder_name'] build_number = failure_info['build_number'] for step_name in failure_info.get('failed_steps', []): if not waterfall_config.StepIsSupportedForMaster( step_name, master_name): # Bail out if the step is not supported. continue step = WfStep.Get(master_name, builder_name, build_number, step_name) if step and step.log_data: failure_log = step.log_data else: # TODO: do test-level analysis instead of step-level. # TODO: Use swarming test result instead of archived gtest results gtest_result = buildbot.GetGtestResultLog( master_name, builder_name, build_number, step_name) if gtest_result: failure_log = _GetReliableTestFailureLog(gtest_result) if gtest_result is None or failure_log == 'invalid': if not lock_util.WaitUntilDownloadAllowed( master_name): # pragma: no cover raise pipeline.Retry( 'Failed to pull log of step %s of master %s' % (step_name, master_name)) try: failure_log = buildbot.GetStepLog( master_name, builder_name, build_number, step_name, self.HTTP_CLIENT) except ResponseTooLargeError: # pragma: no cover. logging.exception( 'Log of step "%s" is too large for urlfetch.', step_name) # If the stdio log of a step is too large, we don't want to pull it # again in next run, because that might lead to DDoS to the master. # TODO: Use archived stdio logs in Google Storage instead. failure_log = 'Stdio log is too large for urlfetch.' if not failure_log: # pragma: no cover raise pipeline.Retry( 'Failed to pull stdio of step %s of master %s' % (step_name, master_name)) # Save step log in datastore and avoid downloading again during retry. if not step: # pragma: no cover step = WfStep.Create(master_name, builder_name, build_number, step_name) step.log_data = _ExtractStorablePortionOfLog(failure_log) try: step.put() except Exception as e: # pragma: no cover # Sometimes, the step log is too large to save in datastore. logging.exception(e) # TODO: save result in datastore? if step.isolated: try: json_failure_log = (json.loads(failure_log) if failure_log != 'flaky' else {}) except ValueError: # pragma: no cover json_failure_log = {} logging.warning('failure_log %s is not valid JSON.' % failure_log) signals[step_name] = {'tests': {}} step_signal = FailureSignal() for test_name, test_failure_log in json_failure_log.iteritems( ): signals[step_name]['tests'][ test_name] = extractors.ExtractSignal( master_name, builder_name, step_name, test_name, base64.b64decode(test_failure_log)).ToDict() # Save signals in test failure log to step level. step_signal.MergeFrom( signals[step_name]['tests'][test_name]) signals[step_name]['files'] = step_signal.files signals[step_name]['keywords'] = step_signal.keywords else: signals[step_name] = extractors.ExtractSignal( master_name, builder_name, step_name, None, failure_log).ToDict() return signals
def testTestLevelFailedInfo(self, mock_fn): master_name = 'm' builder_name = 'b' build_number = 223 self._CreateAndSaveWfAnanlysis(master_name, builder_name, build_number, analysis_status.RUNNING) # Mock data for retrieving data from swarming server for a build. self._MockUrlFetchWithSwarmingData(master_name, builder_name, 223) mock_fn.side_effect = [ self._GetBuildData(master_name, builder_name, 223), self._GetBuildData(master_name, builder_name, 222), self._GetBuildData(master_name, builder_name, 221), self._GetBuildData(master_name, builder_name, 220) ] for n in xrange(223, 219, -1): # pragma: no branch. # Setup build data for builds: if n == 220: break # Mock data for retrieving data from swarming server for a single step. self._MockUrlFetchWithSwarmingData(master_name, builder_name, n, 'abc_test') # Mock data for retrieving hash to output.json from isolated server. isolated_data = { 'isolatedserver': 'https://isolateserver.appspot.com', 'namespace': { 'namespace': 'default-gzip' }, 'digest': 'isolatedhashabctest-%d' % n } self._MockUrlfetchWithIsolatedData(isolated_data, build_number=n) # Mock data for retrieving url to output.json from isolated server. file_hash_data = { 'isolatedserver': 'https://isolateserver.appspot.com', 'namespace': { 'namespace': 'default-gzip' }, 'digest': 'abctestoutputjsonhash-%d' % n } self._MockUrlfetchWithIsolatedData(file_hash_data, build_number=n) # Mock data for downloading output.json from isolated server. self._MockUrlfetchWithIsolatedData( None, ('https://isolateserver.storage.googleapis.com/default-gzip/' 'm_b_%d_abc_test' % n), '%s_%s_%d_%s.json' % (master_name, builder_name, n, 'abc_test')) step_221 = WfStep.Create(master_name, builder_name, 221, 'abc_test') step_221.isolated = True step_221.log_data = ( '{"Unittest3.Subtest3": "YS9iL3UzczIuY2M6MTEwOiBGYWlsdXJlCg=="}') step_221.put() pipeline = DetectFirstFailurePipeline() failure_info = pipeline.run(master_name, builder_name, build_number) expected_failed_steps = { 'abc_test': { 'current_failure': 223, 'first_failure': 222, 'last_pass': 221, 'list_isolated_data': [{ 'isolatedserver': 'https://isolateserver.appspot.com', 'namespace': 'default-gzip', 'digest': 'isolatedhashabctest-223' }], 'tests': { 'Unittest2.Subtest1': { 'current_failure': 223, 'first_failure': 222, 'last_pass': 221, 'base_test_name': 'Unittest2.Subtest1' }, 'Unittest3.Subtest2': { 'current_failure': 223, 'first_failure': 222, 'last_pass': 221, 'base_test_name': 'Unittest3.Subtest2' } } } } expected_step_log_data = { 223: ('{"Unittest2.Subtest1": "RVJST1I6eF90ZXN0LmNjOjEyMzRcbmEvYi91Mn' 'MxLmNjOjU2NzogRmFpbHVyZVxuRVJST1I6WzJdOiAyNTk0NzM1MDAwIGJvZ28tb' 'Wljcm9zZWNvbmRzXG5FUlJPUjp4X3Rlc3QuY2M6MTIzNAphL2IvdTJzMS5jYzo1' 'Njc6IEZhaWx1cmUK", ' '"Unittest3.Subtest2": "YS9iL3UzczIuY2M6MTEwOiBGYWlsdXJlCg=="}'), 222: ('{"Unittest2.Subtest1": "RVJST1I6eF90ZXN0LmNjOjEyMzRcbmEvYi91Mn' 'MxLmNjOjU2NzogRmFpbHVyZVxuRVJST1I6WzJdOiAyNTk0NzM1MDAwIGJvZ28tb' 'Wljcm9zZWNvbmRzXG5FUlJPUjp4X3Rlc3QuY2M6MTIzNAphL2IvdTJzMS5jYzo1' 'Njc6IEZhaWx1cmUK", ' '"Unittest3.Subtest2": "YS9iL3UzczIuY2M6MTEwOiBGYWlsdXJlCg=="}'), 221: '{"Unittest3.Subtest3": "YS9iL3UzczIuY2M6MTEwOiBGYWlsdXJlCg=="}' } for n in xrange(223, 220, -1): step = WfStep.Get(master_name, builder_name, n, 'abc_test') self.assertIsNotNone(step) self.assertTrue(step.isolated) self.assertEqual(expected_step_log_data[n], step.log_data) self.assertEqual(expected_failed_steps, failure_info['failed_steps'])
def run(self, failure_info): """ Args: failure_info (dict): Output of pipeline DetectFirstFailurePipeline.run(). Returns: A dict like below: { 'step_name1': waterfall.failure_signal.FailureSignal.ToDict(), ... } """ signals = {} if not failure_info['failed'] or not failure_info['chromium_revision']: # Bail out if no failed step or no chromium revision. return signals master_name = failure_info['master_name'] builder_name = failure_info['builder_name'] build_number = failure_info['build_number'] for step_name in failure_info.get('failed_steps', []): step = WfStep.Get(master_name, builder_name, build_number, step_name) if step and step.log_data: failure_log = step.log_data else: # TODO: do test-level analysis instead of step-level. gtest_result = buildbot.GetGtestResultLog( master_name, builder_name, build_number, step_name) if gtest_result: failure_log = self._GetReliableTestFailureLog(gtest_result) if gtest_result is None or failure_log == 'invalid': if not lock_util.WaitUntilDownloadAllowed( master_name): # pragma: no cover raise pipeline.Retry( 'Failed to pull log of step %s of master %s' % (step_name, master_name)) try: failure_log = buildbot.GetStepStdio( master_name, builder_name, build_number, step_name, self.HTTP_CLIENT) except ResponseTooLargeError: # pragma: no cover. logging.exception( 'Log of step "%s" is too large for urlfetch.', step_name) # If the stdio log of a step is too large, we don't want to pull it # again in next run, because that might lead to DDoS to the master. # TODO: Use archived stdio logs in Google Storage instead. failure_log = 'Stdio log is too large for urlfetch.' if not failure_log: # pragma: no cover raise pipeline.Retry( 'Failed to pull stdio of step %s of master %s' % (step_name, master_name)) # Save step log in datastore and avoid downloading again during retry. if not step: # pragma: no cover step = WfStep.Create(master_name, builder_name, build_number, step_name) step.log_data = self._ExtractStorablePortionOfLog(failure_log) try: step.put() except Exception as e: # pragma: no cover # Sometimes, the step log is too large to save in datastore. logging.exception(e) # TODO: save result in datastore? signals[step_name] = extractors.ExtractSignal( master_name, builder_name, step_name, None, failure_log).ToDict() return signals