Пример #1
0
def _StartTestLevelCheckForFirstFailure(master_name, builder_name,
                                        build_number, step_name, failed_step,
                                        http_client):
    """Downloads test results and initiates first failure info at test level."""
    list_isolated_data = failed_step.list_isolated_data
    list_isolated_data = (list_isolated_data.ToSerializable()
                          if list_isolated_data else [])
    result_log = swarmed_test_util.RetrieveShardedTestResultsFromIsolatedServer(
        list_isolated_data, http_client)

    test_results_object = test_results_util.GetTestResultObject(result_log)

    if not test_results_object or not step_util.IsStepSupportedByFindit(
            test_results_object,
            step_util.GetCanonicalStepName(master_name, builder_name,
                                           build_number, step_name),
            master_name):
        return False

    failed_test_log, reliable_failed_tests = (
        test_results_service.GetFailedTestsInformationFromTestResult(
            test_results_object))

    _SaveIsolatedResultToStep(master_name, builder_name, build_number,
                              step_name, failed_test_log)
    return _InitiateTestLevelFirstFailure(reliable_failed_tests, failed_step)
Пример #2
0
    def testRetrieveShardedTestResultsFromIsolatedServer(self, mock_data, _):
        isolated_data = [{
            'digest': 'shard1_isolated',
            'namespace': 'default-gzip',
            'isolatedserver': 'isolated_server'
        }, {
            'digest': 'shard2_isolated',
            'namespace': 'default-gzip',
            'isolatedserver': 'isolated_server'
        }]

        mock_data.side_effect = [(json.dumps({
            'all_tests': ['test1', 'test2'],
            'per_iteration_data': [{
                'test1': [{
                    'output_snippet': '[ RUN ] test1.\\r\\n',
                    'output_snippet_base64': 'WyBSVU4gICAgICBdIEFjY291bnRUcm',
                    'status': 'SUCCESS'
                }]
            }]
        }), 200),
                                 (json.dumps({
                                     'all_tests': ['test1', 'test2'],
                                     'per_iteration_data': [{
                                         'test2': [{
                                             'output_snippet':
                                             '[ RUN ] test2.\\r\\n',
                                             'output_snippet_base64':
                                             'WyBSVU4gICAgICBdIEFjY291bnRUcm',
                                             'status': 'SUCCESS'
                                         }]
                                     }]
                                 }), 200)]
        result = swarmed_test_util.RetrieveShardedTestResultsFromIsolatedServer(
            isolated_data, None)
        expected_result = {
            'all_tests': ['test1', 'test2'],
            'per_iteration_data': [{
                'test1': [{
                    'output_snippet': '[ RUN ] test1.\\r\\n',
                    'output_snippet_base64': 'WyBSVU4gICAgICBdIEFjY291bnRUcm',
                    'status': 'SUCCESS'
                }],
                'test2': [{
                    'output_snippet': '[ RUN ] test2.\\r\\n',
                    'output_snippet_base64': 'WyBSVU4gICAgICBdIEFjY291bnRUcm',
                    'status': 'SUCCESS'
                }]
            }]
        }

        self.assertEqual(expected_result, result)
Пример #3
0
    def testRetrieveShardedTestResultsFromIsolatedServerFailed(
            self, mock_data, _):
        isolated_data = [{
            'digest': 'shard1_isolated',
            'namespace': 'default-gzip',
            'isolatedserver': 'isolated_server'
        }]
        mock_data.return_value = (None, 404)

        result = swarmed_test_util.RetrieveShardedTestResultsFromIsolatedServer(
            isolated_data, None)

        self.assertIsNone(result)
Пример #4
0
def _GetTestLevelLogForAStep(master_name, builder_name, build_number,
                             step_name, http_client):
    """Downloads swarming test results for a step from a build and returns logs
    for failed tests.

  Returns:
    A dict of failure logs for each failed test.
  """

    step = WfStep.Get(master_name, builder_name, build_number, step_name)

    if (step and step.isolated and step.log_data
            and step.log_data != constants.TOO_LARGE_LOG):
        # Test level log has been saved for this step.
        try:
            if step.log_data == constants.FLAKY_FAILURE_LOG:
                return {}
            return json.loads(step.log_data)
        except ValueError:
            logging.error(
                'log_data %s of step %s/%s/%d/%s is not json loadable.' %
                (step.log_data, master_name, builder_name, build_number,
                 step_name))
            return None

    # Sends request to swarming server for isolated data.
    step_isolated_data = swarming.GetIsolatedDataForStep(
        master_name, builder_name, build_number, step_name, http_client)

    if not step_isolated_data:
        logging.warning(
            'Failed to get step_isolated_data for build %s/%s/%d/%s.' %
            (master_name, builder_name, build_number, step_name))
        return None

    result_log = swarmed_test_util.RetrieveShardedTestResultsFromIsolatedServer(
        step_isolated_data, http_client)
    test_results = test_results_util.GetTestResultObject(result_log)

    if not test_results:
        logging.warning(
            'Failed to get swarming test results for build %s/%s/%d/%s.' %
            (master_name, builder_name, build_number, step_name))
        return None

    failed_test_log, _ = (
        test_results_service.GetFailedTestsInformationFromTestResult(
            test_results))
    return failed_test_log
Пример #5
0
    def testRetrieveShardedTestResultsFromIsolatedServerOneShard(
            self, mock_data, _):
        isolated_data = [{
            'digest': 'shard1_isolated',
            'namespace': 'default-gzip',
            'isolatedserver': 'isolated_server'
        }]
        data_json = {'all_tests': ['test'], 'per_iteration_data': []}
        data_str = json.dumps(data_json)
        mock_data.return_value = (data_str, 200)

        result = swarmed_test_util.RetrieveShardedTestResultsFromIsolatedServer(
            isolated_data, None)

        self.assertEqual(data_json, result)
Пример #6
0
 def testRetrieveShardedTestResultsFromIsolatedServerNoLog(self):
     self.assertEqual(
         [],
         swarmed_test_util.RetrieveShardedTestResultsFromIsolatedServer(
             [], None))
Пример #7
0
def ExtractSignalsForTestFailure(failure_info, http_client):
    signals = {}

    master_name = failure_info.master_name
    builder_name = failure_info.builder_name
    build_number = failure_info.build_number
    failed_steps = failure_info.failed_steps or {}

    for step_name in failed_steps:
        failure_log = None
        if not failed_steps[step_name].supported:
            # Bail out if the step is not supported.
            continue

        # 1. Tries to get stored failure log from step.
        step = (WfStep.Get(master_name, builder_name, build_number, step_name)
                or WfStep.Create(master_name, builder_name, build_number,
                                 step_name))
        if step.log_data and step.log_data != constants.TOO_LARGE_LOG:
            failure_log = step.log_data
        else:
            json_formatted_log = True
            # 2. Gets test results.
            list_isolated_data = failed_steps[step_name].list_isolated_data
            list_isolated_data = (list_isolated_data.ToSerializable()
                                  if list_isolated_data else [])
            merged_test_results = (
                swarmed_test_util.RetrieveShardedTestResultsFromIsolatedServer(
                    list_isolated_data, http_client))
            if merged_test_results:
                test_results = test_results_util.GetTestResultObject(
                    merged_test_results)
                if test_results:
                    failure_log, _ = (
                        test_results_service.
                        GetFailedTestsInformationFromTestResult(test_results))
                    failure_log = json.dumps(
                        failure_log
                    ) if failure_log else constants.FLAKY_FAILURE_LOG
                else:
                    failure_log = constants.WRONG_FORMAT_LOG

            if not merged_test_results or failure_log in [
                    constants.INVALID_FAILURE_LOG, constants.WRONG_FORMAT_LOG
            ]:
                # 3. Gets stdout log.
                json_formatted_log = False
                failure_log = extract_signal.GetStdoutLog(
                    master_name, builder_name, build_number, step_name,
                    http_client)

            try:
                if not failure_log:
                    raise extract_signal.FailedToGetFailureLogError(
                        'Failed to pull failure log (stdio or ninja output) of step %s of'
                        ' %s/%s/%d' %
                        (step_name, master_name, builder_name, build_number))
            except extract_signal.FailedToGetFailureLogError:
                return {}

            # Save step log in datastore and avoid downloading again during retry.
            step.log_data = extract_signal.ExtractStorablePortionOfLog(
                failure_log, json_formatted_log
            ) if step.log_data != constants.TOO_LARGE_LOG else step.log_data
            step.isolated = step.isolated or json_formatted_log

            try:
                step.put()
            except Exception as e:  # pragma: no cover
                # Sometimes, the step log is too large to save in datastore.
                logging.exception(e)

        if step.isolated:
            try:
                json_failure_log = (json.loads(failure_log) if
                                    failure_log != constants.FLAKY_FAILURE_LOG
                                    else {})
            except ValueError:
                json_failure_log = {}
                logging.warning('failure_log %s is not valid JSON.' %
                                failure_log)

            signals[step_name] = {'tests': {}}
            step_signal = FailureSignal()

            for test_name, test_failure_log in json_failure_log.iteritems():
                signals[step_name]['tests'][
                    test_name] = extractors.ExtractSignal(
                        master_name, builder_name, step_name, test_name,
                        base64.b64decode(test_failure_log)).ToDict()

                # Save signals in test failure log to step level.
                step_signal.MergeFrom(signals[step_name]['tests'][test_name])

            signals[step_name]['files'] = step_signal.files
        else:
            signals[step_name] = extractors.ExtractSignal(
                master_name, builder_name, step_name, None,
                failure_log).ToDict()

    return signals