def setUp(self): super(GTestProgressReporterTest, self).setUp() self._mock_timer = simple_mock.MockTimer(gtest_progress_reporter) self._output_stream = base_test_results_unittest.TestOutputStream() self._reporter = gtest_progress_reporter.GTestProgressReporter( self._output_stream)
def testOutputSkipInformation(self): test_page_set = _MakePageSet() self._reporter = gtest_progress_reporter.GTestProgressReporter( self._output_stream, output_skipped_tests_summary=True) results = page_test_results.PageTestResults( progress_reporter=self._reporter) results.WillRunPage(test_page_set.pages[0]) self._mock_timer.SetTime(0.007) results.AddValue( skip.SkipValue(test_page_set.pages[0], 'Page skipped for testing reason')) results.DidRunPage(test_page_set.pages[0]) results.PrintSummary() expected = ('[ RUN ] http://www.foo.com/\n' '===== SKIPPING TEST http://www.foo.com/:' ' Page skipped for testing reason =====\n' '[ OK ] http://www.foo.com/ (7 ms)\n' '[ PASSED ] 1 test.\n' '\n' 'Skipped pages:\n' 'http://www.foo.com/\n' '\n') self.assertEquals(expected, ''.join(self._output_stream.output_data))
def _GetProgressReporter(output_skipped_tests_summary, suppress_gtest_report): if suppress_gtest_report: return progress_reporter.ProgressReporter() return gtest_progress_reporter.GTestProgressReporter( sys.stdout, output_skipped_tests_summary=output_skipped_tests_summary)
def CreateResults(metadata, options): """ Args: options: Contains the options specified in AddResultsOptions. """ # TODO(chrishenry): This logic prevents us from having multiple # OutputFormatters. We should have an output_file per OutputFormatter. # Maybe we should have --output-dir instead of --output-file? if options.output_format == 'html' and not options.output_file: options.output_file = os.path.join(util.GetBaseDir(), 'results.html') elif options.output_format == 'json' and not options.output_file: options.output_file = os.path.join(util.GetBaseDir(), 'results.json') if hasattr(options, 'output_file') and options.output_file: output_file = os.path.expanduser(options.output_file) open(output_file, 'a').close() # Create file if it doesn't exist. output_stream = open(output_file, 'r+') else: output_stream = sys.stdout if not hasattr(options, 'output_format'): options.output_format = _OUTPUT_FORMAT_CHOICES[0] if not hasattr(options, 'output_trace_tag'): options.output_trace_tag = '' output_formatters = [] output_skipped_tests_summary = True reporter = None if options.output_format == 'none': pass elif options.output_format == 'csv': output_formatters.append(csv_output_formatter.CsvOutputFormatter( output_stream)) elif options.output_format == 'buildbot': output_formatters.append(buildbot_output_formatter.BuildbotOutputFormatter( output_stream, trace_tag=options.output_trace_tag)) elif options.output_format == 'gtest': # TODO(chrishenry): This is here to not change the output of # gtest. Let's try enabling skipped tests summary for gtest test # results too (in a separate patch), and see if we break anything. output_skipped_tests_summary = False elif options.output_format == 'html': # TODO(chrishenry): We show buildbot output so that users can grep # through the results easily without needing to open the html # file. Another option for this is to output the results directly # in gtest-style results (via some sort of progress reporter), # as we plan to enable gtest-style output for all output formatters. output_formatters.append(buildbot_output_formatter.BuildbotOutputFormatter( sys.stdout, trace_tag=options.output_trace_tag)) output_formatters.append(html_output_formatter.HtmlOutputFormatter( output_stream, metadata, options.reset_results, options.upload_results, options.browser_type, options.results_label, trace_tag=options.output_trace_tag)) elif options.output_format == 'json': output_formatters.append( json_output_formatter.JsonOutputFormatter(output_stream, metadata)) else: # Should never be reached. The parser enforces the choices. raise Exception('Invalid --output-format "%s". Valid choices are: %s' % (options.output_format, ', '.join(_OUTPUT_FORMAT_CHOICES))) if options.suppress_gtest_report: reporter = progress_reporter.ProgressReporter() else: reporter = gtest_progress_reporter.GTestProgressReporter( sys.stdout, output_skipped_tests_summary=output_skipped_tests_summary) return page_test_results.PageTestResults( output_formatters=output_formatters, progress_reporter=reporter)