def testRunPageWithStartupUrl(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() expectations = test_expectations.TestExpectations() page = page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir()) page.startup_url = 'about:blank' ps.pages.append(page) class Measurement(page_measurement.PageMeasurement): def __init__(self): super(Measurement, self).__init__() self.browser_restarted = False def CustomizeBrowserOptionsForSinglePage(self, ps, options): self.browser_restarted = True super(Measurement, self).CustomizeBrowserOptionsForSinglePage(ps, options) def MeasurePage(self, page, tab, results): pass options = options_for_unittests.GetCopy() options.page_repeat = 2 options.output_format = 'none' if not browser_finder.FindBrowser(options): return test = Measurement() SetUpPageRunnerArguments(options) page_runner.Run(test, ps, expectations, options) self.assertEquals('about:blank', options.browser_options.startup_url) self.assertTrue(test.browser_restarted)
def testRunPageWithStartupUrl(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() expectations = test_expectations.TestExpectations() page = page_module.Page( 'file://blank.html', ps, base_dir=util.GetUnittestDataDir(), startup_url='about:blank') ps.pages.append(page) class Measurement(page_test.PageTest): def __init__(self): super(Measurement, self).__init__() self.browser_restarted = False def CustomizeBrowserOptionsForSinglePage(self, ps, options): self.browser_restarted = True super(Measurement, self).CustomizeBrowserOptionsForSinglePage(ps, options) def ValidateAndMeasurePage(self, page, tab, results): pass options = options_for_unittests.GetCopy() options.page_repeat = 2 options.output_formats = ['none'] options.suppress_gtest_report = True if not browser_finder.FindBrowser(options): return test = Measurement() SetUpStoryRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) story_runner.Run(test, ps, expectations, options, results) self.assertEquals('about:blank', options.browser_options.startup_url) self.assertTrue(test.browser_restarted)
def runCredentialsTest(self, # pylint: disable=R0201 credentials_backend): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page( 'file://blank.html', ps, base_dir=util.GetUnittestDataDir()) page.credentials = "test" ps.pages.append(page) did_run = [False] try: with tempfile.NamedTemporaryFile(delete=False) as f: f.write(SIMPLE_CREDENTIALS_STRING) ps.credentials_path = f.name class TestThatInstallsCredentialsBackend(page_test.PageTest): def __init__(self, credentials_backend): super(TestThatInstallsCredentialsBackend, self).__init__('RunTest') self._credentials_backend = credentials_backend def DidStartBrowser(self, browser): browser.credentials.AddBackend(self._credentials_backend) def RunTest(self, page, tab, results): # pylint: disable=W0613,R0201 did_run[0] = True test = TestThatInstallsCredentialsBackend(credentials_backend) options = options_for_unittests.GetCopy() options.output_format = 'none' page_runner.Run(test, ps, expectations, options) finally: os.remove(f.name) return did_run[0]
def RunMeasurement(self, measurement, ps, expectations=test_expectations.TestExpectations(), options=None): """Runs a measurement against a pageset, returning the rows its outputs.""" if options is None: options = options_for_unittests.GetCopy() assert options temp_parser = options.CreateParser() page_runner.AddCommandLineArgs(temp_parser) measurement.AddCommandLineArgs(temp_parser) measurement.SetArgumentDefaults(temp_parser) defaults = temp_parser.get_default_values() for k, v in defaults.__dict__.items(): if hasattr(options, k): continue setattr(options, k, v) measurement.CustomizeBrowserOptions(options) options.output_file = None options.output_format = 'none' options.output_trace_tag = None page_runner.ProcessCommandLineArgs(temp_parser, options) measurement.ProcessCommandLineArgs(temp_parser, options) return page_runner.Run(measurement, ps, expectations, options)
def GenerateProfiles(profile_creator_class, profile_creator_name, options): """Generate a profile""" expectations = test_expectations.TestExpectations() test = profile_creator_class() temp_output_directory = tempfile.mkdtemp() options.output_profile_path = temp_output_directory results = results_options.CreateResults( benchmark.BenchmarkMetadata(test.__class__.__name__), options) page_runner.Run(test, test.page_set, expectations, options, results) if results.failures: logging.warning('Some pages failed.') logging.warning('Failed pages:\n%s', '\n'.join(results.pages_that_failed)) return 1 # Everything is a-ok, move results to final destination. generated_profiles_dir = os.path.abspath(options.output_dir) if not os.path.exists(generated_profiles_dir): os.makedirs(generated_profiles_dir) out_path = os.path.join(generated_profiles_dir, profile_creator_name) if os.path.exists(out_path): shutil.rmtree(out_path) shutil.copytree(temp_output_directory, out_path, ignore=_IsPseudoFile) shutil.rmtree(temp_output_directory) sys.stderr.write("SUCCESS: Generated profile copied to: '%s'.\n" % out_path) return 0
def CreateExpectations(cls, ps): # pylint: disable=W0613 """Get the expectations this test will run with. By default, it will create an empty expectations set. Override to generate custom expectations. """ return test_expectations.TestExpectations()
def Record(self, results): assert self._page_set.wpr_archive_info, ( 'Pageset archive_data_file path must be specified.') self._page_set.wpr_archive_info.AddNewTemporaryRecording() self._record_page_test.CustomizeBrowserOptions(self._options) story_runner.Run(self._record_page_test, self._page_set, test_expectations.TestExpectations(), self._options, results)
def testBrowserBeforeLaunch(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir()) ps.pages.append(page) class TestBeforeLaunch(page_test.PageTest): def __init__(self, test_method_name, action_name_to_run=''): super(TestBeforeLaunch, self).__init__(test_method_name, action_name_to_run, False) self._did_call_will_start = False self._did_call_did_start = False def WillStartBrowser(self, browser): self._did_call_will_start = True # TODO(simonjam): Test that the profile is available. def DidStartBrowser(self, browser): assert self._did_call_will_start self._did_call_did_start = True def RunTest(self, page, tab, results): # pylint: disable=W0613,R0201 assert self._did_call_did_start test = TestBeforeLaunch('RunTest') options = options_for_unittests.GetCopy() options.output_format = 'none' SetUpPageRunnerArguments(options) page_runner.Run(test, ps, expectations, options)
def testOneTab(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir()) ps.pages.append(page) class TestOneTab(page_test.PageTest): def __init__(self, test_method_name, action_name_to_run='', needs_browser_restart_after_each_page=False): super(TestOneTab, self).__init__(test_method_name, action_name_to_run, needs_browser_restart_after_each_page) self._browser = None def DidStartBrowser(self, browser): self._browser = browser if self._browser.supports_tab_control: self._browser.tabs.New() def RunTest(self, page, tab, results): # pylint: disable=W0613,R0201 if not self._browser.supports_tab_control: logging.warning( 'Browser does not support tab control, skipping test') return assert len(self._browser.tabs) == 1 test = TestOneTab('RunTest') options = options_for_unittests.GetCopy() options.output_format = 'none' SetUpPageRunnerArguments(options) page_runner.Run(test, ps, expectations, options)
def testRunPageWithProfilingFlag(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() ps.pages.append( page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir())) class Measurement(page_test.PageTest): pass options = options_for_unittests.GetCopy() options.output_formats = ['none'] options.suppress_gtest_report = True options.reset_results = None options.upload_results = None options.results_label = None options.output_dir = tempfile.mkdtemp() options.profiler = 'trace' try: SetUpUserStoryRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) user_story_runner.Run(Measurement(), ps, expectations, options, results) self.assertEquals(1, len(GetSuccessfulPageRuns(results))) self.assertEquals(0, len(results.failures)) self.assertEquals(0, len(results.all_page_specific_values)) self.assertTrue( os.path.isfile( os.path.join(options.output_dir, 'blank_html.json'))) finally: shutil.rmtree(options.output_dir)
def testBrowserBeforeLaunch(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir()) ps.pages.append(page) class TestBeforeLaunch(page_test.PageTest): def __init__(self): super(TestBeforeLaunch, self).__init__() self._did_call_will_start = False self._did_call_did_start = False def WillStartBrowser(self, platform): self._did_call_will_start = True # TODO(simonjam): Test that the profile is available. def DidStartBrowser(self, browser): assert self._did_call_will_start self._did_call_did_start = True def ValidatePage(self, *_): assert self._did_call_did_start test = TestBeforeLaunch() options = options_for_unittests.GetCopy() options.output_formats = ['none'] options.suppress_gtest_report = True SetUpUserStoryRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) user_story_runner.Run(test, ps, expectations, options, results)
def testUserAgent(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page( 'file://blank.html', ps, base_dir=util.GetUnittestDataDir(), shared_page_state_class=shared_page_state.SharedTabletPageState) ps.pages.append(page) class TestUserAgent(page_test.PageTest): def ValidateAndMeasurePage(self, _1, tab, _2): actual_user_agent = tab.EvaluateJavaScript('window.navigator.userAgent') expected_user_agent = user_agent.UA_TYPE_MAPPING['tablet'] assert actual_user_agent.strip() == expected_user_agent # This is so we can check later that the test actually made it into this # function. Previously it was timing out before even getting here, which # should fail, but since it skipped all the asserts, it slipped by. self.hasRun = True # pylint: disable=W0201 test = TestUserAgent() options = options_for_unittests.GetCopy() options.output_formats = ['none'] options.suppress_gtest_report = True SetUpStoryRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) story_runner.Run(test, ps, expectations, options, results) self.assertTrue(hasattr(test, 'hasRun') and test.hasRun)
def testNeedsBrowserRestartAfterEachPage(self): self.CaptureFormattedException() ps = page_set.PageSet() expectations = test_expectations.TestExpectations() ps.pages.append(page_module.Page( 'file://blank.html', ps, base_dir=util.GetUnittestDataDir())) ps.pages.append(page_module.Page( 'file://blank.html', ps, base_dir=util.GetUnittestDataDir())) class Test(page_test.PageTest): def __init__(self, *args, **kwargs): super(Test, self).__init__(*args, **kwargs) self.browser_starts = 0 def DidStartBrowser(self, *args): super(Test, self).DidStartBrowser(*args) self.browser_starts += 1 def ValidateAndMeasurePage(self, page, tab, results): pass options = options_for_unittests.GetCopy() options.output_formats = ['none'] options.suppress_gtest_report = True test = Test(needs_browser_restart_after_each_page=True) SetUpStoryRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) story_runner.Run(test, ps, expectations, options, results) self.assertEquals(2, len(GetSuccessfulPageRuns(results))) self.assertEquals(2, test.browser_starts) self.assertFormattedExceptionIsEmpty()
def testWebPageReplay(self): ps = example_domain.ExampleDomainPageSet() expectations = test_expectations.TestExpectations() body = [] class TestWpr(page_test.PageTest): def ValidateAndMeasurePage(self, _, tab, __): body.append(tab.EvaluateJavaScript('document.body.innerText')) test = TestWpr() options = options_for_unittests.GetCopy() options.output_formats = ['none'] options.suppress_gtest_report = True SetUpUserStoryRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) user_story_runner.Run(test, ps, expectations, options, results) self.longMessage = True self.assertIn('Example Domain', body[0], msg='URL: %s' % ps.pages[0].url) self.assertIn('Example Domain', body[1], msg='URL: %s' % ps.pages[1].url) self.assertEquals(2, len(GetSuccessfulPageRuns(results))) self.assertEquals(0, len(results.failures))
def _RunPageTestThatRaisesAppCrashException(self, test, max_failures): class TestPage(page_module.Page): def RunNavigateSteps(self, _): raise exceptions.AppCrashException ps = page_set.PageSet() for _ in range(5): ps.AddUserStory( TestPage('file://blank.html', ps, base_dir=util.GetUnittestDataDir())) expectations = test_expectations.TestExpectations() options = options_for_unittests.GetCopy() options.output_formats = ['none'] options.suppress_gtest_report = True SetUpUserStoryRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) user_story_runner.Run(test, ps, expectations, options, results, max_failures=max_failures) return results
def TestUseLiveSitesFlag(self, options, expect_from_archive): ps = page_set.PageSet(file_path=util.GetUnittestDataDir(), archive_data_file='data/archive_blank.json') ps.pages.append( page_module.Page('file://blank.html', ps, base_dir=ps.base_dir)) expectations = test_expectations.TestExpectations() class ArchiveTest(page_measurement.PageMeasurement): def __init__(self): super(ArchiveTest, self).__init__() self.is_page_from_archive = False self.archive_path_exist = True def WillNavigateToPage(self, page, tab): self.archive_path_exist = (page.archive_path and os.path.isfile(page.archive_path)) self.is_page_from_archive = (tab.browser._wpr_server is not None) # pylint: disable=W0212 def MeasurePage(self, _, __, results): pass test = ArchiveTest() page_runner.Run(test, ps, expectations, options) if expect_from_archive and not test.archive_path_exist: logging.warning('archive path did not exist, asserting that page ' 'is from archive is skipped.') return self.assertEquals(expect_from_archive, test.is_page_from_archive)
def RunMeasurement(self, measurement, ps, expectations=test_expectations.TestExpectations(), options=None): """Runs a measurement against a pageset, returning the rows its outputs.""" if options is None: options = options_for_unittests.GetCopy() assert options temp_parser = options.CreateParser() story_runner.AddCommandLineArgs(temp_parser) defaults = temp_parser.get_default_values() for k, v in defaults.__dict__.items(): if hasattr(options, k): continue setattr(options, k, v) measurement.CustomizeBrowserOptions(options.browser_options) options.output_file = None options.output_formats = ['none'] options.suppress_gtest_report = True options.output_trace_tag = None story_runner.ProcessCommandLineArgs(temp_parser, options) results = results_options.CreateResults(EmptyMetadataForTest(), options) story_runner.Run(measurement, ps, expectations, options, results) return results
def testHandlingOfTestThatRaisesWithNonFatalUnknownExceptions(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() ps.pages.append( page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir())) ps.pages.append( page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir())) class ExpectedException(Exception): pass class Test(page_test.PageTest): def __init__(self, *args): super(Test, self).__init__(*args) self.run_count = 0 def ValidatePage(self, *_): old_run_count = self.run_count self.run_count += 1 if old_run_count == 0: raise ExpectedException() options = options_for_unittests.GetCopy() options.output_format = 'none' test = Test() SetUpPageRunnerArguments(options) results = page_runner.Run(test, ps, expectations, options) self.assertEquals(2, test.run_count) self.assertEquals(1, len(results.successes)) self.assertEquals(1, len(results.failures))
def Run(self, options): # Installing extensions requires that the profile directory exist before # the browser is launched. if not options.browser_options.profile_dir: options.browser_options.profile_dir = tempfile.mkdtemp() options.browser_options.disable_default_apps = False self._PrepareExtensionInstallFiles(options.browser_options.profile_dir) expectations = test_expectations.TestExpectations() results = results_options.CreateResults( benchmark.BenchmarkMetadata(profile_creator.__class__.__name__), options) extension_page_test = _ExtensionPageTest() extension_page_test._expected_extension_count = len( self._extensions_to_install) user_story_runner.Run(extension_page_test, extension_page_test._page_set, expectations, options, results) self._CleanupExtensionInstallFiles() # Check that files on this list exist and have content. expected_files = [os.path.join('Default', 'Network Action Predictor')] for filename in expected_files: filename = os.path.join(options.output_profile_path, filename) if not os.path.getsize(filename) > 0: raise Exception("Profile not complete: %s is zero length." % filename) if results.failures: logging.warning('Some pages failed.') logging.warning('Failed pages:\n%s', '\n'.join(map(str, results.pages_that_failed))) raise Exception('ExtensionsProfileCreator failed.')
def testOneTab(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir()) ps.pages.append(page) class TestOneTab(page_test.PageTest): def __init__(self): super(TestOneTab, self).__init__() self._browser = None def DidStartBrowser(self, browser): self._browser = browser if self._browser.supports_tab_control: self._browser.tabs.New() def ValidatePage(self, *_): if not self._browser.supports_tab_control: logging.warning( 'Browser does not support tab control, skipping test') return assert len(self._browser.tabs) == 1 test = TestOneTab() options = options_for_unittests.GetCopy() options.output_format = 'none' SetUpPageRunnerArguments(options) page_runner.Run(test, ps, expectations, options)
def testCleanUpPage(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir()) ps.pages.append(page) class Test(page_test.PageTest): def __init__(self): super(Test, self).__init__() self.did_call_clean_up = False def ValidatePage(self, *_): raise Exception('Intentional failure') def CleanUpAfterPage(self, page, tab): self.did_call_clean_up = True test = Test() options = options_for_unittests.GetCopy() options.output_format = 'none' SetUpPageRunnerArguments(options) page_runner.Run(test, ps, expectations, options) assert test.did_call_clean_up
def testBrowserBeforeLaunch(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir()) ps.pages.append(page) class TestBeforeLaunch(page_test.PageTest): def __init__(self): super(TestBeforeLaunch, self).__init__() self._did_call_will_start = False self._did_call_did_start = False def WillStartBrowser(self, browser): self._did_call_will_start = True # TODO(simonjam): Test that the profile is available. def DidStartBrowser(self, browser): assert self._did_call_will_start self._did_call_did_start = True def ValidatePage(self, *_): assert self._did_call_did_start test = TestBeforeLaunch() options = options_for_unittests.GetCopy() options.output_format = 'none' SetUpPageRunnerArguments(options) page_runner.Run(test, ps, expectations, options)
def testRetryOnBrowserCrash(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() ps.pages.append( page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir())) class CrashyMeasurement(page_measurement.PageMeasurement): has_crashed = False def MeasurePage(self, *_): if not self.has_crashed: self.has_crashed = True raise exceptions.BrowserGoneException() options = options_for_unittests.GetCopy() options.output_format = 'csv' SetUpPageRunnerArguments(options) results = page_runner.Run(CrashyMeasurement(), ps, expectations, options) self.assertEquals(1, len(results.successes)) self.assertEquals(0, len(results.failures)) self.assertEquals(0, len(results.errors))
def testUserAgent(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page('file://blank.html', ps, base_dir=util.GetUnittestDataDir()) ps.pages.append(page) ps.user_agent_type = 'tablet' class TestUserAgent(page_test.PageTest): def ValidatePage(self, _1, tab, _2): actual_user_agent = tab.EvaluateJavaScript( 'window.navigator.userAgent') expected_user_agent = user_agent.UA_TYPE_MAPPING['tablet'] assert actual_user_agent.strip() == expected_user_agent # This is so we can check later that the test actually made it into this # function. Previously it was timing out before even getting here, which # should fail, but since it skipped all the asserts, it slipped by. self.hasRun = True # pylint: disable=W0201 test = TestUserAgent() options = options_for_unittests.GetCopy() options.output_format = 'none' SetUpPageRunnerArguments(options) page_runner.Run(test, ps, expectations, options) self.assertTrue(hasattr(test, 'hasRun') and test.hasRun)
def testCleanUpPage(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page( 'file://blank.html', ps, base_dir=util.GetUnittestDataDir()) ps.pages.append(page) class Test(page_test.PageTest): def __init__(self): super(Test, self).__init__() self.did_call_clean_up = False def ValidatePage(self, *_): raise exceptions.IntentionalException def CleanUpAfterPage(self, page, tab): self.did_call_clean_up = True test = Test() options = options_for_unittests.GetCopy() options.output_formats = ['none'] options.suppress_gtest_report = True SetUpPageRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) page_runner.Run(test, ps, expectations, options, results) assert test.did_call_clean_up
def testOneTab(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() page = page_module.Page( 'file://blank.html', ps, base_dir=util.GetUnittestDataDir()) ps.pages.append(page) class TestOneTab(page_test.PageTest): def __init__(self): super(TestOneTab, self).__init__() self._browser = None def DidStartBrowser(self, browser): self._browser = browser self._browser.tabs.New() def ValidatePage(self, *_): assert len(self._browser.tabs) == 1 test = TestOneTab() options = options_for_unittests.GetCopy() options.output_formats = ['none'] options.suppress_gtest_report = True SetUpPageRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) page_runner.Run(test, ps, expectations, options, results)
def testDiscardFirstResult(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() ps.pages.append(page_module.Page( 'file://blank.html', ps, base_dir=util.GetUnittestDataDir())) ps.pages.append(page_module.Page( 'file://blank.html', ps, base_dir=util.GetUnittestDataDir())) class Measurement(page_test.PageTest): @property def discard_first_result(self): return True def ValidateAndMeasurePage(self, page, _, results): results.AddValue(string.StringValue(page, 'test', 't', page.url)) options = options_for_unittests.GetCopy() options.output_formats = ['none'] options.suppress_gtest_report = True options.reset_results = None options.upload_results = None options.results_label = None options.page_repeat = 1 options.pageset_repeat = 1 SetUpPageRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) page_runner.Run(Measurement(), ps, expectations, options, results) self.assertEquals(0, len(GetSuccessfulPageRuns(results))) self.assertEquals(0, len(results.failures)) self.assertEquals(0, len(results.all_page_specific_values)) options.page_repeat = 1 options.pageset_repeat = 2 SetUpPageRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) page_runner.Run(Measurement(), ps, expectations, options, results) self.assertEquals(2, len(GetSuccessfulPageRuns(results))) self.assertEquals(0, len(results.failures)) self.assertEquals(2, len(results.all_page_specific_values)) options.page_repeat = 2 options.pageset_repeat = 1 SetUpPageRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) page_runner.Run(Measurement(), ps, expectations, options, results) self.assertEquals(2, len(GetSuccessfulPageRuns(results))) self.assertEquals(0, len(results.failures)) self.assertEquals(2, len(results.all_page_specific_values)) options.output_formats = ['html'] options.suppress_gtest_report = True options.page_repeat = 1 options.pageset_repeat = 1 SetUpPageRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) page_runner.Run(Measurement(), ps, expectations, options, results) self.assertEquals(0, len(GetSuccessfulPageRuns(results))) self.assertEquals(0, len(results.failures)) self.assertEquals(0, len(results.all_page_specific_values))
def testRetryOnBrowserCrash(self): ps = page_set.PageSet() expectations = test_expectations.TestExpectations() ps.pages.append(page_module.Page( 'file://blank.html', ps, base_dir=util.GetUnittestDataDir())) class CrashyMeasurement(page_test.PageTest): has_crashed = False def ValidateAndMeasurePage(self, page, tab, results): # This value should be discarded on the first run when the # browser crashed. results.AddValue( string.StringValue(page, 'test', 't', str(self.has_crashed))) if not self.has_crashed: self.has_crashed = True raise exceptions.BrowserGoneException(tab.browser) options = options_for_unittests.GetCopy() options.output_formats = ['csv'] options.suppress_gtest_report = True SetUpPageRunnerArguments(options) results = results_options.CreateResults(EmptyMetadataForTest(), options) page_runner.Run(CrashyMeasurement(), ps, expectations, options, results) self.assertEquals(1, len(GetSuccessfulPageRuns(results))) self.assertEquals(0, len(results.failures)) self.assertEquals(1, len(results.all_page_specific_values)) self.assertEquals( 'True', results.all_page_specific_values[0].GetRepresentativeString())
def _testMaxFailuresOptionIsRespectedAndOverridable( self, num_failing_user_stories, runner_max_failures, options_max_failures, expected_num_failures): class SimpleSharedState( shared_state.SharedState): _fake_platform = FakePlatform() _current_user_story = None @property def platform(self): return self._fake_platform def WillRunUserStory(self, story): self._current_user_story = story def RunUserStory(self, results): self._current_user_story.Run() def DidRunUserStory(self, results): pass def GetTestExpectationAndSkipValue(self, expectations): return 'pass', None def TearDownState(self): pass class FailingUserStory(user_story.UserStory): def __init__(self): super(FailingUserStory, self).__init__( shared_state_class=SimpleSharedState, is_local=True) self.was_run = False def Run(self): self.was_run = True raise page_test.Failure self.SuppressExceptionFormatting() uss = user_story_set.UserStorySet() for _ in range(num_failing_user_stories): uss.AddUserStory(FailingUserStory()) options = _GetOptionForUnittest() options.output_formats = ['none'] options.suppress_gtest_report = True if options_max_failures: options.max_failures = options_max_failures results = results_options.CreateResults(EmptyMetadataForTest(), options) story_runner.Run( DummyTest(), uss, test_expectations.TestExpectations(), options, results, max_failures=runner_max_failures) self.assertEquals(0, GetNumberOfSuccessfulPageRuns(results)) self.assertEquals(expected_num_failures, len(results.failures)) for ii, story in enumerate(uss.user_stories): self.assertEqual(story.was_run, ii < expected_num_failures)
def setUp(self): self.fake_stdout = StringIO.StringIO() self.actual_stdout = sys.stdout sys.stdout = self.fake_stdout self.options = _GetOptionForUnittest() self.expectations = test_expectations.TestExpectations() self.results = results_options.CreateResults( EmptyMetadataForTest(), self.options) self._story_runner_logging_stub = None