def baseTest(self, mockInitDependencyManager, test_filter, failures, successes): options = browser_test_runner.TestRunOptions() options.verbosity = 0 config = project_config.ProjectConfig( top_level_dir=os.path.join(util.GetTelemetryDir(), 'examples'), client_configs=['a', 'b', 'c'], benchmark_dirs=[ os.path.join(util.GetTelemetryDir(), 'examples', 'browser_tests')] ) temp_file = tempfile.NamedTemporaryFile(delete=False) temp_file.close() temp_file_name = temp_file.name try: browser_test_runner.Run( config, options, ['SimpleTest', '--write-abbreviated-json-results-to=%s' % temp_file_name, '--test-filter=%s' % test_filter]) mockInitDependencyManager.assert_called_with(['a', 'b', 'c']) with open(temp_file_name) as f: test_result = json.load(f) self.assertEquals(test_result['failures'], failures) self.assertEquals(test_result['successes'], successes) self.assertEquals(test_result['valid'], True) finally: os.remove(temp_file_name)
def testSimpleIntegrationUnittest(self, mockInitDependencyManager): options = browser_test_runner.TestRunOptions() # Suppress printing out information for passing tests. options.verbosity = 0 config = gpu_project_config.CONFIG temp_file = tempfile.NamedTemporaryFile(delete=False) temp_file.close() temp_file_name = temp_file.name try: browser_test_runner.Run(config, options, [ 'simple_integration_unittest', '--write-abbreviated-json-results-to=%s' % temp_file_name ]) with open(temp_file_name) as f: test_result = json.load(f) self.assertEquals(test_result['failures'], ['unexpected_error', 'unexpected_failure']) self.assertEquals(test_result['successes'], ['expected_failure', 'expected_flaky']) self.assertEquals(test_result['valid'], True) # It might be nice to be more precise about the order of operations # with these browser restarts, but this is at least a start. self.assertEquals(SimpleIntegrationUnittest._num_browser_starts, 5) finally: os.remove(temp_file_name)
def baseShardingTest(self, total_shards, shard_index, failures, successes): options = browser_test_runner.TestRunOptions() options.verbosity = 0 config = project_config.ProjectConfig( top_level_dir=os.path.join(util.GetTelemetryDir(), 'examples'), client_configs=['a', 'b', 'c'], benchmark_dirs=[ os.path.join(util.GetTelemetryDir(), 'examples', 'browser_tests')] ) temp_file = tempfile.NamedTemporaryFile(delete=False) temp_file.close() temp_file_name = temp_file.name try: browser_test_runner.Run( config, options, ['SimpleShardingTest', '--write-abbreviated-json-results-to=%s' % temp_file_name, '--total-shards=%d' % total_shards, '--shard-index=%d' % shard_index]) with open(temp_file_name) as f: test_result = json.load(f) self.assertEquals(test_result['failures'], failures) self.assertEquals(test_result['successes'], successes) self.assertEquals(test_result['valid'], True) finally: os.remove(temp_file_name)
def testJsonOutputFormat(self, mockInitDependencyManager): options = browser_test_runner.TestRunOptions() config = project_config.ProjectConfig( top_level_dir=os.path.join(util.GetTelemetryDir(), 'examples'), client_configs=['a', 'b', 'c'], benchmark_dirs=[ os.path.join(util.GetTelemetryDir(), 'examples', 'browser_tests') ]) temp_file = tempfile.NamedTemporaryFile(delete=False) temp_file.close() temp_file_name = temp_file.name try: browser_test_runner.Run(config, options, [ 'SimpleTest', '--write-abbreviated-json-results-to=%s' % temp_file_name ]) mockInitDependencyManager.assert_called_with(['a', 'b', 'c']) with open(temp_file_name) as f: test_result = json.load(f) self.assertEquals(test_result['failures'], [ 'browser_tests.simple_numeric_test.SimpleTest.multiplier_simple_2', 'browser_tests.simple_numeric_test.SimpleTest.add_1_and_2', 'browser_tests.simple_numeric_test.SimpleTest.add_7_and_3', 'browser_tests.simple_numeric_test.SimpleTest.testSimple' ]) self.assertEquals(test_result['valid'], True) finally: os.remove(temp_file_name)
def main(): options = browser_test_runner.TestRunOptions() config = project_config.ProjectConfig( top_level_dir=os.path.dirname(__file__), benchmark_dirs=[ os.path.join(os.path.dirname(__file__), 'browser_tests') ]) return browser_test_runner.Run(config, options, sys.argv[1:])
def main(): options = browser_test_runner.TestRunOptions() rest_args = sys.argv[1:] retval = browser_test_runner.Run(gpu_project_config.CONFIG, options, rest_args) # Postprocess the outputted JSON to trim all of the prefixes from # the test names, to keep them as similar to the old form as # possible -- and keep them from getting crazily long. parser = argparse.ArgumentParser(description='Temporary argument parser') parser.add_argument('--write-abbreviated-json-results-to', metavar='FILENAME', action='store', help=('Full path for json results')) option, _ = parser.parse_known_args(rest_args) if option.write_abbreviated_json_results_to: PostprocessJSON(option.write_abbreviated_json_results_to)
def _RunIntegrationTest(self, test_name, failures, successes): options = browser_test_runner.TestRunOptions() # Suppress printing out information for passing tests. options.verbosity = 0 config = gpu_project_config.CONFIG temp_file = tempfile.NamedTemporaryFile(delete=False) temp_file.close() temp_file_name = temp_file.name try: browser_test_runner.Run( config, options, [test_name, '--write-abbreviated-json-results-to=%s' % temp_file_name]) with open(temp_file_name) as f: test_result = json.load(f) self.assertEquals(test_result['failures'], failures) self.assertEquals(test_result['successes'], successes) self.assertEquals(test_result['valid'], True) finally: os.remove(temp_file_name)
def main(): options = browser_test_runner.TestRunOptions() return browser_test_runner.Run(gpu_project_config.CONFIG, options, sys.argv[1:])