def create_runner(self, args=[], driver_class=TestDriver): options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class() runner = PerfTestsRunner(args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner, test_port
def create_runner(self, args=[], driver_class=TestDriver): options, _ = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class() runner = PerfTestsRunner(args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner, test_port
def create_runner(self, buildbot_output=None, args=[], regular_output=None): buildbot_output = buildbot_output or array_stream.ArrayStream() regular_output = regular_output or array_stream.ArrayStream() options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: MainTest.TestDriver() runner = PerfTestsRunner(regular_output, buildbot_output, args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner
def create_runner(self, buildbot_output=None, args=[], regular_output=None, driver_class=TestDriver): buildbot_output = buildbot_output or array_stream.ArrayStream() regular_output = regular_output or array_stream.ArrayStream() options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class() runner = PerfTestsRunner(regular_output, buildbot_output, args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner
def create_runner(self, args=[], driver_class=TestDriver): options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class() runner = PerfTestsRunner(args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, "inspector") runner._host.filesystem.maybe_make_directory(runner._base_path, "Bindings") runner._host.filesystem.maybe_make_directory(runner._base_path, "Parser") filesystem = runner._host.filesystem runner.load_output_json = lambda: json.loads(filesystem.read_text_file(runner._output_json_path())) return runner, test_port
class KeyCompareTest(unittest.TestCase): def setUp(self): self._port = TestPort(MockSystemHost()) def assert_cmp(self, x, y, result): self.assertEqual(cmp(self._port.test_key(x), self._port.test_key(y)), result) def test_test_key(self): self.assert_cmp('/a', '/a', 0) self.assert_cmp('/a', '/b', -1) self.assert_cmp('/a2', '/a10', -1) self.assert_cmp('/a2/foo', '/a10/foo', -1) self.assert_cmp('/a/foo11', '/a/foo2', 1) self.assert_cmp('/ab', '/a/a/b', -1) self.assert_cmp('/a/a/b', '/ab', 1) self.assert_cmp('/foo-bar/baz', '/foo/baz', -1)
class NaturalCompareTest(unittest.TestCase): def setUp(self): self._port = TestPort(MockSystemHost()) def assert_cmp(self, x, y, result): self.assertEqual( cmp(self._port._natural_sort_key(x), self._port._natural_sort_key(y)), result) def test_natural_compare(self): self.assert_cmp('a', 'a', 0) self.assert_cmp('ab', 'a', 1) self.assert_cmp('a', 'ab', -1) self.assert_cmp('', '', 0) self.assert_cmp('', 'ab', -1) self.assert_cmp('1', '2', -1) self.assert_cmp('2', '1', 1) self.assert_cmp('1', '10', -1) self.assert_cmp('2', '10', -1) self.assert_cmp('foo_1.html', 'foo_2.html', -1) self.assert_cmp('foo_1.1.html', 'foo_2.html', -1) self.assert_cmp('foo_1.html', 'foo_10.html', -1) self.assert_cmp('foo_2.html', 'foo_10.html', -1) self.assert_cmp('foo_23.html', 'foo_10.html', 1) self.assert_cmp('foo_23.html', 'foo_100.html', -1)
class KeyCompareTest(unittest.TestCase): def setUp(self): self._port = TestPort(MockSystemHost()) def assert_cmp(self, x, y, result): self.assertEqual(cmp(self._port.test_key(x), self._port.test_key(y)), result) def test_test_key(self): self.assert_cmp("/a", "/a", 0) self.assert_cmp("/a", "/b", -1) self.assert_cmp("/a2", "/a10", -1) self.assert_cmp("/a2/foo", "/a10/foo", -1) self.assert_cmp("/a/foo11", "/a/foo2", 1) self.assert_cmp("/ab", "/a/a/b", -1) self.assert_cmp("/a/a/b", "/ab", 1) self.assert_cmp("/foo-bar/baz", "/foo/baz", -1)
def make_port(self, executive=None, with_tests=False, **kwargs): host = MockSystemHost() if executive: host.executive = executive if with_tests: add_unit_tests_to_mock_filesystem(host.filesystem) return TestPort(host, **kwargs) return Port(host, **kwargs)
def create_runner(self, args=[]): options, _ = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) runner = PerfTestsRunner(args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner, test_port
def test_read_stdin_path(self): port = TestPort(MockHost()) driver = BrowserTestDriver(port, 0, pixel_tests=True) driver._server_process = MockServerProcess(lines=[ 'StdinPath: /foo/bar', '#EOF']) content_block = driver._read_block(0) self.assertEqual(content_block.stdin_path, '/foo/bar') driver._stdin_directory = None
def run_test(self, failures=None, files=None): failures = failures or [] host = MockSystemHost() host.filesystem.files = files or {} port = TestPort(host=host, port_name='test-mac-mac10.11', options=optparse.Values()) actual_output = DriverOutput(text='', image=None, image_hash=None, audio=None) expected_output = DriverOutput(text='', image=None, image_hash=None, audio=None) write_test_result(host.filesystem, port, '/tmp', 'foo.html', actual_output, expected_output, failures) return host.filesystem.written_files
def get_shards(self, num_workers, fully_parallel, test_list=None, max_locked_shards=1): port = TestPort(MockSystemHost()) self.sharder = Sharder(port.split_test, max_locked_shards) test_list = test_list or self.test_list return self.sharder.shard_tests( [self.get_test_input(test) for test in test_list], num_workers, fully_parallel)
class NaturalCompareTest(unittest.TestCase): def setUp(self): self._port = TestPort(MockSystemHost()) def assert_cmp(self, x, y, result): self.assertEqual(cmp(self._port._natural_sort_key(x), self._port._natural_sort_key(y)), result) def test_natural_compare(self): self.assert_cmp("a", "a", 0) self.assert_cmp("ab", "a", 1) self.assert_cmp("a", "ab", -1) self.assert_cmp("", "", 0) self.assert_cmp("", "ab", -1) self.assert_cmp("1", "2", -1) self.assert_cmp("2", "1", 1) self.assert_cmp("1", "10", -1) self.assert_cmp("2", "10", -1) self.assert_cmp("foo_1.html", "foo_2.html", -1) self.assert_cmp("foo_1.1.html", "foo_2.html", -1) self.assert_cmp("foo_1.html", "foo_10.html", -1) self.assert_cmp("foo_2.html", "foo_10.html", -1) self.assert_cmp("foo_23.html", "foo_10.html", 1) self.assert_cmp("foo_23.html", "foo_100.html", -1)
class NaturalCompareTest(unittest.TestCase): def setUp(self): self._port = TestPort(MockSystemHost()) def assert_cmp(self, x, y, result): self.assertEqual(cmp(self._port._natural_sort_key(x), self._port._natural_sort_key(y)), result) def test_natural_compare(self): self.assert_cmp('a', 'a', 0) self.assert_cmp('ab', 'a', 1) self.assert_cmp('a', 'ab', -1) self.assert_cmp('', '', 0) self.assert_cmp('', 'ab', -1) self.assert_cmp('1', '2', -1) self.assert_cmp('2', '1', 1) self.assert_cmp('1', '10', -1) self.assert_cmp('2', '10', -1) self.assert_cmp('foo_1.html', 'foo_2.html', -1) self.assert_cmp('foo_1.1.html', 'foo_2.html', -1) self.assert_cmp('foo_1.html', 'foo_10.html', -1) self.assert_cmp('foo_2.html', 'foo_10.html', -1) self.assert_cmp('foo_23.html', 'foo_10.html', 1) self.assert_cmp('foo_23.html', 'foo_100.html', -1)
def test_parse_reftest_list(self): port = TestPort(MockHost()) port.host.filesystem.files['bar/reftest.list'] = "\n".join([ "== test.html test-ref.html", "", "# some comment", "!= test-2.html test-notref.html # more comments", "== test-3.html test-ref.html", "== test-3.html test-ref2.html", "!= test-3.html test-notref.html" ]) reftest_list = Port._parse_reftest_list(port.host.filesystem, 'bar') self.assertEqual( reftest_list, { 'bar/test.html': [('==', 'bar/test-ref.html')], 'bar/test-2.html': [('!=', 'bar/test-notref.html')], 'bar/test-3.html': [('==', 'bar/test-ref.html'), ('==', 'bar/test-ref2.html'), ('!=', 'bar/test-notref.html')] })
def _create_and_run_perfalizer(self, commands_to_fail=[]): tool = MockTool() patch = tool.bugs.fetch_attachment(10000) logs = [] def logger(message): logs.append(message) def run_webkit_patch(args): if args[0] in commands_to_fail: raise ScriptError def run_perf_test(build_path, description): self.assertTrue(description == 'without 10000' or description == 'with 10000') if 'run-perf-tests' in commands_to_fail: return -1 if 'results-page' not in commands_to_fail: tool.filesystem.write_text_file( tool.filesystem.join(build_path, 'PerformanceTestResults.html'), 'results page') return 0 perfalizer = PerfalizerTask(tool, patch, logger) perfalizer._port = TestPort(tool) perfalizer.run_webkit_patch = run_webkit_patch perfalizer._run_perf_test = run_perf_test capture = OutputCapture() capture.capture_output() if commands_to_fail: self.assertFalse(perfalizer.run()) else: self.assertTrue(perfalizer.run()) capture.restore_output() return logs
def test_find_with_skipped_directories(self): port = TestPort() tests = port.tests('userscripts') self.assertTrue('userscripts/resources/iframe.html' not in tests)
def test_find_one_test(self): port = TestPort(MockHost()) tests = port.tests(['failures/expected/image.html']) self.assertEqual(len(tests), 1)
def setUp(self): self._port = TestPort(MockSystemHost())
def test_find_glob(self): port = TestPort(MockHost()) tests = port.tests(['failures/expected/im*']) self.assertEqual(len(tests), 2)
def test_find_with_skipped_directories(self): port = TestPort(MockHost()) tests = port.tests('userscripts') self.assertTrue('userscripts/resources/iframe.html' not in tests)
def get(self, port_name=None): return TestPort(port_name=port_name, host=self._host)
def test_find_glob(self): port = TestPort() tests = port.tests(['failures/expected/im*']) self.assertEqual(len(tests), 2)
def test_find_with_skipped_directories_2(self): port = TestPort(MockHost()) tests = port.tests(['userscripts/resources']) self.assertEqual(tests, set([]))
def test_find_with_skipped_directories_2(self): port = TestPort() tests = port.tests(['userscripts/resources']) self.assertEqual(tests, set([]))
def test_find_no_paths_specified(self): port = TestPort() layout_tests_dir = port.layout_tests_dir() tests = port.tests([]) self.assertNotEqual(len(tests), 0)
def test_find_one_test(self): port = TestPort() tests = port.tests(['failures/expected/image.html']) self.assertEqual(len(tests), 1)
def test_find_no_paths_specified(self): port = TestPort(MockHost()) layout_tests_dir = port.layout_tests_dir() tests = port.tests([]) self.assertNotEqual(len(tests), 0)