def create_runner(self, args=[], driver_class=TestDriver): options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class() runner = PerfTestsRunner(args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner, test_port
def test_touched_but_skipped_test(self): host = MockHost() port = TestPort(host) expectations_dict = OrderedDict() expectations_dict['expectations'] = 'test1.html [ Skip ]\ntest3.html [ Skip ]\n' port.expectations_dict = lambda **kwargs: expectations_dict port.test_exists = lambda test: True paths = ['LayoutTests/test0.html', 'LayoutTests/test1.html', 'LayoutTests/test2-expected.txt', 'LayoutTests/test3-expected.txt'] host.filesystem.write_text_file('/test.checkout/LayoutTests/test2.html', 'This is a test to be runned') host.filesystem.write_text_file('/test.checkout/LayoutTests/test3.html', 'This is a test to be skipped') touched_tests = MockLayoutTestFinder(port, optparse.Values({'skipped': 'always', 'skip_failing_tests': False, 'http': True})).find_touched_tests(paths) self.assertEqual(sorted(touched_tests), sorted(['test0.html', 'test2.html']))
class KeyCompareTest(unittest.TestCase): def setUp(self): self._port = TestPort(MockSystemHost()) def assert_cmp(self, x, y, result): self.assertEqual(cmp(self._port.test_key(x), self._port.test_key(y)), result) def test_test_key(self): self.assert_cmp('/a', '/a', 0) self.assert_cmp('/a', '/b', -1) self.assert_cmp('/a2', '/a10', -1) self.assert_cmp('/a2/foo', '/a10/foo', -1) self.assert_cmp('/a/foo11', '/a/foo2', 1) self.assert_cmp('/ab', '/a/a/b', -1) self.assert_cmp('/a/a/b', '/ab', 1) self.assert_cmp('/foo-bar/baz', '/foo/baz', -1)
class NaturalCompareTest(unittest.TestCase): def setUp(self): self._port = TestPort(MockSystemHost()) def assert_cmp(self, x, y, result): self.assertEqual( cmp(self._port._natural_sort_key(x), self._port._natural_sort_key(y)), result) def test_natural_compare(self): self.assert_cmp('a', 'a', 0) self.assert_cmp('ab', 'a', 1) self.assert_cmp('a', 'ab', -1) self.assert_cmp('', '', 0) self.assert_cmp('', 'ab', -1) self.assert_cmp('1', '2', -1) self.assert_cmp('2', '1', 1) self.assert_cmp('1', '10', -1) self.assert_cmp('2', '10', -1) self.assert_cmp('foo_1.html', 'foo_2.html', -1) self.assert_cmp('foo_1.1.html', 'foo_2.html', -1) self.assert_cmp('foo_1.html', 'foo_10.html', -1) self.assert_cmp('foo_2.html', 'foo_10.html', -1) self.assert_cmp('foo_23.html', 'foo_10.html', 1) self.assert_cmp('foo_23.html', 'foo_100.html', -1)
class KeyCompareTest(unittest.TestCase): def setUp(self): self._port = TestPort(MockSystemHost()) def assert_cmp(self, x, y, result): self.assertEqual(cmp(self._port.test_key(x), self._port.test_key(y)), result) def test_test_key(self): self.assert_cmp("/a", "/a", 0) self.assert_cmp("/a", "/b", -1) self.assert_cmp("/a2", "/a10", -1) self.assert_cmp("/a2/foo", "/a10/foo", -1) self.assert_cmp("/a/foo11", "/a/foo2", 1) self.assert_cmp("/ab", "/a/a/b", -1) self.assert_cmp("/a/a/b", "/ab", 1) self.assert_cmp("/foo-bar/baz", "/foo/baz", -1)
def get_shards(self, num_workers, fully_parallel, test_list=None): port = TestPort(MockSystemHost()) self.sharder = Sharder(port.split_test) test_list = test_list or self.test_list return self.sharder.shard_tests( [self.get_test_input(test) for test in test_list], num_workers, fully_parallel)
def make_port(self, executive=None, with_tests=False, port_name=None, **kwargs): host = MockSystemHost() if executive: host.executive = executive if with_tests: add_unit_tests_to_mock_filesystem(host.filesystem) return TestPort(host, **kwargs) return Port(host, port_name or 'baseport', **kwargs)
def create_runner(self, args=[]): options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) runner = PerfTestsRunner(args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner, test_port
def touched_files(self, touched_files, fs=None): host = MockHost() if fs: host.filesystem = fs else: fs = host.filesystem port = TestPort(host) return (fs, MockLayoutTestFinder(port, optparse.Values({'skipped': 'always', 'skip_failing_tests': False, 'http': True})).find_touched_tests(touched_files))
def __init__(self, *args, **kwargs): MockHost.__init__(self, *args, **kwargs) self._port = TestPort(self) self._port.name = lambda: "MockPort" self.status_server = MockStatusServer() self._irc = None self.irc_password = "******" self.wakeup_event = threading.Event()
class NaturalCompareTest(unittest.TestCase): def setUp(self): self._port = TestPort(MockSystemHost()) def assert_cmp(self, x, y, result): self.assertEqual(cmp(self._port._natural_sort_key(x), self._port._natural_sort_key(y)), result) def test_natural_compare(self): self.assert_cmp('a', 'a', 0) self.assert_cmp('ab', 'a', 1) self.assert_cmp('a', 'ab', -1) self.assert_cmp('', '', 0) self.assert_cmp('', 'ab', -1) self.assert_cmp('1', '2', -1) self.assert_cmp('2', '1', 1) self.assert_cmp('1', '10', -1) self.assert_cmp('2', '10', -1) self.assert_cmp('foo_1.html', 'foo_2.html', -1) self.assert_cmp('foo_1.1.html', 'foo_2.html', -1) self.assert_cmp('foo_1.html', 'foo_10.html', -1) self.assert_cmp('foo_2.html', 'foo_10.html', -1) self.assert_cmp('foo_23.html', 'foo_10.html', 1) self.assert_cmp('foo_23.html', 'foo_100.html', -1)
class NaturalCompareTest(unittest.TestCase): def setUp(self): self._port = TestPort(MockSystemHost()) def assert_cmp(self, x, y, result): self.assertEqual(cmp(self._port._natural_sort_key(x), self._port._natural_sort_key(y)), result) def test_natural_compare(self): self.assert_cmp("a", "a", 0) self.assert_cmp("ab", "a", 1) self.assert_cmp("a", "ab", -1) self.assert_cmp("", "", 0) self.assert_cmp("", "ab", -1) self.assert_cmp("1", "2", -1) self.assert_cmp("2", "1", 1) self.assert_cmp("1", "10", -1) self.assert_cmp("2", "10", -1) self.assert_cmp("foo_1.html", "foo_2.html", -1) self.assert_cmp("foo_1.1.html", "foo_2.html", -1) self.assert_cmp("foo_1.html", "foo_10.html", -1) self.assert_cmp("foo_2.html", "foo_10.html", -1) self.assert_cmp("foo_23.html", "foo_10.html", 1) self.assert_cmp("foo_23.html", "foo_100.html", -1)
def _create_and_run_perfalizer(self, commands_to_fail=[]): tool = MockTool() patch = tool.bugs.fetch_attachment(10000) logs = [] def logger(message): logs.append(message) def run_webkit_patch(args): if args[0] in commands_to_fail: raise ScriptError def run_perf_test(build_path, description): self.assertTrue(description == 'without 10000' or description == 'with 10000') if 'run-perf-tests' in commands_to_fail: return -1 if 'results-page' not in commands_to_fail: tool.filesystem.write_text_file( tool.filesystem.join(build_path, 'PerformanceTestResults.html'), 'results page') return 0 perfalizer = PerfalizerTask(tool, patch, logger) perfalizer._port = TestPort(tool) perfalizer.run_webkit_patch = run_webkit_patch perfalizer._run_perf_test = run_perf_test capture = OutputCapture() capture.capture_output() if commands_to_fail: self.assertFalse(perfalizer.run()) else: self.assertTrue(perfalizer.run()) capture.restore_output() return logs
def setUp(self): self._port = TestPort(MockSystemHost())
def make_finder(self): host = MockHost(create_stub_repository_files=True) add_unit_tests_to_mock_filesystem(host.filesystem) port = TestPort(host) return LayoutTestFinder(port, None)