def get_shards(self, num_workers, fully_parallel, test_list=None):
     port = TestPort(MockSystemHost())
     self.sharder = Sharder(port.split_test)
     test_list = test_list or self.test_list
     return self.sharder.shard_tests(
         [self.get_test_input(test) for test in test_list], num_workers,
         fully_parallel)
Ejemplo n.º 2
0
 def make_port(self, executive=None, with_tests=False, port_name=None, **kwargs):
     host = MockSystemHost()
     if executive:
         host.executive = executive
     if with_tests:
         add_unit_tests_to_mock_filesystem(host.filesystem)
         return TestPort(host, **kwargs)
     return Port(host, port_name or 'baseport', **kwargs)
Ejemplo n.º 3
0
 def create_runner(self, args=[]):
     options, parsed_args = PerfTestsRunner._parse_args(args)
     test_port = TestPort(host=MockHost(), options=options)
     runner = PerfTestsRunner(args=args, port=test_port)
     runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
     runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
     runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
     return runner, test_port
Ejemplo n.º 4
0
 def touched_files(self, touched_files, fs=None):
     host = MockHost()
     if fs:
         host.filesystem = fs
     else:
         fs = host.filesystem
     port = TestPort(host)
     return (fs, MockLayoutTestFinder(port, optparse.Values({'skipped': 'always', 'skip_failing_tests': False, 'http': True})).find_touched_tests(touched_files))
Ejemplo n.º 5
0
    def __init__(self, *args, **kwargs):
        MockHost.__init__(self, *args, **kwargs)

        self._port = TestPort(self)
        self._port.name = lambda: "MockPort"
        self.status_server = MockStatusServer()

        self._irc = None
        self.irc_password = "******"
        self.wakeup_event = threading.Event()
    def create_runner(self, args=[], driver_class=TestDriver):
        options, parsed_args = PerfTestsRunner._parse_args(args)
        test_port = TestPort(host=MockHost(), options=options)
        test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()

        runner = PerfTestsRunner(args=args, port=test_port)
        runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
        runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
        runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')

        return runner, test_port
Ejemplo n.º 7
0
    def test_touched_but_skipped_test(self):
        host = MockHost()
        port = TestPort(host)

        expectations_dict = OrderedDict()
        expectations_dict['expectations'] = 'test1.html [ Skip ]\ntest3.html [ Skip ]\n'
        port.expectations_dict = lambda **kwargs: expectations_dict
        port.test_exists = lambda test: True

        paths = ['LayoutTests/test0.html', 'LayoutTests/test1.html', 'LayoutTests/test2-expected.txt', 'LayoutTests/test3-expected.txt']
        host.filesystem.write_text_file('/test.checkout/LayoutTests/test2.html', 'This is a test to be runned')
        host.filesystem.write_text_file('/test.checkout/LayoutTests/test3.html', 'This is a test to be skipped')

        touched_tests = MockLayoutTestFinder(port, optparse.Values({'skipped': 'always', 'skip_failing_tests': False, 'http': True})).find_touched_tests(paths)
        self.assertEqual(sorted(touched_tests), sorted(['test0.html', 'test2.html']))
Ejemplo n.º 8
0
    def _create_and_run_perfalizer(self, commands_to_fail=[]):
        tool = MockTool()
        patch = tool.bugs.fetch_attachment(10000)

        logs = []

        def logger(message):
            logs.append(message)

        def run_webkit_patch(args):
            if args[0] in commands_to_fail:
                raise ScriptError

        def run_perf_test(build_path, description):
            self.assertTrue(description == 'without 10000'
                            or description == 'with 10000')
            if 'run-perf-tests' in commands_to_fail:
                return -1
            if 'results-page' not in commands_to_fail:
                tool.filesystem.write_text_file(
                    tool.filesystem.join(build_path,
                                         'PerformanceTestResults.html'),
                    'results page')
            return 0

        perfalizer = PerfalizerTask(tool, patch, logger)
        perfalizer._port = TestPort(tool)
        perfalizer.run_webkit_patch = run_webkit_patch
        perfalizer._run_perf_test = run_perf_test

        capture = OutputCapture()
        capture.capture_output()

        if commands_to_fail:
            self.assertFalse(perfalizer.run())
        else:
            self.assertTrue(perfalizer.run())

        capture.restore_output()

        return logs
Ejemplo n.º 9
0
 def setUp(self):
     self._port = TestPort(MockSystemHost())
Ejemplo n.º 10
0
 def make_finder(self):
     host = MockHost(create_stub_repository_files=True)
     add_unit_tests_to_mock_filesystem(host.filesystem)
     port = TestPort(host)
     return LayoutTestFinder(port, None)