def create_runner(self, args=[]): options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) runner = PerfTestsRunner(args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner, test_port
def create_runner(self, args=[], driver_class=TestDriver): options, _ = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class() runner = PerfTestsRunner(args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner, test_port
def create_runner(self, args=[], driver_class=TestDriver): options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class() runner = PerfTestsRunner(args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') filesystem = runner._host.filesystem runner.load_output_json = lambda: json.loads(filesystem.read_text_file(runner._output_json_path())) return runner, test_port
def create_runner(self, buildbot_output=None, args=[], regular_output=None): buildbot_output = buildbot_output or array_stream.ArrayStream() regular_output = regular_output or array_stream.ArrayStream() options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: MainTest.TestDriver() runner = PerfTestsRunner(regular_output, buildbot_output, args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner
def test_parse_args(self): runner = self.create_runner() options, args = PerfTestsRunner._parse_args([ '--verbose', '--build-directory=folder42', '--platform=platform42', '--builder-name', 'webkit-mac-1', '--build-number=56', '--time-out-ms=42', '--output-json-path=a/output.json', '--source-json-path=a/source.json', '--test-results-server=somehost', '--debug', 'an_arg']) self.assertEqual(options.build, True) self.assertEqual(options.verbose, True) self.assertEqual(options.help_printing, None) self.assertEqual(options.build_directory, 'folder42') self.assertEqual(options.platform, 'platform42') self.assertEqual(options.builder_name, 'webkit-mac-1') self.assertEqual(options.build_number, '56') self.assertEqual(options.time_out_ms, '42') self.assertEqual(options.configuration, 'Debug') self.assertEqual(options.print_options, None) self.assertEqual(options.output_json_path, 'a/output.json') self.assertEqual(options.source_json_path, 'a/source.json') self.assertEqual(options.test_results_server, 'somehost')
def test_parse_args(self): runner, port = self.create_runner() options, args = PerfTestsRunner._parse_args([ '--build-directory=folder42', '--platform=platform42', '--builder-name', 'webkit-mac-1', '--build-number=56', '--time-out-ms=42', '--no-show-results', '--reset-results', '--output-json-path=a/output.json', '--slave-config-json-path=a/source.json', '--test-results-server=somehost', '--debug']) self.assertEqual(options.build, True) self.assertEqual(options.build_directory, 'folder42') self.assertEqual(options.platform, 'platform42') self.assertEqual(options.builder_name, 'webkit-mac-1') self.assertEqual(options.build_number, '56') self.assertEqual(options.time_out_ms, '42') self.assertEqual(options.configuration, 'Debug') self.assertEqual(options.show_results, False) self.assertEqual(options.reset_results, True) self.assertEqual(options.output_json_path, 'a/output.json') self.assertEqual(options.slave_config_json_path, 'a/source.json') self.assertEqual(options.test_results_server, 'somehost')
def test_parse_args(self): runner, port = self.create_runner() options, args = PerfTestsRunner._parse_args([ '--build-directory=folder42', '--platform=platform42', '--builder-name', 'webkit-mac-1', '--build-number=56', '--time-out-ms=42', '--no-show-results', '--reset-results', '--output-json-path=a/output.json', '--slave-config-json-path=a/source.json', '--test-results-server=somehost', '--additional-drt-flag=--enable-threaded-parser', '--additional-drt-flag=--awesomesauce', '--repeat=5', '--test-runner-count=5', '--debug', '--no-timeout']) self.assertTrue(options.build) self.assertEqual(options.build_directory, 'folder42') self.assertEqual(options.platform, 'platform42') self.assertEqual(options.builder_name, 'webkit-mac-1') self.assertEqual(options.build_number, '56') self.assertEqual(options.time_out_ms, '42') self.assertEqual(options.configuration, 'Debug') self.assertFalse(options.show_results) self.assertTrue(options.reset_results) self.assertEqual(options.output_json_path, 'a/output.json') self.assertEqual(options.slave_config_json_path, 'a/source.json') self.assertEqual(options.test_results_server, 'somehost') self.assertEqual(options.additional_drt_flag, ['--enable-threaded-parser', '--awesomesauce']) self.assertEqual(options.repeat, 5) self.assertEqual(options.test_runner_count, 5) self.assertEqual(options.no_timeout, True)
def test_parse_args(self): runner, port = self.create_runner() options, args = PerfTestsRunner._parse_args([ '--build-directory=folder42', '--platform=platform42', '--builder-name', 'webkit-mac-1', '--build-number=56', '--time-out-ms=42', '--no-show-results', '--reset-results', '--output-json-path=a/output.json', '--slave-config-json-path=a/source.json', '--test-results-server=somehost', '--additional-drt-flag=--enable-threaded-parser', '--additional-drt-flag=--awesomesauce', '--repeat=5', '--test-runner-count=5', '--debug']) self.assertTrue(options.build) self.assertEqual(options.build_directory, 'folder42') self.assertEqual(options.platform, 'platform42') self.assertEqual(options.builder_name, 'webkit-mac-1') self.assertEqual(options.build_number, '56') self.assertEqual(options.time_out_ms, '42') self.assertEqual(options.configuration, 'Debug') self.assertFalse(options.show_results) self.assertTrue(options.reset_results) self.assertEqual(options.output_json_path, 'a/output.json') self.assertEqual(options.slave_config_json_path, 'a/source.json') self.assertEqual(options.test_results_server, 'somehost') self.assertEqual(options.additional_drt_flag, ['--enable-threaded-parser', '--awesomesauce']) self.assertEqual(options.repeat, 5) self.assertEqual(options.test_runner_count, 5)
def test_parse_args(self): runner, port = self.create_runner() options, args = PerfTestsRunner._parse_args( [ "--build-directory=folder42", "--platform=platform42", "--builder-name", "webkit-mac-1", "--build-number=56", "--time-out-ms=42", "--output-json-path=a/output.json", "--source-json-path=a/source.json", "--test-results-server=somehost", "--debug", ] ) self.assertEqual(options.build, True) self.assertEqual(options.build_directory, "folder42") self.assertEqual(options.platform, "platform42") self.assertEqual(options.builder_name, "webkit-mac-1") self.assertEqual(options.build_number, "56") self.assertEqual(options.time_out_ms, "42") self.assertEqual(options.configuration, "Debug") self.assertEqual(options.output_json_path, "a/output.json") self.assertEqual(options.source_json_path, "a/source.json") self.assertEqual(options.test_results_server, "somehost")
def test_default_args(self): options, _ = PerfTestsRunner._parse_args([]) self.assertTrue(options.build) self.assertEqual(options.time_out_ms, 600 * 1000) self.assertTrue(options.generate_results) self.assertTrue(options.show_results) self.assertTrue(options.use_skipped_list) self.assertEqual(options.repeat, 1) self.assertEqual(options.test_runner_count, DEFAULT_TEST_RUNNER_COUNT)
def create_runner(self, args=[], driver_class=TestDriver): options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class() runner = PerfTestsRunner(args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, "inspector") runner._host.filesystem.maybe_make_directory(runner._base_path, "Bindings") runner._host.filesystem.maybe_make_directory(runner._base_path, "Parser") return runner, test_port
def test_default_args(self): runner, port = self.create_runner() options, args = PerfTestsRunner._parse_args([]) self.assertTrue(options.build) self.assertEqual(options.time_out_ms, 600 * 1000) self.assertTrue(options.generate_results) self.assertTrue(options.show_results) self.assertTrue(options.use_skipped_list) self.assertEqual(options.repeat, 1) self.assertEqual(options.test_runner_count, -1)
def create_runner(self, buildbot_output=None, args=[], regular_output=None, driver_class=TestDriver): buildbot_output = buildbot_output or StringIO.StringIO() regular_output = regular_output or StringIO.StringIO() options, parsed_args = PerfTestsRunner._parse_args(args) test_port = TestPort(host=MockHost(), options=options) test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class() runner = PerfTestsRunner(regular_output, buildbot_output, args=args, port=test_port) runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings') runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser') return runner
def test_parse_args(self): runner, port = self.create_runner() options, args = PerfTestsRunner._parse_args( [ "--build-directory=folder42", "--platform=platform42", "--builder-name", "webkit-mac-1", "--build-number=56", "--time-out-ms=42", "--no-show-results", "--reset-results", "--output-json-path=a/output.json", "--slave-config-json-path=a/source.json", "--test-results-server=somehost", "--additional-drt-flag=--enable-threaded-parser", "--additional-drt-flag=--awesomesauce", "--repeat=5", "--test-runner-count=5", "--debug", ] ) self.assertTrue(options.build) self.assertEqual(options.build_directory, "folder42") self.assertEqual(options.platform, "platform42") self.assertEqual(options.builder_name, "webkit-mac-1") self.assertEqual(options.build_number, "56") self.assertEqual(options.time_out_ms, "42") self.assertEqual(options.configuration, "Debug") self.assertFalse(options.show_results) self.assertTrue(options.reset_results) self.assertEqual(options.output_json_path, "a/output.json") self.assertEqual(options.slave_config_json_path, "a/source.json") self.assertEqual(options.test_results_server, "somehost") self.assertEqual(options.additional_drt_flag, ["--enable-threaded-parser", "--awesomesauce"]) self.assertEqual(options.repeat, 5) self.assertEqual(options.test_runner_count, 5)
def create_runner(self, buildbot_output=None): buildbot_output = buildbot_output or array_stream.ArrayStream() regular_output = array_stream.ArrayStream() return PerfTestsRunner('', regular_output, buildbot_output, args=[])
# copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Run Inspector's perf tests in perf mode.""" import logging import sys from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner _log = logging.getLogger(__name__) if '__main__' == __name__: logging.basicConfig(level=logging.INFO, format="%(message)s") sys.exit(PerfTestsRunner(args=['inspector']).run())
# copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Run Inspector's perf tests in perf mode.""" import logging import sys from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner _log = logging.getLogger(__name__) if '__main__' == __name__: logging.basicConfig(level=logging.INFO, format="%(message)s") sys.exit(PerfTestsRunner('inspector').run())