Ejemplo n.º 1
0
 def test_can_construct_runner_object_minimum_parameters(self):
     # This tests that constructing the benchmark_runner object specifying the minimum required paramaters is ok.
     plan_list = BenchmarkRunner.available_plans()
     build_dir = os.path.abspath(os.curdir)
     runner = FakeBenchmarkRunner(plan_list[0], False, 1, build_dir,
                                  "/tmp/testOutput.txt", default_platform(),
                                  default_browser(), None)
     self.assertTrue(runner.execute())
Ejemplo n.º 2
0
def parse_args():
    parser = argparse.ArgumentParser(
        description='Automate the browser based performance benchmarks')
    # browserperfdash specific arguments.
    parser.add_argument(
        '--config-file',
        dest='config_file',
        default=None,
        required=True,
        help=
        'Configuration file for sending the results to the performance dashboard server(s).'
    )
    parser.add_argument('--browser-version',
                        dest='browser_version',
                        default=None,
                        required=True,
                        help='A string that identifies the browser version.')
    # arguments shared with run-benchmark.
    parser.add_argument(
        '--build-directory',
        dest='buildDir',
        help='Path to the browser executable. e.g. WebKitBuild/Release/')
    parser.add_argument('--platform',
                        dest='platform',
                        default=default_platform(),
                        choices=BrowserDriverFactory.available_platforms())
    parser.add_argument('--browser',
                        dest='browser',
                        default=default_browser(),
                        choices=BrowserDriverFactory.available_browsers())
    parser.add_argument(
        '--driver',
        default=WebServerBenchmarkRunner.name,
        choices=benchmark_runner_subclasses.keys(),
        help='Use the specified benchmark driver. Defaults to %s.' %
        WebServerBenchmarkRunner.name)
    parser.add_argument(
        '--local-copy',
        dest='localCopy',
        help=
        'Path to a local copy of the benchmark. e.g. PerformanceTests/SunSpider/'
    )
    parser.add_argument('--count',
                        dest='countOverride',
                        type=int,
                        help='Number of times to run the benchmark. e.g. 5')
    mutual_group = parser.add_mutually_exclusive_group(required=True)
    mutual_group.add_argument(
        '--plan',
        dest='plan',
        help='Benchmark plan to run. e.g. speedometer, jetstream')
    mutual_group.add_argument(
        '--allplans',
        action='store_true',
        help='Run all available benchmark plans sequentially')
    args = parser.parse_args()
    return args