def run_once(self, host, args): """Run a set of telemetry benchmarks. @param host: Host machine where test is run @param args: A dictionary of the arguments that were passed to this test. @returns None. """ self._host = host host_board = host.get_board().split(':')[1] if not (host_board in LLVM_BOARDS or host_board in GCC_BOARDS): raise error.TestFail( 'This test cannot be run on board %s' % host_board) self._parse_args(args) if self._minimal_telemetry: self._run_tests_minimal_telemetry() else: self._telemetry_runner = telemetry_runner.TelemetryRunner( self._host, self._local, telemetry_on_dut=False) for benchmark_info in TELEMETRY_AFDO_BENCHMARKS: benchmark = benchmark_info[0] args = () if len(benchmark_info) == 1 else benchmark_info[1] try: self._run_test_with_retry(benchmark, *args) except error.TestBaseException: if not self._ignore_failures: raise else: logging.info('Ignoring failure from benchmark %s.', benchmark)
def run_once(self, host=None, args={}): """Run the telemetry scrolling benchmark. @param host: host we are running telemetry on. """ logging.info('Checking sysfs') self._check_sysfs(host) local = args.get('local') == 'True' telemetry = telemetry_runner.TelemetryRunner( host, local, telemetry_on_dut=False) logging.info('Starting test') results_idle = self._run_telemetry(host, telemetry, True) results_noidle = self._run_telemetry(host, telemetry, False) # Score is the regression in percentage of smooth frames caused by # enabling CPU idle. logging.info('Processing results') results = self._compare_results(results_idle, results_noidle) self.write_perf_keyval(results) if not results['passed']: raise error.TestFail('enabling CPU idle significantly ' 'regresses scrolling performance')
def run_once(self, host=None, benchmark=None, args={}): """Run a telemetry benchmark. @param host: hostname(ip address) to run the telemetry benchmark on. @param benchmark: telemetry benchmark test to run. """ local = args.get("local") == "True" telemetry = telemetry_runner.TelemetryRunner(host, local) telemetry.run_telemetry_benchmark(benchmark, perf_value_writer=self)
def run_once(self, host=None): """Run the telemetry scrolling action tests. @param host: host we are running telemetry on. """ telemetry = telemetry_runner.TelemetryRunner(host) result = telemetry.run_telemetry_test('ScrollingActionTest') logging.debug( 'Telemetry completed with a status of: %s with output:' ' %s', result.status, result.output)
def run_once(self, host=None, test=None, args={}): """Run a GPU telemetry test. @param host: host we are running telemetry on. @param test: telemetry test we want to run. """ local = args.get("local") == "True" telemetry = telemetry_runner.TelemetryRunner(host, local) result = telemetry.run_gpu_integration_test(test) logging.debug( 'Telemetry completed with a status of: %s with output:' ' %s', result.status, result.output)
def run_once(self, host=None, benchmark=None, args={}): """Run a telemetry benchmark. @param host: hostname(ip address) to run the telemetry benchmark on. @param benchmark: telemetry benchmark test to run. """ local = args.get("local") == "True" optional = {} telemetry_on_dut = args.get("telemetry_on_dut") if telemetry_on_dut: optional["telemetry_on_dut"] = telemetry_on_dut == "True" telemetry = telemetry_runner.TelemetryRunner(host, local, **optional) perf_value_writer = self extra_args = args.get("extra_args", []) repeat = args.get("pageset_repeat") if repeat is not None: extra_args.append('--pageset-repeat=%s' % repeat) telemetry.run_telemetry_benchmark(benchmark, perf_value_writer, *extra_args)