def _run_tests_minimal_telemetry(self): """Run the benchmarks using the minimal support from Telemetry. The benchmarks are run using a client side autotest test. This test will control Chrome directly using the chrome.Chrome support and it will ask Chrome to display the benchmark pages directly instead of using the "page sets" and "measurements" support from Telemetry. In this way we avoid using Telemetry benchmark support which is not stable on ChromeOS yet. """ AFDO_GENERATE_CLIENT_TEST = 'telemetry_AFDOGenerateClient' # We dont want to "inherit" the profiler settings for this test # to the client test. Doing so will end up in two instances of # the profiler (perf) being executed at the same time. # Filed a feature request about this. See crbug/342958. # Save the current settings for profilers. saved_profilers = self.job.profilers saved_default_profile_only = self.job.default_profile_only # Reset the state of the profilers. self.job.default_profile_only = False self.job.profilers = profilers.profilers(self.job) # Execute the client side test. client_at = autotest.Autotest(self._host) client_at.run_test(AFDO_GENERATE_CLIENT_TEST, args='') # Restore the settings for the profilers. self.job.default_profile_only = saved_default_profile_only self.job.profiler = saved_profilers
def __init__(self, control, args, resultdir, label, user, machines, client=False, parse_job='', ssh_user='******', ssh_port=22, ssh_pass='', group_name='', tag='', control_filename=SERVER_CONTROL_FILENAME): """ Create a server side job object. @param control: The pathname of the control file. @param args: Passed to the control file. @param resultdir: Where to throw the results. @param label: Description of the job. @param user: Username for the job (email address). @param client: True if this is a client-side control file. @param parse_job: string, if supplied it is the job execution tag that the results will be passed through to the TKO parser with. @param ssh_user: The SSH username. [root] @param ssh_port: The SSH port number. [22] @param ssh_pass: The SSH passphrase, if needed. @param group_name: If supplied, this will be written out as host_group_name in the keyvals file for the parser. @param tag: The job execution tag from the scheduler. [optional] @param control_filename: The filename where the server control file should be written in the results directory. """ super(base_server_job, self).__init__(resultdir=resultdir) path = os.path.dirname(__file__) self.control = control self._uncollected_log_file = os.path.join(self.resultdir, 'uncollected_logs') debugdir = os.path.join(self.resultdir, 'debug') if not os.path.exists(debugdir): os.mkdir(debugdir) if user: self.user = user else: self.user = getpass.getuser() self.args = args self.machines = machines self._client = client self.warning_loggers = set() self.warning_manager = warning_manager() self._ssh_user = ssh_user self._ssh_port = ssh_port self._ssh_pass = ssh_pass self.tag = tag self.last_boot_tag = None self.hosts = set() self.drop_caches = False self.drop_caches_between_iterations = False self._control_filename = control_filename self.logging = logging_manager.get_logging_manager( manage_stdout_and_stderr=True, redirect_fds=True) subcommand.logging_manager_object = self.logging self.sysinfo = sysinfo.sysinfo(self.resultdir) self.profilers = profilers.profilers(self) job_data = { 'label': label, 'user': user, 'hostname': ','.join(machines), 'drone': platform.node(), 'status_version': str(self._STATUS_VERSION), 'job_started': str(int(time.time())) } if group_name: job_data['host_group_name'] = group_name # only write these keyvals out on the first job in a resultdir if 'job_started' not in utils.read_keyval(self.resultdir): job_data.update(get_site_job_data(self)) utils.write_keyval(self.resultdir, job_data) self._parse_job = parse_job self._using_parser = (self._parse_job and len(machines) <= 1) self.pkgmgr = packages.PackageManager( self.autodir, run_function_dargs={'timeout': 600}) self.num_tests_run = 0 self.num_tests_failed = 0 self._register_subcommand_hooks() # these components aren't usable on the server self.bootloader = None self.harness = None # set up the status logger self._indenter = status_indenter() self._logger = base_job.status_logger( self, self._indenter, 'status.log', 'status.log', record_hook=server_job_record_hook(self))
def __init__( self, control, args, resultdir, label, user, machines, client=False, parse_job="", ssh_user="******", ssh_port=22, ssh_pass="", group_name="", tag="", control_filename=SERVER_CONTROL_FILENAME, ): """ Create a server side job object. @param control: The pathname of the control file. @param args: Passed to the control file. @param resultdir: Where to throw the results. @param label: Description of the job. @param user: Username for the job (email address). @param client: True if this is a client-side control file. @param parse_job: string, if supplied it is the job execution tag that the results will be passed through to the TKO parser with. @param ssh_user: The SSH username. [root] @param ssh_port: The SSH port number. [22] @param ssh_pass: The SSH passphrase, if needed. @param group_name: If supplied, this will be written out as host_group_name in the keyvals file for the parser. @param tag: The job execution tag from the scheduler. [optional] @param control_filename: The filename where the server control file should be written in the results directory. """ super(base_server_job, self).__init__(resultdir=resultdir) path = os.path.dirname(__file__) self.control = control self._uncollected_log_file = os.path.join(self.resultdir, "uncollected_logs") debugdir = os.path.join(self.resultdir, "debug") if not os.path.exists(debugdir): os.mkdir(debugdir) if user: self.user = user else: self.user = getpass.getuser() self.args = args self.machines = machines self._client = client self.warning_loggers = set() self.warning_manager = warning_manager() self._ssh_user = ssh_user self._ssh_port = ssh_port self._ssh_pass = ssh_pass self.tag = tag self.last_boot_tag = None self.hosts = set() self.drop_caches = False self.drop_caches_between_iterations = False self._control_filename = control_filename self.logging = logging_manager.get_logging_manager(manage_stdout_and_stderr=True, redirect_fds=True) subcommand.logging_manager_object = self.logging self.sysinfo = sysinfo.sysinfo(self.resultdir) self.profilers = profilers.profilers(self) job_data = { "label": label, "user": user, "hostname": ",".join(machines), "drone": platform.node(), "status_version": str(self._STATUS_VERSION), "job_started": str(int(time.time())), } if group_name: job_data["host_group_name"] = group_name # only write these keyvals out on the first job in a resultdir if "job_started" not in utils.read_keyval(self.resultdir): job_data.update(get_site_job_data(self)) utils.write_keyval(self.resultdir, job_data) self._parse_job = parse_job self._using_parser = self._parse_job and len(machines) <= 1 self.pkgmgr = packages.PackageManager(self.autodir, run_function_dargs={"timeout": 600}) self.num_tests_run = 0 self.num_tests_failed = 0 self._register_subcommand_hooks() # these components aren't usable on the server self.bootloader = None self.harness = None # set up the status logger self._indenter = status_indenter() self._logger = base_job.status_logger( self, self._indenter, "status.log", "status.log", record_hook=server_job_record_hook(self) )
def __init__(self, control, args, resultdir, label, user, machines, client=False, parse_job='', ssh_user='******', ssh_port=22, ssh_pass='', group_name='', tag=''): """ Create a server side job object. @param control: The pathname of the control file. @param args: Passed to the control file. @param resultdir: Where to throw the results. @param label: Description of the job. @param user: Username for the job (email address). @param client: True if this is a client-side control file. @param parse_job: string, if supplied it is the job execution tag that the results will be passed through to the TKO parser with. @param ssh_user: The SSH username. [root] @param ssh_port: The SSH port number. [22] @param ssh_pass: The SSH passphrase, if needed. @param group_name: If supplied, this will be written out as host_group_name in the keyvals file for the parser. @param tag: The job execution tag from the scheduler. [optional] """ path = os.path.dirname(__file__) self.autodir = os.path.abspath(os.path.join(path, '..')) self.serverdir = os.path.join(self.autodir, 'server') self.testdir = os.path.join(self.serverdir, 'tests') self.site_testdir = os.path.join(self.serverdir, 'site_tests') self.tmpdir = os.path.join(self.serverdir, 'tmp') self.conmuxdir = os.path.join(self.autodir, 'conmux') self.clientdir = os.path.join(self.autodir, 'client') self.toolsdir = os.path.join(self.autodir, 'client/tools') if control: self.control = self._load_control_file(control) else: self.control = '' self.resultdir = resultdir self.uncollected_log_file = None if resultdir: self.uncollected_log_file = os.path.join(resultdir, 'uncollected_logs') self.debugdir = os.path.join(resultdir, 'debug') if not os.path.exists(resultdir): os.mkdir(resultdir) if not os.path.exists(self.debugdir): os.mkdir(self.debugdir) self.label = label self.user = user self.args = args self.machines = machines self.client = client self.record_prefix = '' self.warning_loggers = set() self.warning_manager = warning_manager() self.ssh_user = ssh_user self.ssh_port = ssh_port self.ssh_pass = ssh_pass self.tag = tag self.run_test_cleanup = True self.last_boot_tag = None self.hosts = set() self.drop_caches_between_iterations = False self.logging = logging_manager.get_logging_manager( manage_stdout_and_stderr=True, redirect_fds=True) subcommand.logging_manager_object = self.logging if resultdir: self.sysinfo = sysinfo.sysinfo(self.resultdir) self.profilers = profilers.profilers(self) if not os.access(self.tmpdir, os.W_OK): try: os.makedirs(self.tmpdir, 0700) except os.error, e: # Thrown if the directory already exists, which it may. pass