results repository. """ file_contents = '\n'.join(lines) + '\n' if paired_with_process: drone = self._get_drone_for_process(paired_with_process) on_results_repository = False else: drone = self._results_drone on_results_repository = True full_path = self.absolute_path( file_path, on_results_repository=on_results_repository) drone.queue_call('write_to_file', full_path, file_contents) SiteDroneManager = utils.import_site_class( __file__, 'autotest_lib.scheduler.site_drone_manager', 'SiteDroneManager', BaseDroneManager) class DroneManager(SiteDroneManager): pass _the_instance = None def instance(): if _the_instance is None: _set_instance(DroneManager()) return _the_instance
from autotest_lib.client.common_lib import utils, base_packages SitePackageManager = utils.import_site_class( __file__, "autotest_lib.client.common_lib.site_packages", "SitePackageManager", base_packages.BasePackageManager) class PackageManager(SitePackageManager): pass
def flush_all_buffers(self): if self.leftover: self._process_line(self.leftover) self.leftover = "" self._process_logs() self.flush() def close(self): self.flush_all_buffers() SiteAutotest = client_utils.import_site_class( __file__, "autotest_lib.server.site_autotest", "SiteAutotest", BaseAutotest) _SiteRun = client_utils.import_site_class( __file__, "autotest_lib.server.site_autotest", "_SiteRun", _BaseRun) class Autotest(SiteAutotest): pass class _Run(_SiteRun): pass
action="store_true", dest="install_in_tmpdir", default=False, help=("by default install autotest clients in " "a temporary directory"), ) self.parser.add_option( "--collect-crashinfo", action="store_true", dest="collect_crashinfo", default=False, help="just run crashinfo collection", ) def parse_args(self): self.options, self.args = self.parser.parse_args() if self.options.args: self.args += self.options.args.split() site_autoserv_parser = utils.import_site_class( __file__, "autotest_lib.server.site_autoserv_parser", "site_autoserv_parser", base_autoserv_parser ) class autoserv_parser(site_autoserv_parser): pass # create the one and only one instance of autoserv_parser autoserv_parser = autoserv_parser()
#!/usr/bin/python try: import autotest.common as common except ImportError: import common import logging from autotest_lib.client.common_lib import global_config, utils from autotest_lib.scheduler import drone_utility class BaseResultsArchiver(object): def archive_results(self, path): results_host = global_config.global_config.get_config_value( 'SCHEDULER', 'results_host', default=None) if not results_host or results_host == 'localhost': return if not path.endswith('/'): path += '/' logging.info('Archiving %s to %s', path, results_host) utility = drone_utility.DroneUtility() utility.sync_send_file_to(results_host, path, path, can_fail=True) ResultsArchiver = utils.import_site_class( __file__, 'autotest_lib.scheduler.site_archive_results', 'SiteResultsArchiver', BaseResultsArchiver)
from autotest_lib.client.common_lib import utils from autotest_lib.client.bin import base_sysinfo sysinfo = utils.import_site_class(__file__, "autotest_lib.client.bin.site_sysinfo", "site_sysinfo", base_sysinfo.base_sysinfo) # pull in some data stucture stubs from base_sysinfo, for convenience logfile = base_sysinfo.logfile command = base_sysinfo.command
eligible_hosts_in_group = [ self._hosts_available[id] for id in eligible_host_ids_in_group ] # So that they show up in a sane order when viewing the job. eligible_hosts_in_group.sort( cmp=scheduler_models.Host.cmp_for_sort) # Limit ourselves to scheduling the atomic group size. if len(eligible_hosts_in_group) > max_hosts: eligible_hosts_in_group = eligible_hosts_in_group[:max_hosts] # Remove the selected hosts from our cached internal state # of available hosts in order to return the Host objects. host_list = [] for host in eligible_hosts_in_group: hosts_in_label.discard(host.id) self._hosts_available.pop(host.id) host_list.append(host) return host_list return [] site_host_scheduler = utils.import_site_class( __file__, 'autotest_lib.scheduler.site_host_scheduler', 'site_host_scheduler', BaseHostScheduler) class HostScheduler(site_host_scheduler): pass
continue eligible_hosts_in_group = [self._hosts_available[id] for id in eligible_host_ids_in_group] # So that they show up in a sane order when viewing the job. eligible_hosts_in_group.sort(cmp=scheduler_models.Host.cmp_for_sort) # Limit ourselves to scheduling the atomic group size. if len(eligible_hosts_in_group) > max_hosts: eligible_hosts_in_group = eligible_hosts_in_group[:max_hosts] # Remove the selected hosts from our cached internal state # of available hosts in order to return the Host objects. host_list = [] for host in eligible_hosts_in_group: hosts_in_label.discard(host.id) self._hosts_available.pop(host.id) host_list.append(host) return host_list return [] site_host_scheduler = utils.import_site_class( __file__, 'autotest_lib.scheduler.site_host_scheduler', 'site_host_scheduler', BaseHostScheduler) class HostScheduler(site_host_scheduler): pass
import os, sys, subprocess, logging from autotest_lib.client.common_lib import utils, error from autotest_lib.server import utils as server_utils from autotest_lib.server.hosts import remote SiteHost = utils.import_site_class(__file__, "autotest_lib.server.hosts.site_host", "SiteHost", remote.RemoteHost) class SerialHost(SiteHost): DEFAULT_REBOOT_TIMEOUT = SiteHost.DEFAULT_REBOOT_TIMEOUT def _initialize(self, conmux_server=None, conmux_attach=None, console_log="console.log", *args, **dargs): super(SerialHost, self)._initialize(*args, **dargs) self.__logger = None self.__console_log = console_log self.conmux_server = conmux_server self.conmux_attach = self._get_conmux_attach(conmux_attach) @classmethod def _get_conmux_attach(cls, conmux_attach=None): if conmux_attach:
time.sleep(30) # kill it utils.signal_program(monitor_db.PID_FILE_PREFIX) def handle_sigterm(signum, frame): logging.info('Caught SIGTERM') kill_monitor() utils.delete_pid_file_if_exists(monitor_db.WATCHER_PID_FILE_PREFIX) sys.exit(1) signal.signal(signal.SIGTERM, handle_sigterm) SiteMonitorProc = utils.import_site_class( __file__, 'autotest_lib.scheduler.site_monitor_db_watcher', 'SiteMonitorProc', object) class MonitorProc(SiteMonitorProc): def __init__(self, do_recovery=False): args = [monitor_db_path] if do_recovery: args.append("--recover-hosts") args.append(results_dir) kill_monitor() scheduler_config = scheduler_logging_config.SchedulerLoggingConfig log_name = scheduler_config.get_log_name() os.environ['AUTOTEST_SCHEDULER_LOG_NAME'] = log_name scheduler_log_dir = scheduler_config.get_server_log_dir()
'%s\n%s\n%s' % (pickled_input, separator, traceback.format_exc(), separator)) def _parse_args(args): parser = argparse.ArgumentParser( description='Local drone process manager.') parser.add_argument('--call_time', help='Time this process was invoked from the master', default=None, type=float) return parser.parse_args(args) SiteDroneUtility = utils.import_site_class( __file__, 'autotest_lib.scheduler.site_drone_utility', 'SiteDroneUtility', BaseDroneUtility) class DroneUtility(SiteDroneUtility): pass def return_data(data): print pickle.dumps(data) def main(): logging_manager.configure_logging( drone_logging_config.DroneLoggingConfig()) with timer.get_client('decode'):
# SSP uses privileged containers, sudo access is required. If # the process can't run sudo command without password, SSP can't # work properly. sudo command option -n will avoid user input. # If password is required, the command will fail and raise # AutoservRunError exception. self._host.run('sudo -n ls "%s"' % base_container) self._support_ssp = True except (error.AutoservRunError, error.AutotestHostRunError): # Local drone raises AutotestHostRunError, while remote drone # raises AutoservRunError. self._support_ssp = False return self._support_ssp SiteDrone = utils.import_site_class(__file__, 'autotest_lib.scheduler.site_drones', '_SiteAbstractDrone', _BaseAbstractDrone) class _AbstractDrone(SiteDrone): pass class _LocalDrone(_AbstractDrone): def __init__(self, timestamp_remote_calls=True): super(_LocalDrone, self).__init__(timestamp_remote_calls=timestamp_remote_calls) self.hostname = 'localhost' self._host = local_host.LocalHost() self._drone_utility = drone_utility.DroneUtility()
"for ssh connections")) self.parser.add_option("--install-in-tmpdir", action="store_true", dest="install_in_tmpdir", default=False, help=("by default install autotest clients in " "a temporary directory")) self.parser.add_option("--collect-crashinfo", action="store_true", dest="collect_crashinfo", default=False, help="just run crashinfo collection") self.parser.add_option("--control-filename", action="store", type="string", default=None, help=("filename to use for the server control " "file in the results directory")) def parse_args(self): self.options, self.args = self.parser.parse_args() if self.options.args: self.args += self.options.args.split() site_autoserv_parser = utils.import_site_class( __file__, "autotest_lib.server.site_autoserv_parser", "site_autoserv_parser", base_autoserv_parser) class autoserv_parser(site_autoserv_parser): pass # create the one and only one instance of autoserv_parser autoserv_parser = autoserv_parser()
for start, end in disabled_intervals: if timestamp >= start and (end is None or timestamp < end): return False return True def disable_warnings(self, warning_type, current_time_func=time.time): """As of now, disables all further warnings of this type.""" intervals = self.disabled_warnings.setdefault(warning_type, []) if not intervals or intervals[-1][1] is not None: intervals.append((int(current_time_func()), None)) def enable_warnings(self, warning_type, current_time_func=time.time): """As of now, enables all further warnings of this type.""" intervals = self.disabled_warnings.get(warning_type, []) if intervals and intervals[-1][1] is None: intervals[-1] = (intervals[-1][0], int(current_time_func())) # load up site-specific code for generating site-specific job data get_site_job_data = utils.import_site_function( __file__, "autotest_lib.server.site_server_job", "get_site_job_data", _get_site_job_data_dummy) site_server_job = utils.import_site_class( __file__, "autotest_lib.server.site_server_job", "site_server_job", base_server_job) class server_job(site_server_job): pass
import os, sys, subprocess, logging from autotest_lib.client.common_lib import utils, error from autotest_lib.server import utils as server_utils from autotest_lib.server.hosts import remote SiteHost = utils.import_site_class( __file__, "autotest_lib.server.hosts.site_host", "SiteHost", remote.RemoteHost) class SerialHost(SiteHost): DEFAULT_REBOOT_TIMEOUT = SiteHost.DEFAULT_REBOOT_TIMEOUT def _initialize(self, conmux_server=None, conmux_attach=None, console_log="console.log", *args, **dargs): super(SerialHost, self)._initialize(*args, **dargs) self.__logger = None self.__console_log = console_log self.conmux_server = conmux_server self.conmux_attach = self._get_conmux_attach(conmux_attach) @classmethod def _get_conmux_attach(cls, conmux_attach=None): if conmux_attach: return conmux_attach
def flush(self): sys.stdout.flush() def flush_all_buffers(self): if self.leftover: self._process_line(self.leftover) self.leftover = "" self._process_logs() self.flush() def close(self): self.flush_all_buffers() SiteAutotest = client_utils.import_site_class( __file__, "autotest_lib.server.site_autotest", "SiteAutotest", BaseAutotest) _SiteRun = client_utils.import_site_class(__file__, "autotest_lib.server.site_autotest", "_SiteRun", _BaseRun) class Autotest(SiteAutotest): pass class _Run(_SiteRun): pass
if timestamp >= start and (end is None or timestamp < end): return False return True def disable_warnings(self, warning_type, current_time_func=time.time): """As of now, disables all further warnings of this type.""" intervals = self.disabled_warnings.setdefault(warning_type, []) if not intervals or intervals[-1][1] is not None: intervals.append((int(current_time_func()), None)) def enable_warnings(self, warning_type, current_time_func=time.time): """As of now, enables all further warnings of this type.""" intervals = self.disabled_warnings.get(warning_type, []) if intervals and intervals[-1][1] is None: intervals[-1] = (intervals[-1][0], int(current_time_func())) # load up site-specific code for generating site-specific job data get_site_job_data = utils.import_site_function( __file__, "autotest_lib.server.site_server_job", "get_site_job_data", _get_site_job_data_dummy ) site_server_job = utils.import_site_class( __file__, "autotest_lib.server.site_server_job", "site_server_job", base_server_job ) class server_job(site_server_job): pass
if entry.status not in allowed_hqe_statuses: raise scheduler_lib.SchedulerError( '%s attempting to start entry with invalid status %s: ' '%s' % (class_name, entry.status, entry)) invalid_host_status = (allowed_host_statuses is not None and entry.host.status not in allowed_host_statuses) if invalid_host_status: raise scheduler_lib.SchedulerError( '%s attempting to start on queue entry with invalid ' 'host status %s: %s' % (class_name, entry.host.status, entry)) SiteAgentTask = utils.import_site_class( __file__, 'autotest_lib.scheduler.site_monitor_db', 'SiteAgentTask', BaseAgentTask) class AgentTask(SiteAgentTask): pass class TaskWithJobKeyvals(object): """AgentTask mixin providing functionality to help with job keyval files.""" _KEYVAL_FILE = 'keyval' def _format_keyval(self, key, value): return '%s=%s' % (key, value) def _keyval_path(self):
# Copyright 2009 Google Inc. Released under the GPL v2 """This is a convenience module to import all available types of hosts. Implementation details: You should 'import hosts' instead of importing every available host module. """ from autotest_lib.client.common_lib import utils import base_classes Host = utils.import_site_class( __file__, "autotest_lib.client.common_lib.hosts.site_host", "SiteHost", base_classes.Host)