action="store_true", dest="collect_crashinfo", default=False, help="just run crashinfo collection", ) self.parser.add_option( "--control-filename", action="store", type="string", default=None, help=("filename to use for the server control " "file in the results directory"), ) def parse_args(self): self.options, self.args = self.parser.parse_args() if self.options.args: self.args += self.options.args.split() site_autoserv_parser = utils.import_site_class( __file__, "autotest.server.site_autoserv_parser", "site_autoserv_parser", base_autoserv_parser ) class autoserv_parser(site_autoserv_parser): pass # create the one and only one instance of autoserv_parser autoserv_parser = autoserv_parser()
# Copyright 2009 Google Inc. Released under the GPL v2 """This is a convenience module to import all available types of hosts. Implementation details: You should 'import hosts' instead of importing every available host module. """ from autotest.client.shared import utils import base_classes Host = utils.import_site_class( __file__, "autotest.client.shared.hosts.site_host", "SiteHost", base_classes.Host)
try: import autotest.common as common # pylint: disable=W0611 except ImportError: import common # pylint: disable=W0611 import logging from autotest.client.shared import utils from autotest.client.shared.settings import settings from autotest.scheduler import drone_utility class BaseResultsArchiver(object): def archive_results(self, path): results_host = settings.get_value('SCHEDULER', 'results_host', default=None) if not results_host or results_host == 'localhost': return if not path.endswith('/'): path += '/' logging.info('Archiving %s to %s', path, results_host) utility = drone_utility.DroneUtility() utility.sync_send_file_to(results_host, path, path, can_fail=True) ResultsArchiver = utils.import_site_class( __file__, 'autotest.scheduler.site_archive_results', 'SiteResultsArchiver', BaseResultsArchiver)
continue eligible_hosts_in_group = [self._hosts_available[id] for id in eligible_host_ids_in_group] # So that they show up in a sane order when viewing the job. eligible_hosts_in_group.sort(cmp=scheduler_models.Host.cmp_for_sort) # Limit ourselves to scheduling the atomic group size. if len(eligible_hosts_in_group) > max_hosts: eligible_hosts_in_group = eligible_hosts_in_group[:max_hosts] # Remove the selected hosts from our cached internal state # of available hosts in order to return the Host objects. host_list = [] for host in eligible_hosts_in_group: hosts_in_label.discard(host.id) self._hosts_available.pop(host.id) host_list.append(host) return host_list return [] site_host_scheduler = utils.import_site_class( __file__, 'autotest.scheduler.site_host_scheduler', 'site_host_scheduler', BaseHostScheduler) class HostScheduler(site_host_scheduler): pass
# Not enough eligible hosts in this atomic group label. continue eligible_hosts_in_group = [self._hosts_available[id] for id in eligible_host_ids_in_group] # So that they show up in a sane order when viewing the job. eligible_hosts_in_group.sort(cmp=scheduler_models.Host.cmp_for_sort) # Limit ourselves to scheduling the atomic group size. if len(eligible_hosts_in_group) > max_hosts: eligible_hosts_in_group = eligible_hosts_in_group[:max_hosts] # Remove the selected hosts from our cached internal state # of available hosts in order to return the Host objects. host_list = [] for host in eligible_hosts_in_group: hosts_in_label.discard(host.id) self._hosts_available.pop(host.id) host_list.append(host) return host_list return [] site_host_scheduler = utils.import_site_class( __file__, "autotest.scheduler.site_host_scheduler", "site_host_scheduler", BaseHostScheduler ) class HostScheduler(site_host_scheduler): pass
#!/usr/bin/python try: import autotest.common as common except ImportError: import common import logging from autotest.client.shared import global_config, utils from autotest.scheduler import drone_utility class BaseResultsArchiver(object): def archive_results(self, path): results_host = global_config.global_config.get_config_value( 'SCHEDULER', 'results_host', default=None) if not results_host or results_host == 'localhost': return if not path.endswith('/'): path += '/' logging.info('Archiving %s to %s', path, results_host) utility = drone_utility.DroneUtility() utility.sync_send_file_to(results_host, path, path, can_fail=True) ResultsArchiver = utils.import_site_class( __file__, 'autotest.scheduler.site_archive_results', 'SiteResultsArchiver', BaseResultsArchiver)
from autotest.client.shared import utils from autotest.client import base_sysinfo sysinfo = utils.import_site_class(__file__, "autotest.client.site_sysinfo", "site_sysinfo", base_sysinfo.base_sysinfo) # pull in some data structure stubs from base_sysinfo, for convenience logfile = base_sysinfo.logfile command = base_sysinfo.command
import os import sys import subprocess import logging from autotest.client.shared import utils, error from autotest.server import utils as server_utils from autotest.server.hosts import remote SiteHost = utils.import_site_class(__file__, "autotest.server.hosts.site_host", "SiteHost", remote.RemoteHost) class SerialHost(SiteHost): DEFAULT_REBOOT_TIMEOUT = SiteHost.DEFAULT_REBOOT_TIMEOUT def _initialize(self, conmux_server=None, conmux_attach=None, console_log="console.log", *args, **dargs): super(SerialHost, self)._initialize(*args, **dargs) self.__logger = None self.__console_log = console_log self.conmux_server = conmux_server self.conmux_attach = self._get_conmux_attach(conmux_attach) @classmethod
time.sleep(30) # kill it utils.signal_program(monitor_db.PID_FILE_PREFIX) def handle_sigterm(signum, frame): logging.info('Caught SIGTERM') kill_monitor() utils.delete_pid_file_if_exists(monitor_db.WATCHER_PID_FILE_PREFIX) sys.exit(1) signal.signal(signal.SIGTERM, handle_sigterm) SiteMonitorProc = utils.import_site_class( __file__, 'autotest.scheduler.site_monitor_db_watcher', 'SiteMonitorProc', object) class MonitorProc(SiteMonitorProc): def __init__(self, do_recovery=False): args = [monitor_db_path] if do_recovery: args.append("--recover-hosts") args.append(results_dir) kill_monitor() scheduler_config = scheduler_logging_config.SchedulerLoggingConfig log_name = scheduler_config.get_log_name() os.environ['AUTOTEST_SCHEDULER_LOG_NAME'] = log_name
import os, sys, subprocess, logging from autotest.client.shared import utils, error from autotest.server import utils as server_utils from autotest.server.hosts import remote SiteHost = utils.import_site_class( __file__, "autotest.server.hosts.site_host", "SiteHost", remote.RemoteHost) class SerialHost(SiteHost): DEFAULT_REBOOT_TIMEOUT = SiteHost.DEFAULT_REBOOT_TIMEOUT def _initialize(self, conmux_server=None, conmux_attach=None, console_log="console.log", *args, **dargs): super(SerialHost, self)._initialize(*args, **dargs) self.__logger = None self.__console_log = console_log self.conmux_server = conmux_server self.conmux_attach = self._get_conmux_attach(conmux_attach) @classmethod def _get_conmux_attach(cls, conmux_attach=None): if conmux_attach: return conmux_attach
from autotest.client.shared import utils from autotest.client import base_sysinfo sysinfo = utils.import_site_class(__file__, "autotest.client.site_sysinfo", "site_sysinfo", base_sysinfo.base_sysinfo) # pull in some data stucture stubs from base_sysinfo, for convenience logfile = base_sysinfo.logfile command = base_sysinfo.command
# Copyright 2009 Google Inc. Released under the GPL v2 """This is a convenience module to import all available types of hosts. Implementation details: You should 'import hosts' instead of importing every available host module. """ from autotest.client.shared import utils from . import base_classes Host = utils.import_site_class(__file__, "autotest.client.shared.hosts.site_host", "SiteHost", base_classes.Host)
def flush_all_buffers(self): if self.leftover: self._process_line(self.leftover) self.leftover = "" self._process_logs() self.flush() def close(self): self.flush_all_buffers() SiteAutotest = client_utils.import_site_class( __file__, "autotest.server.site_autotest", "SiteAutotest", BaseAutotest) _SiteRun = client_utils.import_site_class( __file__, "autotest.server.site_autotest", "_SiteRun", _BaseRun) class Autotest(SiteAutotest): pass class _Run(_SiteRun): pass
"a temporary directory")) self.parser.add_option("--collect-crashinfo", action="store_true", dest="collect_crashinfo", default=False, help="just run crashinfo collection") self.parser.add_option("--control-filename", action="store", type="string", default=None, help=("filename to use for the server control " "file in the results directory")) def parse_args(self): self.options, self.args = self.parser.parse_args() if self.options.args: self.args += self.options.args.split() site_autoserv_parser = utils.import_site_class( __file__, "autotest.server.site_autoserv_parser", "site_autoserv_parser", base_autoserv_parser) class autoserv_parser(site_autoserv_parser): pass # create the one and only one instance of autoserv_parser autoserv_parser = autoserv_parser()
def flush(self): sys.stdout.flush() def flush_all_buffers(self): if self.leftover: self._process_line(self.leftover) self.leftover = "" self._process_logs() self.flush() def close(self): self.flush_all_buffers() SiteAutotest = client_utils.import_site_class(__file__, "autotest.server.site_autotest", "SiteAutotest", BaseAutotest) _SiteRun = client_utils.import_site_class(__file__, "autotest.server.site_autotest", "_SiteRun", _BaseRun) class Autotest(SiteAutotest): pass class _Run(_SiteRun): pass
# kill it utils.signal_program(monitor_db.PID_FILE_PREFIX) def handle_sigterm(signum, frame): logging.info('Caught SIGTERM') kill_monitor() utils.delete_pid_file_if_exists(monitor_db.WATCHER_PID_FILE_PREFIX) sys.exit(1) signal.signal(signal.SIGTERM, handle_sigterm) SiteMonitorProc = utils.import_site_class( __file__, 'autotest.scheduler.site_monitor_db_watcher', 'SiteMonitorProc', object) class MonitorProc(SiteMonitorProc): def __init__(self, do_recovery=False): args = [monitor_db_path] if do_recovery: args.append("--recover-hosts") args.append(results_dir) kill_monitor() scheduler_config = scheduler_logging_config.SchedulerLoggingConfig log_name = scheduler_config.get_log_name() os.environ['AUTOTEST_SCHEDULER_LOG_NAME'] = log_name
# kill it utils.signal_program(monitor_db.PID_FILE_PREFIX) def handle_sigterm(signum, frame): logging.info("Caught SIGTERM") kill_monitor() utils.delete_pid_file_if_exists(monitor_db.WATCHER_PID_FILE_PREFIX) sys.exit(1) signal.signal(signal.SIGTERM, handle_sigterm) SiteMonitorProc = utils.import_site_class( __file__, "autotest.scheduler.site_monitor_db_watcher", "SiteMonitorProc", object ) class MonitorProc(SiteMonitorProc): def __init__(self, do_recovery=False): args = [monitor_db_path] if do_recovery: args.append("--recover-hosts") args.append(results_dir) kill_monitor() scheduler_config = scheduler_logging_config.SchedulerLoggingConfig log_name = scheduler_config.get_log_name() os.environ["AUTOTEST_SCHEDULER_LOG_NAME"] = log_name scheduler_log_dir = scheduler_config.get_server_log_dir()
return True def disable_warnings(self, warning_type, current_time_func=time.time): """As of now, disables all further warnings of this type.""" intervals = self.disabled_warnings.setdefault(warning_type, []) if not intervals or intervals[-1][1] is not None: intervals.append((int(current_time_func()), None)) def enable_warnings(self, warning_type, current_time_func=time.time): """As of now, enables all further warnings of this type.""" intervals = self.disabled_warnings.get(warning_type, []) if intervals and intervals[-1][1] is None: intervals[-1] = (intervals[-1][0], int(current_time_func())) # load up site-specific code for generating site-specific job data get_site_job_data = utils.import_site_function(__file__, "autotest.server.site_server_job", "get_site_job_data", _get_site_job_data_dummy) site_server_job = utils.import_site_class( __file__, "autotest.server.site_server_job", "site_server_job", base_server_job) class server_job(site_server_job): pass
from autotest.client.shared import utils, base_packages SitePackageManager = utils.import_site_class( __file__, "autotest.client.shared.site_packages", "SitePackageManager", base_packages.BasePackageManager) class PackageManager(SitePackageManager): pass