def _post_record_init(self, control, options, drop_caches): """ Perform job initialization not required by self.record(). """ self._init_drop_caches(drop_caches) self._init_packages() self.sysinfo = sysinfo.sysinfo(self.resultdir) self._load_sysinfo_state() if not options.cont: download = os.path.join(self.testdir, 'download') if not os.path.exists(download): os.mkdir(download) shutil.copyfile(self.control, os.path.join(self.resultdir, 'control')) self.control = control self.logging = logging_manager.get_logging_manager( manage_stdout_and_stderr=True, redirect_fds=True) self.logging.start_logging() self.profilers = profilers.profilers(self) self.machines = [options.hostname] self.machine_dict_list = [{'hostname' : options.hostname}] # Client side tests should always run the same whether or not they are # running in the lab. self.in_lab = False self.hosts = set([local_host.LocalHost(hostname=options.hostname)]) self.args = [] if options.args: self.args = self._parse_args(options.args) if options.user: self.user = options.user else: self.user = getpass.getuser() self.sysinfo.log_per_reboot_data() if not options.cont: self.record('START', None, None) self.harness.run_start() if options.log: self.enable_external_logging() self.num_tests_run = None self.num_tests_failed = None self.warning_loggers = None self.warning_manager = None
def _post_record_init(self, control, options, drop_caches, extra_copy_cmdline): """ Perform job initialization not required by self.record(). """ self._init_drop_caches(drop_caches) self._init_packages() self.sysinfo = sysinfo.sysinfo(self.resultdir) self._load_sysinfo_state() if not options.cont: download = os.path.join(self.testdir, "download") if not os.path.exists(download): os.mkdir(download) shutil.copyfile(self.control, os.path.join(self.resultdir, "control")) self.control = control self.logging = logging_manager.get_logging_manager(manage_stdout_and_stderr=True, redirect_fds=True) self.logging.start_logging() self._config = config.config(self) self.profilers = profilers.profilers(self) self._init_bootloader() self.machines = [options.hostname] self.hosts = set([local_host.LocalHost(hostname=options.hostname, bootloader=self.bootloader)]) self.args = [] if options.args: self.args = options.args.split() if options.user: self.user = options.user else: self.user = getpass.getuser() self.sysinfo.log_per_reboot_data() if not options.cont: self.record("START", None, None) self.harness.run_start() if options.log: self.enable_external_logging() self._init_cmdline(extra_copy_cmdline) self.num_tests_run = None self.num_tests_failed = None self.warning_loggers = None self.warning_manager = None
def __init__(self, control, args, resultdir, label, user, machines, client=False, parse_job='', ssh_user='******', ssh_port=22, ssh_pass='', group_name='', tag='', control_filename=SERVER_CONTROL_FILENAME): """ Create a server side job object. @param control: The pathname of the control file. @param args: Passed to the control file. @param resultdir: Where to throw the results. @param label: Description of the job. @param user: Username for the job (email address). @param client: True if this is a client-side control file. @param parse_job: string, if supplied it is the job execution tag that the results will be passed through to the TKO parser with. @param ssh_user: The SSH username. [root] @param ssh_port: The SSH port number. [22] @param ssh_pass: The SSH passphrase, if needed. @param group_name: If supplied, this will be written out as host_group_name in the keyvals file for the parser. @param tag: The job execution tag from the scheduler. [optional] @param control_filename: The filename where the server control file should be written in the results directory. """ super(base_server_job, self).__init__(resultdir=resultdir) path = os.path.dirname(__file__) self.control = control self._uncollected_log_file = os.path.join(self.resultdir, 'uncollected_logs') debugdir = os.path.join(self.resultdir, 'debug') if not os.path.exists(debugdir): os.mkdir(debugdir) if user: self.user = user else: self.user = getpass.getuser() self.args = args self.machines = machines self._client = client self.warning_loggers = set() self.warning_manager = warning_manager() self._ssh_user = ssh_user self._ssh_port = ssh_port self._ssh_pass = ssh_pass self.tag = tag self.last_boot_tag = None self.hosts = set() self.drop_caches = False self.drop_caches_between_iterations = False self._control_filename = control_filename self.logging = logging_manager.get_logging_manager( manage_stdout_and_stderr=True, redirect_fds=True) subcommand.logging_manager_object = self.logging self.sysinfo = sysinfo.sysinfo(self.resultdir) self.profilers = profilers.profilers(self) job_data = { 'label': label, 'user': user, 'hostname': ','.join(machines), 'drone': platform.node(), 'status_version': str(self._STATUS_VERSION), 'job_started': str(int(time.time())) } if group_name: job_data['host_group_name'] = group_name # only write these keyvals out on the first job in a resultdir if 'job_started' not in utils.read_keyval(self.resultdir): job_data.update(get_site_job_data(self)) utils.write_keyval(self.resultdir, job_data) self._parse_job = parse_job self._using_parser = (self._parse_job and len(machines) <= 1) self.pkgmgr = packages.PackageManager( self.autodir, run_function_dargs={'timeout': 600}) self.num_tests_run = 0 self.num_tests_failed = 0 self._register_subcommand_hooks() # these components aren't usable on the server self.bootloader = None self.harness = None # set up the status logger self._indenter = status_indenter() self._logger = base_job.status_logger( self, self._indenter, 'status.log', 'status.log', record_hook=server_job_record_hook(self))
def _post_record_init(self, control, options, drop_caches, extra_copy_cmdline): """ Perform job initialization not required by self.record(). """ self.bindir = os.path.join(self.autodir, 'bin') self.libdir = os.path.join(self.autodir, 'lib') self.testdir = os.path.join(self.autodir, 'tests') self.configdir = os.path.join(self.autodir, 'config') self.site_testdir = os.path.join(self.autodir, 'site_tests') self.profdir = os.path.join(self.autodir, 'profilers') self.toolsdir = os.path.join(self.autodir, 'tools') self._init_drop_caches(drop_caches) self._init_packages() self.run_test_cleanup = self.get_state("__run_test_cleanup", default=True) self.sysinfo = sysinfo.sysinfo(self.resultdir) self._load_sysinfo_state() self.last_boot_tag = self.get_state("__last_boot_tag", default=None) self.tag = self.get_state("__job_tag", default=None) if not options.cont: if not os.path.exists(self.pkgdir): os.mkdir(self.pkgdir) results = os.path.join(self.autodir, 'results') if not os.path.exists(results): os.mkdir(results) download = os.path.join(self.testdir, 'download') if not os.path.exists(download): os.mkdir(download) os.makedirs(os.path.join(self.resultdir, 'analysis')) shutil.copyfile(self.control, os.path.join(self.resultdir, 'control')) self.control = control self.jobtag = options.tag self.logging = logging_manager.get_logging_manager( manage_stdout_and_stderr=True, redirect_fds=True) self.logging.start_logging() self.config = config.config(self) self.profilers = profilers.profilers(self) self.host = local_host.LocalHost(hostname=options.hostname) self.autoserv_user = options.autoserv_user self._init_bootloader() self.sysinfo.log_per_reboot_data() if not options.cont: self.record('START', None, None) self._increment_group_level() self.harness.run_start() if options.log: self.enable_external_logging() # load the max disk usage rate - default to no monitoring self.max_disk_usage_rate = self.get_state('__monitor_disk', default=0.0) self._init_cmdline(extra_copy_cmdline)
def __init__( self, control, args, resultdir, label, user, machines, client=False, parse_job="", ssh_user="******", ssh_port=22, ssh_pass="", group_name="", tag="", control_filename=SERVER_CONTROL_FILENAME, ): """ Create a server side job object. @param control: The pathname of the control file. @param args: Passed to the control file. @param resultdir: Where to throw the results. @param label: Description of the job. @param user: Username for the job (email address). @param client: True if this is a client-side control file. @param parse_job: string, if supplied it is the job execution tag that the results will be passed through to the TKO parser with. @param ssh_user: The SSH username. [root] @param ssh_port: The SSH port number. [22] @param ssh_pass: The SSH passphrase, if needed. @param group_name: If supplied, this will be written out as host_group_name in the keyvals file for the parser. @param tag: The job execution tag from the scheduler. [optional] @param control_filename: The filename where the server control file should be written in the results directory. """ super(base_server_job, self).__init__(resultdir=resultdir) path = os.path.dirname(__file__) self.control = control self._uncollected_log_file = os.path.join(self.resultdir, "uncollected_logs") debugdir = os.path.join(self.resultdir, "debug") if not os.path.exists(debugdir): os.mkdir(debugdir) if user: self.user = user else: self.user = getpass.getuser() self.args = args self.machines = machines self._client = client self.warning_loggers = set() self.warning_manager = warning_manager() self._ssh_user = ssh_user self._ssh_port = ssh_port self._ssh_pass = ssh_pass self.tag = tag self.last_boot_tag = None self.hosts = set() self.drop_caches = False self.drop_caches_between_iterations = False self._control_filename = control_filename self.logging = logging_manager.get_logging_manager(manage_stdout_and_stderr=True, redirect_fds=True) subcommand.logging_manager_object = self.logging self.sysinfo = sysinfo.sysinfo(self.resultdir) self.profilers = profilers.profilers(self) job_data = { "label": label, "user": user, "hostname": ",".join(machines), "drone": platform.node(), "status_version": str(self._STATUS_VERSION), "job_started": str(int(time.time())), } if group_name: job_data["host_group_name"] = group_name # only write these keyvals out on the first job in a resultdir if "job_started" not in utils.read_keyval(self.resultdir): job_data.update(get_site_job_data(self)) utils.write_keyval(self.resultdir, job_data) self._parse_job = parse_job self._using_parser = self._parse_job and len(machines) <= 1 self.pkgmgr = packages.PackageManager(self.autodir, run_function_dargs={"timeout": 600}) self.num_tests_run = 0 self.num_tests_failed = 0 self._register_subcommand_hooks() # these components aren't usable on the server self.bootloader = None self.harness = None # set up the status logger self._indenter = status_indenter() self._logger = base_job.status_logger( self, self._indenter, "status.log", "status.log", record_hook=server_job_record_hook(self) )
def __init__(self, control, args, resultdir, label, user, machines, client=False, parse_job='', ssh_user='******', ssh_port=22, ssh_pass='', group_name='', tag=''): """ Create a server side job object. @param control: The pathname of the control file. @param args: Passed to the control file. @param resultdir: Where to throw the results. @param label: Description of the job. @param user: Username for the job (email address). @param client: True if this is a client-side control file. @param parse_job: string, if supplied it is the job execution tag that the results will be passed through to the TKO parser with. @param ssh_user: The SSH username. [root] @param ssh_port: The SSH port number. [22] @param ssh_pass: The SSH passphrase, if needed. @param group_name: If supplied, this will be written out as host_group_name in the keyvals file for the parser. @param tag: The job execution tag from the scheduler. [optional] """ path = os.path.dirname(__file__) self.autodir = os.path.abspath(os.path.join(path, '..')) self.serverdir = os.path.join(self.autodir, 'server') self.testdir = os.path.join(self.serverdir, 'tests') self.site_testdir = os.path.join(self.serverdir, 'site_tests') self.tmpdir = os.path.join(self.serverdir, 'tmp') self.conmuxdir = os.path.join(self.autodir, 'conmux') self.clientdir = os.path.join(self.autodir, 'client') self.toolsdir = os.path.join(self.autodir, 'client/tools') if control: self.control = self._load_control_file(control) else: self.control = '' self.resultdir = resultdir self.uncollected_log_file = None if resultdir: self.uncollected_log_file = os.path.join(resultdir, 'uncollected_logs') self.debugdir = os.path.join(resultdir, 'debug') if not os.path.exists(resultdir): os.mkdir(resultdir) if not os.path.exists(self.debugdir): os.mkdir(self.debugdir) self.label = label self.user = user self.args = args self.machines = machines self.client = client self.record_prefix = '' self.warning_loggers = set() self.warning_manager = warning_manager() self.ssh_user = ssh_user self.ssh_port = ssh_port self.ssh_pass = ssh_pass self.tag = tag self.run_test_cleanup = True self.last_boot_tag = None self.hosts = set() self.drop_caches_between_iterations = False self.logging = logging_manager.get_logging_manager( manage_stdout_and_stderr=True, redirect_fds=True) subcommand.logging_manager_object = self.logging if resultdir: self.sysinfo = sysinfo.sysinfo(self.resultdir) self.profilers = profilers.profilers(self) if not os.access(self.tmpdir, os.W_OK): try: os.makedirs(self.tmpdir, 0700) except os.error, e: # Thrown if the directory already exists, which it may. pass
def _post_record_init(self, control, options, drop_caches, extra_copy_cmdline): """ Perform job initialization not required by self.record(). """ self._init_drop_caches(drop_caches) self._init_packages() self.sysinfo = sysinfo.sysinfo(self.resultdir) self._load_sysinfo_state() if not options.cont: download = os.path.join(self.testdir, 'download') if not os.path.exists(download): os.mkdir(download) shutil.copyfile(self.control, os.path.join(self.resultdir, 'control')) self.control = control self.logging = logging_manager.get_logging_manager( manage_stdout_and_stderr=True, redirect_fds=True) self.logging.start_logging() self._config = config.config(self) self.profilers = profilers.profilers(self) self._init_bootloader() self.machines = [options.hostname] self.hosts = set([ local_host.LocalHost(hostname=options.hostname, bootloader=self.bootloader) ]) self.args = [] if options.args: self.args = self._parse_args(options.args) if options.user: self.user = options.user else: self.user = getpass.getuser() self.sysinfo.log_per_reboot_data() if not options.cont: self.record('START', None, None) self.harness.run_start() if options.log: self.enable_external_logging() self._init_cmdline(extra_copy_cmdline) self.num_tests_run = None self.num_tests_failed = None self.warning_loggers = None self.warning_manager = None
def __init__(self, control, options, drop_caches=True, extra_copy_cmdline=None): """ Prepare a client side job object. @param control: The control file (pathname of). @param options: an object which includes: jobtag: The job tag string (eg "default"). cont: If this is the continuation of this job. harness_type: An alternative server harness. [None] use_external_logging: If true, the enable_external_logging method will be called during construction. [False] @param drop_caches: If true, utils.drop_caches() is called before and between all tests. [True] @param extra_copy_cmdline: list of additional /proc/cmdline arguments to copy from the running kernel to all the installed kernels with this job """ self.autodir = os.environ['AUTODIR'] self.bindir = os.path.join(self.autodir, 'bin') self.libdir = os.path.join(self.autodir, 'lib') self.testdir = os.path.join(self.autodir, 'tests') self.configdir = os.path.join(self.autodir, 'config') self.site_testdir = os.path.join(self.autodir, 'site_tests') self.profdir = os.path.join(self.autodir, 'profilers') self.tmpdir = os.path.join(self.autodir, 'tmp') self.toolsdir = os.path.join(self.autodir, 'tools') self.resultdir = os.path.join(self.autodir, 'results', options.tag) if not os.path.exists(self.resultdir): os.makedirs(self.resultdir) if not options.cont: self._cleanup_results_dir() logging_manager.configure_logging( client_logging_config.ClientLoggingConfig(), results_dir=self.resultdir, verbose=options.verbose) logging.info('Writing results to %s', self.resultdir) self.drop_caches_between_iterations = False self.drop_caches = drop_caches if self.drop_caches: logging.debug("Dropping caches") utils.drop_caches() self.control = os.path.realpath(control) self._is_continuation = options.cont self.state_file = self.control + '.state' self.current_step_ancestry = [] self.next_step_index = 0 self.testtag = '' self._test_tag_prefix = '' self._load_state() self.pkgmgr = packages.PackageManager( self.autodir, run_function_dargs={'timeout':3600}) self.pkgdir = os.path.join(self.autodir, 'packages') self.run_test_cleanup = self.get_state("__run_test_cleanup", default=True) self.sysinfo = sysinfo.sysinfo(self.resultdir) self._load_sysinfo_state() self.last_boot_tag = self.get_state("__last_boot_tag", default=None) self.tag = self.get_state("__job_tag", default=None) if not options.cont: """ Don't cleanup the tmp dir (which contains the lockfile) in the constructor, this would be a problem for multiple jobs starting at the same time on the same client. Instead do the delete at the server side. We simply create the tmp directory here if it does not already exist. """ if not os.path.exists(self.tmpdir): os.mkdir(self.tmpdir) if not os.path.exists(self.pkgdir): os.mkdir(self.pkgdir) results = os.path.join(self.autodir, 'results') if not os.path.exists(results): os.mkdir(results) download = os.path.join(self.testdir, 'download') if not os.path.exists(download): os.mkdir(download) os.makedirs(os.path.join(self.resultdir, 'analysis')) shutil.copyfile(self.control, os.path.join(self.resultdir, 'control')) self.control = control self.jobtag = options.tag self.log_filename = self.DEFAULT_LOG_FILENAME self.logging = logging_manager.get_logging_manager( manage_stdout_and_stderr=True, redirect_fds=True) self.logging.start_logging() self._init_group_level() self.config = config.config(self) self.harness = harness.select(options.harness, self) self.profilers = profilers.profilers(self) try: tool = self.config_get('boottool.executable') self.bootloader = boottool.boottool(tool) except: pass self.sysinfo.log_per_reboot_data() if not options.cont: self.record('START', None, None) self._increment_group_level() self.harness.run_start() if options.log: self.enable_external_logging() # load the max disk usage rate - default to no monitoring self.max_disk_usage_rate = self.get_state('__monitor_disk', default=0.0) copy_cmdline = set(['console']) if extra_copy_cmdline is not None: copy_cmdline.update(extra_copy_cmdline) # extract console= and other args from cmdline and add them into the # base args that we use for all kernels we install cmdline = utils.read_one_line('/proc/cmdline') kernel_args = [] for karg in cmdline.split(): for param in copy_cmdline: if karg.startswith(param) and \ (len(param) == len(karg) or karg[len(param)] == '='): kernel_args.append(karg) self.config_set('boot.default_args', ' '.join(kernel_args))