def new_parser_harness(results_dirpath): """Ensure sane environment and create new parser with wrapper. Args: results_dirpath: str; Path to job results directory Returns: ParserHarness; Raises: BadResultsDirectoryError; If results dir does not exist or is malformed. """ if not path.exists(results_dirpath): raise BadResultsDirectoryError keyval_path = path.join(results_dirpath, KEYVAL) job_keyval = utils.read_keyval(keyval_path) status_version = job_keyval[STATUS_VERSION] parser = status_lib.parser(status_version) job = parser.make_job(results_dirpath) status_log_filepath = path.join(results_dirpath, 'status.log') if not path.exists(status_log_filepath): raise BadResultsDirectoryError return ParserHarness(parser, job, job_keyval, status_version, status_log_filepath)
def read_keyval(dir): dir = os.path.normpath(dir) top_dir = tko_utils.find_toplevel_job_dir(dir) if not top_dir: top_dir = dir assert(dir.startswith(top_dir)) # pull in and merge all the keyval files, with higher-level # overriding values in the lower-level ones keyval = {} while True: try: upper_keyval = utils.read_keyval(dir) # HACK: exclude hostname from the override - this is a special # case where we want lower to override higher if "hostname" in upper_keyval and "hostname" in keyval: del upper_keyval["hostname"] keyval.update(upper_keyval) except IOError: pass # if the keyval can't be read just move on to the next if dir == top_dir: break else: assert(dir != "/") dir = os.path.dirname(dir) return keyval
def new_parser_harness(results_dirpath): """Ensure sane environment and create new parser with wrapper. Args: results_dirpath: str; Path to job results directory Returns: ParserHarness; Raises: BadResultsDirectoryError; If results dir does not exist or is malformed. """ if not path.exists(results_dirpath): raise BadResultsDirectoryError keyval_path = path.join(results_dirpath, KEYVAL) job_keyval = utils.read_keyval(keyval_path) status_version = job_keyval[STATUS_VERSION] parser = status_lib.parser(status_version) job = parser.make_job(results_dirpath) status_log_filepath = path.join(results_dirpath, 'status.log') if not path.exists(status_log_filepath): raise BadResultsDirectoryError return ParserHarness( parser, job, job_keyval, status_version, status_log_filepath)
def parse_host_keyval(job_dir, hostname): # the "real" job dir may be higher up in the directory tree job_dir = tko_utils.find_toplevel_job_dir(job_dir) if not job_dir: return {} # we can't find a top-level job dir with host keyvals # the keyval is <job_dir>/host_keyvals/<hostname> if it exists keyval_path = os.path.join(job_dir, "host_keyvals", hostname) if os.path.isfile(keyval_path): return utils.read_keyval(keyval_path) else: return {}
def _pull_sysinfo_keyval(self, host, outputdir, mytest): """Pulls sysinfo and keyval data from the client. """ # pull the sysinfo data back on to the server host.get_file(os.path.join(outputdir, "sysinfo"), mytest.outputdir) # pull the keyval data back into the local one fd, path = tempfile.mkstemp(dir=self.job.tmpdir) os.close(fd) host.get_file(os.path.join(outputdir, "keyval"), path) keyval = utils.read_keyval(path) os.remove(path) mytest.write_test_keyval(keyval)
def __init__(self, control, args, resultdir, label, user, machines, client=False, parse_job='', ssh_user='******', ssh_port=22, ssh_pass='', group_name='', tag='', control_filename=SERVER_CONTROL_FILENAME): """ Create a server side job object. @param control: The pathname of the control file. @param args: Passed to the control file. @param resultdir: Where to throw the results. @param label: Description of the job. @param user: Username for the job (email address). @param client: True if this is a client-side control file. @param parse_job: string, if supplied it is the job execution tag that the results will be passed through to the TKO parser with. @param ssh_user: The SSH username. [root] @param ssh_port: The SSH port number. [22] @param ssh_pass: The SSH passphrase, if needed. @param group_name: If supplied, this will be written out as host_group_name in the keyvals file for the parser. @param tag: The job execution tag from the scheduler. [optional] @param control_filename: The filename where the server control file should be written in the results directory. """ super(base_server_job, self).__init__(resultdir=resultdir) path = os.path.dirname(__file__) self.control = control self._uncollected_log_file = os.path.join(self.resultdir, 'uncollected_logs') debugdir = os.path.join(self.resultdir, 'debug') if not os.path.exists(debugdir): os.mkdir(debugdir) if user: self.user = user else: self.user = getpass.getuser() self.args = args self.machines = machines self._client = client self.warning_loggers = set() self.warning_manager = warning_manager() self._ssh_user = ssh_user self._ssh_port = ssh_port self._ssh_pass = ssh_pass self.tag = tag self.last_boot_tag = None self.hosts = set() self.drop_caches = False self.drop_caches_between_iterations = False self._control_filename = control_filename self.logging = logging_manager.get_logging_manager( manage_stdout_and_stderr=True, redirect_fds=True) subcommand.logging_manager_object = self.logging self.sysinfo = sysinfo.sysinfo(self.resultdir) self.profilers = profilers.profilers(self) job_data = {'label' : label, 'user' : user, 'hostname' : ','.join(machines), 'drone' : platform.node(), 'status_version' : str(self._STATUS_VERSION), 'job_started' : str(int(time.time()))} if group_name: job_data['host_group_name'] = group_name # only write these keyvals out on the first job in a resultdir if 'job_started' not in utils.read_keyval(self.resultdir): job_data.update(get_site_job_data(self)) utils.write_keyval(self.resultdir, job_data) self._parse_job = parse_job self._using_parser = (self._parse_job and len(machines) <= 1) self.pkgmgr = packages.PackageManager( self.autodir, run_function_dargs={'timeout':600}) self.num_tests_run = 0 self.num_tests_failed = 0 self._register_subcommand_hooks() # these components aren't usable on the server self.bootloader = None self.harness = None # set up the status logger self._indenter = status_indenter() self._logger = base_job.status_logger( self, self._indenter, 'status.log', 'status.log', record_hook=server_job_record_hook(self))
def test_accesses_directories_through_keyval_file(self): os.path.isdir.expect_call("dir").and_return(True) self._create_test_file("dir/keyval", "") utils.read_keyval("dir") self.god.check_playback()
def test_accesses_files_directly(self): os.path.isdir.expect_call("file").and_return(False) self._create_test_file("file", "") utils.read_keyval("file") self.god.check_playback()
def test_returns_empty_when_file_doesnt_exist(self): os.path.isdir.expect_call("file").and_return(False) os.path.exists.expect_call("file").and_return(False) self.assertEqual({}, utils.read_keyval("file")) self.god.check_playback()
def read_keyval(self, contents): os.path.isdir.expect_call("file").and_return(False) self._create_test_file("file", contents) keyval = utils.read_keyval("file") self.god.check_playback() return keyval
def load_attributes(keyval_path): """Load the test attributes into a dictionary from a test keyval path. Does not assume that the path actually exists.""" if not os.path.exists(keyval_path): return {} return utils.read_keyval(keyval_path)