def get_file(self, source, dest, delete_dest=False, preserve_perm=True, preserve_symlinks=False): """ Copy files from the remote host to a local path. Directories will be copied recursively. If a source component is a directory with a trailing slash, the content of the directory will be copied, otherwise, the directory itself and its content will be copied. This behavior is similar to that of the program 'rsync'. Args: source: either 1) a single file or directory, as a string 2) a list of one or more (possibly mixed) files or directories dest: a file or a directory (if source contains a directory or more than one element, you must supply a directory dest) delete_dest: if this is true, the command will also clear out any old files at dest that are not in the source preserve_perm: tells get_file() to try to preserve the sources permissions on files and dirs preserve_symlinks: try to preserve symlinks instead of transforming them into files/dirs on copy Raises: AutoservRunError: the scp command failed """ if isinstance(source, basestring): source = [source] dest = os.path.abspath(dest) try: remote_source = self._encode_remote_paths(source) local_dest = utils.sh_escape(dest) rsync = self._make_rsync_cmd([remote_source], local_dest, delete_dest, preserve_symlinks) utils.run(rsync) except error.CmdError, e: # logging.warn("warning: rsync failed with: %s", e) logging.info("attempting to copy with scp instead") # scp has no equivalent to --delete, just drop the entire dest dir if delete_dest and os.path.isdir(dest): shutil.rmtree(dest) os.mkdir(dest) remote_source = self._make_rsync_compatible_source(source, False) if remote_source: # _make_rsync_compatible_source() already did the escaping remote_source = self._encode_remote_paths(remote_source, escape=False) local_dest = utils.sh_escape(dest) scp = self._make_scp_cmd([remote_source], local_dest) try: utils.run(scp) except error.CmdError, e: raise error.AutoservRunError(e.args[0], e.args[1])
def quickmerge(moblab): """Transfer over a subset of Autotest directories. Quickmerge allows developers to do basic editting of tests and test libraries on their workstation without requiring them to emerge and cros deploy the autotest-server package. @param moblab: MoblabHost representing the MobLab being used to launch the testing. """ autotest_rootdir = os.path.dirname( os.path.dirname(os.path.realpath(__file__))) # We use rsync -R to copy a bunch of sources in a single run, adding a dot # to pinpoint the relative path root. rsync_cmd = ['rsync', '-aR', '--exclude', '*.pyc'] ssh_cmd = 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' if int(moblab.port) != 22: ssh_cmd += ' -p %s' % moblab.port rsync_cmd += ['-e', ssh_cmd] rsync_cmd += [ os.path.join(autotest_rootdir, '.', path) for path in _QUICKMERGE_LIST ] rsync_cmd.append('moblab@%s:%s' % (moblab.hostname, moblab_host.AUTOTEST_INSTALL_DIR)) utils.run(rsync_cmd, timeout=240)
def _run_tast(self): """Runs the tast command locally to perform testing against the DUT. @raises error.TestFail if the tast command fails or times out (but not if individual tests fail). """ cmd = [ self._tast_path, '-verbose', '-logtime=false', 'run', '-build=false', '-resultsdir=' + self.resultsdir, '-remotebundledir=' + self._remote_bundle_dir, '-remotedatadir=' + self._remote_data_dir, '-remoterunner=' + self._remote_test_runner_path, self._host.hostname, ] cmd.extend(self._test_exprs) logging.info('Running ' + ' '.join([utils.sh_quote_word(a) for a in cmd])) try: utils.run(cmd, ignore_status=False, timeout=self._EXEC_TIMEOUT_SEC, stdout_tee=utils.TEE_TO_LOGS, stderr_tee=utils.TEE_TO_LOGS, stderr_is_expected=True, stdout_level=logging.INFO, stderr_level=logging.ERROR) except error.CmdError as e: raise error.TestFail('Failed to run tast: %s' % str(e)) except error.CmdTimeoutError as e: raise error.TestFail('Got timeout while running tast: %s' % str(e))
def _unzip(self, filename): """Unzip the file. The destination directory name will be the stem of filename. E.g., _unzip('foo/bar/baz.zip') will create directory at 'foo/bar/baz', and then will inflate zip's content under the directory. If here is already a directory at the stem, that directory will be used. @param filename: Path to the zip archive. @return Path to the inflated directory. """ destination = os.path.splitext(filename)[0] if os.path.isdir(destination): logging.info('Skipping unzip %s, reusing content of %s', filename, destination) return destination tmp = tempfile.mkdtemp(dir=os.path.dirname(filename)) logging.info('Begin unzip %s', filename) try: utils.run('unzip', args=('-d', tmp, filename)) except: logging.error('Failed unzip, cleaning up.') # Clean up just created files. shutil.rmtree(tmp, ignore_errors=True) raise logging.info('End unzip %s', filename) try: os.renames(tmp, destination) except: logging.error('Failed rename, cleaning up.') shutil.rmtree(destination, ignore_errors=True) shutil.rmtree(tmp, ignore_errors=True) raise return destination
def _log_java_version(self): """Quick sanity and spew of java version installed on the server.""" utils.run('java', args=('-version', ), ignore_status=False, verbose=True, stdout_tee=utils.TEE_TO_LOGS, stderr_tee=utils.TEE_TO_LOGS)
def send_file(self, source, dest, delete_dest=False, preserve_symlinks=False): """ Copy files from a local path to the remote host. Directories will be copied recursively. If a source component is a directory with a trailing slash, the content of the directory will be copied, otherwise, the directory itself and its content will be copied. This behavior is similar to that of the program 'rsync'. Args: source: either 1) a single file or directory, as a string 2) a list of one or more (possibly mixed) files or directories dest: a file or a directory (if source contains a directory or more than one element, you must supply a directory dest) delete_dest: if this is true, the command will also clear out any old files at dest that are not in the source preserve_symlinks: controls if symlinks on the source will be copied as such on the destination or transformed into the referenced file/directory Raises: AutoservRunError: the scp command failed """ if isinstance(source, basestring): source = [source] remote_dest = self._encode_remote_paths([dest]) try: local_sources = [utils.sh_escape(path) for path in source] rsync = self._make_rsync_cmd(local_sources, remote_dest, delete_dest, preserve_symlinks) utils.run(rsync) except error.CmdError, e: logging.warn("Command rsync failed with: %s", e) logging.info("Attempting to copy with scp instead") # scp has no equivalent to --delete, just drop the entire dest dir if delete_dest: is_dir = self.run("ls -d %s/" % dest, ignore_status=True).exit_status == 0 if is_dir: cmd = "rm -rf %s && mkdir %s" cmd %= (remote_dest, remote_dest) self.run(cmd) local_sources = self._make_rsync_compatible_source(source, True) if local_sources: scp = self._make_scp_cmd(local_sources, remote_dest) try: utils.run(scp) except error.CmdError, e: raise error.AutoservRunError(e.args[0], e.args[1])
def send_file(self, source, dest, delete_dest=False, preserve_symlinks=False): """ Copy files from a local path to the remote host. Directories will be copied recursively. If a source component is a directory with a trailing slash, the content of the directory will be copied, otherwise, the directory itself and its content will be copied. This behavior is similar to that of the program 'rsync'. Args: source: either 1) a single file or directory, as a string 2) a list of one or more (possibly mixed) files or directories dest: a file or a directory (if source contains a directory or more than one element, you must supply a directory dest) delete_dest: if this is true, the command will also clear out any old files at dest that are not in the source preserve_symlinks: controls if symlinks on the source will be copied as such on the destination or transformed into the referenced file/directory Raises: AutoservRunError: the scp command failed """ logging.debug('send_file. source: %s, dest: %s, delete_dest: %s,' 'preserve_symlinks:%s', source, dest, delete_dest, preserve_symlinks) # Start a master SSH connection if necessary. self.start_master_ssh() if isinstance(source, basestring): source = [source] remote_dest = self._encode_remote_paths([dest]) local_sources = [utils.sh_escape(path) for path in source] if not local_sources: raise error.TestError('source |%s| yielded an empty list' % ( source)) if any([local_source.find('\x00') != -1 for local_source in local_sources]): raise error.TestError('one or more sources include NUL char') # If rsync is disabled or fails, try scp. try_scp = True if self.use_rsync(): logging.debug('Using Rsync.') try: rsync = self._make_rsync_cmd(local_sources, remote_dest, delete_dest, preserve_symlinks) utils.run(rsync) try_scp = False except error.CmdError, e: logging.warning("trying scp, rsync failed: %s", e)
def _run_command(moblab, test_name, test_args, do_quickmerge): """Runs the test launch script. @param moblab: MoblabHost representing the MobLab being used for testing. @param test_name: The name of the test to run. @param test_args: Dictionary of test arguments. @param do_quickmerge: If False, pass the --no_quickmerge flag. """ utils.run(_get_command(moblab, test_name, test_args, do_quickmerge, False), stdout_tee=sys.stdout, stderr_tee=sys.stderr)
def _umount_partition(self, mount_point): """ Unmounts the mount at the given mount point. Also deletes the mount point directory. Does not raise an exception if the mount point does not exist or the mount fails. """ if os.path.exists(mount_point): utils.run("sudo umount -d %s" % mount_point) os.rmdir(mount_point)
def get_file(self, source, dest, delete_dest=False, preserve_perm=True, preserve_symlinks=False): """ Copy files from the remote host to a local path. Directories will be copied recursively. If a source component is a directory with a trailing slash, the content of the directory will be copied, otherwise, the directory itself and its content will be copied. This behavior is similar to that of the program 'rsync'. Args: source: either 1) a single file or directory, as a string 2) a list of one or more (possibly mixed) files or directories dest: a file or a directory (if source contains a directory or more than one element, you must supply a directory dest) delete_dest: if this is true, the command will also clear out any old files at dest that are not in the source preserve_perm: tells get_file() to try to preserve the sources permissions on files and dirs preserve_symlinks: try to preserve symlinks instead of transforming them into files/dirs on copy Raises: AutoservRunError: the scp command failed """ # Start a master SSH connection if necessary. self.start_master_ssh() if isinstance(source, basestring): source = [source] dest = os.path.abspath(dest) # If rsync is disabled or fails, try scp. try_scp = True if self.use_rsync(): try: remote_source = self._encode_remote_paths(source) local_dest = utils.sh_escape(dest) rsync = self._make_rsync_cmd([remote_source], local_dest, delete_dest, preserve_symlinks) utils.run(rsync) try_scp = False except error.CmdError, e: logging.warn("trying scp, rsync failed: %s" % e)
def _log_version(self): """Runs the tast command locally to log its version.""" try: utils.run([self._tast_path, '-version'], timeout=self._EXEC_TIMEOUT_SEC, stdout_tee=utils.TEE_TO_LOGS, stderr_tee=utils.TEE_TO_LOGS, stderr_is_expected=True, stdout_level=logging.INFO, stderr_level=logging.ERROR) except error.CmdError as e: logging.error('Failed to log tast version: %s' % str(e))
def run_once(self, host): """Runs platform ImageLoader tests.""" self.host = host self.autotest_client = autotest.Autotest(self.host) # Download sample production signed components for simulated updates # from Google Storage. This needs to be done by a server test as the # client is unable to access Google Storage. try: version1 = '/tmp/prod_signed_23.0.0.207.tar.gz' utils.run('gsutil', args=('cp', 'gs://chromeos-localmirror-private/' 'testing/components/prod_signed_23.0.0.207.tar.gz', version1), timeout=300, ignore_status=False, verbose=True, stderr_is_expected=False, ignore_timeout=False) version2 = '/tmp/prod_signed_24.0.0.186.tar.gz' utils.run('gsutil', args=('cp', 'gs://chromeos-localmirror-private/' 'testing/components/prod_signed_24.0.0.186.tar.gz', version2), timeout=300, ignore_status=False, verbose=True, stderr_is_expected=False, ignore_timeout=False) except error.CmdTimeoutError: raise error.TestError('Slow network') except error.CmdError: raise error.TestError('Lack of Google Storage access permissions.') self.host.send_file(version1, version1) self.host.send_file(version2, version2) self.host.run('tar xvf "%s" -C "%s"' % (version1, '/home/chronos')) self.host.run('tar xvf "%s" -C "%s"' % (version2, '/home/chronos')) version1_unpack = '/home/chronos/prod_signed_23.0.0.207' version2_unpack = '/home/chronos/prod_signed_24.0.0.186' self.host.run('chmod -R 0755 "%s"' % (version1_unpack)) self.host.run('chmod -R 0755 "%s"' % (version2_unpack)) # Run the actual test (installing and verifying component updates on # the client. self._run_client_test(version1_unpack, version2_unpack) self.host.run('rm -rf "%s" "%s" "%s" "%s"' % (version1, version2, version1_unpack, version2_unpack))
def send_file(self, source, dest, delete_dest=False, preserve_symlinks=False): """ Copy files from a local path to the remote host. Directories will be copied recursively. If a source component is a directory with a trailing slash, the content of the directory will be copied, otherwise, the directory itself and its content will be copied. This behavior is similar to that of the program 'rsync'. Args: source: either 1) a single file or directory, as a string 2) a list of one or more (possibly mixed) files or directories dest: a file or a directory (if source contains a directory or more than one element, you must supply a directory dest) delete_dest: if this is true, the command will also clear out any old files at dest that are not in the source preserve_symlinks: controls if symlinks on the source will be copied as such on the destination or transformed into the referenced file/directory Raises: AutoservRunError: the scp command failed """ # Start a master SSH connection if necessary. self.start_master_ssh() if isinstance(source, basestring): source_is_dir = os.path.isdir(source) source = [source] remote_dest = self._encode_remote_paths([dest]) # If rsync is disabled or fails, try scp. try_scp = True if self.use_rsync(): try: local_sources = [utils.sh_escape(path) for path in source] rsync = self._make_rsync_cmd(local_sources, remote_dest, delete_dest, preserve_symlinks) utils.run(rsync) try_scp = False except error.CmdError, e: logging.warn("trying scp, rsync failed: %s" % e)
def _modify_file(self, path, func): """ Modifies a file as the root user. @param path: The path to the file to modify. @param func: A function that will be invoked with a single argument (the current contents of the file, or None if the file does not exist) and which should return the new contents. """ if os.path.exists(path): contents = utils.system_output("sudo cat %s" % path) else: contents = func(None) utils.run("sudo dd of=%s" % path, stdin=func(contents))
def _make_factory_package(self, factory_test_image, test_image): """ Makes the factory package. """ # Create a pseudo-HWID-updater that merely sets the HWID to "vm" or # "servo" so that the appropriate test list will run. (This gets run by # the factory install shim.) hwid_updater = os.path.join(self.tmpdir, "hwid_updater.sh") with open(hwid_updater, "w") as f: f.write(_HWID_UPDATER_SH_TEMPLATE % self.get_hwid_cfg()) utils.run("%s --factory=%s --release=%s " "--firmware_updater=none --hwid_updater=%s " % (os.path.join(self.src_root(), _MAKE_FACTORY_PACKAGE_PATH), factory_test_image, test_image, hwid_updater))
def initialize(self, host=None, adb_dir=_ADB_DIR_M, sdk_tools_dir=_SDK_TOOLS_DIR_M): """Sets up the tools and binary bundles for the test.""" logging.info('Hostname: %s', host.hostname) self._host = host self._install_paths = [] # Tests in the lab run within individual lxc container instances. if utils.is_in_container(): cache_root = _TRADEFED_CACHE_CONTAINER else: cache_root = _TRADEFED_CACHE_LOCAL # TODO(ihf): reevaluate this again when we run out of memory. We could # for example use 32 bit java on the first run but not during retries. # b/62895114. If select_32bit_java gets deleted for good also remove it # from the base image. # Try to save server memory (crbug.com/717413). # select_32bit_java() # Quick sanity check and spew of java version installed on the server. utils.run('java', args=('-version', ), ignore_status=False, verbose=True, stdout_tee=utils.TEE_TO_LOGS, stderr_tee=utils.TEE_TO_LOGS) # The content of the cache survives across jobs. self._safe_makedirs(cache_root) self._tradefed_cache = os.path.join(cache_root, 'cache') self._tradefed_cache_lock = os.path.join(cache_root, 'lock') # The content of the install location does not survive across jobs and # is isolated (by using a unique path)_against other autotest instances. # This is not needed for the lab, but if somebody wants to run multiple # TradedefTest instance. self._tradefed_install = tempfile.mkdtemp(prefix=_TRADEFED_PREFIX) # Under lxc the cache is shared between multiple autotest/tradefed # instances. We need to synchronize access to it. All binaries are # installed through the (shared) cache into the local (unshared) # lxc/autotest instance storage. # If clearing the cache it must happen before all downloads. self._clear_download_cache_if_needed() # Set permissions (rwxr-xr-x) to the executable binaries. permission = (stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) self._install_files(adb_dir, _ADB_FILES, permission) self._install_files(sdk_tools_dir, _SDK_TOOLS_FILES, permission)
def scp_telemetry_results(self, client_ip, dut): """Copy telemetry results from dut. @param client_ip: The ip address of the DUT @param dut: The autotest host object representing DUT. @returns status code for scp command. """ cmd = [] src = ('root@%s:%s/results-chart.json' % (dut.hostname if dut else client_ip, DUT_CHROME_RESULTS_DIR)) cmd.extend([ 'scp', telemetry_runner.DUT_SCP_OPTIONS, RSA_KEY, '-v', src, self.resultsdir ]) command = ' '.join(cmd) logging.debug('Retrieving Results: %s', command) try: result = utils.run(command, timeout=TELEMETRY_TIMEOUT_MINS * 60) exit_code = result.exit_status except Exception as e: logging.error('Failed to retrieve results: %s', e) raise logging.debug('command return value: %d', exit_code) return exit_code
def scp_telemetry_results(self, client_ip, dut, file, host_dir): """Copy telemetry results from dut. @param client_ip: The ip address of the DUT @param dut: The autotest host object representing DUT. @param file: The file to copy from DUT. @param host_dir: The directory on host to put the file . @returns status code for scp command. """ cmd=[] src = ('root@%s:%s/%s' % (dut.hostname if dut else client_ip, DUT_CHROME_RESULTS_DIR, file)) cmd.extend(['scp', DUT_SCP_OPTIONS, RSA_KEY, '-v', src, host_dir]) command = ' '.join(cmd) logging.debug('Retrieving Results: %s', command) try: result = utils.run(command, timeout=WAIT_FOR_CMD_TIMEOUT_SECS) exit_code = result.exit_status except Exception as e: logging.error('Failed to retrieve results: %s', e) raise logging.debug('command return value: %d', exit_code) return exit_code
def _run(self, command, timeout, ignore_status, stdout, stderr, connect_timeout, env, options, stdin, args): """Helper function for run().""" ssh_cmd = self.ssh_command(connect_timeout, options) if not env.strip(): env = "" else: env = "export %s;" % env for arg in args: command += ' "%s"' % utils.sh_escape(arg) full_cmd = '%s "%s %s"' % (ssh_cmd, env, utils.sh_escape(command)) result = utils.run(full_cmd, timeout, True, stdout, stderr, verbose=False, stdin=stdin, stderr_is_expected=ignore_status) # The error messages will show up in band (indistinguishable # from stuff sent through the SSH connection), so we have the # remote computer echo the message "Connected." before running # any command. Since the following 2 errors have to do with # connecting, it's safe to do these checks. if result.exit_status == 255: if re.search(r'^ssh: connect to host .* port .*: ' r'Connection timed out\r$', result.stderr): raise error.AutoservSSHTimeout("ssh timed out", result) if "Permission denied." in result.stderr: msg = "ssh permission denied" raise error.AutoservSshPermissionDeniedError(msg, result) if not ignore_status and result.exit_status > 0: raise error.AutoservRunError("command execution error", result) return result
def correct_results_folder_permission(results): """Make sure the results folder has the right permission settings. For tests running with server-side packaging, the results folder has the owner of root. This must be changed to the user running the autoserv process, so parsing job can access the results folder. TODO(dshi): crbug.com/459344 Remove this function when test container can be unprivileged container. @param results: Path to the results folder. """ if not results: return utils.run('sudo -n chown -R %s "%s"' % (os.getuid(), results)) utils.run('sudo -n chgrp -R %s "%s"' % (os.getgid(), results))
def _unzip(self, filename): """Unzip the file. The destination directory name will be the stem of filename. E.g., _unzip('foo/bar/baz.zip') will create directory at 'foo/bar/baz', and then will inflate zip's content under the directory. If here is already a directory at the stem, that directory will be used. @param filename: Path to the zip archive. @return Path to the inflated directory. """ destination = os.path.splitext(filename)[0] if os.path.isdir(destination): return destination self._safe_makedirs(destination) utils.run('unzip', args=('-d', destination, filename)) return destination
def _run_tast(self, subcommand, extra_subcommand_args, timeout_sec, log_stdout=False): """Runs the tast command locally to e.g. list available tests or perform testing against the DUT. @param subcommand: Subcommand to pass to the tast executable, e.g. 'run' or 'list'. @param extra_subcommand_args: List of additional subcommand arguments. @param timeout_sec: Integer timeout for the command in seconds. @param log_stdout: If true, write stdout to log. @returns client.common_lib.utils.CmdResult object describing the result. @raises error.TestFail if the tast command fails or times out. """ cmd = [ self._tast_path, '-verbose=true', '-logtime=false', subcommand, '-build=false', '-remotebundledir=' + self._remote_bundle_dir, '-remotedatadir=' + self._remote_data_dir, '-remoterunner=' + self._remote_test_runner_path, '-sshretries=%d' % self._SSH_CONNECT_RETRIES, ] cmd.extend(extra_subcommand_args) cmd.append('%s:%d' % (self._host.hostname, self._host.port)) cmd.extend(self._test_exprs) logging.info('Running ' + ' '.join([utils.sh_quote_word(a) for a in cmd])) try: return utils.run( cmd, ignore_status=False, timeout=timeout_sec, stdout_tee=(utils.TEE_TO_LOGS if log_stdout else None), stderr_tee=utils.TEE_TO_LOGS, stderr_is_expected=True, stdout_level=logging.INFO, stderr_level=logging.ERROR) except error.CmdError as e: # The tast command's output generally ends with a line describing # the error that was encountered; include it in the first line of # the TestFail exception. Fall back to stderr if stdout is empty (as # is the case with the "list" subcommand, which uses stdout to print # test data). get_last_line = lambda s: s.strip().split('\n')[-1].strip() last_line = (get_last_line(e.result_obj.stdout) or get_last_line(e.result_obj.stderr)) msg = (' (last line: %s)' % last_line) if last_line else '' raise error.TestFail('Failed to run tast%s: %s' % (msg, str(e))) except error.CmdTimeoutError as e: raise error.TestFail('Got timeout while running tast: %s' % str(e))
def get_results(self, resultsdir): """Copies results from the DUT to a local results directory. Copy the tarball over to the results folder, untar, and delete the tarball if everything was successful. This will effectively place all the logs relevant to factory testing in the job's results folder. @param resultsdir: The directory in which to untar the contents of the tarball factory_bug generates. """ logging.info('Getting results logs for test_list.') try: factory_bug_log = self._host.run('factory_bug').stderr except error.CmdError as e: logging.error('Could not execute factory_bug: %s', e) return try: factory_bug_tar = re.match(self.FACTORY_BUG_RE, factory_bug_log).groups(1)[0] except (IndexError, AttributeError): logging.error( 'could not collect logs for factory results, ' 'factory bug returned %s', factory_bug_log) return factory_bug_tar_file = os.path.basename(factory_bug_tar) local_factory_bug_tar = os.path.join(resultsdir, factory_bug_tar_file) try: self._host.get_file(factory_bug_tar, local_factory_bug_tar) except error.AutoservRunError as e: logging.error('Failed to pull back the results tarball: %s', e) return try: utils.run(self.UNTAR_COMMAND % (local_factory_bug_tar, resultsdir)) except error.CmdError as e: logging.error('Failed to untar the results tarball: %s', e) return finally: if os.path.exists(local_factory_bug_tar): os.remove(local_factory_bug_tar)
def _run(self, *args, **kwargs): """Executes the given command line. To support SDK tools, such as adb or aapt, this adds _install_paths to the extra_paths. Before invoking this, ensure _install_files() has been called. """ kwargs['extra_paths'] = (kwargs.get('extra_paths', []) + self._install_paths) return utils.run(*args, **kwargs)
def _mount_partition(self, image, index): """ Mounts a partition of an image temporarily using loopback. The partition will be automatically unmounted when the test exits. @param image: The image to mount. @param index: The partition number to mount. @return: The mount point. """ mount_point = os.path.join(self.tmpdir, "%s_%d" % (image, index)) if not os.path.exists(mount_point): os.makedirs(mount_point) common_args = "cgpt show -i %d %s" % (index, image) offset = int(utils.system_output(common_args + " -b")) * 512 size = int(utils.system_output(common_args + " -s")) * 512 utils.run("sudo mount -o rw,loop,offset=%d,sizelimit=%d %s %s" % (offset, size, image, mount_point)) self.cleanup_tasks.append(lambda: self._umount_partition(mount_point)) return mount_point
def install(self, host, label='autotest', default=False, kernel_args = '', install_vmlinux=True): """ Install a kernel on the remote host. This will also invoke the guest's bootloader to set this kernel as the default kernel if default=True. Args: host: the host on which to install the kernel [kwargs]: remaining keyword arguments will be passed to Bootloader.add_kernel() Raises: AutoservError: no package has yet been obtained. Call RPMKernel.get() with a .rpm package. """ if len(label) > 15: raise error.AutoservError("label for kernel is too long \ (> 15 chars): %s" % label) if self.source_material is None: raise error.AutoservError("A kernel must first be \ specified via get()") rpm = self.source_material remote_tmpdir = host.get_tmp_dir() remote_rpm = os.path.join(remote_tmpdir, os.path.basename(rpm)) rpm_package = utils.run('/usr/bin/rpm -q -p %s' % rpm).stdout vmlinuz = self.get_image_name() host.send_file(rpm, remote_rpm) host.run('rpm -e ' + rpm_package, ignore_status = True) host.run('rpm --force -i ' + remote_rpm) # Copy over the uncompressed image if there is one if install_vmlinux: vmlinux = self.get_vmlinux_name() host.run('cd /;rpm2cpio %s | cpio -imuv .%s' % (remote_rpm, vmlinux)) host.run('ls ' + vmlinux) # Verify host.bootloader.remove_kernel(label) host.bootloader.add_kernel(vmlinuz, label, args=kernel_args, default=default) if kernel_args: host.bootloader.add_args(label, kernel_args) if not default: host.bootloader.boot_once(label)
def get_version(self): """Get the version of the kernel to be installed. Returns: The version string, as would be returned by 'make kernelrelease'. Raises: AutoservError: no package has yet been obtained. Call RPMKernel.get() with a .rpm package. """ if self.source_material is None: raise error.AutoservError("A kernel must first be \ specified via get()") retval = utils.run('rpm -qpi %s | grep Version | awk \'{print($3);}\'' % utils.sh_escape(self.source_material)) return retval.stdout.strip()
def get_image_name(self): """Get the name of the kernel image to be installed. Returns: The full path to the kernel image file as it will be installed on the host. Raises: AutoservError: no package has yet been obtained. Call RPMKernel.get() with a .rpm package. """ if self.source_material is None: raise error.AutoservError("A kernel must first be \ specified via get()") vmlinuz = utils.run('rpm -q -l -p %s | grep /boot/vmlinuz' % self.source_material).stdout.strip() return vmlinuz
def get_version(self): """Get the version of the kernel to be installed. Returns: The version string, as would be returned by 'make kernelrelease'. Raises: AutoservError: no package has yet been obtained. Call DEBKernel.get() with a .deb package. """ if self.source_material is None: raise error.AutoservError("A kernel must first be " "specified via get()") retval= utils.run('dpkg-deb -f "%s" version' % utils.sh_escape(self.source_material),) return retval.stdout.strip()
def get_version(self): """Get the version of the kernel to be installed. Returns: The version string, as would be returned by 'make kernelrelease'. Raises: AutoservError: no package has yet been obtained. Call DEBKernel.get() with a .deb package. """ if self.source_material is None: raise error.AutoservError("A kernel must first be " "specified via get()") retval = utils.run( 'dpkg-deb -f "%s" version' % utils.sh_escape(self.source_material), ) return retval.stdout.strip()
def get_initrd_name(self): """Get the name of the initrd file to be installed. Returns: The full path to the initrd file as it will be installed on the host. If the package includes no initrd file, None is returned Raises: AutoservError: no package has yet been obtained. Call RPMKernel.get() with a .rpm package. """ if self.source_material is None: raise error.AutoservError("A kernel must first be \ specified via get()") res = utils.run('rpm -q -l -p %s | grep /boot/initrd' % self.source_material, ignore_status=True) if res.exit_status: return None return res.stdout.strip()
def _start_servod(machine): """Try to start servod in moblab if it's not already running or running with different board or port. @param machine: Name of the dut used for test. """ if not utils.is_moblab(): return logging.debug('Trying to start servod.') try: afe = frontend.AFE() board = server_utils.get_board_from_afe(machine, afe) hosts = afe.get_hosts(hostname=machine) servo_host = hosts[0].attributes.get('servo_host', None) servo_port = hosts[0].attributes.get('servo_port', 9999) if not servo_host in ['localhost', '127.0.0.1']: logging.warn('Starting servod is aborted. The dut\'s servo_host ' 'attribute is not set to localhost.') return except (urllib2.HTTPError, urllib2.URLError): # Ignore error if RPC failed to get board logging.error('Failed to get board name from AFE. Start servod is ' 'aborted') return try: pid = utils.run('pgrep servod').stdout cmd_line = utils.run('ps -fp %s' % pid).stdout if ('--board %s' % board in cmd_line and '--port %s' % servo_port in cmd_line): logging.debug( 'Servod is already running with given board and port.' ' There is no need to restart servod.') return logging.debug('Servod is running with different board or port. ' 'Stopping existing servod.') utils.run('sudo stop servod') except error.CmdError: # servod is not running. pass try: utils.run(START_SERVOD_CMD % (board, servo_port)) logging.debug('Servod is started') except error.CmdError as e: logging.error('Servod failed to be started, error: %s', e)
def _run(self, command, timeout, ignore_status, stdout, stderr, connect_timeout, env, options, stdin, args): """Helper function for run().""" ssh_cmd = self.ssh_command(connect_timeout, options) if not env.strip(): env = "" else: env = "export %s;" % env for arg in args: command += ' "%s"' % utils.sh_escape(arg) full_cmd = '%s "%s %s"' % (ssh_cmd, env, utils.sh_escape(command)) result = utils.run(full_cmd, timeout, True, stdout, stderr, verbose=False, stdin=stdin, stderr_is_expected=ignore_status) # The error messages will show up in band (indistinguishable # from stuff sent through the SSH connection), so we have the # remote computer echo the message "Connected." before running # any command. Since the following 2 errors have to do with # connecting, it's safe to do these checks. if result.exit_status == 255: if re.search( r'^ssh: connect to host .* port .*: ' r'Connection timed out\r$', result.stderr): raise error.AutoservSSHTimeout("ssh timed out", result) if "Permission denied." in result.stderr: msg = "ssh permission denied" raise error.AutoservSshPermissionDeniedError(msg, result) if not ignore_status and result.exit_status > 0: raise error.AutoservRunError("command execution error", result) return result
def _ensure_deps(dut, test_name): """ Ensure the dependencies are locally available on DUT. @param dut: The autotest host object representing DUT. @param test_name: Name of the telemetry test. """ # Get DEPs using host's telemetry. chrome_root_dir = _find_chrome_root_dir() format_string = ('python %s/tools/perf/fetch_benchmark_deps.py %s') command = format_string % (chrome_root_dir, test_name) logging.info('Getting DEPs: %s', command) stdout = StringIO.StringIO() stderr = StringIO.StringIO() try: result = utils.run(command, stdout_tee=stdout, stderr_tee=stderr) except error.CmdError as e: logging.debug('Error occurred getting DEPs: %s\n %s\n', stdout.getvalue(), stderr.getvalue()) raise error.TestFail('Error occurred while getting DEPs.') # Download DEPs to DUT. # send_file() relies on rsync over ssh. Couldn't be better. stdout_str = stdout.getvalue() stdout.close() stderr.close() for dep in stdout_str.split(): src = os.path.join(chrome_root_dir, dep) dst = os.path.join(CLIENT_CHROME_ROOT, dep) if not os.path.isfile(src): raise error.TestFail('Error occurred while saving DEPs.') logging.info('Copying: %s -> %s', src, dst) try: dut.send_file(src, dst) except: raise error.TestFail('Error occurred while sending DEPs to dut.\n')
if try_scp: # scp has no equivalent to --delete, just drop the entire dest dir if delete_dest: is_dir = self.run("ls -d %s/" % dest, ignore_status=True).exit_status == 0 if is_dir: cmd = "rm -rf %s && mkdir %s" cmd %= (dest, dest) self.run(cmd) local_sources = self._make_rsync_compatible_source(source, True) if local_sources: scp = self._make_scp_cmd(local_sources, remote_dest) try: utils.run(scp) except error.CmdError, e: raise error.AutoservRunError(e.args[0], e.args[1]) def ssh_ping(self, timeout=60): try: self.run("true", timeout=timeout, connect_timeout=timeout) except error.AutoservSSHTimeout: msg = "Host (ssh) verify timed out (timeout = %d)" % timeout raise error.AutoservSSHTimeout(msg) except error.AutoservSshPermissionDeniedError: #let AutoservSshPermissionDeniedError be visible to the callers raise except error.AutoservRunError, e: # convert the generic AutoservRunError into something more
class BaseAutotest(installable_object.InstallableObject): """ This class represents the Autotest program. Autotest is used to run tests automatically and collect the results. It also supports profilers. Implementation details: This is a leaf class in an abstract class hierarchy, it must implement the unimplemented methods in parent classes. """ def __init__(self, host=None): self.host = host self.got = False self.installed = False self.serverdir = utils.get_server_dir() super(BaseAutotest, self).__init__() install_in_tmpdir = False @classmethod def set_install_in_tmpdir(cls, flag): """ Sets a flag that controls whether or not Autotest should by default be installed in a "standard" directory (e.g. /home/autotest, /usr/local/autotest) or a temporary directory. """ cls.install_in_tmpdir = flag @classmethod def get_client_autodir_paths(cls, host): return global_config.global_config.get_config_value( 'AUTOSERV', 'client_autodir_paths', type=list) @classmethod def get_installed_autodir(cls, host): """ Find where the Autotest client is installed on the host. @returns an absolute path to an installed Autotest client root. @raises AutodirNotFoundError if no Autotest installation can be found. """ autodir = host.get_autodir() if autodir: logging.debug('Using existing host autodir: %s', autodir) return autodir for path in Autotest.get_client_autodir_paths(host): try: autotest_binary = os.path.join(path, 'bin', 'autotest') host.run('test -x %s' % utils.sh_escape(autotest_binary)) host.run('test -w %s' % utils.sh_escape(path)) logging.debug('Found existing autodir at %s', path) return path except error.AutoservRunError: logging.debug('%s does not exist on %s', autotest_binary, host.hostname) raise AutodirNotFoundError @classmethod def get_install_dir(cls, host): """ Determines the location where autotest should be installed on host. If self.install_in_tmpdir is set, it will return a unique temporary directory that autotest can be installed in. Otherwise, looks for an existing installation to use; if none is found, looks for a usable directory in the global config client_autodir_paths. """ try: install_dir = cls.get_installed_autodir(host) except AutodirNotFoundError: install_dir = cls._find_installable_dir(host) if cls.install_in_tmpdir: return host.get_tmp_dir(parent=install_dir) return install_dir @classmethod def _find_installable_dir(cls, host): client_autodir_paths = cls.get_client_autodir_paths(host) for path in client_autodir_paths: try: host.run('mkdir -p %s' % utils.sh_escape(path)) host.run('test -w %s' % utils.sh_escape(path)) return path except error.AutoservRunError: logging.debug('Failed to create %s', path) raise error.AutoservInstallError( 'Unable to find a place to install Autotest; tried %s' % ', '.join(client_autodir_paths)) def get_fetch_location(self): c = global_config.global_config repos = c.get_config_value("PACKAGES", 'fetch_location', type=list, default=[]) repos.reverse() return repos def install(self, host=None, autodir=None): self._install(host=host, autodir=autodir) def install_full_client(self, host=None, autodir=None): self._install(host=host, autodir=autodir, use_autoserv=False, use_packaging=False) def install_no_autoserv(self, host=None, autodir=None): self._install(host=host, autodir=autodir, use_autoserv=False) def _install_using_packaging(self, host, autodir): repos = self.get_fetch_location() if not repos: raise error.PackageInstallError("No repos to install an " "autotest client from") pkgmgr = packages.PackageManager(autodir, hostname=host.hostname, repo_urls=repos, do_locking=False, run_function=host.run, run_function_dargs=dict(timeout=600)) # The packages dir is used to store all the packages that # are fetched on that client. (for the tests,deps etc. # too apart from the client) pkg_dir = os.path.join(autodir, 'packages') # clean up the autodir except for the packages directory host.run('cd %s && ls | grep -v "^packages$"' ' | xargs rm -rf && rm -rf .[^.]*' % autodir) pkgmgr.install_pkg('autotest', 'client', pkg_dir, autodir, preserve_install_dir=True) self.installed = True def _install_using_send_file(self, host, autodir): dirs_to_exclude = set(["tests", "site_tests", "deps", "profilers"]) light_files = [ os.path.join(self.source_material, f) for f in os.listdir(self.source_material) if f not in dirs_to_exclude ] host.send_file(light_files, autodir, delete_dest=True) # create empty dirs for all the stuff we excluded commands = [] for path in dirs_to_exclude: abs_path = os.path.join(autodir, path) abs_path = utils.sh_escape(abs_path) commands.append("mkdir -p '%s'" % abs_path) commands.append("touch '%s'/__init__.py" % abs_path) host.run(';'.join(commands)) def _install(self, host=None, autodir=None, use_autoserv=True, use_packaging=True): """ Install autotest. If get() was not called previously, an attempt will be made to install from the autotest svn repository. @param host A Host instance on which autotest will be installed @param autodir Location on the remote host to install to @param use_autoserv Enable install modes that depend on the client running with the autoserv harness @param use_packaging Enable install modes that use the packaging system @exception AutoservError if a tarball was not specified and the target host does not have svn installed in its path """ if not host: host = self.host if not self.got: self.get() host.wait_up(timeout=30) host.setup() logging.info("Installing autotest on %s", host.hostname) # set up the autotest directory on the remote machine if not autodir: autodir = self.get_install_dir(host) logging.info('Using installation dir %s', autodir) host.set_autodir(autodir) host.run('mkdir -p %s' % utils.sh_escape(autodir)) # make sure there are no files in $AUTODIR/results results_path = os.path.join(autodir, 'results') host.run('rm -rf %s/*' % utils.sh_escape(results_path), ignore_status=True) # Fetch the autotest client from the nearest repository if use_packaging: try: self._install_using_packaging(host, autodir) return except (error.PackageInstallError, error.AutoservRunError, global_config.ConfigError), e: logging.info( "Could not install autotest using the packaging " "system: %s. Trying other methods", e) # try to install from file or directory if self.source_material: c = global_config.global_config supports_autoserv_packaging = c.get_config_value( "PACKAGES", "serve_packages_from_autoserv", type=bool) # Copy autotest recursively if supports_autoserv_packaging and use_autoserv: self._install_using_send_file(host, autodir) else: host.send_file(self.source_material, autodir, delete_dest=True) logging.info("Installation of autotest completed") self.installed = True return # if that fails try to install using svn if utils.run('which svn').exit_status: raise error.AutoservError('svn not found on target machine: %s' % host.name) try: host.run('svn checkout %s %s' % (AUTOTEST_SVN, autodir)) except error.AutoservRunError, e: host.run('svn checkout %s %s' % (AUTOTEST_HTTP, autodir))
commands.append("mkdir -p '%s'" % abs_path) host.run(';'.join(commands)) else: host.send_file(self.source_material, autodir, delete_dest=True) else: # Copy autotest via tarball e_msg = 'Installation method not yet implemented!' raise NotImplementedError(e_msg) logging.info("Installation of autotest completed") self.installed = True self.lightweight = lightweight return # if that fails try to install using svn if utils.run('which svn').exit_status: raise error.AutoservError('svn not found on target machine: %s' % host.name) try: host.run('svn checkout %s %s' % (AUTOTEST_SVN, autodir)) except error.AutoservRunError, e: host.run('svn checkout %s %s' % (AUTOTEST_HTTP, autodir)) logging.info("Installation of autotest completed") self.installed = True self.lightweight = lightweight def uninstall(self, host=None): """ Uninstall (i.e. delete) autotest. Removes the autotest client install from the specified host.
def run(self, command, timeout=None, ignore_status=False): return utils.run(r'%s' % (utils.sh_escape(command)), timeout, ignore_status)