def copy_files_to(self, host_path, guest_path, nic_index=0, limit="", verbose=False, timeout=COPY_FILES_TIMEOUT, username=None, password=None): """ Transfer files to the remote host(guest). :param host_path: Host path :param guest_path: Guest path :param nic_index: The index of the NIC to connect to. :param limit: Speed limit of file transfer. :param verbose: If True, log some stats using logging.debug (RSS only) :param timeout: Time (seconds) before giving up on doing the remote copy. """ error.context("sending file(s) to '%s'" % self.name) if not username: username = self.params.get("username", "") if not password: password = self.params.get("password", "") client = self.params.get("file_transfer_client") address = self.get_address(nic_index) port = self.get_port(int(self.params.get("file_transfer_port"))) log_filename = ( "transfer-%s-to-%s-%s.log" % (self.name, address, utils_misc.generate_random_string(4))) remote.copy_files_to(address, client, username, password, port, host_path, guest_path, limit, log_filename, verbose, timeout) utils_misc.close_log_file(log_filename)
def copy_files_to(self, host_path, guest_path, nic_index=0, limit="", verbose=False, timeout=COPY_FILES_TIMEOUT, username=None, password=None): """ Transfer files to the remote host(guest). @param host_path: Host path @param guest_path: Guest path @param nic_index: The index of the NIC to connect to. @param limit: Speed limit of file transfer. @param verbose: If True, log some stats using logging.debug (RSS only) @param timeout: Time (seconds) before giving up on doing the remote copy. """ error.context("sending file(s) to '%s'" % self.name) if not username: username = self.params.get("username", "") if not password: password = self.params.get("password", "") client = self.params.get("file_transfer_client") address = self.get_address(nic_index) port = self.get_port(int(self.params.get("file_transfer_port"))) log_filename = ("transfer-%s-to-%s-%s.log" % (self.name, address, utils_misc.generate_random_string(4))) remote.copy_files_to(address, client, username, password, port, host_path, guest_path, limit, log_filename, verbose, timeout) utils_misc.close_log_file(log_filename)
def sync_directories(self): """ Synchronize the directories between the local and remote machines :returns: True if any files needed to be copied; False otherwise. Does not support symlinks. """ def get_local_hashes(path): """ Create a dict of the hashes of all files in path on the local machine. :param path: Path to search """ def hash_file(file_name): """ Calculate hex-encoded hash of a file :param file_name: File to hash """ f = open(file_name, mode='rb') h = hashlib.sha1() while True: buf = f.read(4096) if not buf: break h.update(buf) return h.hexdigest() def visit(arg, dir_name, file_names): """ Callback function to calculate and store hashes :param arg: Tuple with base path and the hash that will contain the results. :param dir_name: Current directory :param file_names: File names in the current directory """ (base_path, result) = arg for file_name in file_names: path = os.path.join(dir_name, file_name) if os.path.isfile(path): result[os.path.relpath(path, base_path)] = hash_file(path) result = {} os.path.walk(path, visit, (path, result)) return result def get_remote_hashes(path, session, linesep): """ Create a dict of the hashes of all files in path on the remote machine. :param path: Path to search :param session: Session object to use :param linesep: Line separation string for the remote system """ cmd = 'test \! -d %s || find %s -type f | xargs sha1sum' % (path, path) status, output = session.cmd_status_output(cmd) if not status == 0: raise BuildError("Unable to get hashes of remote files: '%s'" % output) result = {} # Output is "<sum> <filename><linesep><sum> <filename>..." for line in output.split(linesep): if re.match("^[a-f0-9]{32,} [^ ].*$", line): (h, f) = line.split(None, 1) result[os.path.relpath(f, path)] = h return result def list_recursive_dirnames(path): """ List all directories that exist in path on the local machine :param path: Path to search """ def visit(arg, dir_name, file_names): """ Callback function list alla directories :param arg: Tuple with base path and the list that will contain the results. :param dir_name: Current directory :param file_names: File names in the current directory """ (base_path, result) = arg for file_name in file_names: path = os.path.join(dir_name, file_name) if os.path.isdir(path): result.append(os.path.relpath(path, base_path)) result = [] os.path.walk(path, visit, (path, result)) return result remote_hashes = get_remote_hashes(self.full_build_path, self.session, self.linesep) local_hashes = get_local_hashes(self.source) to_transfer = [] for rel_path in local_hashes.keys(): rhash = remote_hashes.get(rel_path) if rhash is None or not rhash == local_hashes[rel_path]: to_transfer.append(rel_path) need_build = False if to_transfer: logging.info("Need to copy files to %s on target" % self.full_build_path) need_build = True # Create all directories dirs = list_recursive_dirnames(self.source) if dirs: dirs_text = " ".join(dirs) fmt_arg = (self.full_build_path, self.full_build_path, dirs_text) cmd = 'mkdir -p %s && cd %s && mkdir -p %s' % fmt_arg else: cmd = 'mkdir -p %s' % self.full_build_path status, output = self.session.cmd_status_output(cmd) if not status == 0: raise BuildError("Unable to create remote directories: '%s'" % output) # Copy files for file_name in to_transfer: local_path = os.path.join(self.source, file_name) remote_path = os.path.join(self.full_build_path, file_name) remote.copy_files_to(self.address, self.file_transfer_client, self.username, self.password, self.file_transfer_port, local_path, remote_path) else: logging.info("Directory %s on target already up-to-date" % self.full_build_path) return need_build
def sync_directories(self): """ Synchronize the directories between the local and remote machines :returns: True if any files needed to be copied; False otherwise. Does not support symlinks. """ def get_local_hashes(path): """ Create a dict of the hashes of all files in path on the local machine. :param path: Path to search """ def hash_file(file_name): """ Calculate hex-encoded hash of a file :param file_name: File to hash """ f = open(file_name, mode='rb') h = hashlib.sha1() while True: buf = f.read(4096) if not buf: break h.update(buf) return h.hexdigest() def visit(arg, dir_name, file_names): """ Callback function to calculate and store hashes :param arg: Tuple with base path and the hash that will contain the results. :param dir_name: Current directory :param file_names: File names in the current directory """ (base_path, result) = arg for file_name in file_names: path = os.path.join(dir_name, file_name) if os.path.isfile(path): result[os.path.relpath(path, base_path)] = hash_file(path) result = {} os.path.walk(path, visit, (path, result)) return result def get_remote_hashes(path, session, linesep): """ Create a dict of the hashes of all files in path on the remote machine. :param path: Path to search :param session: Session object to use :param linesep: Line separation string for the remote system """ cmd = 'test \! -d %s || find %s -type f | xargs sha1sum' % (path, path) status, output = session.cmd_status_output(cmd) if not status == 0: raise BuildError("Unable to get hashes of remote files: '%s'" % output) result = {} # Output is "<sum> <filename><linesep><sum> <filename>..." for line in output.split(linesep): if re.match("^[a-f0-9]{32,} [^ ].*$", line): (h, f) = line.split(None, 1) result[os.path.relpath(f, path)] = h return result def list_recursive_dirnames(path): """ List all directories that exist in path on the local machine :param path: Path to search """ def visit(arg, dir_name, file_names): """ Callback function list alla directories :param arg: Tuple with base path and the list that will contain the results. :param dir_name: Current directory :param file_names: File names in the current directory """ (base_path, result) = arg for file_name in file_names: path = os.path.join(dir_name, file_name) if os.path.isdir(path): result.append(os.path.relpath(path, base_path)) result = [] os.path.walk(path, visit, (path, result)) return result remote_hashes = get_remote_hashes(self.full_build_path, self.session, self.linesep) local_hashes = get_local_hashes(self.source) to_transfer = [] for rel_path in local_hashes.keys(): rhash = remote_hashes.get(rel_path) if rhash is None or not rhash == local_hashes[rel_path]: to_transfer.append(rel_path) need_build = False if to_transfer: logging.info("Need to copy files to %s on target" % self.full_build_path) need_build = True # Create all directories dirs = list_recursive_dirnames(self.source) if dirs: dirs_text = " ".join(dirs) fmt_arg = (self.full_build_path, self.full_build_path, dirs_text) cmd = 'mkdir -p %s && cd %s && mkdir -p %s' % fmt_arg else: cmd = 'mkdir -p %s' % self.full_build_path status, output = self.session.cmd_status_output(cmd) if not status == 0: raise BuildError("Unable to create remote directories: '%s'" % output) # Copy files for file_name in to_transfer: local_path = os.path.join(self.source, file_name) remote_path = os.path.join(self.full_build_path, file_name) remote.copy_files_to(self.address, self.file_transfer_client, self.username, self.password, self.port, local_path, remote_path) else: logging.info("Directory %s on target already up-to-date" % self.full_build_path) return need_build