def copy_file(self): self.mkdir_basepath() src = self.src_path dest = self.dest_path if self.dest_isFile(): unix_out('cp %s %s.saved' % (dest, dest)) unix_out('umask 077') unix_out('cp %s %s' % (src, dest)) if not synctool_lib.DRY_RUN: old_umask = os.umask(077) if synctool_param.BACKUP_COPIES: if self.dest_isFile(): verbose(' saving %s as %s.saved' % (dest, dest)) try: shutil.copy2(dest, '%s.saved' % dest) except: stderr('failed to save %s as %s.saved' % (dest, dest)) verbose(' cp %s %s' % (src, dest)) try: shutil.copy2(src, dest) # copy file and stats except: stderr('failed to copy %s to %s' % (self.print_src(), dest)) os.umask(old_umask) else: if self.dest_isFile() and synctool_param.BACKUP_COPIES: verbose(' saving %s as %s.saved' % (dest, dest)) verbose(dryrun_msg(' cp %s %s' % (src, dest)))
def run_remote_copy(nodes, files): """copy files[] to nodes[]""" if not synctool_param.SCP_CMD: stderr( "%s: error: scp_cmd has not been defined in %s" % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE) ) sys.exit(-1) scp_cmd_arr = shlex.split(synctool_param.SCP_CMD) if SCP_OPTIONS: scp_cmd_arr.extend(shlex.split(SCP_OPTIONS)) for node in nodes: if node == synctool_param.NODENAME: verbose("skipping node %s" % node) nodes.remove(node) break scp_cmd_arr.extend(files) files_str = string.join(files) # this is used only for printing synctool_lib.run_parallel(master_scp, worker_scp, (nodes, scp_cmd_arr, files_str), len(nodes))
def single_files(filename): '''check/update a single file''' '''returns (True, path_in_synctree) if file is different''' if not filename: stderr('missing filename') return (False, None) (obj, err) = synctool_overlay.find_terse(synctool_overlay.OV_OVERLAY, filename) if err == synctool_overlay.OV_FOUND_MULTIPLE: # multiple source possible # possibilities have already been printed sys.exit(1) if err == synctool_overlay.OV_NOT_FOUND: stderr('%s is not in the overlay tree' % filename) return (False, None) verbose('checking against %s' % obj.print_src()) changed = obj.compare_files() if not changed: stdout('%s is up to date' % filename) terse(synctool_lib.TERSE_OK, filename) unix_out('# %s is up to date\n' % obj.print_dest()) return (changed, obj.src_path)
def get_latest_version_and_checksum(): '''get latest version and checksum by downloading the LATEST.txt versioning file''' verbose('accessing URL %s' % VERSION_CHECKING_URL) try: opener = urllib.FancyURLopener({}) f = opener.open(VERSION_CHECKING_URL) data = f.read() f.close() except: stderr('error accessing the file at %s' % VERSION_CHECKING_URL) return None if data[0] == '<': stderr('error accessing the file at %s' % VERSION_CHECKING_URL) return None data = string.strip(data) # format of the data in LATEST.txt is: # <version> <MD5 checksum> arr = string.split(data) if len(arr) != 2: return None return (arr[0], arr[1])
def checksum_file(filename): '''compute MD5 checksum of a file''' try: f = open(filename, 'r') except IOError, (err, reason): stderr('error: failed to open %s : %s' % (filename, reason)) raise
def run_local_synctool(): if not synctool_param.SYNCTOOL_CMD: stderr('%s: error: synctool_cmd has not been defined in %s' % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE)) sys.exit(-1) cmd_arr = shlex.split(synctool_param.SYNCTOOL_CMD) + PASS_ARGS synctool_lib.run_with_nodename(cmd_arr, synctool_param.NODENAME)
def ping_nodes(nodes): '''ping nodes in parallel''' '''nodes is a list of interfaces, really''' if not synctool_param.PING_CMD: stderr('%s: error: ping_cmd has not been defined in %s' % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE)) sys.exit(-1) synctool_lib.run_parallel(master_ping, worker_ping, nodes, len(nodes))
def hard_delete_file(self): file = self.dest_path unix_out('rm -f %s' % file) if not synctool_lib.DRY_RUN: verbose(' os.unlink(%s)' % file) try: os.unlink(file) except OSError, reason: stderr('failed to delete %s : %s' % (file, reason))
def set_permissions(self): file = self.dest_path mode = self.src_statbuf.mode unix_out('chmod 0%o %s' % (mode & 07777, file)) if not synctool_lib.DRY_RUN: verbose(' os.chmod(%s, %04o)' % (file, mode & 07777)) try: os.chmod(file, mode & 07777) except OSError, reason: stderr('failed to chmod %04o %s : %s' % (mode & 07777, file, reason))
def worker_ping(rank, nodes): '''ping a single node''' node = nodes[rank] nodename = NODESET.get_nodename_from_interface(node) packets_received = 0 # execute ping command and show output with the nodename cmd = '%s %s' % (synctool_param.PING_CMD, node) cmd_arr = shlex.split(cmd) f = synctool_lib.popen(cmd_arr) if not f: stderr('failed to run command %s' % cmd_arr[0]) return while True: line = f.readline() if not line: break line = string.strip(line) # # argh, we have to parse output here # ping says something like: # "2 packets transmitted, 0 packets received, 100.0% packet loss" on BSD # "2 packets transmitted, 0 received, 100.0% packet loss, time 1001ms" on Linux # arr = string.split(line) if len(arr) > 3 and arr[1] == 'packets' and arr[2] == 'transmitted,': try: packets_received = int(arr[3]) except ValueError: pass break # some ping implementations say "hostname is alive" or "hostname is unreachable" elif len(arr) == 3 and arr[1] == 'is': if arr[2] == 'alive': packets_received = 100 elif arr[2] == 'unreachable': packets_received = -1 f.close() if packets_received > 0: print '%s: up' % nodename else: print '%s: not responding' % nodename
def set_owner(self): file = self.dest_path uid = self.src_statbuf.uid gid = self.src_statbuf.gid unix_out("chown %s.%s %s" % (self.src_ascii_uid(), self.src_ascii_gid(), file)) if not synctool_lib.DRY_RUN: verbose(" os.chown(%s, %d, %d)" % (file, uid, gid)) try: os.chown(file, uid, gid) except OSError, reason: stderr("failed to chown %s.%s %s : %s" % (self.src_ascii_uid(), self.src_ascii_gid(), file, reason))
def package_manager(): '''return instance of SyncPkg installer class''' detected = False if not synctool_param.PACKAGE_MANAGER: detect_installer() if not synctool_param.PACKAGE_MANAGER: stderr('failed to detect package management system') stderr('please configure it in synctool.conf') sys.exit(1) detected = True for mgr in synctool_param.KNOWN_PACKAGE_MANAGERS: if synctool_param.PACKAGE_MANAGER == mgr: short_mgr = string.replace(mgr, '-', '') # load the module module = __import__('synctool_pkg_%s' % short_mgr) # find the package manager class pkgclass = getattr(module, 'SyncPkg%s' % string.capitalize(short_mgr)) # instantiate the class return pkgclass() if detected: stderr('package manager %s is not supported yet' % synctool_param.PACKAGE_MANAGER) else: stderr("unknown package manager defined: '%s'" % synctool_param.PACKAGE_MANAGER) sys.exit(1)
def save_dir(self): if not synctool_param.BACKUP_COPIES: return path = self.dest_path unix_out('mv %s %s.saved' % (path, path)) if not synctool_lib.DRY_RUN: verbose('moving %s to %s.saved' % (path, path)) try: os.rename(path, '%s.saved' % path) except OSError, reason: stderr('failed to move directory to %s.saved : %s' % (path, reason))
def make_dir(self): self.mkdir_basepath() path = self.dest_path unix_out('umask 077') unix_out('mkdir %s' % path) if not synctool_lib.DRY_RUN: old_umask = os.umask(077) verbose(' os.mkdir(%s)' % path) try: os.mkdir(path) except OSError, reason: stderr('failed to make directory %s : %s' % (path, reason)) os.umask(old_umask)
def reference(filename): '''show which source file in the repository synctool chooses to use''' if not filename: stderr('missing filename') return (obj, err) = synctool_overlay.find_terse(synctool_overlay.OV_OVERLAY, filename) if err == synctool_overlay.OV_FOUND_MULTIPLE: # multiple source possible # possibilities have already been printed sys.exit(1) if err == synctool_overlay.OV_NOT_FOUND: stderr('%s is not in the overlay tree' % filename) return print obj.print_src()
def erase_saved(self): dest = self.dest_path stat_saved_path = synctool_stat.SyncStat('%s.saved' % dest) if synctool_lib.ERASE_SAVED and stat_saved_path.exists() and not stat_saved_path.isDir(): terse(synctool_lib.TERSE_DELETE, '%s.saved' % dest) unix_out('rm %s.saved' % dest) if synctool_lib.DRY_RUN: stdout(dryrun_msg('erase %s.saved' % dest, 'erase')) else: stdout('erase %s.saved' % dest) verbose(' os.unlink(%s.saved)' % dest) try: os.unlink('%s.saved' % dest) except OSError, reason: stderr('failed to delete %s : %s' % (dest, reason))
def delete_file(self): file = self.dest_path if not synctool_lib.DRY_RUN: if synctool_param.BACKUP_COPIES: unix_out('mv %s %s.saved' % (file, file)) verbose('moving %s to %s.saved' % (file, file)) try: os.rename(file, '%s.saved' % file) except OSError, reason: stderr('failed to move file to %s.saved : %s' % (file, reason)) else: unix_out('rm %s' % file) verbose(' os.unlink(%s)' % file) try: os.unlink(file) except OSError, reason: stderr('failed to delete %s : %s' % (file, reason))
def run_dsh(remote_cmd_arr): '''run remote command to a set of nodes using ssh (param ssh_cmd)''' nodes = NODESET.interfaces() if nodes == None or len(nodes) <= 0: print 'no valid nodes specified' sys.exit(1) if not synctool_param.SSH_CMD: stderr('%s: error: ssh_cmd has not been defined in %s' % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE)) sys.exit(-1) ssh_cmd_arr = shlex.split(synctool_param.SSH_CMD) if SSH_OPTIONS: ssh_cmd_arr.extend(shlex.split(SSH_OPTIONS)) synctool_lib.run_parallel(master_ssh, worker_ssh, (nodes, ssh_cmd_arr, remote_cmd_arr), len(nodes))
def run_remote_pkg(nodes): if not nodes: return if not synctool_param.SSH_CMD: stderr('%s: error: ssh_cmd has not been defined in %s' % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE)) sys.exit(-1) if not synctool_param.PKG_CMD: stderr('%s: error: pkg_cmd has not been defined in %s' % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE)) sys.exit(-1) # prepare remote synctool_pkg command ssh_cmd_arr = shlex.split(synctool_param.SSH_CMD) pkg_cmd_arr = shlex.split(synctool_param.PKG_CMD) pkg_cmd_arr.extend(PASS_ARGS) # run in parallel synctool_lib.run_parallel(master_pkg, worker_pkg, (nodes, ssh_cmd_arr, pkg_cmd_arr), len(nodes))
def download(): '''download latest version''' # ugly globals because of callback function global DOWNLOAD_FILENAME, DOWNLOAD_BYTES tup = get_latest_version_and_checksum() if not tup: return 1 (version, checksum) = tup filename = 'synctool-%s.tar.gz' % version download_url = DOWNLOAD_URL + filename DOWNLOAD_FILENAME = make_local_filename_for_version(version) DOWNLOAD_BYTES = 0 try: opener = urllib.FancyURLopener({}) opener.retrieve(download_url, DOWNLOAD_FILENAME, download_progress) except: if DOWNLOAD_BYTES: print stderr('failed to download file %s' % download_url) return 1 else: print # # compute and compare MD5 checksums # sadly, there is no easy way to do this 'live' while downloading, # because the download callback does not see the downloaded data blocks # downloaded_sum = checksum_file(DOWNLOAD_FILENAME) if downloaded_sum != checksum: stderr('ERROR: checksum failed for %s' % DOWNLOAD_FILENAME) return 1 return 0
def stat(self, path): '''get the stat() information for a pathname''' if not path: self.entry_exists = False self.mode = self.uid = self.gid = self.size = None return try: statbuf = os.lstat(path) except OSError, err: # could be something stupid like "Permission denied" ... # although synctool should be run as root if err.errno != errno.ENOENT: # "No such file or directory" is a valid error # when the destination is missing stderr('error: stat(%s) failed: %s' % (path, reason)) self.entry_exists = False self.mode = self.uid = self.gid = self.size = None
def run_command_in_dir(dest_dir, cmd): '''change directory to dest_dir, and run the shell command''' verbose(' os.chdir(%s)' % dest_dir) unix_out('cd %s' % dest_dir) cwd = os.getcwd() # if dry run, the target directory may not exist yet (mkdir has not been called for real, for a dry run) if synctool_lib.DRY_RUN: run_command(cmd) verbose(' os.chdir(%s)' % cwd) unix_out('cd %s' % cwd) unix_out('') return try: os.chdir(dest_dir) except OSError, reason: stderr('error changing directory to %s: %s' % (dest_dir, reason))
def run_remote_synctool(nodes): if not nodes: return if not synctool_param.RSYNC_CMD: stderr('%s: error: rsync_cmd has not been defined in %s' % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE)) sys.exit(-1) if not synctool_param.SSH_CMD: stderr('%s: error: ssh_cmd has not been defined in %s' % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE)) sys.exit(-1) if not synctool_param.SYNCTOOL_CMD: stderr('%s: error: synctool_cmd has not been defined in %s' % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE)) sys.exit(-1) # prepare rsync command if not OPT_SKIP_RSYNC: rsync_cmd_arr = shlex.split(synctool_param.RSYNC_CMD) rsync_cmd_arr.append('%s/' % synctool_param.MASTERDIR) else: rsync_cmd_arr = None # prepare remote synctool command ssh_cmd_arr = shlex.split(synctool_param.SSH_CMD) synctool_cmd_arr = shlex.split(synctool_param.SYNCTOOL_CMD) synctool_cmd_arr.extend(PASS_ARGS) # run in parallel synctool_lib.run_parallel(master_synctool, worker_synctool, (nodes, rsync_cmd_arr, ssh_cmd_arr, synctool_cmd_arr), len(nodes))
def ov_perror(errorcode, src_path): '''print error message for source path''' if errorcode >= 0: # this is not an error but a valid group number return if errorcode == OV_NOT_MY_GROUP: # this is not an error but a normal condition return if errorcode == OV_NO_GROUP_EXT: if synctool_param.TERSE: terse(synctool_lib.TERSE_ERROR, 'no group on %s' % src_path) else: stderr('no underscored group extension on %s, skipped' % synctool_lib.prettypath(src_path)) elif errorcode == OV_UNKNOWN_GROUP: if synctool_param.TERSE: terse(synctool_lib.TERSE_ERROR, 'invalid group on %s' % src_path) else: stderr('unknown group on %s, skipped' % synctool_lib.prettypath(src_path))
def symlink_file(self, oldpath): self.mkdir_basepath() # note that old_path is the readlink() of the self.src_path newpath = self.dest_path if self.dest_exists(): unix_out('mv %s %s.saved' % (newpath, newpath)) # # actually, if we want the ownership of the symlink to be correct, # we should do setuid() here # matching ownerships of symbolic links is not yet implemented # # linux makes all symlinks mode 0777, but some other platforms do not umask_mode = synctool_param.SYMLINK_MODE ^ 0777 unix_out('umask %03o' % umask_mode) unix_out('ln -s %s %s' % (oldpath, newpath)) if not synctool_lib.DRY_RUN: if self.dest_exists(): verbose('saving %s as %s.saved' % (newpath, newpath)) try: os.rename(newpath, '%s.saved' % newpath) except OSError, reason: stderr('failed to save %s as %s.saved : %s' % (newpath, newpath, reason)) terse(synctool_lib.TERSE_FAIL, 'save %s.saved' % newpath) old_umask = os.umask(umask_mode) verbose(' os.symlink(%s, %s)' % (oldpath, newpath)) try: os.symlink(oldpath, newpath) except OSError, reason: stderr('failed to create symlink %s -> %s : %s' % (newpath, oldpath, reason)) terse(synctool_lib.TERSE_FAIL, 'link %s' % newpath)
def run_command(cmd): '''run a shell command''' if cmd[0] != '/': # if relative path, use scriptdir cmd = synctool_param.SCRIPT_DIR + '/' + cmd # a command can have arguments arr = shlex.split(cmd) cmdfile = arr[0] stat = synctool_stat.SyncStat(cmdfile) if not stat.exists(): stderr('error: command %s not found' % synctool_lib.prettypath(cmdfile)) return if not stat.isExec(): stderr("warning: file '%s' is not executable" % synctool_lib.prettypath(cmdfile)) return # run the shell command synctool_lib.shell_command(cmd)
def single_task(filename): '''run a single task''' if not filename: stderr('missing task filename') return task_script = filename if task_script[0] != '/': # trick to make find() work for tasks, too task_script = '/' + task_script (obj, err) = synctool_overlay.find_terse(synctool_overlay.OV_TASKS, task_script) if err == synctool_overlay.OV_FOUND_MULTIPLE: # multiple source possible # possibilities have already been printed sys.exit(1) if err == synctool_overlay.OV_NOT_FOUND: stderr("no such task '%s'" % filename) return run_command(obj.src_path) unix_out('')
def overlay_pass2(filelist, filedict): '''do pass #2 of 2; create dictionary of destination paths from list Each element in the dictionary is an instance of OverlayEntry''' for entry in filelist: if filedict.has_key(entry.dest_path): entry2 = filedict[entry.dest_path] if entry.groupnum < entry2.groupnum: # this group is more important, so override it del filedict[entry.dest_path] entry2 = None # duplicate paths are a problem, unless they are directories ... elif (not (entry.src_isDir() and entry2.src_isDir())) \ and entry.groupnum == entry2.groupnum: if synctool_param.TERSE: synctool_lib.terse(synctool_lib.TERSE_ERROR, 'duplicate source paths in repository for:') synctool_lib.terse(synctool_lib.TERSE_ERROR, entry.src_path) synctool_lib.terse(synctool_lib.TERSE_ERROR, entry2.src_path) else: stderr('error: duplicate source paths in repository for:\n' 'error: %s\n' 'error: %s\n' % (synctool_lib.prettypath(entry.src_path), synctool_lib.prettypath(entry2.src_path)) ) continue else: # this group is less important, skip it continue # add or update filedict filedict[entry.dest_path] = entry
def diff_files(filename): '''display a diff of the file''' if not synctool_param.DIFF_CMD: stderr('error: diff_cmd is undefined in %s' % synctool_param.CONF_FILE) return synctool_lib.DRY_RUN = True # be sure that it doesn't do any updates (obj, err) = synctool_overlay.find_terse(synctool_overlay.OV_OVERLAY, filename) if err == synctool_overlay.OV_FOUND_MULTIPLE: # multiple source possible # possibilities have already been printed sys.exit(1) if err == synctool_overlay.OV_NOT_FOUND: return if synctool_lib.UNIX_CMD: unix_out('%s %s %s' % (synctool_param.DIFF_CMD, obj.dest_path, obj.src_path)) else: verbose('%s %s %s' % (synctool_param.DIFF_CMD, obj.dest_path, obj.print_src())) sys.stdout.flush() sys.stderr.flush() if use_subprocess: cmd_arr = shlex.split(synctool_param.DIFF_CMD) cmd_arr.append(obj.dest_path) cmd_arr.append(obj.src_path) subprocess.Popen(cmd_arr, shell=False) else: os.system('%s %s %s' % (synctool_param.DIFF_CMD, obj.dest_path, obj.src_path)) sys.stdout.flush() sys.stderr.flush()
def option_combinations(opt_diff, opt_single, opt_reference, opt_tasks, opt_upload, opt_suffix, opt_fix): '''some combinations of command-line options don't make sense; alert the user and abort''' if opt_upload and (opt_diff or opt_single or opt_reference or opt_tasks): stderr("the --upload option can not be combined with --diff, --single, --ref, or --tasks") sys.exit(1) if opt_suffix and not opt_upload: stderr("option --suffix can only be used together with --upload") sys.exit(1) if opt_diff and (opt_single or opt_reference or opt_tasks or opt_fix): stderr("option --diff can not be combined with --single, --ref, --tasks, or --fix") sys.exit(1) if opt_reference and (opt_single or opt_tasks or opt_fix): stderr("option --reference can not be combined with --single, --tasks, or --fix") sys.exit(1)