def overlay_callback(obj): '''compare files and run post-script if needed''' verbose('checking %s' % obj.print_src()) if obj.compare_files(): run_post(obj.src_path, obj.dest_path)
def single_files(filename): '''check/update a single file''' '''returns (True, path_in_synctree) if file is different''' if not filename: stderr('missing filename') return (False, None) (obj, err) = synctool_overlay.find_terse(synctool_overlay.OV_OVERLAY, filename) if err == synctool_overlay.OV_FOUND_MULTIPLE: # multiple source possible # possibilities have already been printed sys.exit(1) if err == synctool_overlay.OV_NOT_FOUND: stderr('%s is not in the overlay tree' % filename) return (False, None) verbose('checking against %s' % obj.print_src()) changed = obj.compare_files() if not changed: stdout('%s is up to date' % filename) terse(synctool_lib.TERSE_OK, filename) unix_out('# %s is up to date\n' % obj.print_dest()) return (changed, obj.src_path)
def run_remote_copy(nodes, files): """copy files[] to nodes[]""" if not synctool_param.SCP_CMD: stderr( "%s: error: scp_cmd has not been defined in %s" % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE) ) sys.exit(-1) scp_cmd_arr = shlex.split(synctool_param.SCP_CMD) if SCP_OPTIONS: scp_cmd_arr.extend(shlex.split(SCP_OPTIONS)) for node in nodes: if node == synctool_param.NODENAME: verbose("skipping node %s" % node) nodes.remove(node) break scp_cmd_arr.extend(files) files_str = string.join(files) # this is used only for printing synctool_lib.run_parallel(master_scp, worker_scp, (nodes, scp_cmd_arr, files_str), len(nodes))
def get_latest_version_and_checksum(): '''get latest version and checksum by downloading the LATEST.txt versioning file''' verbose('accessing URL %s' % VERSION_CHECKING_URL) try: opener = urllib.FancyURLopener({}) f = opener.open(VERSION_CHECKING_URL) data = f.read() f.close() except: stderr('error accessing the file at %s' % VERSION_CHECKING_URL) return None if data[0] == '<': stderr('error accessing the file at %s' % VERSION_CHECKING_URL) return None data = string.strip(data) # format of the data in LATEST.txt is: # <version> <MD5 checksum> arr = string.split(data) if len(arr) != 2: return None return (arr[0], arr[1])
def master_ping(rank, nodes): nodename = NODESET.get_nodename_from_interface(nodes[rank]) if nodename == synctool_param.NODENAME: print '%s: up' % nodename return verbose('pinging %s' % nodename) unix_out('%s %s' % (synctool_param.PING_CMD, nodes[rank]))
def master_pkg(rank, args): # the master node only displays what we're running (nodes, ssh_cmd_arr, pkg_cmd_arr) = args node = nodes[rank] nodename = NODESET.get_nodename_from_interface(node) verbose('running synctool-pkg on node %s' % nodename) unix_out('%s %s %s' % (string.join(ssh_cmd_arr), node, string.join(pkg_cmd_arr)))
def hard_delete_file(self): file = self.dest_path unix_out('rm -f %s' % file) if not synctool_lib.DRY_RUN: verbose(' os.unlink(%s)' % file) try: os.unlink(file) except OSError, reason: stderr('failed to delete %s : %s' % (file, reason))
def set_permissions(self): file = self.dest_path mode = self.src_statbuf.mode unix_out('chmod 0%o %s' % (mode & 07777, file)) if not synctool_lib.DRY_RUN: verbose(' os.chmod(%s, %04o)' % (file, mode & 07777)) try: os.chmod(file, mode & 07777) except OSError, reason: stderr('failed to chmod %04o %s : %s' % (mode & 07777, file, reason))
def set_owner(self): file = self.dest_path uid = self.src_statbuf.uid gid = self.src_statbuf.gid unix_out("chown %s.%s %s" % (self.src_ascii_uid(), self.src_ascii_gid(), file)) if not synctool_lib.DRY_RUN: verbose(" os.chown(%s, %d, %d)" % (file, uid, gid)) try: os.chown(file, uid, gid) except OSError, reason: stderr("failed to chown %s.%s %s : %s" % (self.src_ascii_uid(), self.src_ascii_gid(), file, reason))
def master_synctool(rank, args): # the master node only displays what we're running (nodes, rsync_cmd_arr, ssh_cmd_arr, synctool_cmd_arr) = args node = nodes[rank] nodename = NODESET.get_nodename_from_interface(node) if rsync_cmd_arr != None: verbose('running rsync $masterdir/ to node %s' % nodename) unix_out('%s %s:%s/' % (string.join(rsync_cmd_arr), node, synctool_param.MASTERDIR)) verbose('running synctool on node %s' % nodename) unix_out('%s %s %s' % (string.join(ssh_cmd_arr), node, string.join(synctool_cmd_arr)))
def mkdir_basepath(self): '''call mkdir -p if the destination directory does not exist yet''' if synctool_lib.DRY_RUN: return # check if the directory exists basedir = os.path.dirname(self.dest_path) stat = synctool_stat.SyncStat(basedir) if not stat.exists(): # create the directory verbose('making directory %s' % synctool_lib.prettypath(basedir)) unix_out('mkdir -p %s' % basedir) synctool_lib.mkdir_p(basedir)
def copy_file(self): self.mkdir_basepath() src = self.src_path dest = self.dest_path if self.dest_isFile(): unix_out('cp %s %s.saved' % (dest, dest)) unix_out('umask 077') unix_out('cp %s %s' % (src, dest)) if not synctool_lib.DRY_RUN: old_umask = os.umask(077) if synctool_param.BACKUP_COPIES: if self.dest_isFile(): verbose(' saving %s as %s.saved' % (dest, dest)) try: shutil.copy2(dest, '%s.saved' % dest) except: stderr('failed to save %s as %s.saved' % (dest, dest)) verbose(' cp %s %s' % (src, dest)) try: shutil.copy2(src, dest) # copy file and stats except: stderr('failed to copy %s to %s' % (self.print_src(), dest)) os.umask(old_umask) else: if self.dest_isFile() and synctool_param.BACKUP_COPIES: verbose(' saving %s as %s.saved' % (dest, dest)) verbose(dryrun_msg(' cp %s %s' % (src, dest)))
def master_ssh(rank, args): (nodes, ssh_cmd_arr, remote_cmd_arr) = args node = nodes[rank] cmd_str = string.join(remote_cmd_arr) if node == synctool_param.NODENAME: verbose('running %s' % cmd_str) unix_out(cmd_str) else: verbose('running %s to %s %s' % (os.path.basename(ssh_cmd_arr[0]), NODESET.get_nodename_from_interface(node), cmd_str)) unix_out('%s %s %s' % (string.join(ssh_cmd_arr), node, cmd_str))
def save_dir(self): if not synctool_param.BACKUP_COPIES: return path = self.dest_path unix_out('mv %s %s.saved' % (path, path)) if not synctool_lib.DRY_RUN: verbose('moving %s to %s.saved' % (path, path)) try: os.rename(path, '%s.saved' % path) except OSError, reason: stderr('failed to move directory to %s.saved : %s' % (path, reason))
def erase_saved(self): dest = self.dest_path stat_saved_path = synctool_stat.SyncStat('%s.saved' % dest) if synctool_lib.ERASE_SAVED and stat_saved_path.exists() and not stat_saved_path.isDir(): terse(synctool_lib.TERSE_DELETE, '%s.saved' % dest) unix_out('rm %s.saved' % dest) if synctool_lib.DRY_RUN: stdout(dryrun_msg('erase %s.saved' % dest, 'erase')) else: stdout('erase %s.saved' % dest) verbose(' os.unlink(%s.saved)' % dest) try: os.unlink('%s.saved' % dest) except OSError, reason: stderr('failed to delete %s : %s' % (dest, reason))
def make_dir(self): self.mkdir_basepath() path = self.dest_path unix_out('umask 077') unix_out('mkdir %s' % path) if not synctool_lib.DRY_RUN: old_umask = os.umask(077) verbose(' os.mkdir(%s)' % path) try: os.mkdir(path) except OSError, reason: stderr('failed to make directory %s : %s' % (path, reason)) os.umask(old_umask)
def delete_file(self): file = self.dest_path if not synctool_lib.DRY_RUN: if synctool_param.BACKUP_COPIES: unix_out('mv %s %s.saved' % (file, file)) verbose('moving %s to %s.saved' % (file, file)) try: os.rename(file, '%s.saved' % file) except OSError, reason: stderr('failed to move file to %s.saved : %s' % (file, reason)) else: unix_out('rm %s' % file) verbose(' os.unlink(%s)' % file) try: os.unlink(file) except OSError, reason: stderr('failed to delete %s : %s' % (file, reason))
def run_command_in_dir(dest_dir, cmd): '''change directory to dest_dir, and run the shell command''' verbose(' os.chdir(%s)' % dest_dir) unix_out('cd %s' % dest_dir) cwd = os.getcwd() # if dry run, the target directory may not exist yet (mkdir has not been called for real, for a dry run) if synctool_lib.DRY_RUN: run_command(cmd) verbose(' os.chdir(%s)' % cwd) unix_out('cd %s' % cwd) unix_out('') return try: os.chdir(dest_dir) except OSError, reason: stderr('error changing directory to %s: %s' % (dest_dir, reason))
def symlink_file(self, oldpath): self.mkdir_basepath() # note that old_path is the readlink() of the self.src_path newpath = self.dest_path if self.dest_exists(): unix_out('mv %s %s.saved' % (newpath, newpath)) # # actually, if we want the ownership of the symlink to be correct, # we should do setuid() here # matching ownerships of symbolic links is not yet implemented # # linux makes all symlinks mode 0777, but some other platforms do not umask_mode = synctool_param.SYMLINK_MODE ^ 0777 unix_out('umask %03o' % umask_mode) unix_out('ln -s %s %s' % (oldpath, newpath)) if not synctool_lib.DRY_RUN: if self.dest_exists(): verbose('saving %s as %s.saved' % (newpath, newpath)) try: os.rename(newpath, '%s.saved' % newpath) except OSError, reason: stderr('failed to save %s as %s.saved : %s' % (newpath, newpath, reason)) terse(synctool_lib.TERSE_FAIL, 'save %s.saved' % newpath) old_umask = os.umask(umask_mode) verbose(' os.symlink(%s, %s)' % (oldpath, newpath)) try: os.symlink(oldpath, newpath) except OSError, reason: stderr('failed to create symlink %s -> %s : %s' % (newpath, oldpath, reason)) terse(synctool_lib.TERSE_FAIL, 'link %s' % newpath)
def master_scp(rank, args): (nodes, scp_cmd_arr, files_str) = args node = nodes[rank] nodename = NODESET.get_nodename_from_interface(node) # master thread only displays what we're running if DESTDIR: verbose("copying %s to %s:%s" % (files_str, nodename, DESTDIR)) if SCP_OPTIONS: unix_out("%s %s %s %s:%s" % (synctool_param.SCP_CMD, SCP_OPTIONS, files_str, node, DESTDIR)) else: unix_out("%s %s %s:%s" % (synctool_param.SCP_CMD, files_str, node, DESTDIR)) else: verbose("copying %s to %s" % (files_str, nodename)) if SCP_OPTIONS: unix_out("%s %s %s %s:" % (synctool_param.SCP_CMD, SCP_OPTIONS, files_str, node)) else: unix_out("%s %s %s:" % (synctool_param.SCP_CMD, files_str, node))
def diff_files(filename): '''display a diff of the file''' if not synctool_param.DIFF_CMD: stderr('error: diff_cmd is undefined in %s' % synctool_param.CONF_FILE) return synctool_lib.DRY_RUN = True # be sure that it doesn't do any updates (obj, err) = synctool_overlay.find_terse(synctool_overlay.OV_OVERLAY, filename) if err == synctool_overlay.OV_FOUND_MULTIPLE: # multiple source possible # possibilities have already been printed sys.exit(1) if err == synctool_overlay.OV_NOT_FOUND: return if synctool_lib.UNIX_CMD: unix_out('%s %s %s' % (synctool_param.DIFF_CMD, obj.dest_path, obj.src_path)) else: verbose('%s %s %s' % (synctool_param.DIFF_CMD, obj.dest_path, obj.print_src())) sys.stdout.flush() sys.stderr.flush() if use_subprocess: cmd_arr = shlex.split(synctool_param.DIFF_CMD) cmd_arr.append(obj.dest_path) cmd_arr.append(obj.src_path) subprocess.Popen(cmd_arr, shell=False) else: os.system('%s %s %s' % (synctool_param.DIFF_CMD, obj.dest_path, obj.src_path)) sys.stdout.flush() sys.stderr.flush()
def clean(self): verbose("cleaning up caches")
def interfaces(self): '''return list of interfaces of relevant nodes''' explicit_includes = self.nodelist[:] # by default, work on all nodes if not self.nodelist and not self.grouplist: self.nodelist = synctool_config.get_all_nodes() # check if the nodes exist at all; the user could have given bogus names all_nodes = synctool_config.get_all_nodes() for node in self.nodelist: if not node in all_nodes: stderr("no such node '%s'" % node) return None if self.grouplist: # check if the groups exist at all all_groups = synctool_config.make_all_groups() for group in self.grouplist: if not group in all_groups: stderr("no such group '%s'" % group) return None self.nodelist.extend(synctool_config.get_nodes_in_groups(self.grouplist)) if self.exclude_groups: self.exclude_nodes.extend(synctool_config.get_nodes_in_groups(self.exclude_groups)) for node in self.exclude_nodes: # remove excluded nodes, if not explicitly included if node in self.nodelist and not node in explicit_includes: self.nodelist.remove(node) if len(self.nodelist) <= 0: return [] ifaces = [] for node in self.nodelist: if node in synctool_param.IGNORE_GROUPS and not node in explicit_includes: verbose('node %s is ignored' % node) continue groups = synctool_config.get_groups(node) do_continue = False for group in groups: if group in synctool_param.IGNORE_GROUPS: verbose('group %s is ignored' % group) do_continue = True break if do_continue: continue iface = synctool_config.get_node_interface(node) self.namemap[iface] = node if not iface in ifaces: # make sure we do not have duplicates ifaces.append(iface) return ifaces
def remove(self, pkgs): verbose("removing packages: %s" % string.join(pkgs))
def upload(interface, upload_filename, upload_suffix=None): '''copy a file from a node into the overlay/ tree''' if not synctool_param.SCP_CMD: stderr('%s: error: scp_cmd has not been defined in %s' % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE)) sys.exit(-1) if upload_filename[0] != '/': stderr('error: the filename to upload must be an absolute path') sys.exit(-1) trimmed_upload_fn = upload_filename[1:] # remove leading slash import synctool_overlay # make the known groups lists synctool_config.remove_ignored_groups() synctool_param.MY_GROUPS = synctool_config.get_my_groups() synctool_param.ALL_GROUPS = synctool_config.make_all_groups() if upload_suffix and not upload_suffix in synctool_param.ALL_GROUPS: stderr("no such group '%s'" % upload_suffix) sys.exit(-1) # shadow DRY_RUN because that var can not be used correctly here if '-f' in PASS_ARGS or '--fix' in PASS_ARGS: dry_run = False else: dry_run = True if not synctool_lib.QUIET: stdout('DRY RUN, not uploading any files') terse(synctool_lib.TERSE_DRYRUN, 'not uploading any files') node = NODESET.get_nodename_from_interface(interface) # pretend that the current node is now the given node; # this is needed for find() to find the most optimal reference for the file orig_NODENAME = synctool_param.NODENAME synctool_param.NODENAME = node synctool_config.insert_group(node, node) orig_MY_GROUPS = synctool_param.MY_GROUPS[:] synctool_param.MY_GROUPS = synctool_config.get_my_groups() # see if file is already in the repository (obj, err) = synctool_overlay.find_terse(synctool_overlay.OV_OVERLAY, upload_filename) if err == synctool_overlay.OV_FOUND_MULTIPLE: # multiple source possible # possibilities have already been printed sys.exit(1) if err == synctool_overlay.OV_NOT_FOUND: # no source path found if string.find(upload_filename, '...') >= 0: stderr("%s is not in the repository, don't know what to map this path to\n" "Please give the full path instead of a terse path, or touch the source file\n" "in the repository first and try again" % os.path.basename(upload_filename)) sys.exit(1) # it wasn't a terse path, throw a source path together # This picks the first overlay dir as default source, which may not be correct # but it is a good guess repos_filename = os.path.join(synctool_param.OVERLAY_DIRS[0], trimmed_upload_fn) if upload_suffix: repos_filename = repos_filename + '._' + upload_suffix else: repos_filename = repos_filename + '._' + node # use _nodename as default suffix else: if upload_suffix: # remove the current group suffix an add the specified suffix to the filename arr = string.split(obj.src_path, '.') if len(arr) > 1 and arr[-1][0] == '_': repos_filename = string.join(arr[:-1], '.') repos_filename = repos_filename + '._' + upload_suffix else: repos_filename = obj.src_path synctool_param.NODENAME = orig_NODENAME synctool_param.MY_GROUPS = orig_MY_GROUPS verbose('%s:%s uploaded as %s' % (node, upload_filename, repos_filename)) terse(synctool_lib.TERSE_UPLOAD, repos_filename) unix_out('%s %s:%s %s' % (synctool_param.SCP_CMD, interface, upload_filename, repos_filename)) if dry_run: stdout('would be uploaded as %s' % synctool_lib.prettypath(repos_filename)) else: # first check if the directory in the repository exists repos_dir = os.path.dirname(repos_filename) stat = synctool_stat.SyncStat(repos_dir) if not stat.exists(): verbose('making directory %s' % synctool_lib.prettypath(repos_dir)) unix_out('mkdir -p %s' % repos_dir) synctool_lib.mkdir_p(repos_dir) # make scp command array scp_cmd_arr = shlex.split(synctool_param.SCP_CMD) scp_cmd_arr.append('%s:%s' % (interface, upload_filename)) scp_cmd_arr.append(repos_filename) synctool_lib.run_with_nodename(scp_cmd_arr, NODESET.get_nodename_from_interface(interface)) if os.path.isfile(repos_filename): stdout('uploaded %s' % synctool_lib.prettypath(repos_filename))
def detect_installer(): '''Attempt to detect the operating system and package system Returns instance of a SyncPkg installer class''' # # attempt a best effort at detecting OSes for the purpose of # choosing a package manager # It's probably not 100% fool-proof, but as said, it's a best effort # # Problems: # - there are too many platforms and too many Linux distros # - there are too many different packaging systems # - there are RedHat variants that all have /etc/redhat-release but # use different package managers # - SuSE has three (!) package managers that are all in use # and it seems to be by design (!?) # - I've seen apt-get work with dpkg, and I've seen apt-get work with rpm # - MacOS X has no 'standard' software packaging (the App store??) # There are ports, fink, brew. I prefer 'brew' # - The *BSDs have both pkg_add and ports # - FreeBSD has freebsd-update to upgrade packages # platform = os.uname()[0] if platform == 'Linux': verbose('detected platform Linux') stat = synctool_stat.SyncStat() # use release file to detect Linux distro, # and choose package manager based on that for (release_file, pkgmgr) in LINUX_PACKAGE_MANAGERS: stat.stat(release_file) if stat.exists(): verbose('detected %s' % release_file) verbose('choosing package manager %s' % pkgmgr) synctool_param.PACKAGE_MANAGER = pkgmgr return stderr('unknown Linux distribution') elif platform == 'Darwin': # assume MacOS X verbose('detected platform MacOS X') # some people like port # some people like fink # I like homebrew verbose('choosing package manager brew') synctool_param.PACKAGE_MANAGER = 'brew' elif platform in ('NetBSD', 'OpenBSD', 'FreeBSD'): verbose('detected platform %s' % platform) # choose bsdpkg # I know there are ports, but you can 'make' those easily in *BSD # or maybe ports will be a seperate module in the future verbose('choosing package manager bsdpkg') synctool_param.PACKAGE_MANAGER = 'bsdpkg' # platforms that are not supported yet, but I would like to support # or well, most of them # Want to know more OSes? See the source of autoconf's config.guess elif platform in ('4.4BSD', '4.3bsd', 'BSD/OS', 'SunOS', 'AIX', 'OSF1', 'HP-UX', 'HI-UX', 'IRIX', 'UNICOS', 'UNICOS/mp', 'ConvexOS', 'Minix', 'Windows_95', 'Windows_NT', 'CYGWIN', 'MinGW', 'LynxOS', 'UNIX_System_V', 'BeOS', 'TOPS-10', 'TOPS-20'): verbose('detected platform %s' % platform) stderr('synctool package management under %s is not yet supported' % platform) else: stderr("unknown platform '%s'" % platform)
def update(self): verbose("updating package database")
def upgrade(self): verbose("upgrading packages")
def main(): action = get_options() synctool_config.add_myhostname() if synctool_param.NODENAME == None: stderr('unable to determine my nodename, please check %s' % synctool_param.CONF_FILE) sys.exit(1) if synctool_param.NODENAME in synctool_param.IGNORE_GROUPS: stderr('%s: node %s is disabled in the config file' % (synctool_param.CONF_FILE, synctool_param.NODENAME)) sys.exit(1) synctool_config.remove_ignored_groups() synctool_param.MY_GROUPS = synctool_config.get_my_groups() synctool_param.ALL_GROUPS = synctool_config.make_all_groups() if synctool_lib.UNIX_CMD: t = time.localtime(time.time()) unix_out('#') unix_out('# script generated by synctool on %04d/%02d/%02d %02d:%02d:%02d' % (t[0], t[1], t[2], t[3], t[4], t[5])) unix_out('#') unix_out('# NODENAME=%s' % synctool_param.NODENAME) unix_out('# HOSTNAME=%s' % synctool_param.HOSTNAME) unix_out('# MASTERDIR=%s' % synctool_param.MASTERDIR) unix_out('# SYMLINK_MODE=0%o' % synctool_param.SYMLINK_MODE) unix_out('#') if not synctool_lib.DRY_RUN: unix_out('# NOTE: --fix specified, applying updates') unix_out('#') unix_out('') else: if not synctool_lib.QUIET: verbose('my nodename: %s' % synctool_param.NODENAME) verbose('my hostname: %s' % synctool_param.HOSTNAME) verbose('masterdir: %s' % synctool_param.MASTERDIR) verbose('symlink_mode: 0%o' % synctool_param.SYMLINK_MODE) if synctool_param.LOGFILE != None and not synctool_lib.DRY_RUN: verbose('logfile: %s' % synctool_param.LOGFILE) verbose('') if synctool_lib.DRY_RUN: stdout('DRY RUN, not doing any updates') terse(synctool_lib.TERSE_DRYRUN, 'not doing any updates') else: stdout('--fix specified, applying changes') terse(synctool_lib.TERSE_FIXING, ' applying changes') verbose('') synctool_lib.openlog() os.putenv('SYNCTOOL_NODENAME', synctool_param.NODENAME) os.putenv('SYNCTOOL_MASTERDIR', synctool_param.MASTERDIR) if action == ACTION_DIFF: for file in SINGLE_FILES: diff_files(file) elif action == ACTION_RUN_TASKS: if SINGLE_FILES: for single_file in SINGLE_FILES: single_task(single_file) else: run_tasks() elif action == ACTION_REFERENCE: for file in SINGLE_FILES: reference(file) elif SINGLE_FILES: for single_file in SINGLE_FILES: (changed, src) = single_files(single_file) if changed: run_post(src, single_file) run_post_on_directories() else: overlay_files() delete_files() run_post_on_directories() always_run() unix_out('# EOB') synctool_lib.closelog()
if synctool_lib.DRY_RUN: run_command(cmd) verbose(' os.chdir(%s)' % cwd) unix_out('cd %s' % cwd) unix_out('') return try: os.chdir(dest_dir) except OSError, reason: stderr('error changing directory to %s: %s' % (dest_dir, reason)) else: run_command(cmd) verbose(' os.chdir(%s)' % cwd) unix_out('cd %s' % cwd) unix_out('') try: os.chdir(cwd) except OSError, reason: stderr('error changing directory to %s: %s' % (cwd, reason)) def run_post(src, dest): '''run any on_update or .post script commands for destination path''' global DIR_CHANGED stat = synctool_stat.SyncStat(dest)