def single_files(filename): '''check/update a single file''' '''returns (True, path_in_synctree) if file is different''' if not filename: stderr('missing filename') return (False, None) (obj, err) = synctool_overlay.find_terse(synctool_overlay.OV_OVERLAY, filename) if err == synctool_overlay.OV_FOUND_MULTIPLE: # multiple source possible # possibilities have already been printed sys.exit(1) if err == synctool_overlay.OV_NOT_FOUND: stderr('%s is not in the overlay tree' % filename) return (False, None) verbose('checking against %s' % obj.print_src()) changed = obj.compare_files() if not changed: stdout('%s is up to date' % filename) terse(synctool_lib.TERSE_OK, filename) unix_out('# %s is up to date\n' % obj.print_dest()) return (changed, obj.src_path)
def erase_saved(self): dest = self.dest_path stat_saved_path = synctool_stat.SyncStat('%s.saved' % dest) if synctool_lib.ERASE_SAVED and stat_saved_path.exists() and not stat_saved_path.isDir(): terse(synctool_lib.TERSE_DELETE, '%s.saved' % dest) unix_out('rm %s.saved' % dest) if synctool_lib.DRY_RUN: stdout(dryrun_msg('erase %s.saved' % dest, 'erase')) else: stdout('erase %s.saved' % dest) verbose(' os.unlink(%s.saved)' % dest) try: os.unlink('%s.saved' % dest) except OSError, reason: stderr('failed to delete %s : %s' % (dest, reason))
def ov_perror(errorcode, src_path): '''print error message for source path''' if errorcode >= 0: # this is not an error but a valid group number return if errorcode == OV_NOT_MY_GROUP: # this is not an error but a normal condition return if errorcode == OV_NO_GROUP_EXT: if synctool_param.TERSE: terse(synctool_lib.TERSE_ERROR, 'no group on %s' % src_path) else: stderr('no underscored group extension on %s, skipped' % synctool_lib.prettypath(src_path)) elif errorcode == OV_UNKNOWN_GROUP: if synctool_param.TERSE: terse(synctool_lib.TERSE_ERROR, 'invalid group on %s' % src_path) else: stderr('unknown group on %s, skipped' % synctool_lib.prettypath(src_path))
def symlink_file(self, oldpath): self.mkdir_basepath() # note that old_path is the readlink() of the self.src_path newpath = self.dest_path if self.dest_exists(): unix_out('mv %s %s.saved' % (newpath, newpath)) # # actually, if we want the ownership of the symlink to be correct, # we should do setuid() here # matching ownerships of symbolic links is not yet implemented # # linux makes all symlinks mode 0777, but some other platforms do not umask_mode = synctool_param.SYMLINK_MODE ^ 0777 unix_out('umask %03o' % umask_mode) unix_out('ln -s %s %s' % (oldpath, newpath)) if not synctool_lib.DRY_RUN: if self.dest_exists(): verbose('saving %s as %s.saved' % (newpath, newpath)) try: os.rename(newpath, '%s.saved' % newpath) except OSError, reason: stderr('failed to save %s as %s.saved : %s' % (newpath, newpath, reason)) terse(synctool_lib.TERSE_FAIL, 'save %s.saved' % newpath) old_umask = os.umask(umask_mode) verbose(' os.symlink(%s, %s)' % (oldpath, newpath)) try: os.symlink(oldpath, newpath) except OSError, reason: stderr('failed to create symlink %s -> %s : %s' % (newpath, oldpath, reason)) terse(synctool_lib.TERSE_FAIL, 'link %s' % newpath)
def overlay_pass2(filelist, filedict): '''do pass #2 of 2; create dictionary of destination paths from list Each element in the dictionary is an instance of OverlayEntry''' for entry in filelist: if filedict.has_key(entry.dest_path): entry2 = filedict[entry.dest_path] if entry.groupnum < entry2.groupnum: # this group is more important, so override it del filedict[entry.dest_path] entry2 = None # duplicate paths are a problem, unless they are directories ... elif (not (entry.src_isDir() and entry2.src_isDir())) \ and entry.groupnum == entry2.groupnum: if synctool_param.TERSE: synctool_lib.terse(synctool_lib.TERSE_ERROR, 'duplicate source paths in repository for:') synctool_lib.terse(synctool_lib.TERSE_ERROR, entry.src_path) synctool_lib.terse(synctool_lib.TERSE_ERROR, entry2.src_path) else: stderr('error: duplicate source paths in repository for:\n' 'error: %s\n' 'error: %s\n' % (synctool_lib.prettypath(entry.src_path), synctool_lib.prettypath(entry2.src_path)) ) continue else: # this group is less important, skip it continue # add or update filedict filedict[entry.dest_path] = entry
def upload(interface, upload_filename, upload_suffix=None): '''copy a file from a node into the overlay/ tree''' if not synctool_param.SCP_CMD: stderr('%s: error: scp_cmd has not been defined in %s' % (os.path.basename(sys.argv[0]), synctool_param.CONF_FILE)) sys.exit(-1) if upload_filename[0] != '/': stderr('error: the filename to upload must be an absolute path') sys.exit(-1) trimmed_upload_fn = upload_filename[1:] # remove leading slash import synctool_overlay # make the known groups lists synctool_config.remove_ignored_groups() synctool_param.MY_GROUPS = synctool_config.get_my_groups() synctool_param.ALL_GROUPS = synctool_config.make_all_groups() if upload_suffix and not upload_suffix in synctool_param.ALL_GROUPS: stderr("no such group '%s'" % upload_suffix) sys.exit(-1) # shadow DRY_RUN because that var can not be used correctly here if '-f' in PASS_ARGS or '--fix' in PASS_ARGS: dry_run = False else: dry_run = True if not synctool_lib.QUIET: stdout('DRY RUN, not uploading any files') terse(synctool_lib.TERSE_DRYRUN, 'not uploading any files') node = NODESET.get_nodename_from_interface(interface) # pretend that the current node is now the given node; # this is needed for find() to find the most optimal reference for the file orig_NODENAME = synctool_param.NODENAME synctool_param.NODENAME = node synctool_config.insert_group(node, node) orig_MY_GROUPS = synctool_param.MY_GROUPS[:] synctool_param.MY_GROUPS = synctool_config.get_my_groups() # see if file is already in the repository (obj, err) = synctool_overlay.find_terse(synctool_overlay.OV_OVERLAY, upload_filename) if err == synctool_overlay.OV_FOUND_MULTIPLE: # multiple source possible # possibilities have already been printed sys.exit(1) if err == synctool_overlay.OV_NOT_FOUND: # no source path found if string.find(upload_filename, '...') >= 0: stderr("%s is not in the repository, don't know what to map this path to\n" "Please give the full path instead of a terse path, or touch the source file\n" "in the repository first and try again" % os.path.basename(upload_filename)) sys.exit(1) # it wasn't a terse path, throw a source path together # This picks the first overlay dir as default source, which may not be correct # but it is a good guess repos_filename = os.path.join(synctool_param.OVERLAY_DIRS[0], trimmed_upload_fn) if upload_suffix: repos_filename = repos_filename + '._' + upload_suffix else: repos_filename = repos_filename + '._' + node # use _nodename as default suffix else: if upload_suffix: # remove the current group suffix an add the specified suffix to the filename arr = string.split(obj.src_path, '.') if len(arr) > 1 and arr[-1][0] == '_': repos_filename = string.join(arr[:-1], '.') repos_filename = repos_filename + '._' + upload_suffix else: repos_filename = obj.src_path synctool_param.NODENAME = orig_NODENAME synctool_param.MY_GROUPS = orig_MY_GROUPS verbose('%s:%s uploaded as %s' % (node, upload_filename, repos_filename)) terse(synctool_lib.TERSE_UPLOAD, repos_filename) unix_out('%s %s:%s %s' % (synctool_param.SCP_CMD, interface, upload_filename, repos_filename)) if dry_run: stdout('would be uploaded as %s' % synctool_lib.prettypath(repos_filename)) else: # first check if the directory in the repository exists repos_dir = os.path.dirname(repos_filename) stat = synctool_stat.SyncStat(repos_dir) if not stat.exists(): verbose('making directory %s' % synctool_lib.prettypath(repos_dir)) unix_out('mkdir -p %s' % repos_dir) synctool_lib.mkdir_p(repos_dir) # make scp command array scp_cmd_arr = shlex.split(synctool_param.SCP_CMD) scp_cmd_arr.append('%s:%s' % (interface, upload_filename)) scp_cmd_arr.append(repos_filename) synctool_lib.run_with_nodename(scp_cmd_arr, NODESET.get_nodename_from_interface(interface)) if os.path.isfile(repos_filename): stdout('uploaded %s' % synctool_lib.prettypath(repos_filename))
def overlay_pass1(overlay_dir, filelist, dest_dir = '/', highest_groupnum = sys.maxint, handle_postscripts = True): '''do pass #1 of 2; create list of source and dest files Each element in the list is an instance of SyncObject''' global POST_SCRIPTS for entry in os.listdir(overlay_dir): src_path = os.path.join(overlay_dir, entry) src_statbuf = synctool_stat.SyncStat(src_path) if src_statbuf.isDir(): if synctool_param.IGNORE_DOTDIRS and entry[0] == '.': continue isDir = True else: if synctool_param.IGNORE_DOTFILES and entry[0] == '.': continue isDir = False # check any ignored files with wildcards # before any group extension is examined if synctool_param.IGNORE_FILES_WITH_WILDCARDS: wildcard_match = False for wildcard_entry in synctool_param.IGNORE_FILES_WITH_WILDCARDS: if fnmatch.fnmatchcase(entry, wildcard_entry): wildcard_match = True break if wildcard_match: continue (name, groupnum, isPost) = split_extension(entry, not isDir) if groupnum < 0: # not a relevant group, so skip it # Note that this also prunes trees if you have group-specific subdirs if groupnum != OV_NOT_MY_GROUP: # "not my group" is a rather normal error code, but if it is # something else, it's a serious error that we should report ov_perror(groupnum, os.path.join(overlay_dir, entry)) continue if name in synctool_param.IGNORE_FILES: continue # inherit lower group level from parent directory if groupnum > highest_groupnum: groupnum = highest_groupnum if isPost: if handle_postscripts: if not src_statbuf.isExec(): stderr('warning: .post script %s is not executable, ignored' % synctool_lib.prettypath(src_path)) continue # register .post script # trigger is the source file that would trigger the .post script to run trigger = os.path.join(overlay_dir, name) if POST_SCRIPTS.has_key(trigger): if groupnum >= POST_SCRIPTS[trigger].groupnum: continue POST_SCRIPTS[trigger] = synctool_object.SyncObject(src_path, dest_dir, groupnum, src_statbuf) else: # unfortunately, the name has been messed up already # so therefore just ignore the file and issue a warning if synctool_param.TERSE: terse(synctool_lib.TERSE_WARNING, 'ignoring %s' % src_path) else: stderr('warning: ignoring .post script %s' % synctool_lib.prettypath(src_path)) continue dest_path = os.path.join(dest_dir, name) filelist.append(synctool_object.SyncObject(src_path, dest_path, groupnum, src_statbuf)) if isDir: # recurse into subdir overlay_pass1(src_path, filelist, dest_path, groupnum, handle_postscripts)
def main(): action = get_options() synctool_config.add_myhostname() if synctool_param.NODENAME == None: stderr('unable to determine my nodename, please check %s' % synctool_param.CONF_FILE) sys.exit(1) if synctool_param.NODENAME in synctool_param.IGNORE_GROUPS: stderr('%s: node %s is disabled in the config file' % (synctool_param.CONF_FILE, synctool_param.NODENAME)) sys.exit(1) synctool_config.remove_ignored_groups() synctool_param.MY_GROUPS = synctool_config.get_my_groups() synctool_param.ALL_GROUPS = synctool_config.make_all_groups() if synctool_lib.UNIX_CMD: t = time.localtime(time.time()) unix_out('#') unix_out('# script generated by synctool on %04d/%02d/%02d %02d:%02d:%02d' % (t[0], t[1], t[2], t[3], t[4], t[5])) unix_out('#') unix_out('# NODENAME=%s' % synctool_param.NODENAME) unix_out('# HOSTNAME=%s' % synctool_param.HOSTNAME) unix_out('# MASTERDIR=%s' % synctool_param.MASTERDIR) unix_out('# SYMLINK_MODE=0%o' % synctool_param.SYMLINK_MODE) unix_out('#') if not synctool_lib.DRY_RUN: unix_out('# NOTE: --fix specified, applying updates') unix_out('#') unix_out('') else: if not synctool_lib.QUIET: verbose('my nodename: %s' % synctool_param.NODENAME) verbose('my hostname: %s' % synctool_param.HOSTNAME) verbose('masterdir: %s' % synctool_param.MASTERDIR) verbose('symlink_mode: 0%o' % synctool_param.SYMLINK_MODE) if synctool_param.LOGFILE != None and not synctool_lib.DRY_RUN: verbose('logfile: %s' % synctool_param.LOGFILE) verbose('') if synctool_lib.DRY_RUN: stdout('DRY RUN, not doing any updates') terse(synctool_lib.TERSE_DRYRUN, 'not doing any updates') else: stdout('--fix specified, applying changes') terse(synctool_lib.TERSE_FIXING, ' applying changes') verbose('') synctool_lib.openlog() os.putenv('SYNCTOOL_NODENAME', synctool_param.NODENAME) os.putenv('SYNCTOOL_MASTERDIR', synctool_param.MASTERDIR) if action == ACTION_DIFF: for file in SINGLE_FILES: diff_files(file) elif action == ACTION_RUN_TASKS: if SINGLE_FILES: for single_file in SINGLE_FILES: single_task(single_file) else: run_tasks() elif action == ACTION_REFERENCE: for file in SINGLE_FILES: reference(file) elif SINGLE_FILES: for single_file in SINGLE_FILES: (changed, src) = single_files(single_file) if changed: run_post(src, single_file) run_post_on_directories() else: overlay_files() delete_files() run_post_on_directories() always_run() unix_out('# EOB') synctool_lib.closelog()
def compare_files(self): '''see what the differences are for this SyncObject, and fix it if not a dry run self.src_path is the file in the synctool/overlay tree self.dest_path is the file in the system need_update is a local boolean saying if a path needs to be updated Return value is False when file is not changed, True when file is updated -- The structure of this long function is as follows; stat(src) this stat is 'sacred' and dest should be set accordingly stat(dest) if src is symlink: check if dest exists check if dest is symlink check if dest is dir treat dest as file fix if needed if src is directory: check if dest exists check if dest is symlink check if dest is dir treat dest as file fix if needed if src is file: check if dest exists check if dest is symlink check if dest is dir treat dest as file check filesize do md5 checksum fix if needed don't know what type src is check ownership check permissions return False''' src_path = self.src_path dest_path = self.dest_path self.src_stat() src_stat = self.src_statbuf if not src_stat: return False self.dest_stat() dest_stat = self.dest_statbuf # if not dest_stat: # pass # destination does not exist need_update = False # # if source is a symbolic link ... # if src_stat.isLink(): need_update = False try: src_link = os.readlink(src_path) except OSError, reason: stderr('failed to readlink %s : %s' % (src_path, reason)) terse(synctool_lib.TERSE_FAIL, 'readlink %s' % src_path) return False if not dest_stat.exists(): stdout('symbolic link %s does not exist' % dest_path) terse(synctool_lib.TERSE_LINK, dest_path) unix_out('# create symbolic link %s' % dest_path) need_update = True elif dest_stat.isLink(): try: dest_link = os.readlink(dest_path) except OSError, reason: stderr('failed to readlink %s : %s (but ignoring this error)' % (src_path, reason)) terse(synctool_lib.TERSE_FAIL, 'readlink %s' % src_path) dest_link = None if src_link != dest_link: stdout('%s should point to %s, but points to %s' % (dest_path, src_link, dest_link)) terse(synctool_lib.TERSE_LINK, dest_path) unix_out('# relink symbolic link %s' % dest_path) need_update = True if (dest_stat.mode & 07777) != synctool_param.SYMLINK_MODE: stdout('%s should have mode %04o (symlink), but has %04o' % (dest_path, synctool_param.SYMLINK_MODE, dest_stat.mode & 07777)) terse(synctool_lib.TERSE_MODE, '%04o %s' % (synctool_param.SYMLINK_MODE, dest_path)) unix_out('# fix permissions of symbolic link %s' % dest_path) need_update = True
unix_out('# fix permissions of symbolic link %s' % dest_path) need_update = True elif dest_stat.isDir(): stdout('%s should be a symbolic link' % dest_path) terse(synctool_lib.TERSE_LINK, dest_path) unix_out('# target should be a symbolic link') self.save_dir() need_update = True # # treat as file ... # else: stdout('%s should be a symbolic link' % dest_path) terse(synctool_lib.TERSE_LINK, dest_path) unix_out('# target should be a symbolic link') need_update = True # # (re)create the symbolic link # if need_update: self.symlink_file(src_link) unix_out('') return True # # if the source is a directory ... # elif src_stat.isDir():
def interfaces(self): '''return list of interfaces of relevant nodes''' explicit_includes = self.nodelist[:] # by default, work on all nodes if not self.nodelist and not self.grouplist: self.nodelist = synctool_config.get_nodes_in_groups(["all"]) # check if the nodes exist at all; the user could have given bogus names all_nodes = synctool_config.get_all_nodes() for node in self.nodelist: if not node in all_nodes: stderr("no such node '%s'" % node) return None if self.grouplist: # check if the groups exist at all all_groups = synctool_config.make_all_groups() for group in self.grouplist: if not group in all_groups: stderr("no such group '%s'" % group) return None self.nodelist.extend(synctool_config.get_nodes_in_groups(self.grouplist)) if self.exclude_groups: self.exclude_nodes.extend(synctool_config.get_nodes_in_groups(self.exclude_groups)) for node in self.exclude_nodes: # remove excluded nodes, if not explicitly included if node in self.nodelist and not node in explicit_includes: self.nodelist.remove(node) if len(self.nodelist) <= 0: return [] ifaces = [] ignored_nodes = '' for node in self.nodelist: if node in synctool_param.IGNORE_GROUPS and not node in explicit_includes: verbose('node %s is ignored' % node) if not ignored_nodes: ignored_nodes = node else: ignored_nodes = ignored_nodes + ',' + node continue groups = synctool_config.get_groups(node) do_continue = False for group in groups: if group in synctool_param.IGNORE_GROUPS: verbose('group %s is ignored' % group) if not ignored_nodes: ignored_nodes = node else: ignored_nodes = ignored_nodes + ',' + node do_continue = True break if do_continue: continue iface = synctool_config.get_node_interface(node) self.namemap[iface] = node if not iface in ifaces: # make sure we do not have duplicates ifaces.append(iface) # print message about ignored nodes if ignored_nodes and not synctool_lib.QUIET and not synctool_lib.UNIX_CMD: if synctool_param.TERSE: synctool_lib.terse(synctool_lib.TERSE_WARNING, 'ignored nodes') else: ignored_nodes = 'warning: ignored nodes: ' + ignored_nodes if len(ignored_nodes) < 80: print ignored_nodes else: print 'warning: some nodes are ignored' return ifaces