def print_src(self): '''pretty print my source path''' if self.src_stat and self.src_stat.is_dir(): return prettypath(self.src_path) + os.sep return prettypath(self.src_path)
def _split_extension(filename, src_dir): '''filename in the overlay tree, without leading path src_dir is passed for the purpose of printing error messages Returns tuple: SyncObject, importance''' (name, ext) = os.path.splitext(filename) if not ext: return SyncObject(filename, name, OV_NO_EXT), _group_all() if ext == '.post': (name2, ext) = os.path.splitext(name) if ext == '._template': # it's a generic template generator return SyncObject(filename, name, OV_TEMPLATE_POST), _group_all() # it's a generic .post script return SyncObject(filename, name, OV_POST), _group_all() if ext[:2] != '._': return SyncObject(filename, filename, OV_NO_EXT), _group_all() ext = ext[2:] if not ext: return SyncObject(filename, filename, OV_NO_EXT), _group_all() if ext == 'template': return SyncObject(filename, name, OV_TEMPLATE), _group_all() try: importance = synctool.param.MY_GROUPS.index(ext) except ValueError: if not ext in synctool.param.ALL_GROUPS: src_path = os.path.join(src_dir, filename) if synctool.param.TERSE: terse(synctool.lib.TERSE_ERROR, 'invalid group on %s' % src_path) else: stderr('unknown group on %s, skipped' % prettypath(src_path)) return None, -1 # it is not one of my groups verbose('skipping %s, it is not one of my groups' % prettypath(os.path.join(src_dir, filename))) return None, -1 (name2, ext) = os.path.splitext(name) if ext == '.post': _, ext = os.path.splitext(name2) if ext == '._template': # it's a group-specific template generator return (SyncObject(filename, name2, OV_TEMPLATE_POST), importance) # register group-specific .post script return SyncObject(filename, name2, OV_POST), importance elif ext == '._template': return SyncObject(filename, name2, OV_TEMPLATE), importance return SyncObject(filename, name), importance
def _run_rsync_purge(cmd_arr): '''run rsync for purging cmd_arr holds already prepared rsync command + arguments Returns: None ''' unix_out(' '.join(cmd_arr)) sys.stdout.flush() sys.stderr.flush() try: # run rsync proc = subprocess.Popen(cmd_arr, shell=False, bufsize=4096, stdout=subprocess.PIPE) except OSError as err: error('failed to run command %s: %s' % (cmd_arr[0], err.strerror)) return out, _ = proc.communicate() if synctool.lib.VERBOSE: print out out = out.split('\n') for line in out: line = line.strip() if not line: continue code, filename = line.split(' ', 1) if code[:6] == 'ERROR:' or code[:8] == 'WARNING:': # output rsync errors and warnings stderr(line) continue if filename == './': # rsync has a habit of displaying ugly "./" path # cmd_arr[-1] is the destination path path = cmd_arr[-1] else: # cmd_arr[-1] is the destination path path = os.path.join(cmd_arr[-1], filename) if code[0] == '*': # rsync has a message for us # most likely "deleting" msg = code[1:] msg = msg.strip() stdout('%s %s (purge)' % (msg, prettypath(path))) else: stdout('%s mismatch (purge)' % prettypath(path))
def _run_rsync_purge(cmd_arr): # type: (List[str]) -> None '''run rsync for purging cmd_arr holds already prepared rsync command + arguments ''' unix_out(' '.join(cmd_arr)) sys.stdout.flush() sys.stderr.flush() try: # run rsync proc = subprocess.Popen(cmd_arr, shell=False, bufsize=4096, stdout=subprocess.PIPE) except OSError as err: error('failed to run command %s: %s' % (cmd_arr[0], err.strerror)) return out, _ = proc.communicate() if synctool.lib.VERBOSE: print out out = out.split('\n') for line in out: line = line.strip() if not line: continue code, filename = line.split(' ', 1) if code[:6] == 'ERROR:' or code[:8] == 'WARNING:': # output rsync errors and warnings stderr(line) continue if filename == './': # rsync has a habit of displaying ugly "./" path # cmd_arr[-1] is the destination path path = cmd_arr[-1] else: # cmd_arr[-1] is the destination path path = os.path.join(cmd_arr[-1], filename) if code[0] == '*': # rsync has a message for us # most likely "deleting" msg = code[1:] msg = msg.strip() stdout('%s %s (purge)' % (msg, prettypath(path))) else: stdout('%s mismatch (purge)' % prettypath(path))
def purge_files(): # type: () -> None '''run the purge function''' paths = [] purge_groups = os.listdir(param.PURGE_DIR) # find the source purge paths that we need to copy # scan only the group dirs that apply for g in param.MY_GROUPS: if g in purge_groups: purge_root = os.path.join(param.PURGE_DIR, g) if not os.path.isdir(purge_root): continue for path, subdirs, files in os.walk(purge_root): # rsync only purge dirs that actually contain files # otherwise rsync --delete would wreak havoc if not files: continue if path == purge_root: # root contains files; guard against user mistakes # rsync --delete would destroy the whole filesystem warning('cowardly refusing to purge the root directory') stderr('please remove any files directly under %s/' % prettypath(purge_root)) return # paths has (src_dir, dest_dir) paths.append((path, path[len(purge_root):])) # do not recurse into this dir any deeper del subdirs[:] cmd_rsync, opts_string = _make_rsync_purge_cmd() # call rsync to copy the purge dirs for src, dest in paths: # trailing slash on source path is important for rsync src += os.sep dest += os.sep cmd_arr = cmd_rsync[:] cmd_arr.append(src) cmd_arr.append(dest) verbose('running rsync%s%s %s' % (opts_string, prettypath(src), dest)) _run_rsync_purge(cmd_arr)
def _write_purge_filter(f): '''write rsync filter rules for purge/ tree Returns False on error ''' f.write('+ /var/purge/\n') purge_groups = os.listdir(synctool.param.PURGE_DIR) # add only the group dirs that apply for g in synctool.param.MY_GROUPS: if g in purge_groups: purge_root = os.path.join(synctool.param.PURGE_DIR, g) if not os.path.isdir(purge_root): continue for path, _, files in os.walk(purge_root): if path == purge_root: # guard against user mistakes; # danger of destroying the entire filesystem # if it would rsync --delete the root if len(files) > 0: warning('cowardly refusing to purge the root ' 'directory') stderr('please remove any files directly ' 'under %s/' % prettypath(purge_root)) return False else: f.write('+ /var/purge/%s/' % g) break f.write('- /var/purge/*\n') return True
def _write_purge_filter(f): '''write rsync filter rules for purge/ tree Returns False on error''' f.write('+ /var/purge/\n') purge_groups = os.listdir(synctool.param.PURGE_DIR) # add only the group dirs that apply for g in synctool.param.MY_GROUPS: if g in purge_groups: purge_root = os.path.join(synctool.param.PURGE_DIR, g) if not os.path.isdir(purge_root): continue for path, _, files in os.walk(purge_root): if path == purge_root: # guard against user mistakes; # danger of destroying the entire filesystem # if it would rsync --delete the root if len(files) > 0: stderr('cowardly refusing to purge the root ' 'directory') stderr('please remove any files directly ' 'under %s/' % prettypath(purge_root)) return False else: f.write('+ /var/purge/%s/' % g) break f.write('- /var/purge/*\n') return True
def run_command(cmd): '''run a shell command''' # a command can have arguments arr = shlex.split(cmd) cmdfile = arr[0] statbuf = synctool.syncstat.SyncStat(cmdfile) if not statbuf.exists(): stderr('error: command %s not found' % prettypath(cmdfile)) return if not statbuf.is_exec(): stderr("warning: file '%s' is not executable" % prettypath(cmdfile)) return # run the shell command synctool.lib.shell_command(cmd)
def _exec_diff(src, dest): '''execute diff_cmd to display diff between dest and src''' verbose('%s %s %s' % (synctool.param.DIFF_CMD, dest, prettypath(src))) cmd_arr = shlex.split(synctool.param.DIFF_CMD) cmd_arr.append(dest) cmd_arr.append(src) synctool.lib.exec_command(cmd_arr)
def _exec_diff(src, dest): '''execute diff_cmd to display diff between dest and src''' verbose('%s %s %s' % (param.DIFF_CMD, dest, prettypath(src))) cmd_arr = shlex.split(param.DIFF_CMD) cmd_arr.append(dest) cmd_arr.append(src) synctool.lib.exec_command(cmd_arr)
def mkdir_basepath(self): '''call mkdir -p to create leading path''' if synctool.lib.DRY_RUN: return basedir = os.path.dirname(self.name) # be a bit quiet about it if synctool.lib.VERBOSE or synctool.lib.UNIX_CMD: verbose('making directory %s' % prettypath(basedir)) synctool.lib.mkdir_p(basedir)
def create(self): '''copy file''' if not self.exists: terse(synctool.lib.TERSE_NEW, self.name) verbose(dryrun_msg(' copy %s %s' % (self.src_path, self.name))) unix_out('cp %s %s' % (self.src_path, self.name)) if not synctool.lib.DRY_RUN: try: # copy file shutil.copy(self.src_path, self.name) except (OSError, IOError) as err: error('failed to copy %s to %s: %s' % (prettypath(self.src_path), self.name, err.strerror)) terse(TERSE_FAIL, self.name)
def create(self): # type: () -> None '''copy file''' if not self.exists: terse(synctool.lib.TERSE_NEW, self.name) verbose(dryrun_msg(' copy %s %s' % (self.src_path, self.name))) unix_out('cp %s %s' % (self.src_path, self.name)) if not synctool.lib.DRY_RUN: try: # copy file shutil.copy(self.src_path, self.name) except (OSError, IOError) as err: error('failed to copy %s to %s: %s' % (prettypath(self.src_path), self.name, err.strerror)) terse(TERSE_FAIL, self.name)
def _toplevel(overlay): '''Returns sorted list of fullpath directories under overlay/''' arr = [] for entry in os.listdir(overlay): fullpath = os.path.join(overlay, entry) try: importance = synctool.param.MY_GROUPS.index(entry) except ValueError: verbose('%s/ is not one of my groups, skipping' % prettypath(fullpath)) continue arr.append((fullpath, importance)) arr.sort(_sort_by_importance) # return list of only the directory names return [x[0] for x in arr]
def create(self): '''copy file''' if not self.exists: terse(synctool.lib.TERSE_NEW, self.name) verbose(dryrun_msg(' copy %s %s' % (self.src_path, self.name))) unix_out('cp %s %s' % (self.src_path, self.name)) if not synctool.lib.DRY_RUN: try: # copy file shutil.copy(self.src_path, self.name) if synctool.param.SYNC_TIMES: shutil.copystat(self.src_path, self.name) except IOError as err: error('failed to copy %s to %s: %s' % (prettypath(self.src_path), self.name, err.strerror)) terse(synctool.lib.TERSE_FAIL, self.name)
def _walk_subtree(src_dir, dest_dir, duplicates, callback): '''walk subtree under overlay/group/ duplicates is a set that keeps us from selecting any duplicate matches Returns pair of booleans: ok, dir was updated ''' # verbose('_walk_subtree(%s)' % src_dir) arr = [] for entry in os.listdir(src_dir): if entry in synctool.param.IGNORE_FILES: verbose('ignoring %s' % prettypath(os.path.join(src_dir, entry))) continue # check any ignored files with wildcards # before any group extension is examined wildcard_match = False for wildcard_entry in synctool.param.IGNORE_FILES_WITH_WILDCARDS: if fnmatch.fnmatchcase(entry, wildcard_entry): wildcard_match = True verbose('ignoring %s (pattern match)' % prettypath(os.path.join(src_dir, entry))) break if wildcard_match: continue obj, importance = _split_extension(entry, src_dir) if not obj: continue arr.append((obj, importance)) # sort with .pre and .post scripts first # this ensures that post_dict will have the required script when needed arr.sort(_sort_by_importance_post_first) pre_dict = {} post_dict = {} dir_changed = False for obj, importance in arr: obj.make(src_dir, dest_dir) if obj.ov_type == OV_PRE: # register the .pre script and continue if obj.dest_path in pre_dict: continue pre_dict[obj.dest_path] = obj.src_path continue if obj.ov_type == OV_POST: # register the .post script and continue if obj.dest_path in post_dict: continue post_dict[obj.dest_path] = obj.src_path continue if obj.ov_type == OV_TEMPLATE_POST: # register the template generator and continue # put the dest for the template in the overlay (source) dir obj.dest_path = os.path.join(os.path.dirname(obj.src_path), os.path.basename(obj.dest_path)) if obj.dest_path in post_dict: continue post_dict[obj.dest_path] = obj.src_path continue if obj.src_stat.is_dir(): if synctool.param.IGNORE_DOTDIRS: name = os.path.basename(obj.src_path) if name[0] == '.': verbose('ignoring dotdir %s' % obj.print_src()) continue updated = False if obj.dest_path not in duplicates: # this is the most important source for this dir duplicates.add(obj.dest_path) # run callback on the directory itself # this will create or fix directory entry if needed # a .pre script may be run # a .post script should not be run ok, updated = callback(obj, pre_dict, {}) if not ok: # quick exit return False, dir_changed # recurse down into the directory # with empty pre_dict and post_dict parameters ok, updated2 = _walk_subtree(obj.src_path, obj.dest_path, duplicates, callback) if not ok: # quick exit return False, dir_changed # we still need to run the .post script on the dir (if any) if updated or updated2: obj.run_script(post_dict) # finished checking directory continue if synctool.param.IGNORE_DOTFILES: name = os.path.basename(obj.src_path) if name[0] == '.': verbose('ignoring dotfile %s' % obj.print_src()) continue if synctool.param.REQUIRE_EXTENSION and obj.ov_type == OV_NO_EXT: if synctool.param.TERSE: terse(synctool.lib.TERSE_ERROR, ('no group on %s' % obj.src_path)) else: warning('no group extension on %s, skipped' % obj.print_src()) continue if obj.dest_path in duplicates: # there already was a more important source for this destination continue duplicates.add(obj.dest_path) ok, updated = callback(obj, pre_dict, post_dict) if not ok: # quick exit return False, dir_changed if obj.ov_type == OV_IGNORE: # OV_IGNORE may be set by templates that didn't finish continue if obj.ov_type == OV_TEMPLATE: # a new file was generated # call callback on the generated file obj.ov_type = OV_REG obj.make(src_dir, dest_dir) ok, updated = callback(obj, pre_dict, post_dict) if not ok: # quick exit return False, dir_changed dir_changed |= updated return True, dir_changed
def generate_template(obj, post_dict): # type: (SyncObject, Dict[str, str]) -> bool '''run template .post script, generating a new file The script will run in the source dir (overlay tree) and it will run even in dry-run mode Returns: True or False on error ''' # Note: this func modifies input parameter 'obj' # when it succesfully generates output, it will change obj's paths # and it will be picked up again in overlay._walk_subtree() if synctool.lib.NO_POST: verbose('skipping template generation of %s' % obj.src_path) obj.ov_type = synctool.overlay.OV_IGNORE return True if SINGLE_FILES and obj.dest_path not in SINGLE_FILES: verbose('skipping template generation of %s' % obj.src_path) obj.ov_type = synctool.overlay.OV_IGNORE return True verbose('generating template %s' % obj.print_src()) src_dir = os.path.dirname(obj.src_path) newname = os.path.join(src_dir, os.path.basename(obj.dest_path)) template = newname + '._template' # add most important extension newname += '._' + param.NODENAME verbose('generating template as %s' % newname) statbuf = synctool.syncstat.SyncStat(newname) if statbuf.exists(): verbose('template destination %s already exists' % newname) if param.SYNC_TIMES and statbuf.mtime != obj.src_stat.mtime: # force the mtime of the template onto the existing output verbose('forcing mtime %s => %s' % (obj.src_path, newname)) synctool.lib.set_filetimes(newname, statbuf.atime, obj.src_stat.mtime) # modify the object; set new src and dest filenames # later, visit() will call obj.make(), which will make full paths obj.src_path = newname obj.dest_path = os.path.basename(obj.dest_path) return True # get the .post script for the template file if template not in post_dict: if param.TERSE: terse(synctool.lib.TERSE_ERROR, 'no .post %s' % obj.src_path) else: error('template generator for %s not found' % obj.src_path) return False generator = post_dict[template] # chdir to source directory # Note: the change dir is not really needed # but the documentation promises that .post scripts run in # the dir where the new file will be put verbose(' os.chdir(%s)' % src_dir) unix_out('cd %s' % src_dir) cwd = os.getcwd() try: os.chdir(src_dir) except OSError as err: if param.TERSE: terse(synctool.lib.TERSE_ERROR, 'chdir %s' % src_dir) else: error('failed to change directory to %s: %s' % (src_dir, err.strerror)) return False # temporarily restore original umask # so the script runs with the umask set by the sysadmin os.umask(param.ORIG_UMASK) # run the script # pass template and newname as "$1" and "$2" cmd_arr = [generator, obj.src_path, newname] verbose(' os.system(%s, %s, %s)' % (prettypath(cmd_arr[0]), cmd_arr[1], cmd_arr[2])) unix_out('# run command %s' % os.path.basename(cmd_arr[0])) have_error = False if synctool.lib.exec_command(cmd_arr) == -1: have_error = True statbuf = synctool.syncstat.SyncStat(newname) if not statbuf.exists(): if not have_error: if param.TERSE: terse(synctool.lib.TERSE_WARNING, 'no output %s' % newname) else: warning('expected output %s was not generated' % newname) obj.ov_type = synctool.overlay.OV_IGNORE else: # an error message was already printed when exec() failed earlier # so, only when --verbose is used, print additional debug info verbose('error: expected output %s was not generated' % newname) else: verbose('found generated output %s' % newname) if param.SYNC_TIMES: # force the mtime of the template onto the generated output verbose('forcing mtime %s => %s' % (obj.src_path, newname)) synctool.lib.set_filetimes(newname, statbuf.atime, obj.src_stat.mtime) os.umask(077) # chdir back to original location # chdir to source directory verbose(' os.chdir(%s)' % cwd) unix_out('cd %s' % cwd) try: os.chdir(cwd) except OSError as err: if param.TERSE: terse(synctool.lib.TERSE_ERROR, 'chdir %s' % src_dir) else: error('failed to change directory to %s: %s' % (cwd, err.strerror)) return False if have_error: return False # modify the object; set new src and dest filenames # later, visit() will call obj.make(), which will make full paths obj.src_path = newname obj.dest_path = os.path.basename(obj.dest_path) return True
def rsync_upload(up): '''upload a file/dir to $overlay/group/ or $purge/group/''' up.make_repos_path() # check whether the remote entry exists remote_stats = _remote_stat(up) if remote_stats is None: # error message was already printed return # first element in array is our 'target' isdir = remote_stats[0].is_dir() if isdir and synctool.param.REQUIRE_EXTENSION and not up.purge: error('remote is a directory') stderr('synctool can not upload directories to $overlay ' 'when require_extension is set') return if isdir: up.filename += os.sep up.repos_path += os.sep # make command: rsync [-n] [-v] node:/path/ $overlay/group/path/ cmd_arr = shlex.split(synctool.param.RSYNC_CMD) # opts is just for the 'visual aspect'; it is displayed when --verbose opts = ' ' if synctool.lib.DRY_RUN: # cmd_arr.append('-n') opts += '-n ' if synctool.lib.VERBOSE: cmd_arr.append('-v') opts += '-v ' if '-q' in cmd_arr: cmd_arr.remove('-q') if '--quiet' in cmd_arr: cmd_arr.remove('--quiet') # use ssh connection multiplexing (if possible) ssh_cmd_arr = shlex.split(synctool.param.SSH_CMD) use_multiplex = synctool.multiplex.use_mux(up.node) if use_multiplex: synctool.multiplex.ssh_args(ssh_cmd_arr, up.node) cmd_arr.extend(['-e', ' '.join(ssh_cmd_arr)]) cmd_arr.extend(['--', up.address + ':' + up.filename, up.repos_path]) verbose_path = prettypath(up.repos_path) if synctool.lib.DRY_RUN: stdout('would be uploaded as %s' % verbose_path) else: dest_dir = os.path.dirname(up.repos_path) _makedir(dest_dir, remote_stats[1:]) if not synctool.lib.path_exists(dest_dir): error('failed to create %s/' % dest_dir) return # for $overlay, never do rsync --delete / --delete-excluded # for $purge, don't use rsync --delete on single files # because it would (inadvertently) delete all existing files in the repos if not up.purge or not isdir: if '--delete' in cmd_arr: cmd_arr.remove('--delete') if '--delete-excluded' in cmd_arr: cmd_arr.remove('--delete-excluded') verbose('running rsync%s%s:%s to %s' % (opts, up.node, up.filename, verbose_path)) if not synctool.lib.DRY_RUN: synctool.lib.run_with_nodename(cmd_arr, up.node) if not synctool.lib.path_exists(up.repos_path): error('upload failed') else: stdout('uploaded %s' % verbose_path) else: # in dry-run mode, show the command anyway unix_out('# dry run, rsync not performed') unix_out(' '.join(cmd_arr))
def rsync_upload(up): '''upload a file/dir to $overlay/group/ or $purge/group/''' up.make_repos_path() # check whether the remote entry exists ok, isdir = _remote_isdir(up) if not ok: # error message was already printed return if isdir and synctool.param.REQUIRE_EXTENSION and not up.purge: stderr('error: remote is a directory') stderr('synctool can not upload directories to $overlay ' 'when require_extension is set') return if isdir: up.filename += os.sep up.repos_path += os.sep # make command: rsync [-n] [-v] node:/path/ $overlay/group/path/ cmd_arr = shlex.split(synctool.param.RSYNC_CMD) # opts is just for the 'visual aspect'; it is displayed when --verbose opts = ' ' if synctool.lib.DRY_RUN: cmd_arr.append('-n') opts += '-n ' if synctool.lib.VERBOSE: cmd_arr.append('-v') opts += '-v ' if '-q' in cmd_arr: cmd_arr.remove('-q') if '--quiet' in cmd_arr: cmd_arr.remove('--quiet') cmd_arr.append(up.address + ':' + up.filename) cmd_arr.append(up.repos_path) verbose_path = prettypath(up.repos_path) if synctool.lib.DRY_RUN: stdout('would be uploaded as %s' % verbose_path) else: dest_dir = os.path.dirname(up.repos_path) unix_out('mkdir -p %s' % dest_dir) synctool.lib.mkdir_p(dest_dir) if not os.path.exists(dest_dir): stderr('error: failed to create %s/' % dest_dir) return # for $overlay, never do rsync --delete / --delete-excluded # for $purge, don't use rsync --delete on single files # because it would (inadvertently) delete all existing files in the repos if not up.purge or not isdir: if '--delete' in cmd_arr: cmd_arr.remove('--delete') if '--delete-excluded' in cmd_arr: cmd_arr.remove('--delete-excluded') verbose('running rsync%s%s:%s to %s' % (opts, up.node, up.filename, verbose_path)) unix_out(' '.join(cmd_arr)) if not synctool.lib.DRY_RUN: synctool.lib.run_with_nodename(cmd_arr, up.node) if not os.path.exists(up.repos_path): stderr('error: upload failed') else: stdout('uploaded %s' % verbose_path)
def _split_extension(filename, src_dir): '''filename in the overlay tree, without leading path src_dir is passed for the purpose of printing error messages Returns tuple: SyncObject, importance ''' (name, ext) = os.path.splitext(filename) if not ext: return SyncObject(filename, name, OV_NO_EXT), _group_all() if ext == '.pre': # it's a generic .pre script return SyncObject(filename, name, OV_PRE), _group_all() if ext == '.post': (name2, ext) = os.path.splitext(name) if ext == '._template': # it's a generic template generator return SyncObject(filename, name, OV_TEMPLATE_POST), _group_all() # it's a generic .post script return SyncObject(filename, name, OV_POST), _group_all() if ext[:2] != '._': return SyncObject(filename, filename, OV_NO_EXT), _group_all() ext = ext[2:] if not ext: return SyncObject(filename, filename, OV_NO_EXT), _group_all() if ext == 'template': return SyncObject(filename, name, OV_TEMPLATE), _group_all() try: importance = synctool.param.MY_GROUPS.index(ext) except ValueError: if ext not in synctool.param.ALL_GROUPS: src_path = os.path.join(src_dir, filename) if synctool.param.TERSE: terse(synctool.lib.TERSE_ERROR, ('invalid group on %s' % src_path)) else: warning('unknown group on %s, skipped' % prettypath(src_path)) return None, -1 # it is not one of my groups verbose('skipping %s, it is not one of my groups' % prettypath(os.path.join(src_dir, filename))) return None, -1 (name2, ext) = os.path.splitext(name) if ext == '.pre': # register group-specific .pre script return SyncObject(filename, name2, OV_PRE), importance elif ext == '.post': _, ext = os.path.splitext(name2) if ext == '._template': # it's a group-specific template generator return (SyncObject(filename, name2, OV_TEMPLATE_POST), importance) # register group-specific .post script return SyncObject(filename, name2, OV_POST), importance elif ext == '._template': return SyncObject(filename, name2, OV_TEMPLATE), importance return SyncObject(filename, name), importance