def github_api(url): # mypy-bug-type: (str) -> Union[List[Dict[str, Any]], Dict[str, Any]] '''Access GitHub API via URL Returns data (list or dict) depending on GitHub API function or None on error ''' verbose('loading URL %s' % url) try: # can not use 'with' statement with urlopen()..? web = urllib2.urlopen(url) except urllib2.HTTPError as err: error('webserver at %s: %u %s' % (url, err.code, err.msg)) return None except urllib2.URLError as err: error('failed to access %s: %s' % (url, str(err.reason))) return None except IOError as err: error('failed to access %s: %s' % (url, err.strerror)) return None try: # parse JSON data at URL data = json.load(web) finally: web.close() # this may be a list or a dict # don't know and don't care at this point return data
def worker_pkg(addr): '''runs ssh + synctool-pkg to the nodes in parallel''' nodename = NODESET.get_nodename_from_address(addr) # use ssh connection multiplexing (if possible) use_multiplex = synctool.multiplex.use_mux(nodename) # make command array 'ssh node pkg_cmd' cmd_arr = SSH_CMD_ARR[:] # add extra arguments for ssh multiplexing (if OK to use) if use_multiplex: synctool.multiplex.ssh_args(cmd_arr, nodename) cmd_arr.append('--') cmd_arr.append(addr) cmd_arr.extend(shlex.split(param.PKG_CMD)) cmd_arr.extend(PASS_ARGS) verbose('running synctool-pkg on node %s' % nodename) # execute ssh synctool-pkg and show output with the nodename if param.NUM_PROC <= 1: # run with -N 1 : wait on prompts, flush output print nodename + ': ', synctool.lib.exec_command(cmd_arr) else: # run_with_nodename() shows the nodename, but # does not expect any prompts while running the cmd synctool.lib.run_with_nodename(cmd_arr, nodename)
def worker_synctool(addr): '''run rsync of ROOTDIR to the nodes and ssh+synctool, in parallel''' nodename = NODESET.get_nodename_from_address(addr) if nodename == synctool.param.NODENAME: run_local_synctool() return # use ssh connection multiplexing (if possible) use_multiplex = synctool.multiplex.use_mux(nodename, addr) ssh_cmd_arr = shlex.split(synctool.param.SSH_CMD) if use_multiplex: synctool.multiplex.ssh_args(ssh_cmd_arr, nodename) # rsync ROOTDIR/dirs/ to the node # if "it wants it" if not (OPT_SKIP_RSYNC or nodename in synctool.param.NO_RSYNC): verbose('running rsync $SYNCTOOL/ to node %s' % nodename) # make rsync filter to include the correct dirs tmp_filename = rsync_include_filter(nodename) cmd_arr = shlex.split(synctool.param.RSYNC_CMD) cmd_arr.append('--filter=. %s' % tmp_filename) # add "-e ssh_cmd" to rsync command cmd_arr.extend(['-e', ' '.join(ssh_cmd_arr)]) cmd_arr.append('--') cmd_arr.append('%s/' % synctool.param.ROOTDIR) cmd_arr.append('%s:%s/' % (addr, synctool.param.ROOTDIR)) # double check the rsync destination # our filters are like playing with fire if not synctool.param.ROOTDIR or (synctool.param.ROOTDIR == os.sep): warning('cowardly refusing to rsync with rootdir == %s' % synctool.param.ROOTDIR) sys.exit(-1) synctool.lib.run_with_nodename(cmd_arr, nodename) # delete temp file try: os.unlink(tmp_filename) except OSError: # silently ignore unlink error pass # run 'ssh node synctool_cmd' cmd_arr = ssh_cmd_arr[:] cmd_arr.append('--') cmd_arr.append(addr) cmd_arr.extend(shlex.split(synctool.param.SYNCTOOL_CMD)) cmd_arr.append('--nodename=%s' % nodename) cmd_arr.extend(PASS_ARGS) verbose('running synctool on node %s' % nodename) synctool.lib.run_with_nodename(cmd_arr, nodename)
def upgrade(self): msg = 'upgrading packages' verbose(dryrun_msg(msg)) # log the upgrade action ... # don't know which packages are upgraded here, sorry log(msg)
def _split_extension(filename, src_dir): '''filename in the overlay tree, without leading path src_dir is passed for the purpose of printing error messages Returns tuple: SyncObject, importance''' (name, ext) = os.path.splitext(filename) if not ext: return SyncObject(filename, name, OV_NO_EXT), _group_all() if ext == '.post': (name2, ext) = os.path.splitext(name) if ext == '._template': # it's a generic template generator return SyncObject(filename, name, OV_TEMPLATE_POST), _group_all() # it's a generic .post script return SyncObject(filename, name, OV_POST), _group_all() if ext[:2] != '._': return SyncObject(filename, filename, OV_NO_EXT), _group_all() ext = ext[2:] if not ext: return SyncObject(filename, filename, OV_NO_EXT), _group_all() if ext == 'template': return SyncObject(filename, name, OV_TEMPLATE), _group_all() try: importance = synctool.param.MY_GROUPS.index(ext) except ValueError: if not ext in synctool.param.ALL_GROUPS: src_path = os.path.join(src_dir, filename) if synctool.param.TERSE: terse(synctool.lib.TERSE_ERROR, 'invalid group on %s' % src_path) else: stderr('unknown group on %s, skipped' % prettypath(src_path)) return None, -1 # it is not one of my groups verbose('skipping %s, it is not one of my groups' % prettypath(os.path.join(src_dir, filename))) return None, -1 (name2, ext) = os.path.splitext(name) if ext == '.post': _, ext = os.path.splitext(name2) if ext == '._template': # it's a group-specific template generator return (SyncObject(filename, name2, OV_TEMPLATE_POST), importance) # register group-specific .post script return SyncObject(filename, name2, OV_POST), importance elif ext == '._template': return SyncObject(filename, name2, OV_TEMPLATE), importance return SyncObject(filename, name), importance
def run_local_synctool(): '''run synctool on the master node itself''' cmd_arr = shlex.split(synctool.param.SYNCTOOL_CMD) + PASS_ARGS verbose('running synctool on node %s' % synctool.param.NODENAME) synctool.lib.run_with_nodename(cmd_arr, synctool.param.NODENAME)
def github_api(url): '''Access GitHub API via URL Returns data (list or dict) depending on GitHub API function or None on error ''' verbose('loading URL %s' % url) try: # can not use 'with' statement with urlopen()..? web = urllib2.urlopen(url) except urllib2.HTTPError as err: error('webserver at %s: %u %s' % (url, err.code, err.msg)) return None except urllib2.URLError as err: error('failed to access %s: %u %s' % (url, err.code, err.msg)) return None except IOError as err: error('failed to access %s: %s' % (url, err.strerror)) return None try: # parse JSON data at URL data = json.load(web) finally: web.close() # this may be a list or a dict # don't know and don't care at this point return data
def start_multiplex(address_list): '''run ssh -M to each node in address_list''' global PERSIST # allow this only on the master node because of security considerations if synctool.param.MASTER != synctool.param.HOSTNAME: verbose('master %s != hostname %s' % (synctool.param.MASTER, synctool.param.HOSTNAME)) error('not running on the master node') sys.exit(-1) if PERSIST is None: # use default from synctool.conf PERSIST = synctool.param.CONTROL_PERSIST else: # spellcheck the parameter m = synctool.configparser.PERSIST_TIME.match(PERSIST) if not m: error("invalid persist value '%s'" % PERSIST) return # make list of nodenames nodes = [NODESET.get_nodename_from_address(x) for x in address_list] # make list of pairs: (addr, nodename) pairs = zip(address_list, nodes) synctool.multiplex.setup_master(pairs, PERSIST)
def worker_synctool(addr): '''run rsync of ROOTDIR to the nodes and ssh+synctool, in parallel''' nodename = NODESET.get_nodename_from_address(addr) if nodename == param.NODENAME: run_local_synctool() return # use ssh connection multiplexing (if possible) use_multiplex = synctool.multiplex.use_mux(nodename) ssh_cmd_arr = shlex.split(param.SSH_CMD) if use_multiplex: synctool.multiplex.ssh_args(ssh_cmd_arr, nodename) # rsync ROOTDIR/dirs/ to the node # if "it wants it" if not (OPT_SKIP_RSYNC or nodename in param.NO_RSYNC): verbose('running rsync $SYNCTOOL/ to node %s' % nodename) # make rsync filter to include the correct dirs tmp_filename = rsync_include_filter(nodename) cmd_arr = shlex.split(param.RSYNC_CMD) cmd_arr.append('--filter=. %s' % tmp_filename) # add "-e ssh_cmd" to rsync command cmd_arr.extend(['-e', ' '.join(ssh_cmd_arr)]) cmd_arr.append('--') cmd_arr.append('%s/' % param.ROOTDIR) cmd_arr.append('%s:%s/' % (addr, param.ROOTDIR)) # double check the rsync destination # our filters are like playing with fire if not param.ROOTDIR or (param.ROOTDIR == os.sep): warning('cowardly refusing to rsync with rootdir == %s' % param.ROOTDIR) sys.exit(-1) synctool.lib.run_with_nodename(cmd_arr, nodename) # delete temp file try: os.unlink(tmp_filename) except OSError: # silently ignore unlink error pass # run 'ssh node synctool_cmd' cmd_arr = ssh_cmd_arr[:] cmd_arr.append('--') cmd_arr.append(addr) cmd_arr.extend(shlex.split(param.SYNCTOOL_CMD)) cmd_arr.append('--nodename=%s' % nodename) cmd_arr.extend(PASS_ARGS) verbose('running synctool on node %s' % nodename) synctool.lib.run_with_nodename(cmd_arr, nodename)
def github_api(url): """Access GitHub API via URL Returns data (list or dict) depending on GitHub API function or None on error """ verbose("loading URL %s" % url) try: # can not use 'with' statement with urlopen()..? web = urllib2.urlopen(url) except urllib2.HTTPError as err: error("webserver at %s: %u %s" % (url, err.code, err.msg)) return None except urllib2.URLError as err: error("failed to access %s: %u %s" % (url, err.code, err.msg)) return None except IOError as err: error("failed to access %s: %s" % (url, err.strerror)) return None try: # parse JSON data at URL data = json.load(web) finally: web.close() # this may be a list or a dict # don't know and don't care at this point return data
def control(nodename, remote_addr, ctl_cmd): '''Tell the ssh mux process the ctl_cmd Returns True on success, False otherwise ''' if ctl_cmd not in ('check', 'stop', 'exit'): raise RuntimeError("unsupported control command '%s'" % ctl_cmd) control_path = _make_control_path(nodename) if not control_path: # error message already printed return False verbose('sending control command %s to %s' % (ctl_cmd, nodename)) cmd_arr = shlex.split(synctool.param.SSH_CMD) cmd_arr.extend(['-N', '-n', '-O', ctl_cmd, '-o', 'ControlPath=' + control_path]) # if VERBOSE: don't care about ssh -v options here cmd_arr.append('--') cmd_arr.append(remote_addr) exitcode = synctool.lib.exec_command(cmd_arr, silent=True) return exitcode == 0
def _delete_callback(obj, _pre_dict, post_dict): # type: (SyncObject, Dict[str, str], Dict[str, str]) -> Tuple[bool, bool] '''delete files''' if obj.ov_type == synctool.overlay.OV_TEMPLATE: return generate_template(obj, post_dict), False # don't delete directories if obj.src_stat.is_dir(): # verbose('refusing to delete directory %s' % (obj.dest_path + os.sep)) return True, False if obj.dest_stat.is_dir(): warning('destination is a directory: %s, skipped' % obj.print_src()) return True, False verbose('checking %s' % obj.print_src()) if obj.dest_stat.exists(): vnode = obj.vnode_dest_obj() vnode.harddelete() obj.run_script(post_dict) return True, True return True, False
def ping_node(addr): # type: (str) -> None '''ping a single node''' node = NODESET.get_nodename_from_address(addr) verbose('pinging %s' % node) unix_out('%s %s' % (param.PING_CMD, addr)) packets_received = 0 # execute ping command and show output with the nodename cmd = '%s %s' % (param.PING_CMD, addr) cmd_arr = shlex.split(cmd) try: f = subprocess.Popen(cmd_arr, shell=False, bufsize=4096, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout except OSError as err: error('failed to run command %s: %s' % (cmd_arr[0], err.strerror)) return with f: for line in f: line = line.strip() # argh, we have to parse output here # # on BSD, ping says something like: # "2 packets transmitted, 0 packets received, 100.0% packet loss" # # on Linux, ping says something like: # "2 packets transmitted, 0 received, 100.0% packet loss, " \ # "time 1001ms" arr = line.split() if len(arr) > 3 and (arr[1] == 'packets' and arr[2] == 'transmitted,'): try: packets_received = int(arr[3]) except ValueError: pass break # some ping implementations say "hostname is alive" # or "hostname is unreachable" elif len(arr) == 3 and arr[1] == 'is': if arr[2] == 'alive': packets_received = 100 elif arr[2] == 'unreachable': packets_received = -1 if packets_received > 0: print '%s: up' % node else: print '%s: not responding' % node
def use_mux(nodename): '''Returns True if it's OK to use a master connection to node Otherwise returns False -> don't use multiplexing ''' control_path = _make_control_path(nodename) if not control_path: # error message already printed return False # see if the control path already exists statbuf = synctool.syncstat.SyncStat(control_path) if statbuf.exists(): if not statbuf.is_sock(): warning('control path %s: not a socket file' % control_path) return False if statbuf.uid != os.getuid(): warning('control path: %s: incorrect owner uid %u' % (control_path, statbuf.uid)) return False if statbuf.mode & 077 != 0: warning('control path %s: suspicious file mode %04o' % (control_path, statbuf.mode & 0777)) return False verbose('control path %s already exists' % control_path) return True
def _delete_callback(obj, post_dict, dir_changed, *args): '''delete files''' if obj.ov_type == synctool.overlay.OV_TEMPLATE: return generate_template(obj, post_dict), False # don't delete directories if obj.src_stat.is_dir(): # verbose('refusing to delete directory %s' % (obj.dest_path + os.sep)) if dir_changed and obj.dest_path in post_dict: _run_post(obj, post_dict[obj.dest_path]) return True, dir_changed if obj.dest_stat.is_dir(): stderr('destination is a directory: %s, skipped' % obj.print_src()) return True, False verbose('checking %s' % obj.print_src()) if obj.dest_stat.exists(): vnode = obj.vnode_dest_obj() vnode.harddelete() if obj.dest_path in post_dict: _run_post(obj, post_dict[obj.dest_path]) return True, True return True, False
def worker_ssh(addr): '''worker process: sync script and run ssh+command to the node''' # Note that this func even runs ssh to the local node if # the master is also managed by synctool # This is completely intentional, and it resolves certain # issues with shell quoted commands on the dsh cmd line nodename = NODESET.get_nodename_from_address(addr) # use ssh connection multiplexing (if possible) use_multiplex = synctool.multiplex.use_mux(nodename, addr) if (SYNC_IT and not (OPT_SKIP_RSYNC or nodename in synctool.param.NO_RSYNC)): # first, sync the script to the node using rsync # REMOTE_CMD_ARR[0] is the full path to the cmd in SCRIPT_DIR verbose('running rsync $SYNCTOOL/scripts/%s to node %s' % (os.path.basename(REMOTE_CMD_ARR[0]), nodename)) cmd_arr = shlex.split(synctool.param.RSYNC_CMD) # add "-e ssh_cmd" to rsync command ssh_cmd_arr = shlex.split(synctool.param.SSH_CMD) if use_multiplex: synctool.multiplex.ssh_args(ssh_cmd_arr, nodename) cmd_arr.extend(['-e', ' '.join(ssh_cmd_arr)]) cmd_arr.append('--') cmd_arr.append('%s' % REMOTE_CMD_ARR[0]) cmd_arr.append('%s:%s' % (addr, REMOTE_CMD_ARR[0])) synctool.lib.run_with_nodename(cmd_arr, nodename) cmd_str = ' '.join(REMOTE_CMD_ARR) # create local copy # or else parallelism may screw things up ssh_cmd_arr = SSH_CMD_ARR[:] verbose('running %s to %s %s' % (os.path.basename(SSH_CMD_ARR[0]), nodename, cmd_str)) # add extra arguments for ssh multiplexing (if OK to use) if use_multiplex: synctool.multiplex.ssh_args(ssh_cmd_arr, nodename) ssh_cmd_arr.append('--') ssh_cmd_arr.append(addr) ssh_cmd_arr.extend(REMOTE_CMD_ARR) # execute ssh+remote command and show output with the nodename if synctool.param.NUM_PROC <= 1: # run with -N 1 : wait on prompts, flush output print nodename + ': ', synctool.lib.exec_command(ssh_cmd_arr) else: # run_with_nodename() shows the nodename, but # does not expect any prompts while running the cmd synctool.lib.run_with_nodename(ssh_cmd_arr, nodename)
def remove(self, pkgs): if len(pkgs) > 1: plural = 's' else: plural = '' msg = 'removing package%s: %s' % (plural, ' '.join(pkgs)) verbose(msg) log(msg)
def install(self, pkgs): if len(pkgs) > 1: plural = 's' else: plural = '' msg = 'installing package%s: %s' % (plural, ' '.join(pkgs)) verbose(msg) log(msg)
def quiet_delete(self): '''silently delete existing entry; only called by fix()''' if not synctool.lib.DRY_RUN and not synctool.param.BACKUP_COPIES: verbose(' os.unlink(%s)' % self.name) try: os.unlink(self.name) except OSError: pass
def _remote_stat(up): '''Get stat info of the remote object Returns array of RemoteStat data, or None on error ''' # use ssh connection multiplexing (if possible) cmd_arr = shlex.split(synctool.param.SSH_CMD) use_multiplex = synctool.multiplex.use_mux(up.node) if use_multiplex: synctool.multiplex.ssh_args(cmd_arr, up.node) list_cmd = os.path.join(synctool.param.ROOTDIR, 'sbin', 'synctool_list.py') cmd_arr.extend(['--', up.address, list_cmd, up.filename]) verbose('running synctool_list %s:%s' % (up.node, up.filename)) unix_out(' '.join(cmd_arr)) try: proc = subprocess.Popen(cmd_arr, shell=False, bufsize=4096, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError as err: error('failed to run command %s: %s' % (cmd_arr[0], err.strerror)) return None out, err = proc.communicate() if proc.returncode == 255: error('ssh connection to %s failed' % up.node) return None elif proc.returncode == 127: error('remote list command failed') return None # parse synctool_list output into array of RemoteStat info data = [] for line in out.split('\n'): if not line: continue arr = line.split() if arr[0] == 'error:': # relay error message error(' '.join(arr[1:])) return None try: remote_stat = RemoteStat(arr) except ValueError: error('unexpected output from synctool_list %s:%s' % (up.node, up.filename)) return None verbose('remote: %r' % remote_stat) data.append(remote_stat) return data
def _exec_diff(src, dest): '''execute diff_cmd to display diff between dest and src''' verbose('%s %s %s' % (param.DIFF_CMD, dest, prettypath(src))) cmd_arr = shlex.split(param.DIFF_CMD) cmd_arr.append(dest) cmd_arr.append(src) synctool.lib.exec_command(cmd_arr)
def list(self, pkgs=None): if pkgs: if len(pkgs) > 1: plural = 's' else: plural = '' verbose('list package%s: %s' % (plural, ' '.join(pkgs))) else: verbose('list all packages')
def _exec_diff(src, dest): '''execute diff_cmd to display diff between dest and src''' verbose('%s %s %s' % (synctool.param.DIFF_CMD, dest, prettypath(src))) cmd_arr = shlex.split(synctool.param.DIFF_CMD) cmd_arr.append(dest) cmd_arr.append(src) synctool.lib.exec_command(cmd_arr)
def worker_synctool(addr): '''run rsync of ROOTDIR to the nodes and ssh+synctool, in parallel''' nodename = NODESET.get_nodename_from_address(addr) if nodename == synctool.param.NODENAME: run_local_synctool() return # rsync ROOTDIR/dirs/ to the node # if "it wants it" if not (OPT_SKIP_RSYNC or nodename in synctool.param.NO_RSYNC): verbose('running rsync $SYNCTOOL/ to node %s' % nodename) unix_out('%s %s %s:%s/' % (synctool.param.RSYNC_CMD, synctool.param.ROOTDIR, addr, synctool.param.ROOTDIR)) # make rsync filter to include the correct dirs tmp_filename = rsync_include_filter(nodename) cmd_arr = shlex.split(synctool.param.RSYNC_CMD) cmd_arr.append('--filter=. %s' % tmp_filename) cmd_arr.append('--') cmd_arr.append('%s/' % synctool.param.ROOTDIR) cmd_arr.append('%s:%s/' % (addr, synctool.param.ROOTDIR)) # double check the rsync destination # our filters are like playing with fire if not synctool.param.ROOTDIR or ( synctool.param.ROOTDIR == os.sep): stderr('cowardly refusing to rsync with rootdir == %s' % synctool.param.ROOTDIR) sys.exit(-1) synctool.lib.run_with_nodename(cmd_arr, nodename) # delete temp file try: os.unlink(tmp_filename) except OSError: # silently ignore unlink error pass # run 'ssh node synctool_cmd' cmd_arr = shlex.split(synctool.param.SSH_CMD) cmd_arr.append('--') cmd_arr.append(addr) cmd_arr.extend(shlex.split(synctool.param.SYNCTOOL_CMD)) cmd_arr.append('--nodename=%s' % nodename) cmd_arr.extend(PASS_ARGS) verbose('running synctool on node %s' % nodename) unix_out(' '.join(cmd_arr)) synctool.lib.run_with_nodename(cmd_arr, nodename)
def copy_stat(self): '''set access and mod times''' if not synctool.lib.DRY_RUN and synctool.param.SYNC_TIMES: try: verbose('copystat: %s => %s' % (self.src_path, self.name)) shutil.copystat(self.src_path, self.name) except OSError as err: error('failed to set utime on %s : %s' % (self.name, err.strerror)) terse(synctool.lib.TERSE_FAIL, 'utime %s' % self.name)
def remove(self, pkgs): # type: (List[str]) -> None '''remove list of packages''' if len(pkgs) > 1: plural = 's' else: plural = '' msg = 'removing package%s: %s' % (plural, ' '.join(pkgs)) verbose(msg) log(msg)
def quiet_delete(self): '''silently delete directory; only called by fix()''' if not synctool.lib.DRY_RUN and not synctool.param.BACKUP_COPIES: verbose(' os.rmdir(%s)' % self.name) try: os.rmdir(self.name) except OSError: # probably directory not empty # refuse to delete dir, just move it aside verbose('refusing to delete directory %s' % self.name) self.move_saved()
def _overlay_callback(obj, pre_dict, post_dict): '''compare files and run post-script if needed Returns pair: True (continue), updated (data or metadata) ''' if obj.ov_type == synctool.overlay.OV_TEMPLATE: return generate_template(obj, post_dict), False verbose('checking %s' % obj.print_src()) fixup = obj.check() updated = obj.fix(fixup, pre_dict, post_dict) return True, updated
def list(self, pkgs=None): # type: (List[str]) -> None '''output list of packages''' if pkgs: if len(pkgs) > 1: plural = 's' else: plural = '' verbose('list package%s: %s' % (plural, ' '.join(pkgs))) else: verbose('list all packages')
def mkdir_basepath(self): '''call mkdir -p to create leading path''' if synctool.lib.DRY_RUN: return basedir = os.path.dirname(self.name) # be a bit quiet about it if synctool.lib.VERBOSE or synctool.lib.UNIX_CMD: verbose('making directory %s' % prettypath(basedir)) synctool.lib.mkdir_p(basedir)
def set_permissions(self): '''set access permission bits equal to source''' verbose(dryrun_msg(' os.chmod(%s, %04o)' % (self.name, self.stat.mode & 07777))) unix_out('chmod 0%o %s' % (self.stat.mode & 07777, self.name)) if not synctool.lib.DRY_RUN: try: os.chmod(self.name, self.stat.mode & 07777) except OSError as err: error('failed to chmod %04o %s : %s' % (self.stat.mode & 07777, self.name, err.strerror)) terse(synctool.lib.TERSE_FAIL, 'mode %s' % self.name)
def create(self): '''make a fifo''' verbose(dryrun_msg(' os.mkfifo(%s)' % self.name)) unix_out('mkfifo %s' % self.name) terse(synctool.lib.TERSE_NEW, self.name) if not synctool.lib.DRY_RUN: try: os.mkfifo(self.name, self.stat.mode & 0777) except OSError as err: error('failed to create fifo %s : %s' % (self.name, err.strerror)) terse(TERSE_FAIL, 'fifo %s' % self.name)
def detect_ssh(): # type: () -> int '''detect ssh version Set global SSH_VERSION to 2-digit int number: eg. version "5.6p1" -> SSH_VERSION = 56 Returns: SSH_VERSION This routine only works for OpenSSH; otherwise return -1 ''' global SSH_VERSION if SSH_VERSION is not None: return SSH_VERSION cmd_arr = shlex.split(synctool.param.SSH_CMD) # only use first item: the path to the ssh command cmd_arr = cmd_arr[:1] cmd_arr.append('-V') unix_out(' '.join(cmd_arr)) try: # OpenSSH may print version information on stderr proc = subprocess.Popen(cmd_arr, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except OSError as err: error('failed to execute %s: %s' % (cmd_arr[0], err.strerror)) SSH_VERSION = -1 return SSH_VERSION # stderr was redirected to stdout data, _ = proc.communicate() if not data: SSH_VERSION = -1 return SSH_VERSION data = data.strip() verbose('ssh version string: ' + data) # data should be a single line matching "OpenSSH_... SSL ... date\n" m = MATCH_SSH_VERSION.match(data) if not m: SSH_VERSION = -1 return SSH_VERSION groups = m.groups() SSH_VERSION = int(groups[0]) * 10 + int(groups[1]) verbose('SSH_VERSION: %d' % SSH_VERSION) return SSH_VERSION
def create(self): # type: () -> None '''make a fifo''' verbose(dryrun_msg(' os.mkfifo(%s)' % self.name)) unix_out('mkfifo %s' % self.name) terse(synctool.lib.TERSE_NEW, self.name) if not synctool.lib.DRY_RUN: try: os.mkfifo(self.name, self.stat.mode & 0777) except OSError as err: error('failed to create fifo %s : %s' % (self.name, err.strerror)) terse(TERSE_FAIL, 'fifo %s' % self.name)
def run_dsh(address_list, remote_cmd_arr): # type: (List[str], List[str]) -> None '''run remote command to a set of nodes using ssh (param ssh_cmd)''' global SSH_CMD_ARR, REMOTE_CMD_ARR, SYNC_IT # if the command is under scripts/, assume its full path # This is nice because scripts/ isn't likely to be in PATH # It is moderately evil however, because it's not 100% correct # but it's reliable enough to keep in here full_path = synctool.lib.search_path(remote_cmd_arr[0]) if not full_path: # command was not found in PATH # look under scripts/ full_path = os.path.join(param.SCRIPT_DIR, remote_cmd_arr[0]) # sync the script to the node SYNC_IT = True elif (full_path[:len(param.SCRIPT_DIR)+1] == param.SCRIPT_DIR + os.sep): SYNC_IT = True try: if not (os.path.isfile(full_path) and os.access(full_path, os.X_OK)): # not an executable file # must be wrong, do not bother syncing it # Note that syncing wrong paths with rsync --delete is dangerous verbose('%s: not an executable file' % full_path) SYNC_IT = False else: # found the command under scripts/ remote_cmd_arr[0] = full_path except OSError as err: verbose('%s: %s' % (full_path, err.strerror)) SYNC_IT = False SSH_CMD_ARR = shlex.split(param.SSH_CMD) if SSH_OPTIONS: SSH_CMD_ARR.extend(shlex.split(SSH_OPTIONS)) # if -N 1, force tty allocation if param.NUM_PROC <= 1 and '-t' not in SSH_CMD_ARR: SSH_CMD_ARR.append('-t') # remove option -T (disable tty allocation) if '-T' in SSH_CMD_ARR: SSH_CMD_ARR.remove('-T') REMOTE_CMD_ARR = remote_cmd_arr synctool.parallel.do(worker_ssh, address_list)
def create(self): '''create symbolic link''' verbose(dryrun_msg(' os.symlink(%s, %s)' % (self.oldpath, self.name))) unix_out('ln -s %s %s' % (self.oldpath, self.name)) terse(synctool.lib.TERSE_LINK, self.name) if not synctool.lib.DRY_RUN: try: os.symlink(self.oldpath, self.name) except OSError as err: error('failed to create symlink %s -> %s : %s' % (self.name, self.oldpath, err.strerror)) terse(synctool.lib.TERSE_FAIL, 'link %s' % self.name)
def create(self): '''create symbolic link''' verbose(dryrun_msg(' os.symlink(%s, %s)' % (self.oldpath, self.name))) unix_out('ln -s %s %s' % (self.oldpath, self.name)) terse(synctool.lib.TERSE_LINK, self.name) if not synctool.lib.DRY_RUN: try: os.symlink(self.oldpath, self.name) except OSError as err: error('failed to create symlink %s -> %s : %s' % (self.name, self.oldpath, err.strerror)) terse(TERSE_FAIL, 'link %s' % self.name)
def purge_files(): # type: () -> None '''run the purge function''' paths = [] purge_groups = os.listdir(param.PURGE_DIR) # find the source purge paths that we need to copy # scan only the group dirs that apply for g in param.MY_GROUPS: if g in purge_groups: purge_root = os.path.join(param.PURGE_DIR, g) if not os.path.isdir(purge_root): continue for path, subdirs, files in os.walk(purge_root): # rsync only purge dirs that actually contain files # otherwise rsync --delete would wreak havoc if not files: continue if path == purge_root: # root contains files; guard against user mistakes # rsync --delete would destroy the whole filesystem warning('cowardly refusing to purge the root directory') stderr('please remove any files directly under %s/' % prettypath(purge_root)) return # paths has (src_dir, dest_dir) paths.append((path, path[len(purge_root):])) # do not recurse into this dir any deeper del subdirs[:] cmd_rsync, opts_string = _make_rsync_purge_cmd() # call rsync to copy the purge dirs for src, dest in paths: # trailing slash on source path is important for rsync src += os.sep dest += os.sep cmd_arr = cmd_rsync[:] cmd_arr.append(src) cmd_arr.append(dest) verbose('running rsync%s%s %s' % (opts_string, prettypath(src), dest)) _run_rsync_purge(cmd_arr)
def set_owner(self): # type: () -> None '''set ownership equal to source''' verbose(dryrun_msg(' os.chown(%s, %d, %d)' % (self.name, self.stat.uid, self.stat.gid))) unix_out('chown %s.%s %s' % (self.stat.ascii_uid(), self.stat.ascii_gid(), self.name)) if not synctool.lib.DRY_RUN: try: os.chown(self.name, self.stat.uid, self.stat.gid) except OSError as err: error('failed to chown %s.%s %s : %s' % (self.stat.ascii_uid(), self.stat.ascii_gid(), self.name, err.strerror)) terse(TERSE_FAIL, 'owner %s' % self.name)
def create(self): # type: () -> None '''copy file''' if not self.exists: terse(synctool.lib.TERSE_NEW, self.name) verbose(dryrun_msg(' copy %s %s' % (self.src_path, self.name))) unix_out('cp %s %s' % (self.src_path, self.name)) if not synctool.lib.DRY_RUN: try: # copy file shutil.copy(self.src_path, self.name) except (OSError, IOError) as err: error('failed to copy %s to %s: %s' % (prettypath(self.src_path), self.name, err.strerror)) terse(TERSE_FAIL, self.name)