Example #1
0
    def compare(self, _src_path, dest_stat):
        # type: (str, SyncStat) -> bool
        '''see if devs are the same'''

        if not self.exists:
            return False

        # dest_stat is a SyncStat object and it's useless here
        # I need a real, fresh statbuf that includes st_rdev field
        try:
            dest_stat = os.lstat(self.name)
        except OSError as err:
            error('error checking %s : %s' % (self.name, err.strerror))
            return False

        # Note: mypy triggers false errors here
        # Also, no luck with Union[SyncStat, posix.stat_result]
        # In any case, for VNodeChrDev and VNodeBlkDev,
        # the self.src_stat is of type posix.stat_result
        src_major = os.major(self.src_stat.st_rdev)     # type: ignore
        src_minor = os.minor(self.src_stat.st_rdev)     # type: ignore
        dest_major = os.major(dest_stat.st_rdev)        # type: ignore
        dest_minor = os.minor(dest_stat.st_rdev)        # type: ignore
        if src_major != dest_major or src_minor != dest_minor:
            stdout('%s should have major,minor %d,%d but has %d,%d' %
                   (self.name, src_major, src_minor, dest_major, dest_minor))
            unix_out('# updating major,minor %s' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            return False

        return True
Example #2
0
def main():
    # type: () -> None
    '''run the program'''

    param.init()

    sys.stdout = synctool.unbuffered.Unbuffered(sys.stdout) # type: ignore
    sys.stderr = synctool.unbuffered.Unbuffered(sys.stderr) # type: ignore

    try:
        get_options()
    except synctool.range.RangeSyntaxError as err:
        error(str(err))
        sys.exit(1)

    if OPT_AGGREGATE:
        if not synctool.aggr.run(MASTER_OPTS):
            sys.exit(-1)

        sys.exit(0)

    config.init_mynodename()

    address_list = NODESET.addresses()
    if not address_list:
        print 'no valid nodes specified'
        sys.exit(1)

    ping_nodes(address_list)
Example #3
0
    def stat(self, path):
        '''get the stat() information for a pathname'''

        if not path:
            self.entry_exists = False
            self.mode = self.uid = self.gid = self.size = None
            return

        try:
            statbuf = os.lstat(path)
        except OSError as err:
            # could be something stupid like "Permission denied" ...
            # although synctool should be run as root

            if err.errno != errno.ENOENT:
                # "No such file or directory" is a valid error
                # when the destination is missing
                error('stat(%s) failed: %s' % (path, err.strerror))

            self.entry_exists = False
            self.mode = self.uid = self.gid = self.size = None

        else:
            self.entry_exists = True

            self.mode = statbuf.st_mode
            self.uid = statbuf.st_uid
            self.gid = statbuf.st_gid
            self.size = statbuf.st_size
Example #4
0
    def compare(self, _src_path, dest_stat):
        # type: (str, SyncStat) -> bool
        '''see if devs are the same'''

        if not self.exists:
            return False

        # dest_stat is a SyncStat object and it's useless here
        # I need a real, fresh statbuf that includes st_rdev field
        try:
            dest_stat = os.lstat(self.name)
        except OSError as err:
            error('error checking %s : %s' % (self.name, err.strerror))
            return False

        src_major = os.major(self.src_stat.st_rdev)     # type: ignore
        src_minor = os.minor(self.src_stat.st_rdev)     # type: ignore
        dest_major = os.major(dest_stat.st_rdev)        # type: ignore
        dest_minor = os.minor(dest_stat.st_rdev)        # type: ignore
        if src_major != dest_major or src_minor != dest_minor:
            stdout('%s should have major,minor %d,%d but has %d,%d' %
                   (self.name, src_major, src_minor, dest_major, dest_minor))
            unix_out('# updating major,minor %s' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            return False

        return True
Example #5
0
def main():
    '''run the program'''

    param.init()

    sys.stdout = synctool.unbuffered.Unbuffered(sys.stdout)
    sys.stderr = synctool.unbuffered.Unbuffered(sys.stderr)

    try:
        files = get_options()
    except synctool.range.RangeSyntaxError as err:
        error(str(err))
        sys.exit(1)

    if OPT_AGGREGATE:
        if not synctool.aggr.run(MASTER_OPTS):
            sys.exit(-1)

        sys.exit(0)

    config.init_mynodename()

    address_list = NODESET.addresses()
    if not address_list:
        error('no valid nodes specified')
        sys.exit(1)

    run_remote_copy(address_list, files)
Example #6
0
def do(func, work):
    '''run func in parallel'''

    if synctool.param.SLEEP_TIME != 0:
        synctool.param.NUM_PROC = 1

    # 'part' becomes amount of work for each rank to do
    len_work = len(work)
    if len_work <= synctool.param.NUM_PROC:
        num_proc = len_work
        part = 1
    else:
        num_proc = synctool.param.NUM_PROC
        part = len_work / num_proc
        if len_work % num_proc != 0:
            part += 1

    # spawn pool of workers
    for rank in xrange(num_proc):
        try:
            pid = os.fork()
        except OSError as err:
            error('failed to fork(): %s' % err.strerror)
            return

        if pid == 0:
            # child process
            worker(rank, func, work, part)
            sys.exit(0)

        # parent process
        ALL_PIDS.add(pid)

    # wait for all workers to exit
    join()
Example #7
0
def main():
    '''run the program'''

    synctool.param.init()

    sys.stdout = synctool.unbuffered.Unbuffered(sys.stdout)
    sys.stderr = synctool.unbuffered.Unbuffered(sys.stderr)

    try:
        cmd_args = get_options()
    except synctool.range.RangeSyntaxError as err:
        error(str(err))
        sys.exit(1)

    if OPT_AGGREGATE:
        if not synctool.aggr.run(MASTER_OPTS):
            sys.exit(-1)

        sys.exit(0)

    synctool.config.init_mynodename()

    address_list = NODESET.addresses()
    if not address_list:
        error('no valid nodes specified')
        sys.exit(1)

    if OPT_MULTIPLEX:
        start_multiplex(address_list)
    elif CTL_CMD != None:
        control_multiplex(address_list, CTL_CMD)
    else:
        run_dsh(address_list, cmd_args)
Example #8
0
def main():
    # type: () -> None
    '''run the program'''

    param.init()

    sys.stdout = synctool.unbuffered.Unbuffered(sys.stdout)  # type: ignore
    sys.stderr = synctool.unbuffered.Unbuffered(sys.stderr)  # type: ignore

    try:
        files = get_options()
    except synctool.range.RangeSyntaxError as err:
        error(str(err))
        sys.exit(1)

    if OPT_AGGREGATE:
        if not synctool.aggr.run(MASTER_OPTS):
            sys.exit(-1)

        sys.exit(0)

    config.init_mynodename()

    address_list = NODESET.addresses()
    if not address_list:
        error('no valid nodes specified')
        sys.exit(1)

    run_remote_copy(address_list, files)
Example #9
0
def do(func, work):
    """run func in parallel"""

    if synctool.param.SLEEP_TIME != 0:
        synctool.param.NUM_PROC = 1

    # 'part' becomes amount of work for each rank to do
    len_work = len(work)
    if len_work <= synctool.param.NUM_PROC:
        num_proc = len_work
        part = 1
    else:
        num_proc = synctool.param.NUM_PROC
        part = len_work / num_proc
        if len_work % num_proc != 0:
            part += 1

    # spawn pool of workers
    for rank in xrange(num_proc):
        try:
            pid = os.fork()
        except OSError as err:
            error("failed to fork(): %s" % err.strerror)
            return

        if pid == 0:
            # child process
            worker(rank, func, work, part)
            sys.exit(0)

        # parent process
        ALL_PIDS.add(pid)

    # wait for all workers to exit
    join()
Example #10
0
def start_multiplex(address_list):
    '''run ssh -M to each node in address_list'''

    global PERSIST

    # allow this only on the master node because of security considerations
    if synctool.param.MASTER != synctool.param.HOSTNAME:
        verbose('master %s != hostname %s' %
                (synctool.param.MASTER, synctool.param.HOSTNAME))
        error('not running on the master node')
        sys.exit(-1)

    if PERSIST is None:
        # use default from synctool.conf
        PERSIST = synctool.param.CONTROL_PERSIST
    else:
        # spellcheck the parameter
        m = synctool.configparser.PERSIST_TIME.match(PERSIST)
        if not m:
            error("invalid persist value '%s'" % PERSIST)
            return

    # make list of nodenames
    nodes = [NODESET.get_nodename_from_address(x) for x in address_list]

    # make list of pairs: (addr, nodename)
    pairs = zip(address_list, nodes)
    synctool.multiplex.setup_master(pairs, PERSIST)
Example #11
0
    def compare(self, src_path, dest_stat):
        '''see if devs are the same'''

        if not self.exists:
            return False

        # dest_stat is a SyncStat object and it's useless here
        # I need a real, fresh statbuf that includes st_rdev field
        try:
            dest_stat = os.lstat(self.name)
        except OSError as err:
            error('error checking %s : %s' % (self.name, err.strerror))
            return False

        src_major = os.major(self.src_stat.st_rdev)
        src_minor = os.minor(self.src_stat.st_rdev)
        dest_major = os.major(dest_stat.st_rdev)
        dest_minor = os.minor(dest_stat.st_rdev)
        if src_major != dest_major or src_minor != dest_minor:
            stdout('%s should have major,minor %d,%d but has %d,%d' %
                (self.name, src_major, src_minor, dest_major, dest_minor))
            unix_out('# updating major,minor %s' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            return False

        return True
Example #12
0
def option_combinations(opt_diff, opt_single, opt_reference, opt_erase_saved,
                        opt_upload, opt_suffix, opt_fix):
    # type: (bool, bool, bool, bool, bool, bool, bool) -> None
    '''some combinations of command-line options don't make sense;
    alert the user and abort
    '''

    if opt_erase_saved and (opt_diff or opt_reference or opt_upload):
        error("option --erase-saved can not be combined with other actions")
        sys.exit(1)

    if opt_upload and (opt_diff or opt_single or opt_reference):
        error("option --upload can not be combined with other actions")
        sys.exit(1)

    if opt_suffix and not opt_upload:
        error("option --suffix can only be used together with --upload")
        sys.exit(1)

    if opt_diff and (opt_single or opt_reference or opt_fix):
        error("option --diff can not be combined with other actions")
        sys.exit(1)

    if opt_reference and (opt_single or opt_fix):
        error("option --reference can not be combined with other actions")
        sys.exit(1)
Example #13
0
def main():
    '''run the program'''

    param.init()

    sys.stdout = synctool.unbuffered.Unbuffered(sys.stdout)
    sys.stderr = synctool.unbuffered.Unbuffered(sys.stderr)

    try:
        get_options()
    except synctool.range.RangeSyntaxError as err:
        error(str(err))
        sys.exit(1)

    if OPT_AGGREGATE:
        if not synctool.aggr.run(MASTER_OPTS):
            sys.exit(-1)

        sys.exit(0)

    config.init_mynodename()

    address_list = NODESET.addresses()
    if not address_list:
        print 'no valid nodes specified'
        sys.exit(1)

    ping_nodes(address_list)
Example #14
0
def ping_node(addr):
    # type: (str) -> None
    '''ping a single node'''

    node = NODESET.get_nodename_from_address(addr)
    verbose('pinging %s' % node)
    unix_out('%s %s' % (param.PING_CMD, addr))

    packets_received = 0

    # execute ping command and show output with the nodename
    cmd = '%s %s' % (param.PING_CMD, addr)
    cmd_arr = shlex.split(cmd)

    try:
        f = subprocess.Popen(cmd_arr,
                             shell=False,
                             bufsize=4096,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT).stdout
    except OSError as err:
        error('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return

    with f:
        for line in f:
            line = line.strip()

            # argh, we have to parse output here
            #
            # on BSD, ping says something like:
            # "2 packets transmitted, 0 packets received, 100.0% packet loss"
            #
            # on Linux, ping says something like:
            # "2 packets transmitted, 0 received, 100.0% packet loss, " \
            # "time 1001ms"

            arr = line.split()
            if len(arr) > 3 and (arr[1] == 'packets'
                                 and arr[2] == 'transmitted,'):
                try:
                    packets_received = int(arr[3])
                except ValueError:
                    pass

                break

            # some ping implementations say "hostname is alive"
            # or "hostname is unreachable"
            elif len(arr) == 3 and arr[1] == 'is':
                if arr[2] == 'alive':
                    packets_received = 100

                elif arr[2] == 'unreachable':
                    packets_received = -1

    if packets_received > 0:
        print '%s: up' % node
    else:
        print '%s: not responding' % node
Example #15
0
def start_multiplex(address_list):
    '''run ssh -M to each node in address_list'''

    global PERSIST

    # allow this only on the master node because of security considerations
    if synctool.param.MASTER != synctool.param.HOSTNAME:
        verbose('master %s != hostname %s' % (synctool.param.MASTER,
                                              synctool.param.HOSTNAME))
        error('not running on the master node')
        sys.exit(-1)

    if PERSIST is None:
        # use default from synctool.conf
        PERSIST = synctool.param.CONTROL_PERSIST
    else:
        # spellcheck the parameter
        m = synctool.configparser.PERSIST_TIME.match(PERSIST)
        if not m:
            error("invalid persist value '%s'" % PERSIST)
            return

    # make list of nodenames
    nodes = [NODESET.get_nodename_from_address(x) for x in address_list]

    # make list of pairs: (addr, nodename)
    pairs = zip(address_list, nodes)
    synctool.multiplex.setup_master(pairs, PERSIST)
Example #16
0
def main():
    '''run the program'''

    synctool.param.init()

    sys.stdout = synctool.unbuffered.Unbuffered(sys.stdout)
    sys.stderr = synctool.unbuffered.Unbuffered(sys.stderr)

    try:
        cmd_args = get_options()
    except synctool.range.RangeSyntaxError as err:
        error(str(err))
        sys.exit(1)

    if OPT_AGGREGATE:
        if not synctool.aggr.run(MASTER_OPTS):
            sys.exit(-1)

        sys.exit(0)

    synctool.config.init_mynodename()

    address_list = NODESET.addresses()
    if not address_list:
        error('no valid nodes specified')
        sys.exit(1)

    if OPT_MULTIPLEX:
        start_multiplex(address_list)
    elif CTL_CMD != None:
        control_multiplex(address_list, CTL_CMD)
    else:
        run_dsh(address_list, cmd_args)
Example #17
0
    def stat(self, path):
        '''get the stat() information for a pathname'''

        if not path:
            self.entry_exists = False
            self.mode = self.uid = self.gid = self.size = None
            return

        try:
            statbuf = os.lstat(path)
        except OSError as err:
            # could be something stupid like "Permission denied" ...
            # although synctool should be run as root

            if err.errno != errno.ENOENT:
                # "No such file or directory" is a valid error
                # when the destination is missing
                error('stat(%s) failed: %s' % (path, err.strerror))

            self.entry_exists = False
            self.mode = self.uid = self.gid = self.size = None

        else:
            self.entry_exists = True

            self.mode = statbuf.st_mode
            self.uid = statbuf.st_uid
            self.gid = statbuf.st_gid
            self.size = statbuf.st_size
Example #18
0
def ping_node(addr):
    # type: (str) -> None
    '''ping a single node'''

    node = NODESET.get_nodename_from_address(addr)
    verbose('pinging %s' % node)
    unix_out('%s %s' % (param.PING_CMD, addr))

    packets_received = 0

    # execute ping command and show output with the nodename
    cmd = '%s %s' % (param.PING_CMD, addr)
    cmd_arr = shlex.split(cmd)

    try:
        f = subprocess.Popen(cmd_arr, shell=False, bufsize=4096,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT).stdout
    except OSError as err:
        error('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return

    with f:
        for line in f:
            line = line.strip()

            # argh, we have to parse output here
            #
            # on BSD, ping says something like:
            # "2 packets transmitted, 0 packets received, 100.0% packet loss"
            #
            # on Linux, ping says something like:
            # "2 packets transmitted, 0 received, 100.0% packet loss, " \
            # "time 1001ms"

            arr = line.split()
            if len(arr) > 3 and (arr[1] == 'packets' and
                                 arr[2] == 'transmitted,'):
                try:
                    packets_received = int(arr[3])
                except ValueError:
                    pass

                break

            # some ping implementations say "hostname is alive"
            # or "hostname is unreachable"
            elif len(arr) == 3 and arr[1] == 'is':
                if arr[2] == 'alive':
                    packets_received = 100

                elif arr[2] == 'unreachable':
                    packets_received = -1

    if packets_received > 0:
        print '%s: up' % node
    else:
        print '%s: not responding' % node
Example #19
0
def _remote_stat(up):
    '''Get stat info of the remote object
    Returns array of RemoteStat data, or None on error
    '''

    # use ssh connection multiplexing (if possible)
    cmd_arr = shlex.split(synctool.param.SSH_CMD)
    use_multiplex = synctool.multiplex.use_mux(up.node)
    if use_multiplex:
        synctool.multiplex.ssh_args(cmd_arr, up.node)

    list_cmd = os.path.join(synctool.param.ROOTDIR, 'sbin',
                            'synctool_list.py')
    cmd_arr.extend(['--', up.address, list_cmd, up.filename])

    verbose('running synctool_list %s:%s' % (up.node, up.filename))
    unix_out(' '.join(cmd_arr))
    try:
        proc = subprocess.Popen(cmd_arr, shell=False, bufsize=4096,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)
    except OSError as err:
        error('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return None

    out, err = proc.communicate()

    if proc.returncode == 255:
        error('ssh connection to %s failed' % up.node)
        return None
    elif proc.returncode == 127:
        error('remote list command failed')
        return None

    # parse synctool_list output into array of RemoteStat info
    data = []
    for line in out.split('\n'):
        if not line:
            continue

        arr = line.split()
        if arr[0] == 'error:':
            # relay error message
            error(' '.join(arr[1:]))
            return None

        try:
            remote_stat = RemoteStat(arr)
        except ValueError:
            error('unexpected output from synctool_list %s:%s' %
                  (up.node, up.filename))
            return None

        verbose('remote: %r' % remote_stat)
        data.append(remote_stat)

    return data
Example #20
0
def make_tempdir():
    '''create temporary directory (for storing rsync filter files)'''

    if not os.path.isdir(synctool.param.TEMP_DIR):
        try:
            os.mkdir(synctool.param.TEMP_DIR, 0750)
        except OSError as err:
            error('failed to create tempdir %s: %s' %
                  (synctool.param.TEMP_DIR, err.strerror))
            sys.exit(-1)
Example #21
0
def make_tempdir():
    '''create temporary directory (for storing rsync filter files)'''

    if not os.path.isdir(synctool.param.TEMP_DIR):
        try:
            os.mkdir(synctool.param.TEMP_DIR, 0750)
        except OSError as err:
            error('failed to create tempdir %s: %s' %
                  (synctool.param.TEMP_DIR, err.strerror))
            sys.exit(-1)
Example #22
0
def rsync_include_filter(nodename):
    # type: (str) -> str
    '''create temp file with rsync filter rules
    Include only those dirs that apply for this node
    Returns filename of the filter file
    '''

    try:
        (fd, filename) = tempfile.mkstemp(prefix='synctool-',
                                          dir=param.TEMP_DIR)
    except OSError as err:
        error('failed to create temp file: %s' % err.strerror)
        sys.exit(-1)

    try:
        f = os.fdopen(fd, 'w')
    except OSError as err:
        error('failed to open temp file: %s' % err.strerror)
        sys.exit(-1)

    # include $SYNCTOOL/var/ but exclude
    # the top overlay/ and delete/ dir
    with f:
        f.write('# synctool rsync filter')

        # set mygroups for this nodename
        param.NODENAME = nodename
        param.MY_GROUPS = config.get_my_groups()

        # slave nodes get a copy of the entire tree
        # all other nodes use a specific rsync filter
        if nodename not in param.SLAVES:
            if not (_write_overlay_filter(f) and
                    _write_delete_filter(f) and
                    _write_purge_filter(f)):
                # an error occurred;
                # delete temp file and exit
                f.close()
                try:
                    os.unlink(filename)
                except OSError:
                    # silently ignore unlink error
                    pass

                sys.exit(-1)

        # Note: sbin/*.pyc is excluded to keep major differences in
        # Python versions (on master vs. client node) from clashing
        f.write('- /sbin/*.pyc\n'
                '- /lib/synctool/*.pyc\n'
                '- /lib/synctool/pkg/*.pyc\n')

    # Note: remind to delete the temp file later

    return filename
Example #23
0
    def copy_stat(self):
        '''set access and mod times'''

        if not synctool.lib.DRY_RUN and synctool.param.SYNC_TIMES:
            try:
                verbose('copystat: %s => %s' % (self.src_path, self.name))
                shutil.copystat(self.src_path, self.name)
            except OSError as err:
                error('failed to set utime on %s : %s' % (self.name,
                                                          err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'utime %s' % self.name)
Example #24
0
    def copy_stat(self):
        '''set access and mod times'''

        if not synctool.lib.DRY_RUN and synctool.param.SYNC_TIMES:
            try:
                verbose('copystat: %s => %s' % (self.src_path, self.name))
                shutil.copystat(self.src_path, self.name)
            except OSError as err:
                error('failed to set utime on %s : %s' %
                      (self.name, err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'utime %s' % self.name)
Example #25
0
def _run_rsync_purge(cmd_arr):
    '''run rsync for purging
    cmd_arr holds already prepared rsync command + arguments
    Returns: None
    '''

    unix_out(' '.join(cmd_arr))

    sys.stdout.flush()
    sys.stderr.flush()
    try:
        # run rsync
        proc = subprocess.Popen(cmd_arr,
                                shell=False,
                                bufsize=4096,
                                stdout=subprocess.PIPE)
    except OSError as err:
        error('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return

    out, _ = proc.communicate()

    if synctool.lib.VERBOSE:
        print out

    out = out.split('\n')
    for line in out:
        line = line.strip()
        if not line:
            continue

        code, filename = line.split(' ', 1)

        if code[:6] == 'ERROR:' or code[:8] == 'WARNING:':
            # output rsync errors and warnings
            stderr(line)
            continue

        if filename == './':
            # rsync has a habit of displaying ugly "./" path
            # cmd_arr[-1] is the destination path
            path = cmd_arr[-1]
        else:
            # cmd_arr[-1] is the destination path
            path = os.path.join(cmd_arr[-1], filename)

        if code[0] == '*':
            # rsync has a message for us
            # most likely "deleting"
            msg = code[1:]
            msg = msg.strip()
            stdout('%s %s (purge)' % (msg, prettypath(path)))
        else:
            stdout('%s mismatch (purge)' % prettypath(path))
Example #26
0
def rsync_include_filter(nodename):
    '''create temp file with rsync filter rules
    Include only those dirs that apply for this node
    Returns filename of the filter file
    '''

    try:
        (fd, filename) = tempfile.mkstemp(prefix='synctool-',
                                          dir=param.TEMP_DIR)
    except OSError as err:
        error('failed to create temp file: %s' % err.strerror)
        sys.exit(-1)

    try:
        f = os.fdopen(fd, 'w')
    except OSError as err:
        error('failed to open temp file: %s' % err.strerror)
        sys.exit(-1)

    # include $SYNCTOOL/var/ but exclude
    # the top overlay/ and delete/ dir
    with f:
        f.write('# synctool rsync filter')

        # set mygroups for this nodename
        param.NODENAME = nodename
        param.MY_GROUPS = config.get_my_groups()

        # slave nodes get a copy of the entire tree
        # all other nodes use a specific rsync filter
        if nodename not in param.SLAVES:
            if not (_write_overlay_filter(f) and
                    _write_delete_filter(f) and
                    _write_purge_filter(f)):
                # an error occurred;
                # delete temp file and exit
                f.close()
                try:
                    os.unlink(filename)
                except OSError:
                    # silently ignore unlink error
                    pass

                sys.exit(-1)

        # Note: sbin/*.pyc is excluded to keep major differences in
        # Python versions (on master vs. client node) from clashing
        f.write('- /sbin/*.pyc\n'
                '- /lib/synctool/*.pyc\n'
                '- /lib/synctool/pkg/*.pyc\n')

    # Note: remind to delete the temp file later

    return filename
Example #27
0
    def set_times(self, atime, mtime):
        '''set access and mod times'''

        # only used for purge --single

        if not synctool.lib.DRY_RUN:
            try:
                os.utime(self.name, (atime, mtime))
            except OSError as err:
                error('failed to set utime on %s : %s' %
                      (self.name, err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'utime %s' % self.name)
Example #28
0
    def set_times(self, atime, mtime):
        '''set access and mod times'''

        # only used for purge --single

        if not synctool.lib.DRY_RUN:
            try:
                os.utime(self.name, (atime, mtime))
            except OSError as err:
                error('failed to set utime on %s : %s' % (self.name,
                                                          err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'utime %s' % self.name)
Example #29
0
def _run_rsync_purge(cmd_arr):
    # type: (List[str]) -> None
    '''run rsync for purging
    cmd_arr holds already prepared rsync command + arguments
    '''

    unix_out(' '.join(cmd_arr))

    sys.stdout.flush()
    sys.stderr.flush()
    try:
        # run rsync
        proc = subprocess.Popen(cmd_arr, shell=False, bufsize=4096,
                                stdout=subprocess.PIPE)
    except OSError as err:
        error('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return

    out, _ = proc.communicate()

    if synctool.lib.VERBOSE:
        print out

    out = out.split('\n')
    for line in out:
        line = line.strip()
        if not line:
            continue

        code, filename = line.split(' ', 1)

        if code[:6] == 'ERROR:' or code[:8] == 'WARNING:':
            # output rsync errors and warnings
            stderr(line)
            continue

        if filename == './':
            # rsync has a habit of displaying ugly "./" path
            # cmd_arr[-1] is the destination path
            path = cmd_arr[-1]
        else:
            # cmd_arr[-1] is the destination path
            path = os.path.join(cmd_arr[-1], filename)

        if code[0] == '*':
            # rsync has a message for us
            # most likely "deleting"
            msg = code[1:]
            msg = msg.strip()
            stdout('%s %s (purge)' % (msg, prettypath(path)))
        else:
            stdout('%s mismatch (purge)' % prettypath(path))
Example #30
0
def list_nodes(nodelist):
    # type: (str) -> None
    '''display node definition'''

    nodeset = synctool.nodeset.NodeSet()
    try:
        nodeset.add_node(nodelist)
    except synctool.range.RangeSyntaxError as err:
        error(str(err))
        sys.exit(1)

    if nodeset.addresses() is None:
        # error message already printed
        sys.exit(1)

    groups = []  # type: List[str]
    for node in nodeset.nodelist:
        if OPT_IPADDRESS or OPT_RSYNC:
            out = ''
            if OPT_IPADDRESS:
                out += ' ' + config.get_node_ipaddress(node)

            if OPT_RSYNC:
                if node in param.NO_RSYNC:
                    out += ' no'
                else:
                    out += ' yes'

            print out[1:]
        else:
            for group in config.get_groups(node):
                # extend groups, but do not have duplicates
                if group not in groups:
                    groups.append(group)

    # group order is important, so don't sort
    # however, when you list multiple nodes at once, the groups will have
    # been added to the end
    # So the order is important, but may be incorrect when listing
    # multiple nodes at once


#    groups.sort()

    for group in groups:
        if OPT_FILTER_IGNORED and group in param.IGNORE_GROUPS:
            continue

        if group in param.IGNORE_GROUPS:
            group += ' (ignored)'

        print group
Example #31
0
def list_nodes(nodelist):
    '''display node definition'''

    nodeset = synctool.nodeset.NodeSet()
    try:
        nodeset.add_node(nodelist)
    except synctool.range.RangeSyntaxError as err:
        error(str(err))
        sys.exit(1)

    if nodeset.addresses() is None:
        # error message already printed
        sys.exit(1)

    groups = []
    for node in nodeset.nodelist:
        if OPT_IPADDRESS or OPT_HOSTNAME or OPT_RSYNC:
            out = ''
            if OPT_IPADDRESS:
                out += ' ' + synctool.config.get_node_ipaddress(node)

            if OPT_HOSTNAME:
                out += ' ' + synctool.config.get_node_hostname(node)

            if OPT_RSYNC:
                if node in synctool.param.NO_RSYNC:
                    out += ' no'
                else:
                    out += ' yes'

            print out[1:]
        else:
            for group in synctool.config.get_groups(node):
                # extend groups, but do not have duplicates
                if not group in groups:
                    groups.append(group)

    # group order is important, so don't sort
    # however, when you list multiple nodes at once, the groups will have
    # been added to the end
    # So the order is important, but may be incorrect when listing
    # multiple nodes at once
#    groups.sort()

    for group in groups:
        if OPT_FILTER_IGNORED and group in synctool.param.IGNORE_GROUPS:
            continue

        if group in synctool.param.IGNORE_GROUPS:
            group += ' (ignored)'

        print group
Example #32
0
def set_action(a, opt):
    '''set the action to perform'''

    # this is a helper function for the command-line parser

    global ACTION, ACTION_OPTION

    if ACTION > 0:
        error('options %s and %s can not be combined' % (ACTION_OPTION, opt))
        sys.exit(1)

    ACTION = a
    ACTION_OPTION = opt
Example #33
0
    def create(self):
        '''make a fifo'''

        verbose(dryrun_msg('  os.mkfifo(%s)' % self.name))
        unix_out('mkfifo %s' % self.name)
        terse(synctool.lib.TERSE_NEW, self.name)
        if not synctool.lib.DRY_RUN:
            try:
                os.mkfifo(self.name, self.stat.mode & 0777)
            except OSError as err:
                error('failed to create fifo %s : %s' % (self.name,
                                                         err.strerror))
                terse(TERSE_FAIL, 'fifo %s' % self.name)
Example #34
0
def set_action(a, opt):
    '''set the action to perform'''

    # this is a helper function for the command-line parser

    global ACTION, ACTION_OPTION

    if ACTION > 0:
        error('options %s and %s can not be combined' % (ACTION_OPTION, opt))
        sys.exit(1)

    ACTION = a
    ACTION_OPTION = opt
Example #35
0
    def set_permissions(self):
        '''set access permission bits equal to source'''

        verbose(dryrun_msg('  os.chmod(%s, %04o)' %
                           (self.name, self.stat.mode & 07777)))
        unix_out('chmod 0%o %s' % (self.stat.mode & 07777, self.name))
        if not synctool.lib.DRY_RUN:
            try:
                os.chmod(self.name, self.stat.mode & 07777)
            except OSError as err:
                error('failed to chmod %04o %s : %s' %
                      (self.stat.mode & 07777, self.name, err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'mode %s' % self.name)
Example #36
0
def detect_ssh():
    # type: () -> int
    '''detect ssh version
    Set global SSH_VERSION to 2-digit int number:
    eg. version "5.6p1" -> SSH_VERSION = 56

    Returns: SSH_VERSION
    This routine only works for OpenSSH; otherwise return -1
    '''

    global SSH_VERSION

    if SSH_VERSION is not None:
        return SSH_VERSION

    cmd_arr = shlex.split(synctool.param.SSH_CMD)
    # only use first item: the path to the ssh command
    cmd_arr = cmd_arr[:1]
    cmd_arr.append('-V')

    unix_out(' '.join(cmd_arr))
    try:
        # OpenSSH may print version information on stderr
        proc = subprocess.Popen(cmd_arr,
                                shell=False,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.STDOUT)
    except OSError as err:
        error('failed to execute %s: %s' % (cmd_arr[0], err.strerror))
        SSH_VERSION = -1
        return SSH_VERSION

    # stderr was redirected to stdout
    data, _ = proc.communicate()
    if not data:
        SSH_VERSION = -1
        return SSH_VERSION

    data = data.strip()
    verbose('ssh version string: ' + data)

    # data should be a single line matching "OpenSSH_... SSL ... date\n"
    m = MATCH_SSH_VERSION.match(data)
    if not m:
        SSH_VERSION = -1
        return SSH_VERSION

    groups = m.groups()
    SSH_VERSION = int(groups[0]) * 10 + int(groups[1])
    verbose('SSH_VERSION: %d' % SSH_VERSION)
    return SSH_VERSION
Example #37
0
def run_remote_copy(address_list, files):
    # type: (List[str], List[str]) -> None
    '''copy files[] to nodes[]'''

    global DSH_CP_CMD_ARR, SOURCE_LIST, FILES_STR

    errs = 0
    sourcelist = []  # type: List[str]
    for filename in files:
        if not filename:
            continue

        if not synctool.lib.path_exists(filename):
            error('no such file or directory: %s' % filename)
            errs += 1
            continue

        # for directories, append a '/' slash
        if os.path.isdir(filename) and filename[-1] != os.sep:
            sourcelist.append(filename + os.sep)
        else:
            sourcelist.append(filename)

    if errs > 0:
        sys.exit(-1)

    SOURCE_LIST = sourcelist
    FILES_STR = ' '.join(sourcelist)  # only used for printing

    DSH_CP_CMD_ARR = shlex.split(param.RSYNC_CMD)

    if not OPT_PURGE:
        if '--delete' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('--delete')
        if '--delete-excluded' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('--delete-excluded')

    if synctool.lib.VERBOSE:
        if '-q' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('-q')
        if '--quiet' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('--quiet')

    if synctool.lib.QUIET:
        if '-q' not in DSH_CP_CMD_ARR and '--quiet' not in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.append('-q')

    if DSH_CP_OPTIONS:
        DSH_CP_CMD_ARR.extend(shlex.split(DSH_CP_OPTIONS))

    synctool.parallel.do(worker_dsh_cp, address_list)
Example #38
0
    def _check_valid_groupdir(overlaydir, label):
        '''local helper function for _check_valid_overlaydirs()'''

        errs = 0
        entries = os.listdir(overlaydir)
        for entry in entries:
            fullpath = os.path.join(overlaydir, entry)
            if os.path.isdir(fullpath) and entry not in param.ALL_GROUPS:
                error("$%s/%s/ exists, but there is no such group '%s'" %
                      (label, entry, entry))
                errs += 1
                continue

        return errs == 0
Example #39
0
    def _compare_checksums(self, src_path):
        # type: (str) -> bool
        '''compare checksum of src_path and dest: self.name
        Return True if the same'''

        try:
            f1 = open(src_path, 'rb')
        except IOError as err:
            error('failed to open %s : %s' % (src_path, err.strerror))
            # return True because we can't fix an error in src_path
            return True

        sum1 = hashlib.md5()
        sum2 = hashlib.md5()

        with f1:
            try:
                f2 = open(self.name, 'rb')
            except IOError as err:
                error('failed to open %s : %s' % (self.name, err.strerror))
                return False

            with f2:
                ended = False
                while not ended and (sum1.digest() == sum2.digest()):
                    try:
                        data1 = f1.read(IO_SIZE)
                    except IOError as err:
                        error('failed to read file %s: %s' % (src_path,
                                                              err.strerror))
                        return False

                    if not data1:
                        ended = True
                    else:
                        sum1.update(data1)

                    try:
                        data2 = f2.read(IO_SIZE)
                    except IOError as err:
                        error('failed to read file %s: %s' % (self.name,
                                                              err.strerror))
                        return False

                    if not data2:
                        ended = True
                    else:
                        sum2.update(data2)

        if sum1.digest() != sum2.digest():
            if synctool.lib.DRY_RUN:
                stdout('%s mismatch (MD5 checksum)' % self.name)
            else:
                stdout('%s updated (MD5 mismatch)' % self.name)

            unix_out('# updating file %s' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            return False

        return True
Example #40
0
def option_combinations(opt_diff, opt_single, opt_reference, opt_erase_saved,
                        opt_upload, opt_fix, opt_group):
    '''some combinations of command-line options don't make sense;
    alert the user and abort
    '''

    if opt_erase_saved and (opt_diff or opt_reference or opt_upload):
        error("option --erase-saved can not be combined with other actions")
        sys.exit(1)

    if opt_upload and (opt_diff or opt_single or opt_reference):
        error("option --upload can not be combined with other actions")
        sys.exit(1)

    if opt_upload and opt_group:
        print 'option --upload and --group can not be combined'
        sys.exit(1)

    if opt_diff and (opt_single or opt_reference or opt_fix):
        error("option --diff can not be combined with other actions")
        sys.exit(1)

    if opt_reference and (opt_single or opt_fix):
        error("option --reference can not be combined with other actions")
        sys.exit(1)
Example #41
0
    def create(self):
        '''create symbolic link'''

        verbose(dryrun_msg('  os.symlink(%s, %s)' % (self.oldpath, self.name)))
        unix_out('ln -s %s %s' % (self.oldpath, self.name))
        terse(synctool.lib.TERSE_LINK, self.name)

        if not synctool.lib.DRY_RUN:
            try:
                os.symlink(self.oldpath, self.name)
            except OSError as err:
                error('failed to create symlink %s -> %s : %s' %
                      (self.name, self.oldpath, err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'link %s' % self.name)
Example #42
0
    def create(self):
        # type: () -> None
        '''make a fifo'''

        verbose(dryrun_msg('  os.mkfifo(%s)' % self.name))
        unix_out('mkfifo %s' % self.name)
        terse(synctool.lib.TERSE_NEW, self.name)
        if not synctool.lib.DRY_RUN:
            try:
                os.mkfifo(self.name, self.stat.mode & 0777)
            except OSError as err:
                error('failed to create fifo %s : %s' % (self.name,
                                                         err.strerror))
                terse(TERSE_FAIL, 'fifo %s' % self.name)
Example #43
0
def option_combinations(opt_diff, opt_single, opt_reference, opt_erase_saved,
                        opt_upload, opt_fix, opt_group):
    '''some combinations of command-line options don't make sense;
    alert the user and abort
    '''

    if opt_erase_saved and (opt_diff or opt_reference or opt_upload):
        error("option --erase-saved can not be combined with other actions")
        sys.exit(1)

    if opt_upload and (opt_diff or opt_single or opt_reference):
        error("option --upload can not be combined with other actions")
        sys.exit(1)

    if opt_upload and opt_group:
        print 'option --upload and --group can not be combined'
        sys.exit(1)

    if opt_diff and (opt_single or opt_reference or opt_fix):
        error("option --diff can not be combined with other actions")
        sys.exit(1)

    if opt_reference and (opt_single or opt_fix):
        error("option --reference can not be combined with other actions")
        sys.exit(1)
Example #44
0
    def create(self):
        '''create symbolic link'''

        verbose(dryrun_msg('  os.symlink(%s, %s)' % (self.oldpath,
                                                     self.name)))
        unix_out('ln -s %s %s' % (self.oldpath, self.name))
        terse(synctool.lib.TERSE_LINK, self.name)
        if not synctool.lib.DRY_RUN:
            try:
                os.symlink(self.oldpath, self.name)
            except OSError as err:
                error('failed to create symlink %s -> %s : %s' %
                      (self.name, self.oldpath, err.strerror))
                terse(TERSE_FAIL, 'link %s' % self.name)
Example #45
0
def run_remote_copy(address_list, files):
    '''copy files[] to nodes[]'''

    global DSH_CP_CMD_ARR, SOURCE_LIST, FILES_STR

    errs = 0
    sourcelist = []
    for filename in files:
        if not filename:
            continue

        if not synctool.lib.path_exists(filename):
            error('no such file or directory: %s' % filename)
            errs += 1
            continue

        # for directories, append a '/' slash
        if os.path.isdir(filename) and filename[-1] != os.sep:
            sourcelist.append(filename + os.sep)
        else:
            sourcelist.append(filename)

    if errs > 0:
        sys.exit(-1)

    SOURCE_LIST = sourcelist
    FILES_STR = ' '.join(sourcelist)    # only used for printing

    DSH_CP_CMD_ARR = shlex.split(param.RSYNC_CMD)

    if not OPT_PURGE:
        if '--delete' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('--delete')
        if '--delete-excluded' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('--delete-excluded')

    if synctool.lib.VERBOSE:
        if '-q' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('-q')
        if '--quiet' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('--quiet')

    if synctool.lib.QUIET:
        if '-q' not in DSH_CP_CMD_ARR and '--quiet' not in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.append('-q')

    if DSH_CP_OPTIONS:
        DSH_CP_CMD_ARR.extend(shlex.split(DSH_CP_OPTIONS))

    synctool.parallel.do(worker_dsh_cp, address_list)
Example #46
0
def detect_ssh():
    # type: () -> int
    '''detect ssh version
    Set global SSH_VERSION to 2-digit int number:
    eg. version "5.6p1" -> SSH_VERSION = 56

    Returns: SSH_VERSION
    This routine only works for OpenSSH; otherwise return -1
    '''

    global SSH_VERSION

    if SSH_VERSION is not None:
        return SSH_VERSION

    cmd_arr = shlex.split(synctool.param.SSH_CMD)
    # only use first item: the path to the ssh command
    cmd_arr = cmd_arr[:1]
    cmd_arr.append('-V')

    unix_out(' '.join(cmd_arr))
    try:
        # OpenSSH may print version information on stderr
        proc = subprocess.Popen(cmd_arr, shell=False, stdout=subprocess.PIPE,
                                stderr=subprocess.STDOUT)
    except OSError as err:
        error('failed to execute %s: %s' % (cmd_arr[0], err.strerror))
        SSH_VERSION = -1
        return SSH_VERSION

    # stderr was redirected to stdout
    data, _ = proc.communicate()
    if not data:
        SSH_VERSION = -1
        return SSH_VERSION

    data = data.strip()
    verbose('ssh version string: ' + data)

    # data should be a single line matching "OpenSSH_... SSL ... date\n"
    m = MATCH_SSH_VERSION.match(data)
    if not m:
        SSH_VERSION = -1
        return SSH_VERSION

    groups = m.groups()
    SSH_VERSION = int(groups[0]) * 10 + int(groups[1])
    verbose('SSH_VERSION: %d' % SSH_VERSION)
    return SSH_VERSION
Example #47
0
    def set_owner(self):
        '''set ownership equal to source'''

        verbose(dryrun_msg('  os.chown(%s, %d, %d)' %
                           (self.name, self.stat.uid, self.stat.gid)))
        unix_out('chown %s.%s %s' % (self.stat.ascii_uid(),
                                     self.stat.ascii_gid(), self.name))
        if not synctool.lib.DRY_RUN:
            try:
                os.chown(self.name, self.stat.uid, self.stat.gid)
            except OSError as err:
                error('failed to chown %s.%s %s : %s' %
                      (self.stat.ascii_uid(), self.stat.ascii_gid(),
                       self.name, err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'owner %s' % self.name)
Example #48
0
    def copy_stat(self):
        '''set access and mod times'''

        # FIXME change this to os.utime()
        # FIXME but see above ... method VNode.set_times()
        # FIXME agree to copy mtime, but atime should be kept intact?

        if not synctool.lib.DRY_RUN and synctool.param.SYNC_TIMES:
            try:
                verbose('copystat: %s => %s' % (self.src_path, self.name))
                shutil.copystat(self.src_path, self.name)
            except OSError as err:
                error('failed to set utime on %s : %s' % (self.name,
                                                          err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'utime %s' % self.name)
Example #49
0
    def set_owner(self):
        # type: () -> None
        '''set ownership equal to source'''

        verbose(dryrun_msg('  os.chown(%s, %d, %d)' %
                           (self.name, self.stat.uid, self.stat.gid)))
        unix_out('chown %s.%s %s' % (self.stat.ascii_uid(),
                                     self.stat.ascii_gid(), self.name))
        if not synctool.lib.DRY_RUN:
            try:
                os.chown(self.name, self.stat.uid, self.stat.gid)
            except OSError as err:
                error('failed to chown %s.%s %s : %s' %
                      (self.stat.ascii_uid(), self.stat.ascii_gid(),
                       self.name, err.strerror))
                terse(TERSE_FAIL, 'owner %s' % self.name)
Example #50
0
    def create(self):
        '''copy file'''

        if not self.exists:
            terse(synctool.lib.TERSE_NEW, self.name)

        verbose(dryrun_msg('  copy %s %s' % (self.src_path, self.name)))
        unix_out('cp %s %s' % (self.src_path, self.name))
        if not synctool.lib.DRY_RUN:
            try:
                # copy file
                shutil.copy(self.src_path, self.name)
            except (OSError, IOError) as err:
                error('failed to copy %s to %s: %s' %
                      (prettypath(self.src_path), self.name, err.strerror))
                terse(TERSE_FAIL, self.name)
Example #51
0
def control_multiplex(address_list, ctl_cmd):
    '''run ssh -O ctl_cmd to each node in address_list'''

    global SSH_CMD_ARR

    synctool.multiplex.detect_ssh()
    if synctool.multiplex.SSH_VERSION < 39:
        error('unsupported version of ssh')
        sys.exit(-1)

    SSH_CMD_ARR = shlex.split(synctool.param.SSH_CMD)

    if SSH_OPTIONS:
        SSH_CMD_ARR.extend(shlex.split(SSH_OPTIONS))

    synctool.parallel.do(_ssh_control, address_list)
Example #52
0
    def set_times(self, atime, mtime):
        '''set access and mod times'''

        # only used for purge --single

        # FIXME change to: def set_times(self): set equal to source
        # FIXME agree to copy mtime, but atime should be kept intact?
        # FIXME change set_times() to set_mtime() ?

        if not synctool.lib.DRY_RUN:
            try:
                os.utime(self.name, (atime, mtime))
            except OSError as err:
                error('failed to set utime on %s : %s' % (self.name,
                                                          err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'utime %s' % self.name)
Example #53
0
    def load(self):
        # type: () -> bool
        '''load release info from github
        Returns True on success
        '''

        tags = github_api(ReleaseInfo.TAGS_URL)
        if tags is None:
            # error message already printed
            return False

        try:
            self.version = tags[0]['name']
            self.url = tags[0]['tarball_url']
        except (IndexError, KeyError, TypeError):
            error('JSON data format error')
            return False

        # go find the date of the commit for this tag
        try:
            url = tags[0]['commit']['url']
        except (IndexError, KeyError, TypeError):
            error('JSON data format error')
            return False

        # get commit metadata via GitHub API
        commit = github_api(url)
        if commit is None:
            # error already printed
            return False

        try:
            date_str = commit['commit']['committer']['date']
        except (KeyError, TypeError):
            error('JSON data format error')
            return False

        # try parse the date string
        # unfortunately, the %Z format specifier is very badly
        # supported by Python (ie. it doesn't work right)
        # so I strip off the timezone as a workaround
        idx = date_str.find('Z')
        if idx > -1:
            date_str = date_str[:idx]

        try:
            self.datetime = datetime.datetime.strptime(date_str,
                                                       '%Y-%m-%dT%H:%M:%S')
        except ValueError:
            error("datetime format error: '%s'" % date_str)
            return False

        verbose('info.version = %s' % self.version)
        verbose('info.datetime = %s' % str(self.datetime))
        verbose('info.url = %s' % self.url)
        return True
Example #54
0
    def create(self):
        # type: () -> None
        '''copy file'''

        if not self.exists:
            terse(synctool.lib.TERSE_NEW, self.name)

        verbose(dryrun_msg('  copy %s %s' % (self.src_path, self.name)))
        unix_out('cp %s %s' % (self.src_path, self.name))
        if not synctool.lib.DRY_RUN:
            try:
                # copy file
                shutil.copy(self.src_path, self.name)
            except (OSError, IOError) as err:
                error('failed to copy %s to %s: %s' %
                      (prettypath(self.src_path), self.name, err.strerror))
                terse(TERSE_FAIL, self.name)
Example #55
0
def expand(nodelist):
    '''display expanded argument'''

    nodeset = synctool.nodeset.NodeSet()
    try:
        nodeset.add_node(nodelist)
    except synctool.range.RangeSyntaxError as err:
        error(str(err))
        sys.exit(1)

    # don't care if the nodes do not exist

    arr = list(nodeset.nodelist)
    arr.sort()

    for elem in arr:
        print elem,
    print