Пример #1
0
def run(cmd_arr):
    # type: (List[str]) -> bool
    '''pipe the output through the aggregator
    Returns False on error, else True
    '''

    # simply re-run this command, but with a pipe

    if '-a' in cmd_arr:
        cmd_arr.remove('-a')

    if '--aggregate' in cmd_arr:
        cmd_arr.remove('--aggregate')

    try:
        f = subprocess.Popen(cmd_arr,
                             shell=False,
                             bufsize=4096,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT).stdout
    except OSError as err:
        stderr('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return False

    with f:
        aggregate(f)

    return True
Пример #2
0
def _write_purge_filter(f):
    '''write rsync filter rules for purge/ tree
    Returns False on error'''

    f.write('+ /var/purge/\n')

    purge_groups = os.listdir(synctool.param.PURGE_DIR)

    # add only the group dirs that apply
    for g in synctool.param.MY_GROUPS:
        if g in purge_groups:
            purge_root = os.path.join(synctool.param.PURGE_DIR, g)
            if not os.path.isdir(purge_root):
                continue

            for path, _, files in os.walk(purge_root):
                if path == purge_root:
                    # guard against user mistakes;
                    # danger of destroying the entire filesystem
                    # if it would rsync --delete the root
                    if len(files) > 0:
                        stderr('cowardly refusing to purge the root '
                               'directory')
                        stderr('please remove any files directly '
                               'under %s/' % prettypath(purge_root))
                        return False
                else:
                    f.write('+ /var/purge/%s/' % g)
                    break

    f.write('- /var/purge/*\n')
    return True
Пример #3
0
def single_files():
    '''check/update a list of single files'''

    changed_dict = {}
    synctool.overlay.visit(synctool.param.OVERLAY_DIR,
                           _single_overlay_callback, changed_dict)

    # For files that were not found, look in the purge/ tree
    # Any overlay-ed files have already been removed from SINGLE_FILES
    # So purge/ won't overrule overlay/
    visit_purge_single(_single_purge_callback)

    # run any .post scripts on updated directories
    for path in changed_dict:
        obj, post_script = changed_dict[path]
        _run_post(obj, post_script)

    if len(SINGLE_FILES) > 0:
        # there are still single files left
        # maybe they are in the delete tree?
        changed_dict = {}
        synctool.overlay.visit(synctool.param.DELETE_DIR,
                               _single_delete_callback, changed_dict)

        # run any .post scripts on updated directories
        # (it's really correct to do this twice; once overlay/, once delete/)
        for path in changed_dict:
            obj, post_script = changed_dict[path]
            _run_post(obj, post_script)

    for filename in SINGLE_FILES:
        stderr('%s is not in the overlay tree' % filename)
Пример #4
0
def _split_extension(filename, src_dir):
    '''filename in the overlay tree, without leading path
    src_dir is passed for the purpose of printing error messages
    Returns tuple: SyncObject, importance'''

    (name, ext) = os.path.splitext(filename)
    if not ext:
        return SyncObject(filename, name, OV_NO_EXT), _group_all()

    if ext == '.post':
        (name2, ext) = os.path.splitext(name)
        if ext == '._template':
            # it's a generic template generator
            return SyncObject(filename, name, OV_TEMPLATE_POST), _group_all()

        # it's a generic .post script
        return SyncObject(filename, name, OV_POST), _group_all()

    if ext[:2] != '._':
        return SyncObject(filename, filename, OV_NO_EXT), _group_all()

    ext = ext[2:]
    if not ext:
        return SyncObject(filename, filename, OV_NO_EXT), _group_all()

    if ext == 'template':
        return SyncObject(filename, name, OV_TEMPLATE), _group_all()

    try:
        importance = synctool.param.MY_GROUPS.index(ext)
    except ValueError:
        if not ext in synctool.param.ALL_GROUPS:
            src_path = os.path.join(src_dir, filename)
            if synctool.param.TERSE:
                terse(synctool.lib.TERSE_ERROR, 'invalid group on %s' %
                                                src_path)
            else:
                stderr('unknown group on %s, skipped' % prettypath(src_path))
            return None, -1

        # it is not one of my groups
        verbose('skipping %s, it is not one of my groups' %
                prettypath(os.path.join(src_dir, filename)))
        return None, -1

    (name2, ext) = os.path.splitext(name)

    if ext == '.post':
        _, ext = os.path.splitext(name2)
        if ext == '._template':
            # it's a group-specific template generator
            return (SyncObject(filename, name2, OV_TEMPLATE_POST), importance)

        # register group-specific .post script
        return SyncObject(filename, name2, OV_POST), importance

    elif ext == '._template':
        return SyncObject(filename, name2, OV_TEMPLATE), importance

    return SyncObject(filename, name), importance
Пример #5
0
def single_erase_saved():
    '''erase single backup files'''

    changed_dict = {}
    synctool.overlay.visit(synctool.param.OVERLAY_DIR,
                           _single_erase_saved_callback, changed_dict)

    # run any .post scripts on updated directories
    for path in changed_dict:
        obj, post_script = changed_dict[path]
        _run_post(obj, post_script)

    if len(SINGLE_FILES) > 0:
        # there are still single files left
        # maybe they are in the delete tree?
        changed_dict = {}
        synctool.overlay.visit(synctool.param.DELETE_DIR,
                               _single_erase_saved_callback, changed_dict)

        # run any .post scripts on updated directories
        # (it's really correct to do this twice; once overlay/, once delete/)
        for path in changed_dict:
            obj, post_script = changed_dict[path]
            _run_post(obj, post_script)

    for filename in SINGLE_FILES:
        stderr('%s is not in the overlay tree' % filename)
Пример #6
0
def run_command_in_dir(dest_dir, cmd):
    '''change directory to dest_dir, and run the shell command'''

    verbose('  os.chdir(%s)' % dest_dir)
    unix_out('cd %s' % dest_dir)

    cwd = os.getcwd()

    # if dry run, the target directory may not exist yet
    # (mkdir has not been called for real, for a dry run)
    if synctool.lib.DRY_RUN:
        run_command(cmd)

        verbose('  os.chdir(%s)' % cwd)
        unix_out('cd %s' % cwd)
        unix_out('')
        return

    try:
        os.chdir(dest_dir)
    except OSError as err:
        stderr('error changing directory to %s: %s' % (dest_dir,
                                                       err.strerror))
    else:
        run_command(cmd)

        verbose('  os.chdir(%s)' % cwd)
        unix_out('cd %s' % cwd)
        unix_out('')

        try:
            os.chdir(cwd)
        except OSError as err:
            stderr('error changing directory to %s: %s' % (cwd, err.strerror))
Пример #7
0
def _delete_callback(obj, post_dict, dir_changed, *args):
    '''delete files'''

    if obj.ov_type == synctool.overlay.OV_TEMPLATE:
        return generate_template(obj, post_dict), False

    # don't delete directories
    if obj.src_stat.is_dir():
#       verbose('refusing to delete directory %s' % (obj.dest_path + os.sep))
        if dir_changed and obj.dest_path in post_dict:
            _run_post(obj, post_dict[obj.dest_path])

        return True, dir_changed

    if obj.dest_stat.is_dir():
        stderr('destination is a directory: %s, skipped' % obj.print_src())
        return True, False

    verbose('checking %s' % obj.print_src())

    if obj.dest_stat.exists():
        vnode = obj.vnode_dest_obj()
        vnode.harddelete()

        if obj.dest_path in post_dict:
            _run_post(obj, post_dict[obj.dest_path])
        return True, True

    return True, False
Пример #8
0
    def compare(self, src_path, dest_stat):
        '''see if devs are the same'''

        if not self.exists:
            return False

        # dest_stat is a SyncStat object and it's useless here
        # I need a real, fresh statbuf that includes st_rdev field
        try:
            dest_stat = os.lstat(self.name)
        except OSError as err:
            stderr('error checking %s : %s' % (self.name, err.strerror))
            return False

        src_major = os.major(self.src_stat.st_rdev)
        src_minor = os.minor(self.src_stat.st_rdev)
        dest_major = os.major(dest_stat.st_rdev)
        dest_minor = os.minor(dest_stat.st_rdev)
        if src_major != dest_major or src_minor != dest_minor:
            stdout('%s should have major,minor %d,%d but has %d,%d' %
                (self.name, src_major, src_minor, dest_major, dest_minor))
            unix_out('# updating major,minor %s' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            return False

        return True
Пример #9
0
def make_default_nodeset():
    '''take the (temporary) DEFAULT_NODESET and expand it to
    the definitive DEFAULT_NODESET
    Return value: none, exit the program on error
    '''

    # Note: this function is called by config.read_config()

    temp_set = param.DEFAULT_NODESET
    param.DEFAULT_NODESET = set()
    nodeset = NodeSet()
    errors = 0
    for elem in temp_set:
        if elem in param.NODES:
            nodeset.add_node(elem)
        elif elem in param.ALL_GROUPS:
            nodeset.add_group(elem)
        else:
            stderr("config error: unknown node or group '%s' "
                   "in default_nodeset" % elem)
            errors += 1

    if not errors:
        if not nodeset.addresses(silent=True):
            # Note: silent=True suppresses warnings about ignored nodes
            # error message already printed
            errors += 1
        else:
            param.DEFAULT_NODESET = nodeset.nodelist

    if errors > 0:
        sys.exit(-1)
Пример #10
0
def run(cmd_arr):
    # type: (List[str]) -> bool
    '''pipe the output through the aggregator
    Returns False on error, else True
    '''

    # simply re-run this command, but with a pipe

    if '-a' in cmd_arr:
        cmd_arr.remove('-a')

    if '--aggregate' in cmd_arr:
        cmd_arr.remove('--aggregate')

    try:
        f = subprocess.Popen(cmd_arr, shell=False, bufsize=4096,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT).stdout
    except OSError as err:
        stderr('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return False

    with f:
        aggregate(f)

    return True
Пример #11
0
def _write_purge_filter(f):
    '''write rsync filter rules for purge/ tree
    Returns False on error
    '''

    f.write('+ /var/purge/\n')

    purge_groups = os.listdir(synctool.param.PURGE_DIR)

    # add only the group dirs that apply
    for g in synctool.param.MY_GROUPS:
        if g in purge_groups:
            purge_root = os.path.join(synctool.param.PURGE_DIR, g)
            if not os.path.isdir(purge_root):
                continue

            for path, _, files in os.walk(purge_root):
                if path == purge_root:
                    # guard against user mistakes;
                    # danger of destroying the entire filesystem
                    # if it would rsync --delete the root
                    if len(files) > 0:
                        warning('cowardly refusing to purge the root '
                                'directory')
                        stderr('please remove any files directly '
                               'under %s/' % prettypath(purge_root))
                        return False
                else:
                    f.write('+ /var/purge/%s/' % g)
                    break

    f.write('- /var/purge/*\n')
    return True
Пример #12
0
    def stat(self, path):
        '''get the stat() information for a pathname'''

        if not path:
            self.entry_exists = False
            self.mode = self.uid = self.gid = self.size = None
            return

        try:
            statbuf = os.lstat(path)
        except OSError as err:
            # could be something stupid like "Permission denied" ...
            # although synctool should be run as root

            if err.errno != errno.ENOENT:
                # "No such file or directory" is a valid error
                # when the destination is missing
                stderr('error: stat(%s) failed: %s' % (path, err.strerror))

            self.entry_exists = False
            self.mode = self.uid = self.gid = self.size = None

        else:
            self.entry_exists = True

            self.mode = statbuf.st_mode
            self.uid = statbuf.st_uid
            self.gid = statbuf.st_gid
            self.size = statbuf.st_size
Пример #13
0
def option_combinations(opt_diff, opt_single, opt_reference, opt_erase_saved,
    opt_upload, opt_suffix, opt_fix):

    '''some combinations of command-line options don't make sense;
    alert the user and abort'''

    if opt_erase_saved and (opt_diff or opt_reference or opt_upload):
        stderr("option --erase-saved can not be combined with other actions")
        sys.exit(1)

    if opt_upload and (opt_diff or opt_single or opt_reference):
        stderr("option --upload can not be combined with other actions")
        sys.exit(1)

    if opt_suffix and not opt_upload:
        stderr("option --suffix can only be used together with --upload")
        sys.exit(1)

    if opt_diff and (opt_single or opt_reference or opt_fix):
        stderr("option --diff can not be combined with other actions")
        sys.exit(1)

    if opt_reference and (opt_single or opt_fix):
        stderr("option --reference can not be combined with other actions")
        sys.exit(1)
Пример #14
0
def make_default_nodeset():
    '''take the (temporary) DEFAULT_NODESET and expand it to
    the definitive DEFAULT_NODESET
    Return value: none, exit the program on error
    '''

    # Note: this function is called by config.read_config()

    temp_set = param.DEFAULT_NODESET
    param.DEFAULT_NODESET = set()
    nodeset = NodeSet()
    errors = 0
    for elem in temp_set:
        if elem in param.NODES:
            nodeset.add_node(elem)
        elif elem in param.ALL_GROUPS:
            nodeset.add_group(elem)
        else:
            stderr("config error: unknown node or group '%s' "
                   "in default_nodeset" % elem)
            errors += 1

    if not errors:
        if not nodeset.addresses(silent=True):
            # Note: silent=True suppresses warnings about ignored nodes
            # error message already printed
            errors += 1
        else:
            param.DEFAULT_NODESET = nodeset.nodelist

    if errors > 0:
        sys.exit(-1)
Пример #15
0
def ping_node(addr):
    '''ping a single node'''

    node = NODESET.get_nodename_from_address(addr)
    verbose('pinging %s' % node)
    unix_out('%s %s' % (synctool.param.PING_CMD, addr))

    packets_received = 0

    # execute ping command and show output with the nodename
    cmd = '%s %s' % (synctool.param.PING_CMD, addr)
    cmd_arr = shlex.split(cmd)

    try:
        f = subprocess.Popen(cmd_arr, shell=False, bufsize=4096,
                stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout
    except OSError as err:
        stderr('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return False

    with f:
        for line in f:
            line = line.strip()

            # argh, we have to parse output here
            #
            # on BSD, ping says something like:
            # "2 packets transmitted, 0 packets received, 100.0% packet loss"
            #
            # on Linux, ping says something like:
            # "2 packets transmitted, 0 received, 100.0% packet loss, " \
            # "time 1001ms"

            arr = line.split()
            if len(arr) > 3 and (arr[1] == 'packets' and
                                 arr[2] == 'transmitted,'):
                try:
                    packets_received = int(arr[3])
                except ValueError:
                    pass

                break

            # some ping implementations say "hostname is alive"
            # or "hostname is unreachable"
            elif len(arr) == 3 and arr[1] == 'is':
                if arr[2] == 'alive':
                    packets_received = 100

                elif arr[2] == 'unreachable':
                    packets_received = -1

    if packets_received > 0:
        print '%-*s  up' % (MAX_DISPLAY_LEN, node)
    else:
        print '%-*s  not responding' % (MAX_DISPLAY_LEN, node)
Пример #16
0
def reference_files():
    '''show which source file in the repository synctool uses'''

    synctool.overlay.visit(synctool.param.OVERLAY_DIR, _reference_callback)

    # look in the purge/ tree, too
    visit_purge_single(_reference_callback)

    for filename in SINGLE_FILES:
        stderr('%s is not in the overlay tree' % filename)
Пример #17
0
def diff_files():
    '''display a diff of the single files'''

    synctool.overlay.visit(synctool.param.OVERLAY_DIR, _diff_callback)

    # look in the purge/ tree, too
    visit_purge_single(_diff_callback)

    for filename in SINGLE_FILES:
        stderr('%s is not in the overlay tree' % filename)
Пример #18
0
def diff_files():
    '''display a diff of the single files'''

    synctool.overlay.visit(synctool.param.OVERLAY_DIR, _diff_callback)

    # look in the purge/ tree, too
    visit_purge_single(_diff_callback)

    for filename in SINGLE_FILES:
        stderr('%s is not in the overlay tree' % filename)
Пример #19
0
def reference_files():
    '''show which source file in the repository synctool uses'''

    synctool.overlay.visit(synctool.param.OVERLAY_DIR, _reference_callback)

    # look in the purge/ tree, too
    visit_purge_single(_reference_callback)

    for filename in SINGLE_FILES:
        stderr('%s is not in the overlay tree' % filename)
Пример #20
0
def config_master(arr, configfile, lineno):
    '''parse keyword: master'''

    if len(arr) != 2:
        stderr("%s:%d: 'master' requires one argument: the hostname" %
               (configfile, lineno))
        return 1

    synctool.param.MASTER = arr[1]
    return 0
Пример #21
0
def config_master(arr, configfile, lineno):
    '''parse keyword: master'''

    if len(arr) != 2:
        stderr("%s:%d: 'master' requires one argument: the hostname" %
               (configfile, lineno))
        return 1

    param.MASTER = arr[1]
    return 0
Пример #22
0
def make_tempdir():
    '''create temporary directory (for storing rsync filter files)'''

    if not os.path.isdir(synctool.param.TEMP_DIR):
        try:
            os.mkdir(synctool.param.TEMP_DIR, 0750)
        except OSError as err:
            stderr('failed to create tempdir %s: %s' %
                   (synctool.param.TEMP_DIR, err.strerror))
            sys.exit(-1)
Пример #23
0
def worker_synctool(addr):
    '''run rsync of ROOTDIR to the nodes and ssh+synctool, in parallel'''

    nodename = NODESET.get_nodename_from_address(addr)

    if nodename == synctool.param.NODENAME:
        run_local_synctool()
        return

    # rsync ROOTDIR/dirs/ to the node
    # if "it wants it"
    if not (OPT_SKIP_RSYNC or nodename in synctool.param.NO_RSYNC):
        verbose('running rsync $SYNCTOOL/ to node %s' % nodename)
        unix_out('%s %s %s:%s/' % (synctool.param.RSYNC_CMD,
                                   synctool.param.ROOTDIR, addr,
                                   synctool.param.ROOTDIR))

        # make rsync filter to include the correct dirs
        tmp_filename = rsync_include_filter(nodename)

        cmd_arr = shlex.split(synctool.param.RSYNC_CMD)
        cmd_arr.append('--filter=. %s' % tmp_filename)
        cmd_arr.append('--')
        cmd_arr.append('%s/' % synctool.param.ROOTDIR)
        cmd_arr.append('%s:%s/' % (addr, synctool.param.ROOTDIR))

        # double check the rsync destination
        # our filters are like playing with fire
        if not synctool.param.ROOTDIR or (
            synctool.param.ROOTDIR == os.sep):
            stderr('cowardly refusing to rsync with rootdir == %s' %
                   synctool.param.ROOTDIR)
            sys.exit(-1)

        synctool.lib.run_with_nodename(cmd_arr, nodename)

        # delete temp file
        try:
            os.unlink(tmp_filename)
        except OSError:
            # silently ignore unlink error
            pass

    # run 'ssh node synctool_cmd'
    cmd_arr = shlex.split(synctool.param.SSH_CMD)
    cmd_arr.append('--')
    cmd_arr.append(addr)
    cmd_arr.extend(shlex.split(synctool.param.SYNCTOOL_CMD))
    cmd_arr.append('--nodename=%s' % nodename)
    cmd_arr.extend(PASS_ARGS)

    verbose('running synctool on node %s' % nodename)
    unix_out(' '.join(cmd_arr))

    synctool.lib.run_with_nodename(cmd_arr, nodename)
Пример #24
0
def config_master(arr, configfile, lineno):
    # type: (List[str], str, int) -> int
    '''parse keyword: master'''

    if len(arr) != 2:
        stderr("%s:%d: 'master' requires one argument: the hostname" %
               (configfile, lineno))
        return 1

    param.MASTER = arr[1]
    return 0
Пример #25
0
def config_num_proc(arr, configfile, lineno):
    '''parse keyword: num_proc'''

    err, param.NUM_PROC = _config_integer('num_proc', arr[1], configfile,
                                          lineno)

    if not err and param.NUM_PROC < 1:
        stderr("%s:%d: invalid argument for num_proc" % (configfile, lineno))
        return 1

    return err
Пример #26
0
def _run_rsync_purge(cmd_arr):
    '''run rsync for purging
    cmd_arr holds already prepared rsync command + arguments
    Returns: None
    '''

    unix_out(' '.join(cmd_arr))

    sys.stdout.flush()
    sys.stderr.flush()
    try:
        # run rsync
        proc = subprocess.Popen(cmd_arr,
                                shell=False,
                                bufsize=4096,
                                stdout=subprocess.PIPE)
    except OSError as err:
        error('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return

    out, _ = proc.communicate()

    if synctool.lib.VERBOSE:
        print out

    out = out.split('\n')
    for line in out:
        line = line.strip()
        if not line:
            continue

        code, filename = line.split(' ', 1)

        if code[:6] == 'ERROR:' or code[:8] == 'WARNING:':
            # output rsync errors and warnings
            stderr(line)
            continue

        if filename == './':
            # rsync has a habit of displaying ugly "./" path
            # cmd_arr[-1] is the destination path
            path = cmd_arr[-1]
        else:
            # cmd_arr[-1] is the destination path
            path = os.path.join(cmd_arr[-1], filename)

        if code[0] == '*':
            # rsync has a message for us
            # most likely "deleting"
            msg = code[1:]
            msg = msg.strip()
            stdout('%s %s (purge)' % (msg, prettypath(path)))
        else:
            stdout('%s mismatch (purge)' % prettypath(path))
Пример #27
0
def config_num_proc(arr, configfile, lineno):
    '''parse keyword: num_proc'''

    (err, synctool.param.NUM_PROC) = _config_integer('num_proc', arr[1],
                                                     configfile, lineno)

    if not err and synctool.param.NUM_PROC < 1:
        stderr("%s:%d: invalid argument for num_proc" % (configfile, lineno))
        return 1

    return err
Пример #28
0
def config_num_proc(arr, configfile, lineno):
    # type: (List[str], str, int) -> int
    '''parse keyword: num_proc'''

    err, param.NUM_PROC = _config_integer('num_proc', arr[1], configfile,
                                          lineno)

    if not err and param.NUM_PROC < 1:
        stderr("%s:%d: invalid argument for num_proc" % (configfile, lineno))
        return 1

    return err
Пример #29
0
def check_definition(keyword, configfile, lineno):
    '''check whether a param was not defined earlier
    Returns False on error, True if OK
    '''

    if keyword in SYMBOLS:
        stderr("%s:%d: redefinition of '%s'" % (configfile, lineno, keyword))
        stderr("%s: previous definition was here" % SYMBOLS[keyword].origin())
        return False

    SYMBOLS[keyword] = Symbol(keyword, configfile, lineno)
    return True
Пример #30
0
def single_erase_saved():
    '''erase single backup files'''

    synctool.overlay.visit(param.OVERLAY_DIR, _single_erase_saved_callback)

    if len(SINGLE_FILES) > 0:
        # there are still single files left
        # maybe they are in the delete tree?
        synctool.overlay.visit(param.DELETE_DIR, _single_erase_saved_callback)

    for filename in SINGLE_FILES:
        stderr('%s is not in the overlay tree' % filename)
Пример #31
0
def check_definition(keyword, configfile, lineno):
    '''check whether a param was not defined earlier
    Returns False on error, True if OK
    '''

    if keyword in SYMBOLS:
        stderr("%s:%d: redefinition of '%s'" % (configfile, lineno, keyword))
        stderr("%s: previous definition was here" % SYMBOLS[keyword].origin())
        return False

    SYMBOLS[keyword] = Symbol(keyword, configfile, lineno)
    return True
Пример #32
0
    def set_times(self, atime, mtime):
        '''set access and mod times'''

        # only used for purge --single

        if not synctool.lib.DRY_RUN:
            try:
                os.utime(self.name, (atime, mtime))
            except OSError as err:
                stderr('failed to set utime on %s : %s' % (self.name,
                                                           err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'utime %s' % self.name)
Пример #33
0
def rsync_include_filter(nodename):
    '''create temp file with rsync filter rules
    Include only those dirs that apply for this node
    Returns filename of the filter file'''

    try:
        (fd, filename) = tempfile.mkstemp(prefix='synctool-',
                                          dir=synctool.param.TEMP_DIR)
    except OSError as err:
        stderr('failed to create temp file: %s' % err.strerror)
        sys.exit(-1)

    try:
        f = os.fdopen(fd, 'w')
    except OSError as err:
        stderr('failed to open temp file: %s' % err.strerror)
        sys.exit(-1)

    # include $SYNCTOOL/var/ but exclude
    # the top overlay/ and delete/ dir
    with f:
        f.write('# synctool rsync filter')

        # set mygroups for this nodename
        synctool.param.NODENAME = nodename
        synctool.param.MY_GROUPS = synctool.config.get_my_groups()

        # slave nodes get a copy of the entire tree
        # all other nodes use a specific rsync filter
        if not nodename in synctool.param.SLAVES:
            if not (_write_overlay_filter(f) and
                    _write_delete_filter(f) and
                    _write_purge_filter(f)):
                # an error occurred;
                # delete temp file and exit
                f.close()
                try:
                    os.unlink(filename)
                except OSError:
                    # silently ignore unlink error
                    pass

                sys.exit(-1)

        # Note: sbin/*.pyc is excluded to keep major differences in
        # Python versions (on master vs. client node) from clashing
        f.write('- /sbin/*.pyc\n'
                '- /lib/synctool/*.pyc\n'
                '- /lib/synctool/pkg/*.pyc\n')

    # Note: remind to delete the temp file later

    return filename
Пример #34
0
def _remote_isdir(up):
    '''See if the remote rsync source is a directory or a file
    Parameter 'up' is an instance of UploadFile
    Returns: tuple of booleans: (exists, isdir)'''

    cmd_arr = shlex.split(synctool.param.RSYNC_CMD)[:1]
    cmd_arr.append('--list-only')
    cmd_arr.append(up.address + ':' + up.filename)

    verbose('running rsync --list-only %s:%s' % (up.node, up.filename))
    unix_out(' '.join(cmd_arr))

    try:
        proc = subprocess.Popen(cmd_arr, shell=False, bufsize=4096,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)
    except OSError as err:
        stderr('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return False, False

    out, err = proc.communicate()

    if proc.returncode != 0:
        if proc.returncode == 255:
            stderr('failed to connect to %s' % up.node)
        elif proc.returncode == 23:
            stderr('error: no such file or directory')
        else:
            stderr('failed rsync %s:%s' % (up.node, up.filename))

        return False, False

    # output should be an 'ls -l' like line, with first a mode string
    for line in out.split('\n'):
        arr = line.split()
        mode = arr[0]
        if len(mode) == 10:     # crude test
            if mode[0] == 'd':
                # it's a directory
                verbose('remote rsync source is a directory')
                return True, True

            if mode[0] in '-lpcbs':
                # accept it as a file entry
                verbose('remote rsync source is a file entry')
                return True, False

        # some other line on stdout; just ignore it

    # got no good output
    stderr('failed to parse rsync --list-only output')
    return False, False
Пример #35
0
def run_remote_copy(address_list, files):
    '''copy files[] to nodes[]'''

    global DSH_CP_CMD_ARR, FILES_STR

    errs = 0
    sourcelist = []
    for filename in files:
        if not filename:
            continue

        if not os.path.exists(filename):
            stderr('error: no such file or directory: %s' % filename)
            errs += 1
            continue

        # for directories, append a '/' slash
        if os.path.isdir(filename) and not filename[-1] == os.sep:
            sourcelist.append(filename + os.sep)
        else:
            sourcelist.append(filename)

    if errs > 0:
        sys.exit(-1)

    DSH_CP_CMD_ARR = shlex.split(synctool.param.RSYNC_CMD)

    if not OPT_PURGE:
        if '--delete' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('--delete')
        if '--delete-excluded' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('--delete-excluded')

    if synctool.lib.VERBOSE:
        if '-q' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('-q')
        if '--quiet' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.remove('--quiet')

    if synctool.lib.QUIET:
        if not '-q' in DSH_CP_CMD_ARR and not '--quiet' in DSH_CP_CMD_ARR:
            DSH_CP_CMD_ARR.append('-q')

    if DSH_CP_OPTIONS:
        DSH_CP_CMD_ARR.extend(shlex.split(DSH_CP_OPTIONS))

    DSH_CP_CMD_ARR.append('--')
    DSH_CP_CMD_ARR.extend(sourcelist)

    FILES_STR = ' '.join(sourcelist)    # only used for printing

    synctool.lib.multiprocess(worker_dsh_cp, address_list)
Пример #36
0
def _run_rsync_purge(cmd_arr):
    # type: (List[str]) -> None
    '''run rsync for purging
    cmd_arr holds already prepared rsync command + arguments
    '''

    unix_out(' '.join(cmd_arr))

    sys.stdout.flush()
    sys.stderr.flush()
    try:
        # run rsync
        proc = subprocess.Popen(cmd_arr, shell=False, bufsize=4096,
                                stdout=subprocess.PIPE)
    except OSError as err:
        error('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return

    out, _ = proc.communicate()

    if synctool.lib.VERBOSE:
        print out

    out = out.split('\n')
    for line in out:
        line = line.strip()
        if not line:
            continue

        code, filename = line.split(' ', 1)

        if code[:6] == 'ERROR:' or code[:8] == 'WARNING:':
            # output rsync errors and warnings
            stderr(line)
            continue

        if filename == './':
            # rsync has a habit of displaying ugly "./" path
            # cmd_arr[-1] is the destination path
            path = cmd_arr[-1]
        else:
            # cmd_arr[-1] is the destination path
            path = os.path.join(cmd_arr[-1], filename)

        if code[0] == '*':
            # rsync has a message for us
            # most likely "deleting"
            msg = code[1:]
            msg = msg.strip()
            stdout('%s %s (purge)' % (msg, prettypath(path)))
        else:
            stdout('%s mismatch (purge)' % prettypath(path))
Пример #37
0
def _config_integer(label, value, configfile, lineno, radix=10):
    '''get numeric integer value'''

    if not check_definition(label, configfile, lineno):
        return 1, 0

    try:
        n = int(value, radix)
    except ValueError:
        stderr('%s:%d: invalid argument for %s' % (configfile, lineno, label))
        return 1, 0

    return 0, n
Пример #38
0
def _config_integer(param, value, configfile, lineno, radix=10):
    '''get numeric integer value'''

    if not check_definition(param, configfile, lineno):
        return (1, 0)

    try:
        n = int(value, radix)
    except ValueError:
        stderr('%s:%d: invalid argument for %s' % (configfile, lineno, param))
        return (1, 0)

    return (0, n)
Пример #39
0
def _config_color_variant(param, value, configfile, lineno):
    '''set a color by name'''

    if not check_definition(param, configfile, lineno):
        return 1

    value = value.lower()
    if value in synctool.lib.COLORMAP.keys():
        synctool.param.TERSE_COLORS[param[6:]] = value
        return 0

    stderr('%s:%d: invalid argument for %s' % (configfile, lineno, param))
    return 1
Пример #40
0
    def set_permissions(self):
        '''set access permission bits equal to source'''

        verbose(dryrun_msg('  os.chmod(%s, %04o)' %
                           (self.name, self.stat.mode & 07777)))
        unix_out('chmod 0%o %s' % (self.stat.mode & 07777, self.name))
        if not synctool.lib.DRY_RUN:
            try:
                os.chmod(self.name, self.stat.mode & 07777)
            except OSError as err:
                stderr('failed to chmod %04o %s : %s' %
                       (self.stat.mode & 07777, self.name, err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'mode %s' % self.name)
Пример #41
0
def _config_integer(param, value, configfile, lineno, radix=10):
    '''get numeric integer value'''

    if not check_definition(param, configfile, lineno):
        return (1, 0)

    try:
        n = int(value, radix)
    except ValueError:
        stderr('%s:%d: invalid argument for %s' % (configfile, lineno, param))
        return (1, 0)

    return (0, n)
Пример #42
0
def _config_color_variant(label, value, configfile, lineno):
    '''set a color by name'''

    if not check_definition(label, configfile, lineno):
        return 1

    value = value.lower()
    if value in synctool.lib.COLORMAP.keys():
        param.TERSE_COLORS[label[6:]] = value
        return 0

    stderr('%s:%d: invalid argument for %s' % (configfile, lineno, label))
    return 1
Пример #43
0
def _config_integer(label, value, configfile, lineno, radix=10):
    '''get numeric integer value'''

    if not check_definition(label, configfile, lineno):
        return 1, 0

    try:
        n = int(value, radix)
    except ValueError:
        stderr('%s:%d: invalid argument for %s' % (configfile, lineno, label))
        return 1, 0

    return 0, n
Пример #44
0
def check_node_definition(node, configfile, lineno):
    '''check whether a node was not defined earlier
    Returns False on error, True if OK
    '''

    key = 'node %s' % node

    if key in SYMBOLS:
        stderr("%s:%d: redefinition of node '%s'" % (configfile, lineno, node))
        stderr("%s: previous definition was here" % SYMBOLS[key].origin())
        return False

    SYMBOLS[key] = Symbol(node, configfile, lineno)
    return True
Пример #45
0
def _config_color_variant(label, value, configfile, lineno):
    # type: (str, str, str, int) -> int
    '''set a color by name'''

    if not check_definition(label, configfile, lineno):
        return 1

    value = value.lower()
    if value in synctool.lib.COLORMAP.keys():
        param.TERSE_COLORS[label[6:]] = value
        return 0

    stderr('%s:%d: invalid argument for %s' % (configfile, lineno, label))
    return 1
Пример #46
0
def single_erase_saved():
    '''erase single backup files'''

    synctool.overlay.visit(param.OVERLAY_DIR,
                           _single_erase_saved_callback)

    if len(SINGLE_FILES) > 0:
        # there are still single files left
        # maybe they are in the delete tree?
        synctool.overlay.visit(param.DELETE_DIR,
                               _single_erase_saved_callback)

    for filename in SINGLE_FILES:
        stderr('%s is not in the overlay tree' % filename)
Пример #47
0
def option_combinations(opt_diff, opt_single, opt_reference, opt_erase_saved,
    opt_upload, opt_fix, opt_group):

    '''some combinations of command-line options don't make sense;
    alert the user and abort'''

    if opt_erase_saved and (opt_diff or opt_reference or opt_upload):
        stderr("option --erase-saved can not be combined with other actions")
        sys.exit(1)

    if opt_upload and (opt_diff or opt_single or opt_reference):
        stderr("option --upload can not be combined with other actions")
        sys.exit(1)

    if opt_upload and opt_group:
        print 'option --upload and --group can not be combined'
        sys.exit(1)

    if opt_diff and (opt_single or opt_reference or opt_fix):
        stderr("option --diff can not be combined with other actions")
        sys.exit(1)

    if opt_reference and (opt_single or opt_fix):
        stderr("option --reference can not be combined with other actions")
        sys.exit(1)
Пример #48
0
def purge_files():
    # type: () -> None
    '''run the purge function'''

    paths = []
    purge_groups = os.listdir(param.PURGE_DIR)

    # find the source purge paths that we need to copy
    # scan only the group dirs that apply
    for g in param.MY_GROUPS:
        if g in purge_groups:
            purge_root = os.path.join(param.PURGE_DIR, g)
            if not os.path.isdir(purge_root):
                continue

            for path, subdirs, files in os.walk(purge_root):
                # rsync only purge dirs that actually contain files
                # otherwise rsync --delete would wreak havoc
                if not files:
                    continue

                if path == purge_root:
                    # root contains files; guard against user mistakes
                    # rsync --delete would destroy the whole filesystem
                    warning('cowardly refusing to purge the root directory')
                    stderr('please remove any files directly under %s/' %
                           prettypath(purge_root))
                    return

                # paths has (src_dir, dest_dir)
                paths.append((path, path[len(purge_root):]))

                # do not recurse into this dir any deeper
                del subdirs[:]

    cmd_rsync, opts_string = _make_rsync_purge_cmd()

    # call rsync to copy the purge dirs
    for src, dest in paths:
        # trailing slash on source path is important for rsync
        src += os.sep
        dest += os.sep

        cmd_arr = cmd_rsync[:]
        cmd_arr.append(src)
        cmd_arr.append(dest)

        verbose('running rsync%s%s %s' % (opts_string, prettypath(src), dest))
        _run_rsync_purge(cmd_arr)
Пример #49
0
def _config_boolean(param, value, configfile, lineno):
    '''a boolean parameter can be "true|false|yes|no|on|off|1|0"'''

    if not check_definition(param, configfile, lineno):
        return (1, False)

    value = value.lower()
    if value in synctool.param.BOOLEAN_VALUE_TRUE:
        return (0, True)

    elif value in synctool.param.BOOLEAN_VALUE_FALSE:
        return (0, False)

    stderr('%s:%d: invalid argument for %s' % (configfile, lineno, param))
    return (1, False)
Пример #50
0
def check_group_definition(group, configfile, lineno):
    '''check whether a group was not defined earlier
    Returns False on error, True if OK
    '''

    key = 'group %s' % group

    if key in SYMBOLS:
        stderr("%s:%d: redefinition of group '%s'" %
               (configfile, lineno, group))
        stderr("%s: previous definition was here" % SYMBOLS[key].origin())
        return False

    SYMBOLS[key] = Symbol(group, configfile, lineno)
    return True
Пример #51
0
def _config_boolean(label, value, configfile, lineno):
    '''a boolean parameter can be "true|false|yes|no|on|off|1|0"'''

    if not check_definition(label, configfile, lineno):
        return 1, False

    value = value.lower()
    if value in param.BOOLEAN_VALUE_TRUE:
        return 0, True

    elif value in param.BOOLEAN_VALUE_FALSE:
        return 0, False

    stderr('%s:%d: invalid argument for %s' % (configfile, lineno, label))
    return 1, False
Пример #52
0
def config_tempdir(arr, configfile, lineno):
    '''parse keyword: tempdir'''

    if not check_definition(arr[0], configfile, lineno):
        return 1

    d = arr[1:].join()
    d = synctool.lib.prepare_path(d)

    if not os.path.isabs(d):
        stderr("%s:%d: tempdir must be an absolute path" %
               (configfile, lineno))
        return 1

    synctool.param.TEMP_DIR = d
    return 0
Пример #53
0
def config_ssh_control_persist(arr, configfile, lineno):
    '''parse keyword: ssh_control_persist'''

    if len(arr) != 2:
        stderr("%s:%d: 'ssh_control_persist' requires a single argument" %
               (configfile, lineno))
        return 1

    persist = arr[1].lower()

    m = PERSIST_TIME.match(persist)
    if not m:
        stderr("%s:%d: invalid value '%s'" % (configfile, lineno, persist))
        return 1

    synctool.param.CONTROL_PERSIST = persist
    return 0
Пример #54
0
def config_tempdir(arr, configfile, lineno):
    # type: (List[str], str, int) -> int
    '''parse keyword: tempdir'''

    if not check_definition(arr[0], configfile, lineno):
        return 1

    d = ' '.join(arr[1:])
    d = synctool.lib.prepare_path(d)

    if not os.path.isabs(d):
        stderr("%s:%d: tempdir must be an absolute path" % (configfile,
                                                            lineno))
        return 1

    param.TEMP_DIR = d
    return 0
Пример #55
0
def single_files():
    '''check/update a list of single files'''

    synctool.overlay.visit(param.OVERLAY_DIR, _single_overlay_callback)

    # For files that were not found, look in the purge/ tree
    # Any overlay-ed files have already been removed from SINGLE_FILES
    # So purge/ won't overrule overlay/
    visit_purge_single(_single_purge_callback)

    if len(SINGLE_FILES) > 0:
        # there are still single files left
        # maybe they are in the delete tree?
        synctool.overlay.visit(param.DELETE_DIR, _single_delete_callback)

    for filename in SINGLE_FILES:
        stderr('%s is not in the overlay tree' % filename)
Пример #56
0
def _config_command(param, arr, short_cmd, configfile, lineno):
    '''helper for configuring rsync_cmd, ssh_cmd, synctool_cmd, etc.'''

    if not check_definition(param, configfile, lineno):
        return (1, None)

    if len(arr) < 2:
        stderr("%s:%d: '%s' requires an argument: "
               "the '%s' command, and any appropriate switches" %
               (configfile, lineno, param, short_cmd))
        return (1, None)

    # This function does not check the existence of the command
    # That is deferred until later; the client only runs diff_cmd,
    # while the master runs a bunch of commands

    return (0, synctool.lib.prepare_path(' '.join(arr[1:])))
Пример #57
0
def config_package_manager(arr, configfile, lineno):
    '''parse keyword: package_manager'''

    if len(arr) < 2:
        stderr("%s:%d: 'package_manager' requires an argument" %
               (configfile, lineno))
        return 1

    if not check_definition(arr[0], configfile, lineno):
        return 1

    if not arr[1] in synctool.param.KNOWN_PACKAGE_MANAGERS:
        stderr("%s:%d: unknown or unsupported package manager '%s'" %
               (configfile, lineno, arr[1]))
        return 1

    synctool.param.PACKAGE_MANAGER = arr[1]
    return 0
Пример #58
0
def config_ignore_group(arr, configfile, lineno):
    # type: (List[str], str, int) -> int
    '''parse keyword: ignore_group'''

    if len(arr) < 2:
        stderr("%s:%d: '%s' requires 1 argument: the groupname to ignore" %
               (configfile, lineno, arr[0]))
        return 1

    errors = 0

    for group in arr[1:]:
        # range expression syntax: 'group generator'
        if '[' in group:
            try:
                for expanded_group in synctool.range.expand(group):
                    if '[' in expanded_group:
                        raise RuntimeError("bug: expanded range contains "
                                           "'[' character")

                    expanded_arr = ['ignore_group', expanded_group]
                    # recurse
                    if config_ignore_group(expanded_arr, configfile,
                                           lineno) != 0:
                        return 1
            except synctool.range.RangeSyntaxError as err:
                stderr("%s:%d: %s" % (configfile, lineno, err))
                return 1

            return 0

        if not spellcheck(group):
            stderr("%s:%d: invalid group name '%s'" % (configfile, lineno,
                                                       group))
            errors += 1
            continue

        if group == 'none':
            continue

        if group in ('all', 'template'):
            stderr("%s:%d: illegal to ignore '%s'" % (configfile, lineno,
                                                      group))
            errors += 1
            continue

        param.IGNORE_GROUPS.add(group)

        # add any (yet) unknown group names to the group_defs dict
        if group not in param.GROUP_DEFS:
            param.GROUP_DEFS[group] = None

    return errors
Пример #59
0
def config_package_manager(arr, configfile, lineno):
    # type: (List[str], str, int) -> int
    '''parse keyword: package_manager'''

    if len(arr) < 2:
        stderr("%s:%d: 'package_manager' requires an argument" %
               (configfile, lineno))
        return 1

    if not check_definition(arr[0], configfile, lineno):
        return 1

    if arr[1] not in param.KNOWN_PACKAGE_MANAGERS:
        stderr("%s:%d: unknown or unsupported package manager '%s'" %
               (configfile, lineno, arr[1]))
        return 1

    param.PACKAGE_MANAGER = arr[1]
    return 0
Пример #60
0
def config_ignore(arr, configfile, lineno):
    '''parse keyword: ignore'''

    if len(arr) < 2:
        stderr("%s:%d: 'ignore' requires at least 1 argument: "
               "the file or directory to ignore" % (configfile, lineno))
        return 1

    for fn in arr[1:]:
        # if fn has wildcards, put it in array IGNORE_FILES_WITH_WILDCARDS
        if (fn.find('*') >= 0 or fn.find('?') >= 0
                or (fn.find('[') >= 0 and fn.find(']') >= 0)):
            if not fn in synctool.param.IGNORE_FILES_WITH_WILDCARDS:
                synctool.param.IGNORE_FILES_WITH_WILDCARDS.append(fn)
        else:
            # no wildcards, do a regular ignore
            synctool.param.IGNORE_FILES.add(fn)

    return 0