Esempio n. 1
0
    def compare(self, _src_path, dest_stat):
        # type: (str, SyncStat) -> bool
        '''see if devs are the same'''

        if not self.exists:
            return False

        # dest_stat is a SyncStat object and it's useless here
        # I need a real, fresh statbuf that includes st_rdev field
        try:
            dest_stat = os.lstat(self.name)
        except OSError as err:
            error('error checking %s : %s' % (self.name, err.strerror))
            return False

        # Note: mypy triggers false errors here
        # Also, no luck with Union[SyncStat, posix.stat_result]
        # In any case, for VNodeChrDev and VNodeBlkDev,
        # the self.src_stat is of type posix.stat_result
        src_major = os.major(self.src_stat.st_rdev)     # type: ignore
        src_minor = os.minor(self.src_stat.st_rdev)     # type: ignore
        dest_major = os.major(dest_stat.st_rdev)        # type: ignore
        dest_minor = os.minor(dest_stat.st_rdev)        # type: ignore
        if src_major != dest_major or src_minor != dest_minor:
            stdout('%s should have major,minor %d,%d but has %d,%d' %
                   (self.name, src_major, src_minor, dest_major, dest_minor))
            unix_out('# updating major,minor %s' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            return False

        return True
Esempio n. 2
0
    def check_purge_timestamp(self):
        # type: () -> bool
        '''check timestamp between src and dest
        Returns True if same, False if not
        '''

        # This is only used for purge/
        # check() has already determined that the files are the same
        # Now only check the timestamp ...

        if synctool.param.SYNC_TIMES:
            # this was already handled by check() and fix()
            return True

        # set times, but not for symlinks, directories
        if (not self.src_stat.is_link() and not self.src_stat.is_dir() and
                self.src_stat.mtime != self.dest_stat.mtime):
            stdout('%s mismatch (only timestamp)' % self.dest_path)
            terse(synctool.lib.TERSE_WARNING,
                  '%s (only timestamp)' % self.dest_path)

            vnode = self.vnode_obj()
            # leave the atime intact
            vnode.stat.atime = self.dest_stat.atime
            vnode.set_times()
            return False

        return True
Esempio n. 3
0
    def compare(self, _src_path, dest_stat):
        # type: (str, SyncStat) -> bool
        '''see if devs are the same'''

        if not self.exists:
            return False

        # dest_stat is a SyncStat object and it's useless here
        # I need a real, fresh statbuf that includes st_rdev field
        try:
            dest_stat = os.lstat(self.name)
        except OSError as err:
            error('error checking %s : %s' % (self.name, err.strerror))
            return False

        src_major = os.major(self.src_stat.st_rdev)     # type: ignore
        src_minor = os.minor(self.src_stat.st_rdev)     # type: ignore
        dest_major = os.major(dest_stat.st_rdev)        # type: ignore
        dest_minor = os.minor(dest_stat.st_rdev)        # type: ignore
        if src_major != dest_major or src_minor != dest_minor:
            stdout('%s should have major,minor %d,%d but has %d,%d' %
                   (self.name, src_major, src_minor, dest_major, dest_minor))
            unix_out('# updating major,minor %s' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            return False

        return True
Esempio n. 4
0
def _single_overlay_callback(obj, post_dict, updated, *args):
    '''do overlay function for single files'''

    if obj.ov_type == synctool.overlay.OV_TEMPLATE:
        return generate_template(obj, post_dict), False

    go_on = True

    if _match_single(obj.dest_path):
        _, updated = _overlay_callback(obj, post_dict, False, *args)
        if not updated:
            stdout('%s is up to date' % obj.dest_path)
            terse(synctool.lib.TERSE_OK, obj.dest_path)
            unix_out('# %s is up to date\n' % obj.dest_path)
        else:
            # register .post on the parent dir, if it has a .post script
            obj.dest_path = os.path.dirname(obj.dest_path)
            obj.dest_stat = synctool.syncstat.SyncStat(obj.dest_path)

            if obj.dest_path in post_dict:
                changed_dict = args[0]
                changed_dict[obj.dest_path] = (obj, post_dict[obj.dest_path])

        if not SINGLE_FILES:
            return False, updated

    return go_on, updated
Esempio n. 5
0
def worker_dsh_cp(addr):
    '''do remote copy to node'''

    nodename = NODESET.get_nodename_from_address(addr)
    if nodename == synctool.param.NODENAME:
        # do not copy to local node; files are already here
        return

    # the fileset already has been added to DSH_CP_CMD_ARR

    # create local copy of DSH_CP_CMD_ARR
    # or parallelism may screw things up
    dsh_cp_cmd_arr = DSH_CP_CMD_ARR[:]
    dsh_cp_cmd_arr.append('%s:%s' % (addr, DESTDIR))

    msg = 'copy %s to %s' % (FILES_STR, DESTDIR)
    if synctool.lib.DRY_RUN:
        msg += ' (dry run)'
    if synctool.lib.OPT_NODENAME:
        msg = ('%s: ' % nodename) + msg
    stdout(msg)

    unix_out(' '.join(dsh_cp_cmd_arr))

    if not synctool.lib.DRY_RUN:
        synctool.lib.run_with_nodename(dsh_cp_cmd_arr, nodename)
Esempio n. 6
0
    def compare(self, src_path, dest_stat):
        '''see if devs are the same'''

        if not self.exists:
            return False

        # dest_stat is a SyncStat object and it's useless here
        # I need a real, fresh statbuf that includes st_rdev field
        try:
            dest_stat = os.lstat(self.name)
        except OSError as err:
            error('error checking %s : %s' % (self.name, err.strerror))
            return False

        src_major = os.major(self.src_stat.st_rdev)
        src_minor = os.minor(self.src_stat.st_rdev)
        dest_major = os.major(dest_stat.st_rdev)
        dest_minor = os.minor(dest_stat.st_rdev)
        if src_major != dest_major or src_minor != dest_minor:
            stdout('%s should have major,minor %d,%d but has %d,%d' %
                (self.name, src_major, src_minor, dest_major, dest_minor))
            unix_out('# updating major,minor %s' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            return False

        return True
Esempio n. 7
0
    def check_purge_timestamp(self):
        '''check timestamp between src and dest
        Returns True if same, False if not
        '''

        # This is only used for purge/
        # check() has already determined that the files are the same
        # Now only check the timestamp ...

        if synctool.param.SYNC_TIMES:
            # this was already handled by check() and fix()
            return True

        # set times, but not for symlinks, directories
        if (not self.src_stat.is_link() and not self.src_stat.is_dir() and
                self.src_stat.mtime != self.dest_stat.mtime):
            stdout('%s mismatch (only timestamp)' % self.dest_path)
            terse(synctool.lib.TERSE_WARNING,
                  '%s (only timestamp)' % self.dest_path)

            vnode = self.vnode_obj()
            # leave the atime intact
            vnode.stat.atime = self.dest_stat.atime
            vnode.set_times()
            return False

        return True
Esempio n. 8
0
def _single_purge_callback(obj, pre_dict, post_dict):
    # type: (SyncObject, Dict[str, str], Dict[str, str]) -> Tuple[bool, bool]
    '''do purge function for single files'''

    # The same as _single_overlay_callback(), except that
    # purge entries may differ in timestamp. synctool has to report
    # this because pure rsync will as well (which is bloody annoying)
    #
    # For normal synctool overlay/, it's regarded as not important
    # and synctool will not complain about it
    #
    # This actually leaves a final wart; synctool --single may create
    # purge entries that rsync will complain about and sync again
    # Anyway, I don't think it's a big deal, and that's what you get
    # when you mix up synctool and rsync

    go_on = True
    updated = False

    if _match_single(obj.dest_path):
        _, updated = _overlay_callback(obj, pre_dict, post_dict)
        if not updated:
            if obj.check_purge_timestamp():
                stdout('%s is up to date' % obj.dest_path)
                terse(synctool.lib.TERSE_OK, obj.dest_path)
                unix_out('# %s is up to date\n' % obj.dest_path)
            # else: pass

        if not SINGLE_FILES:
            return False, updated

    return go_on, updated
Esempio n. 9
0
    def _compare_checksums(self, src_path):
        # type: (str) -> bool
        '''compare checksum of src_path and dest: self.name
        Return True if the same'''

        try:
            f1 = open(src_path, 'rb')
        except IOError as err:
            error('failed to open %s : %s' % (src_path, err.strerror))
            # return True because we can't fix an error in src_path
            return True

        sum1 = hashlib.md5()
        sum2 = hashlib.md5()

        with f1:
            try:
                f2 = open(self.name, 'rb')
            except IOError as err:
                error('failed to open %s : %s' % (self.name, err.strerror))
                return False

            with f2:
                ended = False
                while not ended and (sum1.digest() == sum2.digest()):
                    try:
                        data1 = f1.read(IO_SIZE)
                    except IOError as err:
                        error('failed to read file %s: %s' % (src_path,
                                                              err.strerror))
                        return False

                    if not data1:
                        ended = True
                    else:
                        sum1.update(data1)

                    try:
                        data2 = f2.read(IO_SIZE)
                    except IOError as err:
                        error('failed to read file %s: %s' % (self.name,
                                                              err.strerror))
                        return False

                    if not data2:
                        ended = True
                    else:
                        sum2.update(data2)

        if sum1.digest() != sum2.digest():
            if synctool.lib.DRY_RUN:
                stdout('%s mismatch (MD5 checksum)' % self.name)
            else:
                stdout('%s updated (MD5 mismatch)' % self.name)

            unix_out('# updating file %s' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            return False

        return True
Esempio n. 10
0
def _run_rsync_purge(cmd_arr):
    '''run rsync for purging
    cmd_arr holds already prepared rsync command + arguments
    Returns: None
    '''

    unix_out(' '.join(cmd_arr))

    sys.stdout.flush()
    sys.stderr.flush()
    try:
        # run rsync
        proc = subprocess.Popen(cmd_arr,
                                shell=False,
                                bufsize=4096,
                                stdout=subprocess.PIPE)
    except OSError as err:
        error('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return

    out, _ = proc.communicate()

    if synctool.lib.VERBOSE:
        print out

    out = out.split('\n')
    for line in out:
        line = line.strip()
        if not line:
            continue

        code, filename = line.split(' ', 1)

        if code[:6] == 'ERROR:' or code[:8] == 'WARNING:':
            # output rsync errors and warnings
            stderr(line)
            continue

        if filename == './':
            # rsync has a habit of displaying ugly "./" path
            # cmd_arr[-1] is the destination path
            path = cmd_arr[-1]
        else:
            # cmd_arr[-1] is the destination path
            path = os.path.join(cmd_arr[-1], filename)

        if code[0] == '*':
            # rsync has a message for us
            # most likely "deleting"
            msg = code[1:]
            msg = msg.strip()
            stdout('%s %s (purge)' % (msg, prettypath(path)))
        else:
            stdout('%s mismatch (purge)' % prettypath(path))
Esempio n. 11
0
def _run_rsync_purge(cmd_arr):
    # type: (List[str]) -> None
    '''run rsync for purging
    cmd_arr holds already prepared rsync command + arguments
    '''

    unix_out(' '.join(cmd_arr))

    sys.stdout.flush()
    sys.stderr.flush()
    try:
        # run rsync
        proc = subprocess.Popen(cmd_arr, shell=False, bufsize=4096,
                                stdout=subprocess.PIPE)
    except OSError as err:
        error('failed to run command %s: %s' % (cmd_arr[0], err.strerror))
        return

    out, _ = proc.communicate()

    if synctool.lib.VERBOSE:
        print out

    out = out.split('\n')
    for line in out:
        line = line.strip()
        if not line:
            continue

        code, filename = line.split(' ', 1)

        if code[:6] == 'ERROR:' or code[:8] == 'WARNING:':
            # output rsync errors and warnings
            stderr(line)
            continue

        if filename == './':
            # rsync has a habit of displaying ugly "./" path
            # cmd_arr[-1] is the destination path
            path = cmd_arr[-1]
        else:
            # cmd_arr[-1] is the destination path
            path = os.path.join(cmd_arr[-1], filename)

        if code[0] == '*':
            # rsync has a message for us
            # most likely "deleting"
            msg = code[1:]
            msg = msg.strip()
            stdout('%s %s (purge)' % (msg, prettypath(path)))
        else:
            stdout('%s mismatch (purge)' % prettypath(path))
Esempio n. 12
0
def upload(up):
    # type: (UploadFile) -> None
    '''copy a file from a node into the overlay/ tree'''

    # Note: this global is only needed because of callback fn ...
    global GLOBAL_UPLOAD_FILE

    if up.filename[0] != os.sep:
        error('the filename to upload must be an absolute path')
        sys.exit(-1)

    if up.suffix and up.suffix not in synctool.param.ALL_GROUPS:
        error("no such group '%s'" % up.suffix)
        sys.exit(-1)

    if up.overlay and up.overlay not in synctool.param.ALL_GROUPS:
        error("no such group '%s'" % up.overlay)
        sys.exit(-1)

    if up.purge and up.purge not in synctool.param.ALL_GROUPS:
        error("no such group '%s'" % up.purge)
        sys.exit(-1)

    if synctool.lib.DRY_RUN and not synctool.lib.QUIET:
        stdout('DRY RUN, not uploading any files')
        terse(synctool.lib.TERSE_DRYRUN, 'not uploading any files')

    if up.purge != None:
        rsync_upload(up)
        return

    # pretend that the current node is now the given node;
    # this is needed for find() to find the best reference for the file
    orig_nodename = synctool.param.NODENAME
    synctool.param.NODENAME = up.node
    synctool.config.insert_group(up.node, up.node)

    orig_my_groups = synctool.param.MY_GROUPS[:]
    synctool.param.MY_GROUPS = synctool.config.get_my_groups()

    # see if file is already in the repository
    # Note: ugly global is needed because of callback function
    GLOBAL_UPLOAD_FILE = up
    synctool.overlay.visit(synctool.param.OVERLAY_DIR, _upload_callback)
    up = GLOBAL_UPLOAD_FILE

    synctool.param.NODENAME = orig_nodename
    synctool.param.MY_GROUPS = orig_my_groups

    rsync_upload(up)
Esempio n. 13
0
    def compare(self, src_path, dest_stat):
        '''see if files are the same
        Return True if the same'''

        if self.stat.size != dest_stat.size:
            if synctool.lib.DRY_RUN:
                stdout('%s mismatch (file size)' % self.name)
            else:
                stdout('%s updated (file size mismatch)' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            unix_out('# updating file %s' % self.name)
            return False

        return self._compare_checksums(src_path)
Esempio n. 14
0
def upload(up):
    '''copy a file from a node into the overlay/ tree'''

    # Note: this global is only needed because of callback fn ...
    global GLOBAL_UPLOAD_FILE

    if up.filename[0] != os.sep:
        error('the filename to upload must be an absolute path')
        sys.exit(-1)

    if up.suffix and up.suffix not in synctool.param.ALL_GROUPS:
        error("no such group '%s'" % up.suffix)
        sys.exit(-1)

    if up.overlay and up.overlay not in synctool.param.ALL_GROUPS:
        error("no such group '%s'" % up.overlay)
        sys.exit(-1)

    if up.purge and up.purge not in synctool.param.ALL_GROUPS:
        error("no such group '%s'" % up.purge)
        sys.exit(-1)

    if synctool.lib.DRY_RUN and not synctool.lib.QUIET:
        stdout('DRY RUN, not uploading any files')
        terse(synctool.lib.TERSE_DRYRUN, 'not uploading any files')

    if up.purge != None:
        rsync_upload(up)
        return

    # pretend that the current node is now the given node;
    # this is needed for find() to find the best reference for the file
    orig_nodename = synctool.param.NODENAME
    synctool.param.NODENAME = up.node
    synctool.config.insert_group(up.node, up.node)

    orig_my_groups = synctool.param.MY_GROUPS[:]
    synctool.param.MY_GROUPS = synctool.config.get_my_groups()

    # see if file is already in the repository
    # Note: ugly global is needed because of callback function
    GLOBAL_UPLOAD_FILE = up
    synctool.overlay.visit(synctool.param.OVERLAY_DIR, _upload_callback)
    up = GLOBAL_UPLOAD_FILE

    synctool.param.NODENAME = orig_nodename
    synctool.param.MY_GROUPS = orig_my_groups

    rsync_upload(up)
Esempio n. 15
0
    def check(self):
        '''check differences between src and dest,
        Return a FIX_xxx code
        '''

        # src_path is under $overlay/
        # dest_path is in the filesystem

        vnode = None

        if not self.dest_stat.exists():
            stdout('%s does not exist' % self.dest_path)
            return SyncObject.FIX_CREATE

        src_type = self.src_stat.filetype()
        dest_type = self.dest_stat.filetype()
        if src_type != dest_type:
            # entry is of a different file type
            vnode = self.vnode_obj()
            stdout('%s should be a %s' % (self.dest_path, vnode.typename()))
            terse(synctool.lib.TERSE_WARNING, 'wrong type %s' %
                                               self.dest_path)
            return SyncObject.FIX_TYPE

        vnode = self.vnode_obj()
        if not vnode.compare(self.src_path, self.dest_stat):
            # content is different; change the entire object
            log('updating %s' % self.dest_path)
            return SyncObject.FIX_UPDATE

        # check ownership and permissions
        # rectify if needed
        fix_action = 0
        if ((self.src_stat.uid != self.dest_stat.uid) or
            (self.src_stat.gid != self.dest_stat.gid)):
            stdout('%s should have owner %s.%s (%d.%d), '
                   'but has %s.%s (%d.%d)' % (self.dest_path,
                   self.src_stat.ascii_uid(),
                   self.src_stat.ascii_gid(),
                   self.src_stat.uid, self.src_stat.gid,
                   self.dest_stat.ascii_uid(),
                   self.dest_stat.ascii_gid(),
                   self.dest_stat.uid, self.dest_stat.gid))
            terse(synctool.lib.TERSE_OWNER, '%s.%s %s' %
                                            (self.src_stat.ascii_uid(),
                                             self.src_stat.ascii_gid(),
                                             self.dest_path))
            fix_action = SyncObject.FIX_OWNER

        if self.src_stat.mode != self.dest_stat.mode:
            stdout('%s should have mode %04o, but has %04o' %
                   (self.dest_path, self.src_stat.mode & 07777,
                    self.dest_stat.mode & 07777))
            terse(synctool.lib.TERSE_MODE, '%04o %s' %
                                           (self.src_stat.mode & 07777,
                                            self.dest_path))
            fix_action |= SyncObject.FIX_MODE

        return fix_action
Esempio n. 16
0
def check():
    '''check for newer version on the website
    It does this by downloading the LATEST.txt versioning file
    Returns True if newer available, else False'''

    latest_version = get_latest_version()

    if latest_version == synctool.param.VERSION:
        stdout('You are running the latest version of synctool')
        return False
    else:
        stdout('A newer version of synctool is available: '
            'version %s' % latest_version)

    return True
Esempio n. 17
0
    def compare(self, src_path, dest_stat):
        '''see if files are the same
        Return True if the same
        '''

        if self.stat.size != dest_stat.size:
            if synctool.lib.DRY_RUN:
                stdout('%s mismatch (file size)' % self.name)
            else:
                stdout('%s updated (file size mismatch)' % self.name)
            terse(synctool.lib.TERSE_SYNC, self.name)
            unix_out('# updating file %s' % self.name)
            return False

        return self._compare_checksums(src_path)
Esempio n. 18
0
def check():
    '''check for newer version on the website
    It does this by downloading the LATEST.txt versioning file
    Returns True if newer available, else False
    '''

    latest_version = get_latest_version()

    if latest_version == synctool.param.VERSION:
        stdout('You are running the latest version of synctool')
        return False
    else:
        stdout('A newer version of synctool is available: '
               'version %s' % latest_version)

    return True
Esempio n. 19
0
    def check_purge_timestamp(self):
        '''check timestamp between src and dest
        Returns True if same, False if not
        '''

        # This is only used for purge/
        # check() has already determined that the files are the same
        # Now only check the timestamp ...
        # FIXME have SyncStat time fields
        # Note that SyncStat objects do not know the timestamps;
        # they are not cached only to save memory
        # So now we have to os.stat() again to get the times; it is
        # not a big problem because this func is used for purge_single only

        # src_path is under $purge/
        # dest_path is in the filesystem

        try:
            src_stat = os.lstat(self.src_path)
        except OSError as err:
            error('stat(%s) failed: %s' % (self.src_path, err.strerror))
            return False

        try:
            dest_stat = os.lstat(self.dest_path)
        except OSError as err:
            error('stat(%s) failed: %s' % (self.dest_path, err.strerror))
            return False

        # FIXME set_times() should not be called for symlinks
        if src_stat.st_mtime > dest_stat.st_mtime:
            stdout('%s mismatch (only timestamp)' % self.dest_path)
            terse(synctool.lib.TERSE_WARNING,
                  '%s (only timestamp)' % self.dest_path)

            verbose(dryrun_msg('  os.utime(%s, %s)'
                               '' % (self.dest_path,
                                     time.ctime(src_stat.st_mtime))))
            unix_out('touch -r %s %s' % (self.src_path, self.dest_path))

            vnode = self.vnode_obj()
            vnode.set_times(src_stat.st_atime, src_stat.st_mtime)
            return False

        return True
Esempio n. 20
0
    def check_purge_timestamp(self):
        '''check timestamp between src and dest
        Returns True if same, False if not
        '''

        # This is only used for purge/
        # check() has already determined that the files are the same
        # Now only check the timestamp ...
        # Note that SyncStat objects do not know the timestamps;
        # they are not cached only to save memory
        # So now we have to os.stat() again to get the times; it is
        # not a big problem because this func is used for purge_single only

        # src_path is under $purge/
        # dest_path is in the filesystem

        try:
            src_stat = os.lstat(self.src_path)
        except OSError as err:
            error('stat(%s) failed: %s' % (self.src_path, err.strerror))
            return False

        try:
            dest_stat = os.lstat(self.dest_path)
        except OSError as err:
            error('stat(%s) failed: %s' % (self.dest_path, err.strerror))
            return False

        if src_stat.st_mtime > dest_stat.st_mtime:
            stdout('%s mismatch (only timestamp)' % self.dest_path)
            terse(synctool.lib.TERSE_WARNING,
                  '%s (only timestamp)' % self.dest_path)

            verbose(
                dryrun_msg('  os.utime(%s, %s)'
                           '' %
                           (self.dest_path, time.ctime(src_stat.st_mtime))))
            unix_out('touch -r %s %s' % (self.src_path, self.dest_path))

            vnode = self.vnode_obj()
            vnode.set_times(src_stat.st_atime, src_stat.st_mtime)
            return False

        return True
Esempio n. 21
0
    def compare(self, src_path, dest_stat):
        '''compare symbolic links'''

        if not self.exists:
            return False

        try:
            link_to = os.readlink(self.name)
        except OSError as err:
            error('failed to read symlink %s : %s' % (self.name, err.strerror))
            return False

        if self.oldpath != link_to:
            stdout('%s should point to %s, but points to %s' %
                   (self.name, self.oldpath, link_to))
            terse(synctool.lib.TERSE_LINK, self.name)
            return False

        return True
Esempio n. 22
0
    def compare(self, src_path, dest_stat):
        '''compare symbolic links'''

        if not self.exists:
            return False

        try:
            link_to = os.readlink(self.name)
        except OSError as err:
            error('failed to read symlink %s : %s' % (self.name,
                                                      err.strerror))
            return False

        if self.oldpath != link_to:
            stdout('%s should point to %s, but points to %s' %
                   (self.name, self.oldpath, link_to))
            terse(synctool.lib.TERSE_LINK, self.name)
            return False

        return True
Esempio n. 23
0
    def harddelete(self):
        '''delete directory'''

        if synctool.lib.DRY_RUN:
            not_str = 'not '
        else:
            not_str = ''

        stdout('%sremoving %s' % (not_str, self.name + os.sep))
        unix_out('rmdir %s' % self.name)
        terse(synctool.lib.TERSE_DELETE, self.name + os.sep)
        if not synctool.lib.DRY_RUN:
            verbose('  os.rmdir(%s)' % self.name)
            try:
                os.rmdir(self.name)
            except OSError:
                # probably directory not empty
                # refuse to delete dir, just move it aside
                verbose('refusing to delete directory %s' % self.name)
                self.move_saved()
Esempio n. 24
0
    def harddelete(self):
        # type: () -> None
        '''delete directory'''

        if synctool.lib.DRY_RUN:
            not_str = 'not '
        else:
            not_str = ''

        stdout('%sremoving %s' % (not_str, self.name + os.sep))
        unix_out('rmdir %s' % self.name)
        terse(synctool.lib.TERSE_DELETE, self.name + os.sep)
        if not synctool.lib.DRY_RUN:
            verbose('  os.rmdir(%s)' % self.name)
            try:
                os.rmdir(self.name)
            except OSError:
                # probably directory not empty
                # refuse to delete dir, just move it aside
                verbose('refusing to delete directory %s' % self.name)
                self.move_saved()
Esempio n. 25
0
    def harddelete(self):
        '''delete existing entry'''

        if synctool.lib.DRY_RUN:
            not_str = 'not '
        else:
            not_str = ''

        stdout('%sdeleting %s' % (not_str, self.name))
        unix_out('rm %s' % self.name)
        terse(synctool.lib.TERSE_DELETE, self.name)

        if not synctool.lib.DRY_RUN:
            verbose('  os.unlink(%s)' % self.name)
            try:
                os.unlink(self.name)
            except OSError as err:
                error('failed to delete %s : %s' % (self.name, err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'delete %s' % self.name)
            else:
                log('deleted %s' % self.name)
Esempio n. 26
0
    def harddelete(self):
        '''delete existing entry'''

        if synctool.lib.DRY_RUN:
            not_str = 'not '
        else:
            not_str = ''

        stdout('%sdeleting %s' % (not_str, self.name))
        unix_out('rm %s' % self.name)
        terse(synctool.lib.TERSE_DELETE, self.name)

        if not synctool.lib.DRY_RUN:
            verbose('  os.unlink(%s)' % self.name)
            try:
                os.unlink(self.name)
            except OSError as err:
                error('failed to delete %s : %s' % (self.name, err.strerror))
                terse(synctool.lib.TERSE_FAIL, 'delete %s' % self.name)
            else:
                log('deleted %s' % self.name)
Esempio n. 27
0
def _single_overlay_callback(obj, pre_dict, post_dict):
    '''do overlay function for single files'''

    if not SINGLE_FILES:
        # proceed quickly
        return True, False

    if obj.ov_type == synctool.overlay.OV_TEMPLATE:
        return generate_template(obj, post_dict), False

    go_on = True
    updated = False

    if _match_single(obj.dest_path):
        _, updated = _overlay_callback(obj, pre_dict, post_dict)
        if not updated:
            stdout('%s is up to date' % obj.dest_path)
            terse(synctool.lib.TERSE_OK, obj.dest_path)
            unix_out('# %s is up to date\n' % obj.dest_path)

    return go_on, updated
Esempio n. 28
0
def _single_overlay_callback(obj, pre_dict, post_dict):
    '''do overlay function for single files'''

    if not SINGLE_FILES:
        # proceed quickly
        return True, False

    if obj.ov_type == synctool.overlay.OV_TEMPLATE:
        return generate_template(obj, post_dict), False

    go_on = True
    updated = False

    if _match_single(obj.dest_path):
        _, updated = _overlay_callback(obj, pre_dict, post_dict)
        if not updated:
            stdout('%s is up to date' % obj.dest_path)
            terse(synctool.lib.TERSE_OK, obj.dest_path)
            unix_out('# %s is up to date\n' % obj.dest_path)

    return go_on, updated
Esempio n. 29
0
def worker_dsh_cp(addr):
    # type: (str) -> None
    '''do remote copy to node'''

    nodename = NODESET.get_nodename_from_address(addr)
    if nodename == param.NODENAME:
        # do not copy to local node; files are already here
        return

    # the fileset already has been added to DSH_CP_CMD_ARR

    # use ssh connection multiplexing (if possible)
    use_multiplex = synctool.multiplex.use_mux(nodename)

    # create local copy of DSH_CP_CMD_ARR
    # or parallelism may screw things up
    dsh_cp_cmd_arr = DSH_CP_CMD_ARR[:]

    # add ssh cmd
    ssh_cmd_arr = shlex.split(param.SSH_CMD)
    if use_multiplex:
        synctool.multiplex.ssh_args(ssh_cmd_arr, nodename)

    dsh_cp_cmd_arr.extend(['-e', ' '.join(ssh_cmd_arr)])
    dsh_cp_cmd_arr.append('--')
    dsh_cp_cmd_arr.extend(SOURCE_LIST)
    dsh_cp_cmd_arr.append('%s:%s' % (addr, DESTDIR))

    msg = 'copy %s to %s' % (FILES_STR, DESTDIR)
    if synctool.lib.DRY_RUN:
        msg += ' (dry run)'
    if synctool.lib.OPT_NODENAME:
        msg = ('%s: ' % nodename) + msg
    stdout(msg)

    if not synctool.lib.DRY_RUN:
        synctool.lib.run_with_nodename(dsh_cp_cmd_arr, nodename)
    else:
        unix_out(' '.join(dsh_cp_cmd_arr) + '    # dry run')
Esempio n. 30
0
def _single_purge_callback(obj, post_dict, updated, *args):
    '''do purge function for single files'''

    # The same as _single_overlay_callback(), except that
    # purge entries may differ in timestamp. synctool has to report
    # this because pure rsync will as well (which is bloody annoying)
    #
    # For normal synctool overlay/, it's regarded as not important
    # and synctool will not complain about it
    #
    # This actually leaves a final wart; synctool --single may create
    # purge entries that rsync will complain about and sync again
    # Anyway, I don't think it's a big deal, and that's what you get
    # when you mix up synctool and rsync

    go_on = True

    if _match_single(obj.dest_path):
        _, updated = _overlay_callback(obj, post_dict, False, *args)
        if not updated:
            if obj.check_purge_timestamp():
                stdout('%s is up to date' % obj.dest_path)
                terse(synctool.lib.TERSE_OK, obj.dest_path)
                unix_out('# %s is up to date\n' % obj.dest_path)
            # else: pass
        else:
            # register .post on the parent dir, if it has a .post script
            obj.dest_path = os.path.dirname(obj.dest_path)
            obj.dest_stat = synctool.syncstat.SyncStat(obj.dest_path)

            if obj.dest_path in post_dict:
                changed_dict = args[0]
                changed_dict[obj.dest_path] = (obj, post_dict[obj.dest_path])

        if not SINGLE_FILES:
            return False, updated

    return go_on, updated
Esempio n. 31
0
def worker_dsh_cp(addr):
    '''do remote copy to node'''

    nodename = NODESET.get_nodename_from_address(addr)
    if nodename == param.NODENAME:
        # do not copy to local node; files are already here
        return

    # the fileset already has been added to DSH_CP_CMD_ARR

    # use ssh connection multiplexing (if possible)
    use_multiplex = synctool.multiplex.use_mux(nodename)

    # create local copy of DSH_CP_CMD_ARR
    # or parallelism may screw things up
    dsh_cp_cmd_arr = DSH_CP_CMD_ARR[:]

    # add ssh cmd
    ssh_cmd_arr = shlex.split(param.SSH_CMD)
    if use_multiplex:
        synctool.multiplex.ssh_args(ssh_cmd_arr, nodename)

    dsh_cp_cmd_arr.extend(['-e', ' '.join(ssh_cmd_arr)])
    dsh_cp_cmd_arr.append('--')
    dsh_cp_cmd_arr.extend(SOURCE_LIST)
    dsh_cp_cmd_arr.append('%s:%s' % (addr, DESTDIR))

    msg = 'copy %s to %s' % (FILES_STR, DESTDIR)
    if synctool.lib.DRY_RUN:
        msg += ' (dry run)'
    if synctool.lib.OPT_NODENAME:
        msg = ('%s: ' % nodename) + msg
    stdout(msg)

    if not synctool.lib.DRY_RUN:
        synctool.lib.run_with_nodename(dsh_cp_cmd_arr, nodename)
    else:
        unix_out(' '.join(dsh_cp_cmd_arr) + '    # dry run')
Esempio n. 32
0
    def check(self):
        '''check differences between src and dest,
        Return a FIX_xxx code
        '''

        # src_path is under $overlay/
        # dest_path is in the filesystem

        vnode = None

        if not self.dest_stat.exists():
            stdout('%s does not exist' % self.dest_path)
            return SyncObject.FIX_CREATE

        src_type = self.src_stat.filetype()
        dest_type = self.dest_stat.filetype()
        if src_type != dest_type:
            # entry is of a different file type
            vnode = self.vnode_obj()
            stdout('%s should be a %s' % (self.dest_path, vnode.typename()))
            terse(synctool.lib.TERSE_WARNING, 'wrong type %s' % self.dest_path)
            return SyncObject.FIX_TYPE

        vnode = self.vnode_obj()
        if not vnode.compare(self.src_path, self.dest_stat):
            # content is different; change the entire object
            log('updating %s' % self.dest_path)
            return SyncObject.FIX_UPDATE

        # check ownership and permissions
        # rectify if needed
        fix_action = 0
        if ((self.src_stat.uid != self.dest_stat.uid)
                or (self.src_stat.gid != self.dest_stat.gid)):
            stdout('%s should have owner %s.%s (%d.%d), '
                   'but has %s.%s (%d.%d)' %
                   (self.dest_path, self.src_stat.ascii_uid(),
                    self.src_stat.ascii_gid(), self.src_stat.uid,
                    self.src_stat.gid, self.dest_stat.ascii_uid(),
                    self.dest_stat.ascii_gid(), self.dest_stat.uid,
                    self.dest_stat.gid))
            terse(
                synctool.lib.TERSE_OWNER,
                '%s.%s %s' % (self.src_stat.ascii_uid(),
                              self.src_stat.ascii_gid(), self.dest_path))
            fix_action = SyncObject.FIX_OWNER

        if self.src_stat.mode != self.dest_stat.mode:
            stdout('%s should have mode %04o, but has %04o' %
                   (self.dest_path, self.src_stat.mode & 07777,
                    self.dest_stat.mode & 07777))
            terse(synctool.lib.TERSE_MODE,
                  '%04o %s' % (self.src_stat.mode & 07777, self.dest_path))
            fix_action |= SyncObject.FIX_MODE

        return fix_action
Esempio n. 33
0
def check():
    """check for newer version
    It does this by looking at releases at GitHub
    Returns True if a newer version is available
    """

    info = ReleaseInfo()
    if not info.load():
        # error message already printed
        return False

    my_time = datetime.datetime.strptime(synctool.param.RELEASE_DATETIME, "%Y-%m-%dT%H:%M:%S")
    if info.datetime <= my_time:
        stdout("You are running the latest release of synctool")
        return False

    stdout("A newer version is available: %s" % info.version)
    stdout("released %s" % info.datetime)
    return True
Esempio n. 34
0
def check():
    '''check for newer version
    It does this by looking at releases at GitHub
    Returns True if a newer version is available
    '''

    info = ReleaseInfo()
    if not info.load():
        # error message already printed
        return False

    my_time = datetime.datetime.strptime(synctool.param.RELEASE_DATETIME,
                                         '%Y-%m-%dT%H:%M:%S')
    if info.datetime <= my_time:
        stdout('You are running the latest release of synctool')
        return False

    stdout('A newer version is available: %s' % info.version)
    stdout('released %s' % info.datetime)
    return True
Esempio n. 35
0
def main():
    # type: () -> None
    '''run the program'''

    param.init()

    action = get_options()

    config.init_mynodename()

    if not param.NODENAME:
        error('unable to determine my nodename (hostname: %s)' %
              param.HOSTNAME)
        stderr('please check %s' % param.CONF_FILE)
        sys.exit(-1)

    if param.NODENAME not in param.NODES:
        error("unknown node '%s'" % param.NODENAME)
        stderr('please check %s' % param.CONF_FILE)
        sys.exit(-1)

    if param.NODENAME in param.IGNORE_GROUPS:
        # this is only a warning ...
        # you can still run synctool-pkg on the client by hand
        warning('node %s is disabled in %s' %
                (param.NODENAME, param.CONF_FILE))

    if synctool.lib.UNIX_CMD:
        t = time.localtime(time.time())

        unix_out('#')
        unix_out('# script generated by synctool on '
                 '%04d/%02d/%02d %02d:%02d:%02d' %
                 (t[0], t[1], t[2], t[3], t[4], t[5]))
        unix_out('#')
        unix_out('# my hostname: %s' % param.HOSTNAME)
        unix_out('# SYNCTOOL_NODE=%s' % param.NODENAME)
        unix_out('# SYNCTOOL_ROOT=%s' % param.ROOTDIR)
        unix_out('#')

        if not synctool.lib.DRY_RUN:
            unix_out('# NOTE: --fix specified, applying updates')
            unix_out('#')

        unix_out('')
    else:
        if not synctool.lib.MASTERLOG:
            # only print this when running stand-alone
            if not synctool.lib.QUIET:
                if synctool.lib.DRY_RUN:
                    stdout('DRY RUN, not doing any updates')
                    terse(synctool.lib.TERSE_DRYRUN, 'not doing any updates')
                else:
                    stdout('--fix specified, applying changes')
                    terse(synctool.lib.TERSE_FIXING, ' applying changes')

            else:
                if synctool.lib.DRY_RUN:
                    verbose('DRY RUN, not doing any updates')
                else:
                    verbose('--fix specified, applying changes')

        verbose('my nodename: %s' % param.NODENAME)
        verbose('my hostname: %s' % param.HOSTNAME)
        verbose('rootdir: %s' % param.ROOTDIR)

    os.environ['SYNCTOOL_NODE'] = param.NODENAME
    os.environ['SYNCTOOL_ROOT'] = param.ROOTDIR

    unix_out('umask 077')
    unix_out('')
    os.umask(077)

    if action == ACTION_DIFF:
        diff_files()

    elif action == ACTION_REFERENCE:
        reference_files()

    elif action == ACTION_ERASE_SAVED:
        if SINGLE_FILES:
            single_erase_saved()
        else:
            erase_saved()

    elif SINGLE_FILES:
        single_files()

    else:
        purge_files()
        overlay_files()
        delete_files()

    unix_out('# EOB')
Esempio n. 36
0
    def check(self):
        '''check differences between src and dest,
        and fix it when not a dry run
        Return pair: updated, metadata_updated'''

        # src_path is under $overlay/
        # dest_path is in the filesystem

        vnode = None

        if not self.dest_stat.exists():
            stdout('%s does not exist' % self.dest_path)
            log('creating %s' % self.dest_path)
            vnode = self.vnode_obj()
            vnode.fix()
            return True, False

        src_type = self.src_stat.filetype()
        dest_type = self.dest_stat.filetype()
        if src_type != dest_type:
            # entry is of a different file type
            vnode = self.vnode_obj()
            stdout('%s should be a %s' % (self.dest_path, vnode.typename()))
            terse(synctool.lib.TERSE_WARNING, 'wrong type %s' %
                                               self.dest_path)
            log('fix type %s' % self.dest_path)
            vnode.fix()
            return True, False

        vnode = self.vnode_obj()
        if not vnode.compare(self.src_path, self.dest_stat):
            # content is different; change the entire object
            log('updating %s' % self.dest_path)
            vnode.fix()
            return True, False

        # check ownership and permissions
        # rectify if needed
        meta_updated = False
        if ((self.src_stat.uid != self.dest_stat.uid) or
            (self.src_stat.gid != self.dest_stat.gid)):
            stdout('%s should have owner %s.%s (%d.%d), '
                   'but has %s.%s (%d.%d)' % (self.dest_path,
                   self.src_stat.ascii_uid(),
                   self.src_stat.ascii_gid(),
                   self.src_stat.uid, self.src_stat.gid,
                   self.dest_stat.ascii_uid(),
                   self.dest_stat.ascii_gid(),
                   self.dest_stat.uid, self.dest_stat.gid))
            terse(synctool.lib.TERSE_OWNER, '%s.%s %s' %
                                            (self.src_stat.ascii_uid(),
                                             self.src_stat.ascii_gid(),
                                             self.dest_path))
            log('set owner %s.%s (%d.%d) %s' %
                (self.src_stat.ascii_uid(), self.src_stat.ascii_gid(),
                 self.src_stat.uid, self.src_stat.gid,
                 self.dest_path))
            vnode.set_owner()
            meta_updated = True

        if self.src_stat.mode != self.dest_stat.mode:
            stdout('%s should have mode %04o, but has %04o' %
                   (self.dest_path, self.src_stat.mode & 07777,
                    self.dest_stat.mode & 07777))
            terse(synctool.lib.TERSE_MODE, '%04o %s' %
                                           (self.src_stat.mode & 07777,
                                            self.dest_path))
            log('set mode %04o %s' % (self.src_stat.mode & 07777,
                                      self.dest_path))
            vnode.set_permissions()
            meta_updated = True

        # set owner/permissions do not trigger .post scripts
        return False, meta_updated
Esempio n. 37
0
def rsync_upload(up):
    '''upload a file/dir to $overlay/group/ or $purge/group/'''

    up.make_repos_path()

    # check whether the remote entry exists
    remote_stats = _remote_stat(up)
    if remote_stats is None:
        # error message was already printed
        return

    # first element in array is our 'target'
    isdir = remote_stats[0].is_dir()
    if isdir and synctool.param.REQUIRE_EXTENSION and not up.purge:
        error('remote is a directory')
        stderr('synctool can not upload directories to $overlay '
               'when require_extension is set')
        return

    if isdir:
        up.filename += os.sep
        up.repos_path += os.sep

    # make command: rsync [-n] [-v] node:/path/ $overlay/group/path/
    cmd_arr = shlex.split(synctool.param.RSYNC_CMD)

    # opts is just for the 'visual aspect'; it is displayed when --verbose
    opts = ' '
    if synctool.lib.DRY_RUN:
#        cmd_arr.append('-n')
        opts += '-n '

    if synctool.lib.VERBOSE:
        cmd_arr.append('-v')
        opts += '-v '
        if '-q' in cmd_arr:
            cmd_arr.remove('-q')
        if '--quiet' in cmd_arr:
            cmd_arr.remove('--quiet')

    # use ssh connection multiplexing (if possible)
    ssh_cmd_arr = shlex.split(synctool.param.SSH_CMD)
    use_multiplex = synctool.multiplex.use_mux(up.node)
    if use_multiplex:
        synctool.multiplex.ssh_args(ssh_cmd_arr, up.node)
    cmd_arr.extend(['-e', ' '.join(ssh_cmd_arr)])
    cmd_arr.extend(['--', up.address + ':' + up.filename, up.repos_path])

    verbose_path = prettypath(up.repos_path)
    if synctool.lib.DRY_RUN:
        stdout('would be uploaded as %s' % verbose_path)
    else:
        dest_dir = os.path.dirname(up.repos_path)
        _makedir(dest_dir, remote_stats[1:])
        if not synctool.lib.path_exists(dest_dir):
            error('failed to create %s/' % dest_dir)
            return

    # for $overlay, never do rsync --delete / --delete-excluded
    # for $purge, don't use rsync --delete on single files
    # because it would (inadvertently) delete all existing files in the repos
    if not up.purge or not isdir:
        if '--delete' in cmd_arr:
            cmd_arr.remove('--delete')
        if '--delete-excluded' in cmd_arr:
            cmd_arr.remove('--delete-excluded')

    verbose('running rsync%s%s:%s to %s' % (opts, up.node, up.filename,
                                            verbose_path))
    if not synctool.lib.DRY_RUN:
        synctool.lib.run_with_nodename(cmd_arr, up.node)

        if not synctool.lib.path_exists(up.repos_path):
            error('upload failed')
        else:
            stdout('uploaded %s' % verbose_path)
    else:
        # in dry-run mode, show the command anyway
        unix_out('# dry run, rsync not performed')
        unix_out(' '.join(cmd_arr))
Esempio n. 38
0
def main():
    '''run the program'''

    synctool.param.init()

    sys.stdout = synctool.unbuffered.Unbuffered(sys.stdout)
    sys.stderr = synctool.unbuffered.Unbuffered(sys.stderr)

    try:
        get_options()
    except synctool.range.RangeSyntaxError as err:
        print 'error:', err
        sys.exit(1)

    if OPT_CHECK_UPDATE:
        if not synctool.update.check():
            # no newer version available
            sys.exit(0)

        sys.exit(1)

    if OPT_DOWNLOAD:
        if not synctool.update.download():
            # download error
            sys.exit(-1)

        sys.exit(0)

    if OPT_AGGREGATE:
        if not synctool.aggr.run(MASTER_OPTS):
            sys.exit(-1)

        sys.exit(0)

    synctool.config.init_mynodename()

    if synctool.param.MASTER != synctool.param.HOSTNAME:
        verbose('master %s != hostname %s' % (synctool.param.MASTER,
                                              synctool.param.HOSTNAME))
        stderr('error: not running on the master node')
        sys.exit(-1)

    if not _check_valid_overlaydirs():
        # error message already printed
        sys.exit(-1)

    synctool.lib.openlog()

    address_list = NODESET.addresses()
    if not address_list:
        print 'no valid nodes specified'
        sys.exit(1)

    if UPLOAD_FILE.filename:
        # upload a file
        if len(address_list) != 1:
            print 'The option --upload can only be run on just one node'
            print ('Please use --node=nodename to specify the node '
                   'to upload from')
            sys.exit(1)

        UPLOAD_FILE.address = address_list[0]
        synctool.upload.upload(UPLOAD_FILE)

    else:
        # do regular synctool run
        # first print message about DRY RUN
        if not synctool.lib.QUIET:
            if synctool.lib.DRY_RUN:
                stdout('DRY RUN, not doing any updates')
                terse(synctool.lib.TERSE_DRYRUN, 'not doing any updates')
            else:
                stdout('--fix specified, applying changes')
                terse(synctool.lib.TERSE_FIXING, ' applying changes')

        else:
            if synctool.lib.DRY_RUN:
                verbose('DRY RUN, not doing any updates')
            else:
                verbose('--fix specified, applying changes')

        make_tempdir()
        run_remote_synctool(address_list)

    synctool.lib.closelog()
Esempio n. 39
0
    def check(self):
        '''check differences between src and dest,
        Return a FIX_xxx code
        '''

        # src_path is under $overlay/
        # dest_path is in the filesystem

        vnode = None

        if not self.dest_stat.exists():
            stdout('%s does not exist' % self.dest_path)
            return SyncObject.FIX_CREATE

        src_type = self.src_stat.filetype()
        dest_type = self.dest_stat.filetype()
        if src_type != dest_type:
            # entry is of a different file type
            vnode = self.vnode_obj()
            stdout('%s should be a %s' % (self.dest_path, vnode.typename()))
            terse(synctool.lib.TERSE_WARNING, 'wrong type %s' %
                                               self.dest_path)
            return SyncObject.FIX_TYPE

        vnode = self.vnode_obj()
        if not vnode.compare(self.src_path, self.dest_stat):
            # content is different; change the entire object
            log('updating %s' % self.dest_path)
            return SyncObject.FIX_UPDATE

        # check ownership and permissions and time
        # rectify if needed
        fix_action = 0
        if ((self.src_stat.uid != self.dest_stat.uid) or
            (self.src_stat.gid != self.dest_stat.gid)):
            stdout('%s should have owner %s.%s (%d.%d), '
                   'but has %s.%s (%d.%d)' % (self.dest_path,
                   self.src_stat.ascii_uid(),
                   self.src_stat.ascii_gid(),
                   self.src_stat.uid, self.src_stat.gid,
                   self.dest_stat.ascii_uid(),
                   self.dest_stat.ascii_gid(),
                   self.dest_stat.uid, self.dest_stat.gid))
            terse(synctool.lib.TERSE_OWNER, '%s.%s %s' %
                                            (self.src_stat.ascii_uid(),
                                             self.src_stat.ascii_gid(),
                                             self.dest_path))
            fix_action = SyncObject.FIX_OWNER

        if self.src_stat.mode != self.dest_stat.mode:
            stdout('%s should have mode %04o, but has %04o' %
                   (self.dest_path, self.src_stat.mode & 07777,
                    self.dest_stat.mode & 07777))
            terse(synctool.lib.TERSE_MODE, '%04o %s' %
                                           (self.src_stat.mode & 07777,
                                            self.dest_path))
            fix_action |= SyncObject.FIX_MODE

        # FIXME check times for other objects too, but
        # FIXME not for symlinks
        # FIXME not for directories (change when you add files ...)
        if synctool.param.SYNC_TIMES and self.src_stat.is_file():
            # FIXME do not call stat() again / SyncStat should have times
            self.src_stattime = os.lstat(self.src_path)
            self.dest_stattime = os.lstat(self.dest_path)
            if (int(self.src_stattime.st_mtime) !=
                int(self.dest_stattime.st_mtime)):
                stdout('%s has wrong timestamp' % self.dest_path)
                terse(synctool.lib.TERSE_MODE, ('%s has wrong timestamp' %
                                                self.dest_path))
                fix_action |= SyncObject.FIX_TIME

        return fix_action
Esempio n. 40
0
def main():
    '''run the program'''

    synctool.param.init()

    sys.stdout = synctool.unbuffered.Unbuffered(sys.stdout)
    sys.stderr = synctool.unbuffered.Unbuffered(sys.stderr)

    try:
        get_options()
    except synctool.range.RangeSyntaxError as err:
        error(str(err))
        sys.exit(1)

    if OPT_CHECK_UPDATE:
        if not synctool.update.check():
            # no newer version available
            sys.exit(0)

        sys.exit(1)

    if OPT_DOWNLOAD:
        if not synctool.update.download():
            # download error
            sys.exit(-1)

        sys.exit(0)

    if OPT_AGGREGATE:
        if not synctool.aggr.run(MASTER_OPTS):
            sys.exit(-1)

        sys.exit(0)

    synctool.config.init_mynodename()

    if synctool.param.MASTER != synctool.param.HOSTNAME:
        verbose('master %s != hostname %s' %
                (synctool.param.MASTER, synctool.param.HOSTNAME))
        error('not running on the master node')
        sys.exit(-1)

    if not _check_valid_overlaydirs():
        # error message already printed
        sys.exit(-1)

    synctool.lib.openlog()

    address_list = NODESET.addresses()
    if not address_list:
        print 'no valid nodes specified'
        sys.exit(1)

    if UPLOAD_FILE.filename:
        # upload a file
        if len(address_list) != 1:
            error('option --upload can only be run on just one node')
            stderr('Please use --node=nodename to specify the node '
                   'to upload from')
            sys.exit(1)

        UPLOAD_FILE.address = address_list[0]
        synctool.upload.upload(UPLOAD_FILE)

    else:
        # do regular synctool run
        # first print message about DRY RUN
        if not synctool.lib.QUIET:
            if synctool.lib.DRY_RUN:
                stdout('DRY RUN, not doing any updates')
                terse(synctool.lib.TERSE_DRYRUN, 'not doing any updates')
            else:
                stdout('--fix specified, applying changes')
                terse(synctool.lib.TERSE_FIXING, ' applying changes')
        else:
            if synctool.lib.DRY_RUN:
                verbose('DRY RUN, not doing any updates')
            else:
                verbose('--fix specified, applying changes')

        make_tempdir()
        run_remote_synctool(address_list)

    synctool.lib.closelog()
Esempio n. 41
0
def rsync_upload(up):
    '''upload a file/dir to $overlay/group/ or $purge/group/'''

    up.make_repos_path()

    # check whether the remote entry exists
    ok, isdir = _remote_isdir(up)
    if not ok:
        # error message was already printed
        return

    if isdir and synctool.param.REQUIRE_EXTENSION and not up.purge:
        stderr('error: remote is a directory')
        stderr('synctool can not upload directories to $overlay '
               'when require_extension is set')
        return

    if isdir:
        up.filename += os.sep
        up.repos_path += os.sep

    # make command: rsync [-n] [-v] node:/path/ $overlay/group/path/
    cmd_arr = shlex.split(synctool.param.RSYNC_CMD)

    # opts is just for the 'visual aspect'; it is displayed when --verbose
    opts = ' '
    if synctool.lib.DRY_RUN:
        cmd_arr.append('-n')
        opts += '-n '

    if synctool.lib.VERBOSE:
        cmd_arr.append('-v')
        opts += '-v '
        if '-q' in cmd_arr:
            cmd_arr.remove('-q')
        if '--quiet' in cmd_arr:
            cmd_arr.remove('--quiet')

    cmd_arr.append(up.address + ':' + up.filename)
    cmd_arr.append(up.repos_path)

    verbose_path = prettypath(up.repos_path)
    if synctool.lib.DRY_RUN:
        stdout('would be uploaded as %s' % verbose_path)
    else:
        dest_dir = os.path.dirname(up.repos_path)
        unix_out('mkdir -p %s' % dest_dir)
        synctool.lib.mkdir_p(dest_dir)
        if not os.path.exists(dest_dir):
            stderr('error: failed to create %s/' % dest_dir)
            return

    # for $overlay, never do rsync --delete / --delete-excluded
    # for $purge, don't use rsync --delete on single files
    # because it would (inadvertently) delete all existing files in the repos
    if not up.purge or not isdir:
        if '--delete' in cmd_arr:
            cmd_arr.remove('--delete')
        if '--delete-excluded' in cmd_arr:
            cmd_arr.remove('--delete-excluded')

    verbose('running rsync%s%s:%s to %s' % (opts, up.node, up.filename,
                                            verbose_path))
    unix_out(' '.join(cmd_arr))

    if not synctool.lib.DRY_RUN:
        synctool.lib.run_with_nodename(cmd_arr, up.node)
        if not os.path.exists(up.repos_path):
            stderr('error: upload failed')
        else:
            stdout('uploaded %s' % verbose_path)