Esempio n. 1
0
def _expand_remote_dest(local_path, remote_path):
    if remote_path is None:
        if local_path is None:
            raise RuntimeError('one of local_path, remote_path is required')
        remote_path = local_path

    st = remote.lstat(remote_path)
    if st:
        # file exists, check if it is a link
        if S_ISLNK(st.st_mode):
            # normalize (dangling links will raise an exception)
            remote_path = remote.normalize(remote_path)

            # update stat
            st = remote.lstat(remote_path)

        # dir-expansion, since st is guaranteed not be a link
        if st and S_ISDIR(st.st_mode):
            if local_path is None:
                raise RemoteFailureError('Is a directory: {}'.format(
                    remote_path))

            # if it's a directory, correct path
            remote_path = remote.path.join(remote_path,
                                           remote.path.basename(local_path))

            st = remote.lstat(remote_path)
            log.debug('Expanded remote_path to {!r}'.format(remote_path))

    # ensure st is either non-existant, or a regular file
    if st and not S_ISREG(st.st_mode):
        raise RemoteFailureError('Not a regular file: {!r}'.format(
            remote_path))
    return st, remote_path
Esempio n. 2
0
def _expand_remote_dest(local_path, remote_path):
    if remote_path is None:
        if local_path is None:
            raise RuntimeError('one of local_path, remote_path is required')
        remote_path = local_path

    st = remote.lstat(remote_path)
    if st:
        # file exists, check if it is a link
        if S_ISLNK(st.st_mode):
            # normalize (dangling links will raise an exception)
            remote_path = remote.normalize(remote_path)

            # update stat
            st = remote.lstat(remote_path)

        # dir-expansion, since st is guaranteed not be a link
        if st and S_ISDIR(st.st_mode):
            if local_path is None:
                raise RemoteFailureError(
                    'Is a directory: {}'.format(remote_path))

            # if it's a directory, correct path
            remote_path = remote.path.join(remote_path,
                                           remote.path.basename(local_path))

            st = remote.lstat(remote_path)
            log.debug('Expanded remote_path to {!r}'.format(remote_path))

    # ensure st is either non-existant, or a regular file
    if st and not S_ISREG(st.st_mode):
        raise RemoteFailureError(
            'Not a regular file: {!r}'.format(remote_path))
    return st, remote_path
Esempio n. 3
0
File: ssl.py Progetto: mbr/remand
def ensure_certificate(hostname):
    cert_rpath = remote.path.join(config['sslcert_cert_dir'],
                                  hostname + '.crt')
    chain_rpath = remote.path.join(config['sslcert_cert_dir'],
                                   hostname + '.chain.crt')
    key_rpath = remote.path.join(config['sslcert_key_dir'], hostname + '.pem')

    # first, ensure any certificate exists on the host. otherwise,
    # webservers like nginx will likely not start
    if not (remote.lstat(cert_rpath) and remote.lstat(key_rpath)
            and remote.lstat(chain_rpath)):
        log.debug('Remote certificate {}, key {}, chain {} not found'.format(
            cert_rpath, key_rpath, chain_rpath))

        key, cert = generate_self_signed_cert(hostname)

        # FIXME: maybe use install cert here.
        fs.upload_string(key, key_rpath)
        fs.upload_string(cert, cert_rpath)
        fs.upload_string(cert, chain_rpath)

        return Changed(
            msg='No certificate {} / key {} / chain {} found. A self-signed '
            'certficate from a reputable snake-oil vendor was installed.'.
            format(cert_rpath, key_rpath, chain_rpath))

    return Unchanged(
        'Certificate for hostname {} already preset'.format(hostname))
Esempio n. 4
0
def dpkg_install(paths, check=True):
    if not hasattr(paths, 'keys'):
        pkgs = {}

        # determine package names from filenames. ideally, we would open the
        # package here and check
        for p in paths:
            fn = os.path.basename(p)
            try:
                name, version, tail = fn.split('_', 3)
                pkgs[(name, version)] = p
            except ValueError:
                raise ValueError(
                    'Could not determine package version from '
                    'package filename {}. Please rename the .deb '
                    'to standard debian convention '
                    '(name_version_arch.deb) or supply a specific '
                    'version by passing a dictionary parameter.'.format(fn))

    # log names
    log.debug('Package names: ' + ', '.join('{} -> {}'.format(k, v)
                                            for k, v in pkgs.items()))

    if check:
        missing = []
        installed = info_installed_packages()

        for name, version in pkgs:
            if name not in installed or not installed[name].eq_version(version):
                missing.append((name, version))
    else:
        missing = pkgs.keys()

    log.debug('Installing packages: {}'.format(missing))

    if not missing:
        return Unchanged('Packages {!r} already installed'.format(pkgs.keys()))

    # FIXME: see above
    info_installed_packages.invalidate_cache()

    with fs.remote_tmpdir() as rtmp:
        # upload packages to be installed
        pkg_files = []
        for idx, key in enumerate(missing):
            tmpdest = remote.path.join(rtmp, str(idx) + '.deb')
            fs.upload_file(pkgs[key], tmpdest)
            pkg_files.append(tmpdest)

        # install in a single dpkg install line
        # FIXME: add debconf default and such (same as apt)
        args = [config['cmd_dpkg'], '-i']
        args.extend(pkg_files)
        proc.run(
            args, extra_env={
                'DEBIAN_FRONTEND': 'noninteractive',
            })

    return Changed(msg='Installed packages {!r}'.format(missing))
Esempio n. 5
0
File: verify.py Progetto: mbr/remand
    def verify_buffer(self, st, buf, remote_path):
        m = self.hashfunc(buf)
        remote_hash = self._get_remote_hash(remote_path)

        log.debug('Local hash: {} Remote hash: {}'.format(
            m.hexdigest(), remote_hash))

        return remote_hash == m.hexdigest()
Esempio n. 6
0
File: verify.py Progetto: mbr/remand
    def verify_buffer(self, st, buf, remote_path):
        m = self.hashfunc(buf)
        remote_hash = self._get_remote_hash(remote_path)

        log.debug('Local hash: {} Remote hash: {}'.format(
            m.hexdigest(), remote_hash))

        return remote_hash == m.hexdigest()
Esempio n. 7
0
File: apt.py Progetto: mbr/remand
def dpkg_install(paths, check=True):
    pkgs = paths
    if not hasattr(paths, 'keys'):
        pkgs = {}

        # determine package names from filenames. ideally, we would open the
        # package here and check
        for p in paths:
            fn = os.path.basename(p)
            try:
                name, version, tail = fn.split('_', 3)
                pkgs[(name, version)] = p
            except ValueError:
                raise ValueError(
                    'Could not determine package version from '
                    'package filename {}. Please rename the .deb '
                    'to standard debian convention '
                    '(name_version_arch.deb) or supply a specific '
                    'version by passing a dictionary parameter.'.format(fn))

    # log names
    log.debug('Package names: ' + ', '.join('{} -> {}'.format(k, v)
                                            for k, v in pkgs.items()))

    if check:
        missing = []
        installed = info_installed_packages()

        for name, version in pkgs:
            if name not in installed or not installed[name].eq_version(
                    version):
                missing.append((name, version))
    else:
        missing = pkgs.keys()

    log.debug('Installing packages: {}'.format(missing))

    if not missing:
        return Unchanged('Packages {!r} already installed'.format(pkgs.keys()))

    # FIXME: see above
    info_installed_packages.invalidate_cache()

    with fs.remote_tmpdir() as rtmp:
        # upload packages to be installed
        pkg_files = []
        for idx, key in enumerate(missing):
            tmpdest = remote.path.join(rtmp, str(idx) + '.deb')
            fs.upload_file(pkgs[key], tmpdest)
            pkg_files.append(tmpdest)

        # install in a single dpkg install line
        # FIXME: add debconf default and such (same as apt)
        args = [config['cmd_dpkg'], '-i']
        args.extend(pkg_files)
        proc.run(args, extra_env={'DEBIAN_FRONTEND': 'noninteractive', })

    return Changed(msg='Installed packages {!r}'.format(missing))
Esempio n. 8
0
File: verify.py Progetto: mbr/remand
    def verify_file(self, st, local_path, remote_path):
        # hash local file
        with open(local_path, 'rb') as lfile:
            m = util.hash_file(lfile, self.hashfunc)

        remote_hash = self._get_remote_hash(remote_path)
        log.debug('Local hash: {} Remote hash: {}'.format(
            m.hexdigest(), remote_hash))

        return remote_hash == m.hexdigest()
Esempio n. 9
0
 def _(*args):
     sig = (name, ) + args
     if config.get_bool('info_cache') and sig in info.cache:
         v = info.cache[sig]
         log.debug('Memoize cache hit {}'.format(sig))
     else:
         v = f(*args)
         log.debug('Memoize cache miss {}'.format(sig))
         info.cache[sig] = v
     return v
Esempio n. 10
0
 def _(*args):
     sig = (name, ) + args
     if config.get_bool('info_cache') and sig in info.cache:
         v = info.cache[sig]
         log.debug('Memoize cache hit {}'.format(sig))
     else:
         v = f(*args)
         log.debug('Memoize cache miss {}'.format(sig))
         info.cache[sig] = v
     return v
Esempio n. 11
0
File: verify.py Progetto: mbr/remand
    def verify_file(self, st, local_path, remote_path):
        # hash local file
        with open(local_path, 'rb') as lfile:
            m = util.hash_file(lfile, self.hashfunc)

        remote_hash = self._get_remote_hash(remote_path)
        log.debug('Local hash: {} Remote hash: {}'.format(
            m.hexdigest(), remote_hash))

        return remote_hash == m.hexdigest()
Esempio n. 12
0
File: verify.py Progetto: mbr/remand
    def verify_file(self, st, local_path, remote_path):
        lst = os.stat(local_path)

        mul = int(config['fs_mtime_multiplier'])

        # we cast to int, to avoid into issues with different mtime resolutions
        l = (int(lst.st_mtime * mul), lst.st_size)
        r = (int(st.st_mtime * mul), st.st_size)
        log.debug('stat (mtime/size): local {}/{}, remote {}/{}'
                  .format(*(l + r)))
        return l == r
Esempio n. 13
0
File: util.py Progetto: mbr/remand
    def _by_short_name(cls, short_name):
        v = cls.registry.get(short_name, None)
        if v is None:
            raise ConfigurationError(
                'Unknown {}: {!r}. Check your configuration setting.'
                .format(cls.__name__, short_name))

        subclass = cls.registry[short_name]
        log.debug('{} {!r} -> {}'.format(
            cls.__name__, short_name, subclass.__name__))
        return subclass
Esempio n. 14
0
File: util.py Progetto: mbr/remand
    def _by_short_name(cls, short_name):
        v = cls.registry.get(short_name, None)
        if v is None:
            raise ConfigurationError(
                'Unknown {}: {!r}. Check your configuration setting.'
                .format(cls.__name__, short_name))

        subclass = cls.registry[short_name]
        log.debug('{} {!r} -> {}'.format(cls.__name__, short_name,
                                         subclass.__name__))
        return subclass
Esempio n. 15
0
File: verify.py Progetto: mbr/remand
    def verify_file(self, st, local_path, remote_path):
        lst = os.stat(local_path)

        mul = int(config['fs_mtime_multiplier'])

        # we cast to int, to avoid into issues with different mtime resolutions
        l = (int(lst.st_mtime * mul), lst.st_size)
        r = (int(st.st_mtime * mul), st.st_size)
        log.debug('stat (mtime/size): local {}/{}, remote {}/{}'
                  .format(*(l + r)))
        return l == r
Esempio n. 16
0
File: apt.py Progetto: mbr/remand
    def set(self, timestamp=None):
        # ensure directory for timestamp exists
        fs.create_dir(remote.path.dirname(self.rpath), 0o755)

        # update timestamp
        fs.touch(self.rpath, timestamp)

        # update cached values
        if timestamp is not None:
            self._current = timestamp
            self.synced = True
        log.debug('Timestamp {} set to {}'.format(self.rpath, self._current))
Esempio n. 17
0
    def set(self, timestamp=None):
        # ensure directory for timestamp exists
        fs.create_dir(remote.path.dirname(self.rpath), 0o755)

        # update timestamp
        fs.touch(self.rpath, timestamp)

        # update cached values
        if timestamp is not None:
            self._current = timestamp
            self.synced = True
        log.debug('Timestamp {} set to {}'.format(self.rpath, self._current))
Esempio n. 18
0
File: proc.py Progetto: mbr/remand
    def sudo_popen(args, cwd=None, extra_env={}):
        # -E preserve environment variables passed
        # -H set the $HOME environment variable (usually default)
        # -S (unused): read password from stdin

        if password:
            cur = time()
            if cur - prev_timestamp[0] > timestamp_timeout:
                # customize our prompt, to prevent accidentally entering the
                # password. note that this is visible on the sudo invocation
                # and therefore not a security measure
                prompt_cookie = hexlify(urandom(40))
                log.debug(
                    'Prompt cookie for sudo refresh: {}'.format(prompt_cookie))

                # we need to refresh the sudo timestamp
                refresh_args = [
                    config['cmd_sudo'],
                    '-k',  # --reset-timestamp
                    '-v',  # --validate
                    '-S',  # --stdin
                    '--prompt={}'.format(prompt_cookie)
                ]

                proc = orig_popen(refresh_args)
                log.debug('Checking prompt cookie...')
                if not proc.stderr.read(len(prompt_cookie)) == prompt_cookie:
                    raise RemoteFailureError('Unexpected output from sudo, '
                                             'bailing out.')

                # shove in the password
                stdout, stderr = proc.communicate(password + '\n')

                if proc.returncode != 0:
                    raise RemoteFailureError(
                        'Could not refresh sudo timestamp (exit status: {}).'
                        'The most common occurence for this is an incorrect '
                        'password.'.format(proc.returncode))

                # from this point on, sudo should work without a password
                # until the timestamp expires
                prev_timestamp[0] = cur

                # FIXME: handle SFTP

        pargs = sudo_args[:]

        pargs.append('--')
        pargs.extend(args)
        return orig_popen(pargs, cwd, extra_env)
Esempio n. 19
0
File: proc.py Progetto: mbr/remand
    def sudo_popen(args, cwd=None, extra_env={}):
        # -E preserve environment variables passed
        # -H set the $HOME environment variable (usually default)
        # -S (unused): read password from stdin

        if password:
            cur = time()
            if cur - prev_timestamp[0] > timestamp_timeout:
                # customize our prompt, to prevent accidentally entering the
                # password. note that this is visible on the sudo invocation
                # and therefore not a security measure
                prompt_cookie = hexlify(urandom(40))
                log.debug('Prompt cookie for sudo refresh: {}'.format(
                    prompt_cookie))

                # we need to refresh the sudo timestamp
                refresh_args = [
                    config['cmd_sudo'],
                    '-k',  # --reset-timestamp
                    '-v',  # --validate
                    '-S',  # --stdin
                    '--prompt={}'.format(prompt_cookie)
                ]

                proc = orig_popen(refresh_args)
                log.debug('Checking prompt cookie...')
                if not proc.stderr.read(len(prompt_cookie)) == prompt_cookie:
                    raise RemoteFailureError('Unexpected output from sudo, '
                                             'bailing out.')

                # shove in the password
                stdout, stderr = proc.communicate(password + '\n')

                if proc.returncode != 0:
                    raise RemoteFailureError(
                        'Could not refresh sudo timestamp (exit status: {}).'
                        'The most common occurence for this is an incorrect '
                        'password.'.format(proc.returncode))

                # from this point on, sudo should work without a password
                # until the timestamp expires
                prev_timestamp[0] = cur

                # FIXME: handle SFTP

        pargs = sudo_args[:]

        pargs.append('--')
        pargs.extend(args)
        return orig_popen(pargs, cwd, extra_env)
Esempio n. 20
0
File: apt.py Progetto: mbr/remand
def query_cache(pkgs):
    stdout, _, _ = proc.run([config['cmd_apt_cache'], 'show'] + list(pkgs))
    pkgs = OrderedDict()
    for dump in stdout.split('\n\n'):
        # skip empty lines
        if not dump or dump.isspace():
            continue
        try:
            pkg_info = Deb822(dump)
        except ValueError:
            log.debug(dump)
            raise RemoteFailureError('Error parsing Deb822 info.')

        pkgs[pkg_info['Package']] = pkg_info

    return Unchanged(pkgs)
Esempio n. 21
0
def query_cache(pkgs):
    stdout, _, _ = proc.run([config['cmd_apt_cache'], 'show'] + list(pkgs))
    pkgs = OrderedDict()
    for dump in stdout.split('\n\n'):
        # skip empty lines
        if not dump or dump.isspace():
            continue
        try:
            pkg_info = Deb822(dump)
        except ValueError:
            log.debug(dump)
            raise RemoteFailureError('Error parsing Deb822 info.')

        pkgs[pkg_info['Package']] = pkg_info

    return Unchanged(pkgs)
Esempio n. 22
0
File: proc.py Progetto: mbr/remand
def run(cmd,
        input=None,
        extra_env={},
        status_ok=(0, ),
        status_meaning={},
        cwd=None):
    args = _cmd_to_args(cmd)

    proc = remote.popen(args, extra_env=extra_env, cwd=cwd)
    stdout, stderr = proc.communicate(input)

    if status_ok != 'any' and proc.returncode not in status_ok:
        log.debug('stdout: {}'.format(stdout))
        log.debug('stderr: {}'.format(stderr))

        raise RemoteProcessFailedError(args,
                                       proc.returncode,
                                       status_meaning.get(proc.returncode),
                                       stdout,
                                       stderr, )

    return stdout, stderr, proc.returncode
Esempio n. 23
0
def remote_tmpdir(delete=True, randbytes=16, mode=0o700):
    # FIXME: audit this for security issues

    if config['cmd_mktemp']:
        # create directory using mktemp command
        tmpdir, _, _ = proc.run([config['cmd_mktemp'], '-d'])
        tmpdir = tmpdir.rstrip('\n')
    else:
        # emulate mktemp
        tmpdir = remote.path.join(config['fs_fallback_tmpdir'],
                                  'remand-' + hexlify(os.urandom(randbytes)))

        remote.mkdir(tmpdir, mode=mode)

    log.debug('Created temporary directory {}'.format(tmpdir))

    try:
        yield tmpdir
    finally:
        if delete:
            log.debug('Removing temporary directory {}'.format(tmpdir))
            remove_dir(tmpdir)
Esempio n. 24
0
def remote_tmpdir(delete=True, randbytes=16, mode=0o700):
    # FIXME: audit this for security issues

    if config['cmd_mktemp']:
        # create directory using mktemp command
        tmpdir, _, _ = proc.run([config['cmd_mktemp'], '-d'])
        tmpdir = tmpdir.rstrip('\n')
    else:
        # emulate mktemp
        tmpdir = remote.path.join(config['fs_fallback_tmpdir'],
                                  'remand-' + hexlify(os.urandom(randbytes)))

        remote.mkdir(tmpdir, mode=mode)

    log.debug('Created temporary directory {}'.format(tmpdir))

    try:
        yield tmpdir
    finally:
        if delete:
            log.debug('Removing temporary directory {}'.format(tmpdir))
            remove_dir(tmpdir)
Esempio n. 25
0
File: apt.py Progetto: mbr/remand
    def sync(self):
        log.debug('Syncing timestamp {}'.format(self.rpath))
        if self.synced:
            log.debug('Timestamp already synced')
            return

        # ensure directory for timestamp exists
        if fs.create_dir(remote.path.dirname(self.rpath), 0o755).changed:
            # had to create directory, new timestamp
            self.synced = True
            self._current = 0
            log.debug('Timestamp did not exist')
            return

        # directory already exists
        st = remote.stat(self.rpath)
        if not st:
            # file does not exist
            self._current = 0
            log.debug('Timestamp did not exist')
        else:
            self._current = st.st_mtime
            log.debug('Timestamp synced to {}'.format(self._current))
        self.synced = True
Esempio n. 26
0
File: proc.py Progetto: mbr/remand
def run(cmd,
        input=None,
        extra_env={},
        status_ok=(0, ),
        status_meaning={},
        cwd=None):
    args = _cmd_to_args(cmd)

    proc = remote.popen(args, extra_env=extra_env, cwd=cwd)
    stdout, stderr = proc.communicate(input)

    if status_ok != 'any' and proc.returncode not in status_ok:
        log.debug('stdout: {}'.format(stdout))
        log.debug('stderr: {}'.format(stderr))

        raise RemoteProcessFailedError(
            args,
            proc.returncode,
            status_meaning.get(proc.returncode),
            stdout,
            stderr,
        )

    return stdout, stderr, proc.returncode
Esempio n. 27
0
    def sync(self):
        log.debug('Syncing timestamp {}'.format(self.rpath))
        if self.synced:
            log.debug('Timestamp already synced')
            return

        # ensure directory for timestamp exists
        if fs.create_dir(remote.path.dirname(self.rpath), 0o755).changed:
            # had to create directory, new timestamp
            self.synced = True
            self._current = 0
            log.debug('Timestamp did not exist')
            return

        # directory already exists
        st = remote.stat(self.rpath)
        if not st:
            # file does not exist
            self._current = 0
            log.debug('Timestamp did not exist')
        else:
            self._current = st.st_mtime
            log.debug('Timestamp synced to {}'.format(self._current))
        self.synced = True
Esempio n. 28
0
File: ssh.py Progetto: mbr/remand
def get_authorized_keys_file(user):
    u = info['posix.users'][user]
    ak_file = config['ssh_authorized_keys_file'].format(name=u.name,
                                                        home=u.home)
    log.debug('Authorized key file for {}: {}'.format(u.name, ak_file))
    return ak_file
Esempio n. 29
0
File: apt.py Progetto: mbr/remand
    def get_age(self):
        self.sync()

        age = time.time() - self._current
        log.debug('Timestamp age ({}): {}'.format(self.rpath, age))
        return age
Esempio n. 30
0
def get_authorized_keys_file(user):
    u = info['posix.users'][user]
    ak_file = config['ssh_authorized_keys_file'].format(name=u.name,
                                                        home=u.home)
    log.debug('Authorized key file for {}: {}'.format(u.name, ak_file))
    return ak_file
Esempio n. 31
0
def run():
    log.warning('Running testing module. Do not run this on a real machine!')

    log.debug('Testing popen')
    proc = remote.popen(['uname'])
    stdout, stderr = proc.communicate()
    assert 'Linux' == stdout.strip()

    log.debug('Testing getcwd()')
    assert '/home/vagrant' == remote.getcwd()

    log.debug('Testing chdir()')
    remote.chdir('/')
    assert '/' == remote.getcwd()
    remote.chdir('/home/vagrant')

    # create a sample file
    TESTFN = 'testfile'
    TESTDN = 'TESTDIR'
    log.debug('Testing file')
    with remote.file(TESTFN, mode='w') as out:
        out.write('test')

    log.debug('Testing chmod')
    remote.chmod(TESTFN, 0732)

    log.debug('Testing mkdir')
    # FIXME: umask?
    # FIXME: on exists/conflict?
    remote.mkdir(TESTDN, 0700)

    log.debug('Testing listdir')
    assert TESTFN in remote.listdir('.')
    assert TESTDN in remote.listdir('.')

    log.debug('Testing rmdir')
    remote.rmdir(TESTDN)

    # FIXME: can't test chown without root access

    log.debug('Testing normalize')
    assert '/home' == remote.normalize('./..')

    log.debug('Testing symlink')
    remote.symlink('to', 'from')

    log.debug('Testing lstat')
    remote.lstat('from')

    log.debug('Testing readlink')
    assert remote.readlink('/home/vagrant/from') == 'to'

    log.debug('Testing rename')
    remote.rename('from', 'from2')
    assert remote.readlink('/home/vagrant/from2') == 'to'

    log.debug('Testing unlink')
    remote.unlink('/home/vagrant/from2')

    log.debug('Testing stat')
    s = remote.stat(TESTFN)
    assert s.st_uid == 1000
    assert s.st_gid == 1000
    remote.unlink(TESTFN)
Esempio n. 32
0
def upload_file(local_path,
                remote_path=None,
                follow_symlink=True,
                create_parent=False):
    """Uploads a local file to a remote and if does not exist or differs
    from the local version, uploads it.

    To avoid having to transfer the file one or more times if unchanged,
    different methods for verification are available. These can be configured
    using the ``fs_remote_file_verify`` configuration variable.

    :param local_path: Local file to upload. If it is a symbolic link, it will
                       be resolved first.
    :param remote_path: Remote name for the file. If ``None``, same as
                        ``local_path``. If it points to a directory, the file
                        will be uploaded to the directory. Symbolic links not
                        pointing to a directory are an error.

    :param return: ``False`` if no upload was necessary, ``True`` otherwise.
    """
    st, remote_path = _expand_remote_dest(local_path, remote_path)
    lst = os.stat(local_path) if follow_symlink else os.lstat(local_path)

    verifier = Verifier._by_short_name(config['fs_remote_file_verify'])()
    uploader = Uploader._by_short_name(config['fs_remote_file_upload'])()

    if lst is None:
        raise ConfigurationError(
            'Local file {!r} does not exist'.format(local_path))

    if S_ISLNK(lst.st_mode):
        # local file is a link
        rst = remote.lstat(remote_path)

        if rst:
            if not S_ISLNK(rst.st_mode):
                # remote file is not a link, unlink it
                remote.unlink(remote_path)
            elif remote.readlink(remote_path) != os.readlink(local_path):
                # non matching links
                remote.unlink(remote_path)
            else:
                # links pointing to the same target
                return Unchanged(
                    msg='Symbolink link up-to-date: {}'.format(remote_path))

        remote.symlink(os.readlink(local_path), remote_path)
        return Changed(msg='Created remote link: {}'.format(remote_path))

    if not st or not verifier.verify_file(st, local_path, remote_path):
        if create_parent:
            create_dir(remote.path.dirname(remote_path))

        uploader.upload_file(local_path, remote_path)

        if config.get_bool('fs_update_mtime'):
            times = (lst.st_mtime, lst.st_mtime)
            remote.utime(remote_path, times)
            log.debug('Updated atime/mtime: {}'.format(times))
        return Changed(msg='Upload {} -> {}'.format(local_path, remote_path))

    return Unchanged(msg='File up-to-date: {}'.format(remote_path))
Esempio n. 33
0
def upload_file(local_path,
                remote_path=None,
                follow_symlink=True,
                create_parent=False):
    """Uploads a local file to a remote and if does not exist or differs
    from the local version, uploads it.

    To avoid having to transfer the file one or more times if unchanged,
    different methods for verification are available. These can be configured
    using the ``fs_remote_file_verify`` configuration variable.

    :param local_path: Local file to upload. If it is a symbolic link, it will
                       be resolved first.
    :param remote_path: Remote name for the file. If ``None``, same as
                        ``local_path``. If it points to a directory, the file
                        will be uploaded to the directory. Symbolic links not
                        pointing to a directory are an error.

    :param return: ``False`` if no upload was necessary, ``True`` otherwise.
    """
    st, remote_path = _expand_remote_dest(local_path, remote_path)
    lst = os.stat(local_path) if follow_symlink else os.lstat(local_path)

    verifier = Verifier._by_short_name(config['fs_remote_file_verify'])()
    uploader = Uploader._by_short_name(config['fs_remote_file_upload'])()

    if lst is None:
        raise ConfigurationError('Local file {!r} does not exist'.format(
            local_path))

    if S_ISLNK(lst.st_mode):
        # local file is a link
        rst = remote.lstat(remote_path)

        if rst:
            if not S_ISLNK(rst.st_mode):
                # remote file is not a link, unlink it
                remote.unlink(remote_path)
            elif remote.readlink(remote_path) != os.readlink(local_path):
                # non matching links
                remote.unlink(remote_path)
            else:
                # links pointing to the same target
                return Unchanged(
                    msg='Symbolink link up-to-date: {}'.format(remote_path))

        remote.symlink(os.readlink(local_path), remote_path)
        return Changed(msg='Created remote link: {}'.format(remote_path))

    if not st or not verifier.verify_file(st, local_path, remote_path):
        if create_parent:
            create_dir(remote.path.dirname(remote_path))

        uploader.upload_file(local_path, remote_path)

        if config.get_bool('fs_update_mtime'):
            times = (lst.st_mtime, lst.st_mtime)
            remote.utime(remote_path, times)
            log.debug('Updated atime/mtime: {}'.format(times))
        return Changed(msg='Upload {} -> {}'.format(local_path, remote_path))

    return Unchanged(msg='File up-to-date: {}'.format(remote_path))
Esempio n. 34
0
 def __init__(self, name):
     self.name = name
     self.initial_hash = self._get_hash()
     log.debug('{} before editing: {}'.format(
         self.name, self.initial_hash.hexdigest()))
Esempio n. 35
0
 def modified(self):
     h = self._get_hash()
     log.debug('{} hash currently: {}'.format(self.name, h.hexdigest()))
     return self.initial_hash.digest() != h.digest()
Esempio n. 36
0
    def get_age(self):
        self.sync()

        age = time.time() - self._current
        log.debug('Timestamp age ({}): {}'.format(self.rpath, age))
        return age