Пример #1
0
Файл: client.py Проект: xx4h/bup
    def sync_indexes(self):
        self.check_busy()
        conn = self.conn
        mkdirp(self.cachedir)
        # All cached idxs are extra until proven otherwise
        extra = set()
        for f in os.listdir(self.cachedir):
            debug1('%s\n' % f)
            if f.endswith('.idx'):
                extra.add(f)
        needed = set()
        conn.write('list-indexes\n')
        for line in linereader(conn):
            if not line:
                break
            assert(line.find('/') < 0)
            parts = line.split(' ')
            idx = parts[0]
            if len(parts) == 2 and parts[1] == 'load' and idx not in extra:
                # If the server requests that we load an idx and we don't
                # already have a copy of it, it is needed
                needed.add(idx)
            # Any idx that the server has heard of is proven not extra
            extra.discard(idx)

        self.check_ok()
        debug1('client: removing extra indexes: %s\n' % extra)
        for idx in extra:
            os.unlink(os.path.join(self.cachedir, idx))
        debug1('client: server requested load of: %s\n' % needed)
        for idx in needed:
            self.sync_index(idx)
        git.auto_midx(self.cachedir)
Пример #2
0
    def sync_indexes(self):
        conn = self.conn
        mkdirp(self.cachedir)
        # All cached idxs are extra until proven otherwise
        extra = set()
        for f in os.listdir(self.cachedir):
            debug1(path_msg(f) + '\n')
            if f.endswith(b'.idx'):
                extra.add(f)
        needed = set()
        for idx, load in self._list_indexes():
            if load:
                # If the server requests that we load an idx and we don't
                # already have a copy of it, it is needed
                needed.add(idx)
            # Any idx that the server has heard of is proven not extra
            extra.discard(idx)

        debug1('client: removing extra indexes: %s\n' % extra)
        for idx in extra:
            os.unlink(os.path.join(self.cachedir, idx))
        debug1('client: server requested load of: %s\n' % needed)
        for idx in needed:
            self.sync_index(idx)
        git.auto_midx(self.cachedir)
Пример #3
0
    def sync_indexes(self):
        self._require_command(b'list-indexes')
        self.check_busy()
        conn = self.conn
        mkdirp(self.cachedir)
        # All cached idxs are extra until proven otherwise
        extra = set()
        for f in os.listdir(self.cachedir):
            debug1(path_msg(f) + '\n')
            if f.endswith(b'.idx'):
                extra.add(f)
        needed = set()
        conn.write(b'list-indexes\n')
        for line in linereader(conn):
            if not line:
                break
            assert(line.find(b'/') < 0)
            parts = line.split(b' ')
            idx = parts[0]
            if len(parts) == 2 and parts[1] == b'load' and idx not in extra:
                # If the server requests that we load an idx and we don't
                # already have a copy of it, it is needed
                needed.add(idx)
            # Any idx that the server has heard of is proven not extra
            extra.discard(idx)

        self.check_ok()
        debug1('client: removing extra indexes: %s\n' % extra)
        for idx in extra:
            os.unlink(os.path.join(self.cachedir, idx))
        debug1('client: server requested load of: %s\n' % needed)
        for idx in needed:
            self.sync_index(idx)
        git.auto_midx(self.cachedir)
Пример #4
0
    def __init__(self, cfg_file, create=False):
        super(EncryptedRepo, self).__init__(cfg_file, create)
        # init everything for __del__ in case we get an exception here
        self.storage = None
        self.data_writer = None
        self.meta_writer = None
        self.cfg_file = cfg_file
        self.ec_cache = {}

        if libnacl is None:
            raise Exception("Encrypted repositories require libnacl")

        if self.max_pack_size is None:
            self.max_pack_size = 1000 * 1000 * 1000
        self.cachedir = self.config(b'bup.cachedir', opttype='path')
        if self.cachedir is None:
            raise Exception("encrypted repositories need a 'cachedir'")
        if create:
            mkdirp(self.cachedir)
        if not os.path.isdir(self.cachedir):
            raise Exception(
                "cachedir doesn't exist or isn't a directory - may have to init the repo?"
            )
        self.storage = get_storage(self, create=create)

        self.readkey = None
        self.repokey = None
        self.writekey = None
        self.refsname = self.config(b'bup.refsname')
        if self.refsname is None:
            self.refsname = b'refs'
        readkey = self.config(b'bup.readkey')
        if readkey is not None:
            self.readkey = libnacl.public.SecretKey(unhexlify(readkey))
        repokey = self.config(b'bup.repokey')
        if repokey is not None:
            self.repokey = unhexlify(repokey)
        writekey = self.config(b'bup.writekey')
        if writekey is not None:
            self.writekey = unhexlify(writekey)
            if self.readkey is not None:
                assert self.writekey == self.readkey.pk
        else:
            assert self.readkey is not None, "at least one of 'readkey' or 'writekey' is required"
            self.writekey = self.readkey.pk

        self.compression = self.compression_level
        if self.compression is None:
            self.compression = -1
        self.separatemeta = self.config(b'bup.separatemeta', opttype='bool')
        self.data_written_objs = set()
        if self.separatemeta:
            self.meta_written_objs = set()
        else:
            self.meta_written_objs = self.data_written_objs

        self._synchronize_idxes()
        self.idxlist = git.PackIdxList(self.cachedir)
Пример #5
0
 def __init__(self, repo, create=False):
     self.openset = set()
     self.path = repo.config(b'bup.path', opttype='path')
     if create:
         mkdirp(self.path)
     if not os.path.isdir(self.path):
         raise Exception(
             "FileStorage: %s doesn't exist or isn't a directory, need to init?"
             % path_msg(self.path))
Пример #6
0
 def sync_index(self, name):
     mkdirp(self.cachedir)
     fn = os.path.join(self.cachedir, name)
     if os.path.exists(fn):
         msg = ("won't request existing .idx, try `bup bloom --check %s`"
                % path_msg(fn))
         raise ClientError(msg)
     with atomically_replaced_file(fn, 'wb') as f:
         self.send_index(name, f, lambda size: None)
Пример #7
0
def _set_up_path(meta, create_symlinks=True):
    # Allow directories to exist as a special case -- might have
    # been created by an earlier longer path.
    if meta.isdir():
        mkdirp(meta.path)
    else:
        parent = os.path.dirname(meta.path)
        if parent:
            mkdirp(parent)
        meta.create_path(meta.path, create_symlinks=create_symlinks)
Пример #8
0
def _set_up_path(meta, create_symlinks=True):
    # Allow directories to exist as a special case -- might have
    # been created by an earlier longer path.
    if meta.isdir():
        mkdirp(meta.path)
    else:
        parent = os.path.dirname(meta.path)
        if parent:
            mkdirp(parent)
        meta.create_path(meta.path, create_symlinks=create_symlinks)
Пример #9
0
def test_list_refs():
    with no_lingering_errors():
        with test_tempdir(b'bup-tgit-') as tmpdir:
            environ[b'BUP_DIR'] = bupdir = tmpdir + b'/bup'
            src = tmpdir + b'/src'
            mkdirp(src)
            with open(src + b'/1', 'wb+') as f:
                f.write(b'something\n')
            with open(src + b'/2', 'wb+') as f:
                f.write(b'something else\n')
            git.init_repo(bupdir)
            emptyset = frozenset()
            WVPASSEQ(frozenset(git.list_refs()), emptyset)
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), emptyset)
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)), emptyset)
            exc(bup_exe, b'index', src)
            exc(bup_exe, b'save', b'-n', b'src', b'--strip', src)
            src_hash = exo(b'git', b'--git-dir', bupdir, b'rev-parse',
                           b'src').strip().split(b'\n')
            assert (len(src_hash) == 1)
            src_hash = unhexlify(src_hash[0])
            tree_hash = unhexlify(
                exo(b'git', b'--git-dir', bupdir, b'rev-parse',
                    b'src:').strip().split(b'\n')[0])
            blob_hash = unhexlify(
                exo(b'git', b'--git-dir', bupdir, b'rev-parse',
                    b'src:1').strip().split(b'\n')[0])
            WVPASSEQ(frozenset(git.list_refs()),
                     frozenset([(b'refs/heads/src', src_hash)]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), emptyset)
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
                     frozenset([(b'refs/heads/src', src_hash)]))
            exc(b'git', b'--git-dir', bupdir, b'tag', b'commit-tag', b'src')
            WVPASSEQ(
                frozenset(git.list_refs()),
                frozenset([(b'refs/heads/src', src_hash),
                           (b'refs/tags/commit-tag', src_hash)]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)),
                     frozenset([(b'refs/tags/commit-tag', src_hash)]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
                     frozenset([(b'refs/heads/src', src_hash)]))
            exc(b'git', b'--git-dir', bupdir, b'tag', b'tree-tag', b'src:')
            exc(b'git', b'--git-dir', bupdir, b'tag', b'blob-tag', b'src:1')
            os.unlink(bupdir + b'/refs/heads/src')
            expected_tags = frozenset([(b'refs/tags/commit-tag', src_hash),
                                       (b'refs/tags/tree-tag', tree_hash),
                                       (b'refs/tags/blob-tag', blob_hash)])
            WVPASSEQ(frozenset(git.list_refs()), expected_tags)
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
                     frozenset([]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)),
                     expected_tags)
Пример #10
0
def create_path(n, fullname, meta):
    if meta:
        meta.create_path(fullname)
    else:
        # These fallbacks are important -- meta could be null if, for
        # example, save created a "fake" item, i.e. a new strip/graft
        # path element, etc.  You can find cases like that by
        # searching for "Metadata()".
        unlink(fullname)
        if stat.S_ISDIR(n.mode):
            mkdirp(fullname)
        elif stat.S_ISLNK(n.mode):
            os.symlink(n.readlink(), fullname)
Пример #11
0
def test_list_refs():
    initial_failures = wvfailure_count()
    tmpdir = tempfile.mkdtemp(dir=bup_tmp, prefix='bup-tgit-')
    os.environ['BUP_MAIN_EXE'] = bup_exe
    os.environ['BUP_DIR'] = bupdir = tmpdir + "/bup"
    src = tmpdir + '/src'
    mkdirp(src)
    with open(src + '/1', 'w+') as f:
        print f, 'something'
    with open(src + '/2', 'w+') as f:
        print f, 'something else'
    git.init_repo(bupdir)
    emptyset = frozenset()
    WVPASSEQ(frozenset(git.list_refs()), emptyset)
    WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), emptyset)
    WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)), emptyset)
    exc(bup_exe, 'index', src)
    exc(bup_exe, 'save', '-n', 'src', '--strip', src)
    src_hash = exo('git', '--git-dir', bupdir, 'rev-parse',
                   'src').strip().split('\n')
    assert (len(src_hash) == 1)
    src_hash = src_hash[0].decode('hex')
    tree_hash = exo('git', '--git-dir', bupdir, 'rev-parse',
                    'src:').strip().split('\n')[0].decode('hex')
    blob_hash = exo('git', '--git-dir', bupdir, 'rev-parse',
                    'src:1').strip().split('\n')[0].decode('hex')
    WVPASSEQ(frozenset(git.list_refs()),
             frozenset([('refs/heads/src', src_hash)]))
    WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), emptyset)
    WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
             frozenset([('refs/heads/src', src_hash)]))
    exc('git', '--git-dir', bupdir, 'tag', 'commit-tag', 'src')
    WVPASSEQ(
        frozenset(git.list_refs()),
        frozenset([('refs/heads/src', src_hash),
                   ('refs/tags/commit-tag', src_hash)]))
    WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)),
             frozenset([('refs/tags/commit-tag', src_hash)]))
    WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
             frozenset([('refs/heads/src', src_hash)]))
    exc('git', '--git-dir', bupdir, 'tag', 'tree-tag', 'src:')
    exc('git', '--git-dir', bupdir, 'tag', 'blob-tag', 'src:1')
    os.unlink(bupdir + '/refs/heads/src')
    expected_tags = frozenset([('refs/tags/commit-tag', src_hash),
                               ('refs/tags/tree-tag', tree_hash),
                               ('refs/tags/blob-tag', blob_hash)])
    WVPASSEQ(frozenset(git.list_refs()), expected_tags)
    WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)), frozenset([]))
    WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), expected_tags)
    if wvfailure_count() == initial_failures:
        subprocess.call(['rm', '-rf', tmpdir])
Пример #12
0
def test_list_refs():
    with no_lingering_errors():
        with test_tempdir('bup-tgit-') as tmpdir:
            os.environ['BUP_MAIN_EXE'] = bup_exe
            os.environ['BUP_DIR'] = bupdir = tmpdir + "/bup"
            src = tmpdir + '/src'
            mkdirp(src)
            with open(src + '/1', 'w+') as f:
                print f, 'something'
            with open(src + '/2', 'w+') as f:
                print f, 'something else'
            git.init_repo(bupdir)
            emptyset = frozenset()
            WVPASSEQ(frozenset(git.list_refs()), emptyset)
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), emptyset)
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)), emptyset)
            exc(bup_exe, 'index', src)
            exc(bup_exe, 'save', '-n', 'src', '--strip', src)
            src_hash = exo('git', '--git-dir', bupdir, 'rev-parse',
                           'src').strip().split('\n')
            assert (len(src_hash) == 1)
            src_hash = src_hash[0].decode('hex')
            tree_hash = exo('git', '--git-dir', bupdir, 'rev-parse',
                            'src:').strip().split('\n')[0].decode('hex')
            blob_hash = exo('git', '--git-dir', bupdir, 'rev-parse',
                            'src:1').strip().split('\n')[0].decode('hex')
            WVPASSEQ(frozenset(git.list_refs()),
                     frozenset([('refs/heads/src', src_hash)]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), emptyset)
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
                     frozenset([('refs/heads/src', src_hash)]))
            exc('git', '--git-dir', bupdir, 'tag', 'commit-tag', 'src')
            WVPASSEQ(
                frozenset(git.list_refs()),
                frozenset([('refs/heads/src', src_hash),
                           ('refs/tags/commit-tag', src_hash)]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)),
                     frozenset([('refs/tags/commit-tag', src_hash)]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
                     frozenset([('refs/heads/src', src_hash)]))
            exc('git', '--git-dir', bupdir, 'tag', 'tree-tag', 'src:')
            exc('git', '--git-dir', bupdir, 'tag', 'blob-tag', 'src:1')
            os.unlink(bupdir + '/refs/heads/src')
            expected_tags = frozenset([('refs/tags/commit-tag', src_hash),
                                       ('refs/tags/tree-tag', tree_hash),
                                       ('refs/tags/blob-tag', blob_hash)])
            WVPASSEQ(frozenset(git.list_refs()), expected_tags)
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
                     frozenset([]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)),
                     expected_tags)
Пример #13
0
Файл: tgit.py Проект: senseb/bup
def test_list_refs():
    initial_failures = wvfailure_count()
    tmpdir = tempfile.mkdtemp(dir=bup_tmp, prefix='bup-tgit-')
    os.environ['BUP_MAIN_EXE'] = bup_exe
    os.environ['BUP_DIR'] = bupdir = tmpdir + "/bup"
    src = tmpdir + '/src'
    mkdirp(src)
    with open(src + '/1', 'w+') as f:
        print f, 'something'
    with open(src + '/2', 'w+') as f:
        print f, 'something else'
    git.init_repo(bupdir)
    emptyset = frozenset()
    WVPASSEQ(frozenset(git.list_refs()), emptyset)
    WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), emptyset)
    WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)), emptyset)
    exc(bup_exe, 'index', src)
    exc(bup_exe, 'save', '-n', 'src', '--strip', src)
    src_hash = exo('git', '--git-dir', bupdir,
                   'rev-parse', 'src').strip().split('\n')
    assert(len(src_hash) == 1)
    src_hash = src_hash[0].decode('hex')
    tree_hash = exo('git', '--git-dir', bupdir,
                   'rev-parse', 'src:').strip().split('\n')[0].decode('hex')
    blob_hash = exo('git', '--git-dir', bupdir,
                   'rev-parse', 'src:1').strip().split('\n')[0].decode('hex')
    WVPASSEQ(frozenset(git.list_refs()),
             frozenset([('refs/heads/src', src_hash)]))
    WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), emptyset)
    WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
             frozenset([('refs/heads/src', src_hash)]))
    exc('git', '--git-dir', bupdir, 'tag', 'commit-tag', 'src')
    WVPASSEQ(frozenset(git.list_refs()),
             frozenset([('refs/heads/src', src_hash),
                        ('refs/tags/commit-tag', src_hash)]))
    WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)),
             frozenset([('refs/tags/commit-tag', src_hash)]))
    WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
             frozenset([('refs/heads/src', src_hash)]))
    exc('git', '--git-dir', bupdir, 'tag', 'tree-tag', 'src:')
    exc('git', '--git-dir', bupdir, 'tag', 'blob-tag', 'src:1')
    os.unlink(bupdir + '/refs/heads/src')
    expected_tags = frozenset([('refs/tags/commit-tag', src_hash),
                               ('refs/tags/tree-tag', tree_hash),
                               ('refs/tags/blob-tag', blob_hash)])
    WVPASSEQ(frozenset(git.list_refs()), expected_tags)
    WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)), frozenset([]))
    WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), expected_tags)
    if wvfailure_count() == initial_failures:
        subprocess.call(['rm', '-rf', tmpdir])
Пример #14
0
def test_list_refs():
    with no_lingering_errors():
        with test_tempdir('bup-tgit-') as tmpdir:
            os.environ['BUP_MAIN_EXE'] = bup_exe
            os.environ['BUP_DIR'] = bupdir = tmpdir + "/bup"
            src = tmpdir + '/src'
            mkdirp(src)
            with open(src + '/1', 'w+') as f:
                print f, 'something'
            with open(src + '/2', 'w+') as f:
                print f, 'something else'
            git.init_repo(bupdir)
            emptyset = frozenset()
            WVPASSEQ(frozenset(git.list_refs()), emptyset)
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), emptyset)
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)), emptyset)
            exc(bup_exe, 'index', src)
            exc(bup_exe, 'save', '-n', 'src', '--strip', src)
            src_hash = exo('git', '--git-dir', bupdir,
                           'rev-parse', 'src').strip().split('\n')
            assert(len(src_hash) == 1)
            src_hash = src_hash[0].decode('hex')
            tree_hash = exo('git', '--git-dir', bupdir,
                           'rev-parse', 'src:').strip().split('\n')[0].decode('hex')
            blob_hash = exo('git', '--git-dir', bupdir,
                           'rev-parse', 'src:1').strip().split('\n')[0].decode('hex')
            WVPASSEQ(frozenset(git.list_refs()),
                     frozenset([('refs/heads/src', src_hash)]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), emptyset)
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
                     frozenset([('refs/heads/src', src_hash)]))
            exc('git', '--git-dir', bupdir, 'tag', 'commit-tag', 'src')
            WVPASSEQ(frozenset(git.list_refs()),
                     frozenset([('refs/heads/src', src_hash),
                                ('refs/tags/commit-tag', src_hash)]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)),
                     frozenset([('refs/tags/commit-tag', src_hash)]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)),
                     frozenset([('refs/heads/src', src_hash)]))
            exc('git', '--git-dir', bupdir, 'tag', 'tree-tag', 'src:')
            exc('git', '--git-dir', bupdir, 'tag', 'blob-tag', 'src:1')
            os.unlink(bupdir + '/refs/heads/src')
            expected_tags = frozenset([('refs/tags/commit-tag', src_hash),
                                       ('refs/tags/tree-tag', tree_hash),
                                       ('refs/tags/blob-tag', blob_hash)])
            WVPASSEQ(frozenset(git.list_refs()), expected_tags)
            WVPASSEQ(frozenset(git.list_refs(limit_to_heads=True)), frozenset([]))
            WVPASSEQ(frozenset(git.list_refs(limit_to_tags=True)), expected_tags)
Пример #15
0
 def sync_index(self, name):
     #debug1('requesting %r\n' % name)
     self.check_busy()
     mkdirp(self.cachedir)
     fn = os.path.join(self.cachedir, name)
     if os.path.exists(fn):
         msg = "won't request existing .idx, try `bup bloom --check %s`" % fn
         raise ClientError(msg)
     self.conn.write('send-index %s\n' % name)
     n = struct.unpack('!I', self.conn.read(4))[0]
     assert(n)
     with atomically_replaced_file(fn, 'w') as f:
         count = 0
         for b in chunkyreader(self.conn, n):
             f.write(b)
             count += len(b)
             qprogress('Receiving index from server: %d/%d\r' % (count, n))
         self.check_ok()
Пример #16
0
def test_cat_pipe(tmpdir):
    environ[b'BUP_DIR'] = bupdir = tmpdir + b'/bup'
    src = tmpdir + b'/src'
    mkdirp(src)
    with open(src + b'/1', 'wb+') as f:
        f.write(b'something\n')
    with open(src + b'/2', 'wb+') as f:
        f.write(b'something else\n')
    git.init_repo(bupdir)
    exc(bup_exe, b'index', src)
    oidx = exo(bup_exe, b'save', b'-cn', b'src', b'--strip', src).strip()
    typ = exo(b'git', b'--git-dir', bupdir, b'cat-file', b'-t', b'src').strip()
    size = int(exo(b'git', b'--git-dir', bupdir, b'cat-file', b'-s', b'src'))
    it = git.cp().get(b'src')
    get_info = next(it)
    for buf in next(it):
        pass
    WVPASSEQ((oidx, typ, size), get_info)
Пример #17
0
 def sync_index(self, name):
     #debug1('requesting %r\n' % name)
     self.check_busy()
     mkdirp(self.cachedir)
     fn = os.path.join(self.cachedir, name)
     if os.path.exists(fn):
         msg = "won't request existing .idx, try `bup bloom --check %s`" % fn
         raise ClientError(msg)
     self.conn.write('send-index %s\n' % name)
     n = struct.unpack('!I', self.conn.read(4))[0]
     assert (n)
     with atomically_replaced_file(fn, 'w') as f:
         count = 0
         progress('Receiving index from server: %d/%d\r' % (count, n))
         for b in chunkyreader(self.conn, n):
             f.write(b)
             count += len(b)
             qprogress('Receiving index from server: %d/%d\r' % (count, n))
         progress('Receiving index from server: %d/%d, done.\n' %
                  (count, n))
         self.check_ok()
Пример #18
0
def test_cat_pipe():
    with no_lingering_errors():
        with test_tempdir('bup-tgit-') as tmpdir:
            os.environ['BUP_DIR'] = bupdir = tmpdir + "/bup"
            src = tmpdir + '/src'
            mkdirp(src)
            with open(src + '/1', 'w+') as f:
                print('something', file=f)
            with open(src + '/2', 'w+') as f:
                print('something else', file=f)
            git.init_repo(bupdir)
            exc(bup_exe, 'index', src)
            oidx = exo(bup_exe, 'save', '-cn', 'src', '--strip', src).strip()
            typ = exo('git', '--git-dir', bupdir, 'cat-file', '-t',
                      'src').strip()
            size = int(exo('git', '--git-dir', bupdir, 'cat-file', '-s',
                           'src'))
            it = git.cp().get('src')
            get_info = it.next()
            for buf in it.next():
                pass
            WVPASSEQ((oidx, typ, size), get_info)
Пример #19
0
def test_cat_pipe():
    with no_lingering_errors():
        with test_tempdir('bup-tgit-') as tmpdir:
            os.environ['BUP_MAIN_EXE'] = bup_exe
            os.environ['BUP_DIR'] = bupdir = tmpdir + "/bup"
            src = tmpdir + '/src'
            mkdirp(src)
            with open(src + '/1', 'w+') as f:
                print f, 'something'
            with open(src + '/2', 'w+') as f:
                print f, 'something else'
            git.init_repo(bupdir)
            exc(bup_exe, 'index', src)
            exc(bup_exe, 'save', '-n', 'src', '--strip', src)
            git_type = exo('git', '--git-dir', bupdir, 'cat-file', '-t',
                           'src').strip()
            git_size = int(
                exo('git', '--git-dir', bupdir, 'cat-file', '-s', 'src'))
            it = git.cp().get('src', size=True)
            get_type, get_size = it.next()
            for buf in it.next():
                pass
            WVPASSEQ(get_type, git_type)
            WVPASSEQ(get_size, git_size)
Пример #20
0
import sys, os, stat, time, random, subprocess, glob, tempfile
from bup import client, git
from bup.helpers import mkdirp
from wvtest import *

bup_tmp = os.path.realpath('../../../t/tmp')
mkdirp(bup_tmp)


def randbytes(sz):
    s = ''
    for i in xrange(sz):
        s += chr(random.randrange(0, 256))
    return s


s1 = randbytes(10000)
s2 = randbytes(10000)
s3 = randbytes(10000)

IDX_PAT = '/*.idx'


@wvtest
def test_server_split_with_indexes():
    initial_failures = wvfailure_count()
    tmpdir = tempfile.mkdtemp(dir=bup_tmp, prefix='bup-tclient-')
    os.environ['BUP_MAIN_EXE'] = '../../../bup'
    os.environ['BUP_DIR'] = bupdir = tmpdir
    git.init_repo(bupdir)
    lw = git.PackWriter()
Пример #21
0
Файл: tclient.py Проект: 3v/bup
import sys, os, stat, time, random, subprocess, glob, tempfile
from bup import client, git
from bup.helpers import mkdirp
from wvtest import *

bup_tmp = os.path.realpath('../../../t/tmp')
mkdirp(bup_tmp)

def randbytes(sz):
    s = ''
    for i in xrange(sz):
        s += chr(random.randrange(0,256))
    return s

s1 = randbytes(10000)
s2 = randbytes(10000)
s3 = randbytes(10000)

IDX_PAT = '/*.idx'
    
@wvtest
def test_server_split_with_indexes():
    initial_failures = wvfailure_count()
    tmpdir = tempfile.mkdtemp(dir=bup_tmp, prefix='bup-tclient-')
    os.environ['BUP_MAIN_EXE'] = '../../../bup'
    os.environ['BUP_DIR'] = bupdir = tmpdir
    git.init_repo(bupdir)
    lw = git.PackWriter()
    c = client.Client(bupdir, create=True)
    rw = c.new_packwriter()
Пример #22
0
            msg = 'saved_errors ' + repr(helpers.saved_errors)
            print('! %-70s %s' %
                  ('%s:%-4d %s' %
                   (basename(src_file), src_line, msg), 'FAILED'))
            sys.stdout.flush()

    fail_if_errors()
    helpers.clear_errors()
    yield
    fail_if_errors()
    helpers.clear_errors()


# Assumes (of course) this file is at the top-level of the source tree
_bup_tmp = realpath(dirname(__file__) + '/t/tmp')
helpers.mkdirp(_bup_tmp)


@contextmanager
def test_tempdir(prefix):
    initial_failures = wvfailure_count()
    tmpdir = tempfile.mkdtemp(dir=_bup_tmp, prefix=prefix)
    yield tmpdir
    if wvfailure_count() == initial_failures:
        subprocess.call(['chmod', '-R', 'u+rwX', tmpdir])
        subprocess.call(['rm', '-rf', tmpdir])


def logcmd(cmd):
    if isinstance(cmd, basestring):
        print(cmd, file=sys.stderr)
Пример #23
0
def main():
    o = options.Options(optspec)
    opt, flags, extra = o.parse(sys.argv[1:])
    verbosity = opt.verbose if not opt.quiet else -1

    git.check_repo_or_die()

    if not extra:
        o.fatal('must specify at least one filename to restore')

    exclude_rxs = parse_rx_excludes(flags, o.fatal)

    owner_map = {}
    for map_type in ('user', 'group', 'uid', 'gid'):
        owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal)

    if opt.outdir:
        mkdirp(opt.outdir)
        os.chdir(opt.outdir)

    repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo()
    top = os.getcwd()
    hardlinks = {}
    for path in extra:
        if not valid_restore_path(path):
            add_error("path %r doesn't include a branch and revision" % path)
            continue
        try:
            resolved = vfs2.lresolve(repo, path, want_meta=True)
        except vfs2.IOError as e:
            add_error(e)
            continue
        path_parent, path_name = os.path.split(path)
        leaf_name, leaf_item = resolved[-1]
        if not leaf_item:
            add_error('error: cannot access %r in %r' %
                      ('/'.join(name for name, item in resolved), path))
            continue
        if not path_name or path_name == '.':
            # Source is /foo/what/ever/ or /foo/what/ever/. -- extract
            # what/ever/* to the current directory, and if name == '.'
            # (i.e. /foo/what/ever/.), then also restore what/ever's
            # metadata to the current directory.
            treeish = vfs2.item_mode(leaf_item)
            if not treeish:
                add_error('%r cannot be restored as a directory' % path)
            else:
                items = vfs2.contents(repo, leaf_item, want_meta=True)
                dot, leaf_item = next(items, None)
                assert (dot == '.')
                for sub_name, sub_item in items:
                    restore(repo, '', sub_name, sub_item, top, opt.sparse,
                            opt.numeric_ids, owner_map, exclude_rxs, verbosity,
                            hardlinks)
                if path_name == '.':
                    leaf_item = vfs2.augment_item_meta(repo,
                                                       leaf_item,
                                                       include_size=True)
                    apply_metadata(leaf_item.meta, '.', opt.numeric_ids,
                                   owner_map)
        else:
            restore(repo, '', leaf_name, leaf_item, top, opt.sparse,
                    opt.numeric_ids, owner_map, exclude_rxs, verbosity,
                    hardlinks)

    if verbosity >= 0:
        progress('Restoring: %d, done.\n' % total_restored)
    die_if_errors()
Пример #24
0
(opt, flags, extra) = o.parse(sys.argv[1:])

git.check_repo_or_die()
top = vfs.RefList(None)

if not extra:
    o.fatal('must specify at least one filename to restore')
    
exclude_rxs = parse_rx_excludes(flags, o.fatal)

owner_map = {}
for map_type in ('user', 'group', 'uid', 'gid'):
    owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal)

if opt.outdir:
    mkdirp(opt.outdir)
    os.chdir(opt.outdir)

ret = 0
for d in extra:
    if not valid_restore_path(d):
        add_error("ERROR: path %r doesn't include a branch and revision" % d)
        continue
    path,name = os.path.split(d)
    try:
        n = top.lresolve(d)
    except vfs.NodeError as e:
        add_error(e)
        continue
    isdir = stat.S_ISDIR(n.mode)
    if not name or name == '.':
Пример #25
0
def main():
    o = options.Options(optspec)
    opt, flags, extra = o.parse(sys.argv[1:])
    verbosity = opt.verbose if not opt.quiet else -1
    
    git.check_repo_or_die()

    if not extra:
        o.fatal('must specify at least one filename to restore')

    exclude_rxs = parse_rx_excludes(flags, o.fatal)

    owner_map = {}
    for map_type in ('user', 'group', 'uid', 'gid'):
        owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal)

    if opt.outdir:
        mkdirp(opt.outdir)
        os.chdir(opt.outdir)

    repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo()
    top = os.getcwd()
    hardlinks = {}
    for path in extra:
        if not valid_restore_path(path):
            add_error("path %r doesn't include a branch and revision" % path)
            continue
        try:
            resolved = vfs.resolve(repo, path, want_meta=True, follow=False)
        except vfs.IOError as e:
            add_error(e)
            continue
        if len(resolved) == 3 and resolved[2][0] == 'latest':
            # Follow latest symlink to the actual save
            try:
                resolved = vfs.resolve(repo, 'latest', parent=resolved[:-1],
                                       want_meta=True)
            except vfs.IOError as e:
                add_error(e)
                continue
            # Rename it back to 'latest'
            resolved = tuple(elt if i != 2 else ('latest',) + elt[1:]
                             for i, elt in enumerate(resolved))
        path_parent, path_name = os.path.split(path)
        leaf_name, leaf_item = resolved[-1]
        if not leaf_item:
            add_error('error: cannot access %r in %r'
                      % ('/'.join(name for name, item in resolved),
                         path))
            continue
        if not path_name or path_name == '.':
            # Source is /foo/what/ever/ or /foo/what/ever/. -- extract
            # what/ever/* to the current directory, and if name == '.'
            # (i.e. /foo/what/ever/.), then also restore what/ever's
            # metadata to the current directory.
            treeish = vfs.item_mode(leaf_item)
            if not treeish:
                add_error('%r cannot be restored as a directory' % path)
            else:
                items = vfs.contents(repo, leaf_item, want_meta=True)
                dot, leaf_item = next(items, None)
                assert(dot == '.')
                for sub_name, sub_item in items:
                    restore(repo, '', sub_name, sub_item, top,
                            opt.sparse, opt.numeric_ids, owner_map,
                            exclude_rxs, verbosity, hardlinks)
                if path_name == '.':
                    leaf_item = vfs.augment_item_meta(repo, leaf_item,
                                                      include_size=True)
                    apply_metadata(leaf_item.meta, '.',
                                   opt.numeric_ids, owner_map)
        else:
            restore(repo, '', leaf_name, leaf_item, top,
                    opt.sparse, opt.numeric_ids, owner_map,
                    exclude_rxs, verbosity, hardlinks)

    if verbosity >= 0:
        progress('Restoring: %d, done.\n' % total_restored)
    die_if_errors()
Пример #26
0
def main(argv):
    o = options.Options(optspec)
    opt, flags, extra = o.parse_bytes(argv[1:])
    verbosity = (opt.verbose or 0) if not opt.quiet else -1
    if opt.remote:
        opt.remote = argv_bytes(opt.remote)
    if opt.outdir:
        opt.outdir = argv_bytes(opt.outdir)

    git.check_repo_or_die()

    if not extra:
        o.fatal('must specify at least one filename to restore')

    exclude_rxs = parse_rx_excludes(flags, o.fatal)

    owner_map = {}
    for map_type in ('user', 'group', 'uid', 'gid'):
        owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal)

    if opt.outdir:
        mkdirp(opt.outdir)
        os.chdir(opt.outdir)

    repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo()
    top = fsencode(os.getcwd())
    hardlinks = {}
    for path in [argv_bytes(x) for x in extra]:
        if not valid_restore_path(path):
            add_error("path %r doesn't include a branch and revision" % path)
            continue
        try:
            resolved = vfs.resolve(repo, path, want_meta=True, follow=False)
        except vfs.IOError as e:
            add_error(e)
            continue
        if len(resolved) == 3 and resolved[2][0] == b'latest':
            # Follow latest symlink to the actual save
            try:
                resolved = vfs.resolve(repo,
                                       b'latest',
                                       parent=resolved[:-1],
                                       want_meta=True)
            except vfs.IOError as e:
                add_error(e)
                continue
            # Rename it back to 'latest'
            resolved = tuple(elt if i != 2 else (b'latest', ) + elt[1:]
                             for i, elt in enumerate(resolved))
        path_parent, path_name = os.path.split(path)
        leaf_name, leaf_item = resolved[-1]
        if not leaf_item:
            add_error('error: cannot access %r in %r' %
                      (b'/'.join(name for name, item in resolved), path))
            continue
        if not path_name or path_name == b'.':
            # Source is /foo/what/ever/ or /foo/what/ever/. -- extract
            # what/ever/* to the current directory, and if name == '.'
            # (i.e. /foo/what/ever/.), then also restore what/ever's
            # metadata to the current directory.
            treeish = vfs.item_mode(leaf_item)
            if not treeish:
                add_error('%r cannot be restored as a directory' % path)
            else:
                items = vfs.contents(repo, leaf_item, want_meta=True)
                dot, leaf_item = next(items, None)
                assert dot == b'.'
                for sub_name, sub_item in items:
                    restore(repo, b'', sub_name, sub_item, top, opt.sparse,
                            opt.numeric_ids, owner_map, exclude_rxs, verbosity,
                            hardlinks)
                if path_name == b'.':
                    leaf_item = vfs.augment_item_meta(repo,
                                                      leaf_item,
                                                      include_size=True)
                    apply_metadata(leaf_item.meta, b'.', opt.numeric_ids,
                                   owner_map)
        else:
            restore(repo, b'', leaf_name, leaf_item, top, opt.sparse,
                    opt.numeric_ids, owner_map, exclude_rxs, verbosity,
                    hardlinks)

    if verbosity >= 0:
        progress('Restoring: %d, done.\n' % total_restored)
    die_if_errors()
Пример #27
0
def test_path_info():
    initial_failures = wvfailure_count()
    tmpdir = tempfile.mkdtemp(dir=bup_tmp, prefix='bup-tclient-')
    os.environ['BUP_MAIN_EXE'] = '../../../bup'
    os.environ['BUP_DIR'] = bupdir = tmpdir
    src = tmpdir + '/src'
    mkdirp(src)
    with open(src + '/1', 'w+') as f:
        print f, 'something'
    with open(src + '/2', 'w+') as f:
        print f, 'something else'
    os.mkdir(src + '/dir')
    git.init_repo(bupdir)
    c = client.Client(bupdir, create=True)

    info = c.path_info(['/'])
    WVPASS(info)
    WVPASS(len(info) == 1)
    WVPASS(info[0])
    name, id, type = info[0]
    WVPASSEQ(type, 'root')

    info = c.path_info(['/not-there/'])
    WVPASS(info)
    WVPASS(len(info) == 1)
    WVPASS(info[0] is None)

    data = exo(bup_exe, 'random', '128k')
    with open(src + '/chunky', 'wb+') as f:
        f.write(data)
    ex(bup_exe, 'index', '-vv', src)
    ex(bup_exe, 'save', '-n', 'src', '--strip', src)
    ex(bup_exe, 'tag', 'src-latest-tag', 'src')
    src_hash = exo('git', '--git-dir', bupdir,
                   'rev-parse', 'src').strip().split('\n')
    assert(len(src_hash) == 1)
    src_hash = src_hash[0].decode('hex')
    tree_hash = exo('git', '--git-dir', bupdir,
                   'rev-parse', 'src:dir').strip().split('\n')[0].decode('hex')
    file_hash = exo('git', '--git-dir', bupdir,
                   'rev-parse', 'src:1').strip().split('\n')[0].decode('hex')
    chunky_hash = exo('git', '--git-dir', bupdir,
                      'rev-parse', 'src:chunky.bup').strip().split('\n')[0].decode('hex')
    info = c.path_info(['/src'])
    WVPASS(info)
    WVPASS(len(info) == 1)
    WVPASS(info[0])
    WVPASSEQ(info[0], ['/src', src_hash, 'branch'])

    info = c.path_info(['/src/latest'])
    WVPASS(info)
    WVPASS(len(info) == 1)
    WVPASS(info[0])
    WVPASSEQ(info[0], ['/src/latest', src_hash, 'save'])

    info = c.path_info(['/src/latest/dir'])
    WVPASS(info)
    WVPASS(len(info) == 1)
    WVPASS(info[0])
    WVPASSEQ(info[0], ['/src/latest/dir', tree_hash, 'dir'])

    info = c.path_info(['/src/latest/1'])
    WVPASS(info)
    WVPASS(len(info) == 1)
    WVPASS(info[0])
    WVPASSEQ(info[0], ['/src/latest/1', file_hash, 'file'])

    info = c.path_info(['/src/latest/chunky'])
    WVPASS(info)
    WVPASS(len(info) == 1)
    WVPASS(info[0])
    WVPASSEQ(info[0], ['/src/latest/chunky', chunky_hash, 'chunked-file'])

    info = c.path_info(['/.tag/src-latest-tag'])
    WVPASS(info)
    WVPASS(len(info) == 1)
    WVPASS(info[0])
    WVPASSEQ(info[0], ['/.tag/src-latest-tag', src_hash, 'commit'])

    info = c.path_info(['.tag////src-latest-tag'])
    WVPASS(info)
    WVPASS(len(info) == 1)
    WVPASS(info[0])
    WVPASSEQ(info[0], ['/.tag/src-latest-tag', src_hash, 'commit'])

    if wvfailure_count() == initial_failures:
        subprocess.call(['rm', '-rf', tmpdir])
Пример #28
0
    def fail_if_errors():
        if helpers.saved_errors:
            bt = extract_stack()
            src_file, src_line, src_func, src_txt = bt[-4]
            msg = 'saved_errors ' + repr(helpers.saved_errors)
            print '! %-70s %s' % ('%s:%-4d %s' % (basename(src_file),
                                                  src_line,
                                                  msg),
                                  'FAILED')
            sys.stdout.flush()
    fail_if_errors()
    helpers.clear_errors()
    yield
    fail_if_errors()
    helpers.clear_errors()


# Assumes (of course) this file is at the top-level of the source tree
_bup_tmp = realpath(dirname(__file__) + '/t/tmp')
helpers.mkdirp(_bup_tmp)


@contextmanager
def test_tempdir(prefix):
    initial_failures = wvfailure_count()
    tmpdir = tempfile.mkdtemp(dir=_bup_tmp, prefix=prefix)
    yield tmpdir
    if wvfailure_count() == initial_failures:
        subprocess.call(['chmod', '-R', 'u+rwX', tmpdir])
        subprocess.call(['rm', '-rf', tmpdir])