def test_duplicate_save_dates(): with no_lingering_errors(): with test_tempdir('bup-tvfs-') as tmpdir: bup_dir = tmpdir + '/bup' environ['GIT_DIR'] = bup_dir environ['BUP_DIR'] = bup_dir environ['TZ'] = 'UTC' git.repodir = bup_dir data_path = tmpdir + '/src' os.mkdir(data_path) with open(data_path + '/file', 'w+') as tmpfile: tmpfile.write(b'canary\n') ex((bup_path, 'init')) ex((bup_path, 'index', '-v', data_path)) for i in range(11): ex((bup_path, 'save', '-d', '100000', '-n', 'test', data_path)) repo = LocalRepo() res = vfs.resolve(repo, '/test') wvpasseq(2, len(res)) name, revlist = res[-1] wvpasseq('test', name) wvpasseq(('.', '1970-01-02-034640-00', '1970-01-02-034640-01', '1970-01-02-034640-02', '1970-01-02-034640-03', '1970-01-02-034640-04', '1970-01-02-034640-05', '1970-01-02-034640-06', '1970-01-02-034640-07', '1970-01-02-034640-08', '1970-01-02-034640-09', '1970-01-02-034640-10', 'latest'), tuple(sorted(x[0] for x in vfs.contents(repo, revlist))))
def test_duplicate_save_dates(): with no_lingering_errors(): with test_tempdir('bup-tvfs-') as tmpdir: bup_dir = tmpdir + '/bup' environ['GIT_DIR'] = bup_dir environ['BUP_DIR'] = bup_dir environ['TZ'] = 'UTC' git.repodir = bup_dir data_path = tmpdir + '/src' os.mkdir(data_path) with open(data_path + '/file', 'w+') as tmpfile: tmpfile.write(b'canary\n') ex((bup_path, 'init')) ex((bup_path, 'index', '-v', data_path)) for i in range(11): ex((bup_path, 'save', '-d', '100000', '-n', 'test', data_path)) repo = LocalRepo() res = vfs.resolve(repo, '/test') wvpasseq(2, len(res)) name, revlist = res[-1] wvpasseq('test', name) wvpasseq(('.', '1970-01-02-034640-00', '1970-01-02-034640-01', '1970-01-02-034640-02', '1970-01-02-034640-03', '1970-01-02-034640-04', '1970-01-02-034640-05', '1970-01-02-034640-06', '1970-01-02-034640-07', '1970-01-02-034640-08', '1970-01-02-034640-09', '1970-01-02-034640-10', 'latest'), tuple(sorted(x[0] for x in vfs.contents(repo, revlist))))
def test_metadata_method(tmpdir): bup_dir = tmpdir + b'/bup' data_path = tmpdir + b'/foo' os.mkdir(data_path) ex(b'touch', data_path + b'/file') ex(b'ln', b'-s', b'file', data_path + b'/symlink') test_time1 = 13 * 1000000000 test_time2 = 42 * 1000000000 utime(data_path + b'/file', (0, test_time1)) lutime(data_path + b'/symlink', (0, 0)) utime(data_path, (0, test_time2)) ex(bup_path, b'-d', bup_dir, b'init') ex(bup_path, b'-d', bup_dir, b'index', b'-v', data_path) ex(bup_path, b'-d', bup_dir, b'save', b'-tvvn', b'test', data_path) repo = LocalRepo(bup_dir) resolved = vfs.resolve(repo, b'/test/latest' + resolve_parent(data_path), follow=False) leaf_name, leaf_item = resolved[-1] m = leaf_item.meta WVPASS(m.mtime == test_time2) WVPASS(leaf_name == b'foo') contents = tuple(vfs.contents(repo, leaf_item)) WVPASS(len(contents) == 3) WVPASSEQ(frozenset(name for name, item in contents), frozenset((b'.', b'file', b'symlink'))) for name, item in contents: if name == b'file': m = item.meta WVPASS(m.mtime == test_time1) elif name == b'symlink': m = item.meta WVPASSEQ(m.symlink_target, b'file') WVPASSEQ(m.size, 4) WVPASSEQ(m.mtime, 0)
def test_contents_with_mismatched_bupm_git_ordering(): with no_lingering_errors(): with test_tempdir('bup-tvfs-') as tmpdir: bup_dir = tmpdir + '/bup' environ['GIT_DIR'] = bup_dir environ['BUP_DIR'] = bup_dir git.repodir = bup_dir data_path = tmpdir + '/src' os.mkdir(data_path) os.mkdir(data_path + '/foo') with open(data_path + '/foo.', 'w+') as tmpfile: tmpfile.write(b'canary\n') ex((bup_path, 'init')) ex((bup_path, 'index', '-v', data_path)) ex((bup_path, 'save', '-tvvn', 'test', '--strip', data_path)) repo = LocalRepo() tip_sref = exo(('git', 'show-ref', 'refs/heads/test'))[0] tip_oidx = tip_sref.strip().split()[0] tip_tree_oidx = exo( ('git', 'log', '--pretty=%T', '-n1', tip_oidx))[0].strip() tip_tree_oid = tip_tree_oidx.decode('hex') tip_tree = tree_dict(repo, tip_tree_oid) name, item = vfs.resolve(repo, '/test/latest')[2] wvpasseq('latest', name) expected = frozenset( (x.name, vfs.Item(oid=x.oid, meta=x.meta)) for x in (tip_tree[name] for name in ('.', 'foo', 'foo.'))) contents = tuple(vfs.contents(repo, item)) wvpasseq(expected, frozenset(contents)) # Spot check, in case tree_dict shares too much code with the vfs name, item = next(((n, i) for n, i in contents if n == 'foo')) wvpass(S_ISDIR(item.meta)) name, item = next(((n, i) for n, i in contents if n == 'foo.')) wvpass(S_ISREG(item.meta.mode))
def test_duplicate_save_dates(tmpdir): bup_dir = tmpdir + b'/bup' environ[b'GIT_DIR'] = bup_dir environ[b'BUP_DIR'] = bup_dir environ[b'TZ'] = b'UTC' tzset() git.repodir = bup_dir data_path = tmpdir + b'/src' os.mkdir(data_path) with open(data_path + b'/file', 'wb+') as tmpfile: tmpfile.write(b'canary\n') ex((b'env', )) ex((bup_path, b'init')) ex((bup_path, b'index', b'-v', data_path)) for i in range(11): ex((bup_path, b'save', b'-d', b'100000', b'-n', b'test', data_path)) with LocalRepo() as repo: res = vfs.resolve(repo, b'/test') wvpasseq(2, len(res)) name, revlist = res[-1] wvpasseq(b'test', name) wvpasseq((b'.', b'1970-01-02-034640-00', b'1970-01-02-034640-01', b'1970-01-02-034640-02', b'1970-01-02-034640-03', b'1970-01-02-034640-04', b'1970-01-02-034640-05', b'1970-01-02-034640-06', b'1970-01-02-034640-07', b'1970-01-02-034640-08', b'1970-01-02-034640-09', b'1970-01-02-034640-10', b'latest'), tuple(sorted(x[0] for x in vfs.contents(repo, revlist))))
def _dir_contents(repo, resolution, show_hidden=False): """Yield the display information for the contents of dir_item.""" url_query = b'?hidden=1' if show_hidden else b'' def display_info(name, item, resolved_item, display_name=None, omitsize=False): global opt # link should be based on fully resolved type to avoid extra # HTTP redirect. link = tornado.escape.url_escape(name, plus=False) if stat.S_ISDIR(vfs.item_mode(resolved_item)): link += '/' link = link.encode('ascii') if not omitsize: size = vfs.item_size(repo, item) if opt.human_readable: display_size = format_filesize(size) else: display_size = size else: display_size = None if not display_name: mode = vfs.item_mode(item) if stat.S_ISDIR(mode): display_name = name + b'/' display_size = None elif stat.S_ISLNK(mode): display_name = name + b'@' display_size = None else: display_name = name return display_name, link + url_query, display_size dir_item = resolution[-1][1] for name, item in vfs.contents(repo, dir_item): if not show_hidden: if (name not in (b'.', b'..')) and name.startswith(b'.'): continue if name == b'.': parent_item = resolution[-2][1] if len( resolution) > 1 else dir_item yield display_info(b'..', parent_item, parent_item, b'..', omitsize=True) continue res_item = vfs.ensure_item_has_metadata(repo, item, include_size=True) yield display_info(name, item, res_item)
def readdir(self, path, offset): assert not offset # We don't return offsets, so offset should be unused res = vfs.resolve(self.repo, path, follow=False) dir_name, dir_item = res[-1] if not dir_item: yield -errno.ENOENT yield fuse.Direntry('..') # FIXME: make sure want_meta=False is being completely respected for ent_name, ent_item in vfs.contents(repo, dir_item, want_meta=False): yield fuse.Direntry(ent_name.replace('/', '-'))
def readdir(self, path, offset): assert not offset # We don't return offsets, so offset should be unused res = vfs.lresolve(self.repo, path) dir_name, dir_item = res[-1] if not dir_item: yield -errno.ENOENT yield fuse.Direntry('..') # FIXME: make sure want_meta=False is being completely respected for ent_name, ent_item in vfs.contents(repo, dir_item, want_meta=False): yield fuse.Direntry(ent_name.replace('/', '-'))
def _contains_hidden_files(repo, dir_item): """Return true if the directory contains items with names other than '.' and '..' that begin with '.' """ for name, item in vfs.contents(repo, dir_item, want_meta=False): if name in ('.', '..'): continue if name.startswith('.'): return True return False
def _contains_hidden_files(repo, dir_item): """Return true if the directory contains items with names other than '.' and '..' that begin with '.' """ for name, item in vfs.contents(repo, dir_item, want_meta=False): if name in (b'.', b'..'): continue if name.startswith(b'.'): return True return False
def _completer_get_subs(repo, line): (qtype, lastword) = shquote.unfinished_word(line) dir, name = os.path.split(lastword) dir_path = vfs.resolve(repo, dir or b'/') _, dir_item = dir_path[-1] if not dir_item: subs = tuple() else: subs = tuple(dir_path + (entry, ) for entry in vfs.contents(repo, dir_item) if (entry[0] != b'.' and entry[0].startswith(name))) return qtype, lastword, subs
def _completer_get_subs(repo, line): (qtype, lastword) = shquote.unfinished_word(line) (dir,name) = os.path.split(lastword) dir_path = vfs.resolve(repo, dir or '/') _, dir_item = dir_path[-1] if not dir_item: subs = tuple() else: subs = tuple(dir_path + (entry,) for entry in vfs.contents(repo, dir_item) if (entry[0] != '.' and entry[0].startswith(name))) return dir, name, qtype, lastword, subs
def readdir(self, path, offset): path = argv_bytes(path) assert not offset # We don't return offsets, so offset should be unused res = vfs.resolve(self.repo, path, follow=False) dir_name, dir_item = res[-1] if not dir_item: yield -errno.ENOENT yield fuse.Direntry('..') # FIXME: make sure want_meta=False is being completely respected for ent_name, ent_item in vfs.contents(repo, dir_item, want_meta=False): fusename = fsdecode(ent_name.replace(b'/', b'-')) yield fuse.Direntry(fusename)
def _dir_contents(repo, resolution, show_hidden=False): """Yield the display information for the contents of dir_item.""" url_query = '?hidden=1' if show_hidden else '' def display_info(name, item, resolved_item, display_name=None): # link should be based on fully resolved type to avoid extra # HTTP redirect. if stat.S_ISDIR(vfs.item_mode(resolved_item)): link = urllib.quote(name) + '/' else: link = urllib.quote(name) size = vfs.item_size(repo, item) if opt.human_readable: display_size = format_filesize(size) else: display_size = size if not display_name: mode = vfs.item_mode(item) if stat.S_ISDIR(mode): display_name = name + '/' elif stat.S_ISLNK(mode): display_name = name + '@' else: display_name = name return display_name, link + url_query, display_size dir_item = resolution[-1][1] for name, item in vfs.contents(repo, dir_item): if not show_hidden: if (name not in ('.', '..')) and name.startswith('.'): continue if name == '.': yield display_info(name, item, item, '.') parent_item = resolution[-2][1] if len( resolution) > 1 else dir_item yield display_info('..', parent_item, parent_item, '..') continue res = vfs.try_resolve(repo, name, parent=resolution, want_meta=False) res_name, res_item = res[-1] yield display_info(name, item, res_item)
def _dir_contents(repo, resolution, show_hidden=False): """Yield the display information for the contents of dir_item.""" url_query = '?hidden=1' if show_hidden else '' def display_info(name, item, resolved_item, display_name=None): # link should be based on fully resolved type to avoid extra # HTTP redirect. if stat.S_ISDIR(vfs.item_mode(resolved_item)): link = urllib.quote(name) + '/' else: link = urllib.quote(name) size = vfs.item_size(repo, item) if opt.human_readable: display_size = format_filesize(size) else: display_size = size if not display_name: mode = vfs.item_mode(item) if stat.S_ISDIR(mode): display_name = name + '/' elif stat.S_ISLNK(mode): display_name = name + '@' else: display_name = name return display_name, link + url_query, display_size dir_item = resolution[-1][1] for name, item in vfs.contents(repo, dir_item): if not show_hidden: if (name not in ('.', '..')) and name.startswith('.'): continue if name == '.': yield display_info(name, item, item, '.') parent_item = resolution[-2][1] if len(resolution) > 1 else dir_item yield display_info('..', parent_item, parent_item, '..') continue res = vfs.try_resolve(repo, name, parent=resolution, want_meta=False) res_name, res_item = res[-1] yield display_info(name, item, res_item)
def test_contents_with_mismatched_bupm_git_ordering(): with no_lingering_errors(): with test_tempdir(b'bup-tvfs-') as tmpdir: bup_dir = tmpdir + b'/bup' environ[b'GIT_DIR'] = bup_dir environ[b'BUP_DIR'] = bup_dir git.repodir = bup_dir data_path = tmpdir + b'/src' os.mkdir(data_path) os.mkdir(data_path + b'/foo') with open(data_path + b'/foo.', 'wb+') as tmpfile: tmpfile.write(b'canary\n') ex((bup_path, b'init')) ex((bup_path, b'index', b'-v', data_path)) save_utc = 100000 save_name = strftime('%Y-%m-%d-%H%M%S', localtime(save_utc)).encode('ascii') ex((bup_path, b'save', b'-tvvn', b'test', b'-d', b'%d' % save_utc, b'--strip', data_path)) repo = LocalRepo() tip_sref = exo((b'git', b'show-ref', b'refs/heads/test')).out tip_oidx = tip_sref.strip().split()[0] tip_tree_oidx = exo((b'git', b'log', b'--pretty=%T', b'-n1', tip_oidx)).out.strip() tip_tree_oid = unhexlify(tip_tree_oidx) tip_tree = tree_dict(repo, tip_tree_oid) name, item = vfs.resolve(repo, b'/test/latest')[2] wvpasseq(save_name, name) expected = frozenset( (x.name, vfs.Item(oid=x.oid, meta=x.meta)) for x in (tip_tree[name] for name in (b'.', b'foo', b'foo.'))) contents = tuple(vfs.contents(repo, item)) wvpasseq(expected, frozenset(contents)) # Spot check, in case tree_dict shares too much code with the vfs name, item = next(((n, i) for n, i in contents if n == b'foo')) wvpass(S_ISDIR(item.meta)) name, item = next(((n, i) for n, i in contents if n == b'foo.')) wvpass(S_ISREG(item.meta.mode))
def test_metadata_method(): with no_lingering_errors(): with test_tempdir('bup-tmetadata-') as tmpdir: bup_dir = tmpdir + '/bup' data_path = tmpdir + '/foo' os.mkdir(data_path) ex('touch', data_path + '/file') ex('ln', '-s', 'file', data_path + '/symlink') test_time1 = 13 * 1000000000 test_time2 = 42 * 1000000000 utime(data_path + '/file', (0, test_time1)) lutime(data_path + '/symlink', (0, 0)) utime(data_path, (0, test_time2)) ex(bup_path, '-d', bup_dir, 'init') ex(bup_path, '-d', bup_dir, 'index', '-v', data_path) ex(bup_path, '-d', bup_dir, 'save', '-tvvn', 'test', data_path) git.check_repo_or_die(bup_dir) repo = LocalRepo() resolved = vfs.resolve(repo, '/test/latest' + resolve_parent(data_path), follow=False) leaf_name, leaf_item = resolved[-1] m = leaf_item.meta WVPASS(m.mtime == test_time2) WVPASS(leaf_name == 'foo') contents = tuple(vfs.contents(repo, leaf_item)) WVPASS(len(contents) == 3) WVPASSEQ(frozenset(name for name, item in contents), frozenset(('.', 'file', 'symlink'))) for name, item in contents: if name == 'file': m = item.meta WVPASS(m.mtime == test_time1) elif name == 'symlink': m = item.meta WVPASSEQ(m.symlink_target, 'file') WVPASSEQ(m.size, 4) WVPASSEQ(m.mtime, 0)
def test_metadata_method(): with no_lingering_errors(): with test_tempdir('bup-tmetadata-') as tmpdir: bup_dir = tmpdir + '/bup' data_path = tmpdir + '/foo' os.mkdir(data_path) ex('touch', data_path + '/file') ex('ln', '-s', 'file', data_path + '/symlink') test_time1 = 13 * 1000000000 test_time2 = 42 * 1000000000 utime(data_path + '/file', (0, test_time1)) lutime(data_path + '/symlink', (0, 0)) utime(data_path, (0, test_time2)) ex(bup_path, '-d', bup_dir, 'init') ex(bup_path, '-d', bup_dir, 'index', '-v', data_path) ex(bup_path, '-d', bup_dir, 'save', '-tvvn', 'test', data_path) git.check_repo_or_die(bup_dir) repo = LocalRepo() resolved = vfs.resolve(repo, '/test/latest' + resolve_parent(data_path), follow=False) leaf_name, leaf_item = resolved[-1] m = leaf_item.meta WVPASS(m.mtime == test_time2) WVPASS(leaf_name == 'foo') contents = tuple(vfs.contents(repo, leaf_item)) WVPASS(len(contents) == 3) WVPASSEQ(frozenset(name for name, item in contents), frozenset(('.', 'file', 'symlink'))) for name, item in contents: if name == 'file': m = item.meta WVPASS(m.mtime == test_time1) elif name == 'symlink': m = item.meta WVPASSEQ(m.symlink_target, 'file') WVPASSEQ(m.size, 4) WVPASSEQ(m.mtime, 0)
def test_contents_with_mismatched_bupm_git_ordering(): with no_lingering_errors(): with test_tempdir('bup-tvfs-') as tmpdir: bup_dir = tmpdir + '/bup' environ['GIT_DIR'] = bup_dir environ['BUP_DIR'] = bup_dir git.repodir = bup_dir data_path = tmpdir + '/src' os.mkdir(data_path) os.mkdir(data_path + '/foo') with open(data_path + '/foo.', 'w+') as tmpfile: tmpfile.write(b'canary\n') ex((bup_path, 'init')) ex((bup_path, 'index', '-v', data_path)) save_utc = 100000 save_name = strftime('%Y-%m-%d-%H%M%S', localtime(save_utc)) ex((bup_path, 'save', '-tvvn', 'test', '-d', str(save_utc), '--strip', data_path)) repo = LocalRepo() tip_sref = exo(('git', 'show-ref', 'refs/heads/test')).out tip_oidx = tip_sref.strip().split()[0] tip_tree_oidx = exo(('git', 'log', '--pretty=%T', '-n1', tip_oidx)).out.strip() tip_tree_oid = tip_tree_oidx.decode('hex') tip_tree = tree_dict(repo, tip_tree_oid) name, item = vfs.resolve(repo, '/test/latest')[2] wvpasseq(save_name, name) expected = frozenset((x.name, vfs.Item(oid=x.oid, meta=x.meta)) for x in (tip_tree[name] for name in ('.', 'foo', 'foo.'))) contents = tuple(vfs.contents(repo, item)) wvpasseq(expected, frozenset(contents)) # Spot check, in case tree_dict shares too much code with the vfs name, item = next(((n, i) for n, i in contents if n == 'foo')) wvpass(S_ISDIR(item.meta)) name, item = next(((n, i) for n, i in contents if n == 'foo.')) wvpass(S_ISREG(item.meta.mode))
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) verbosity = (opt.verbose or 0) if not opt.quiet else -1 if opt.remote: opt.remote = argv_bytes(opt.remote) if opt.outdir: opt.outdir = argv_bytes(opt.outdir) git.check_repo_or_die() if not extra: o.fatal('must specify at least one filename to restore') exclude_rxs = parse_rx_excludes(flags, o.fatal) owner_map = {} for map_type in ('user', 'group', 'uid', 'gid'): owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal) if opt.outdir: mkdirp(opt.outdir) os.chdir(opt.outdir) repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo() top = fsencode(os.getcwd()) hardlinks = {} for path in [argv_bytes(x) for x in extra]: if not valid_restore_path(path): add_error("path %r doesn't include a branch and revision" % path) continue try: resolved = vfs.resolve(repo, path, want_meta=True, follow=False) except vfs.IOError as e: add_error(e) continue if len(resolved) == 3 and resolved[2][0] == b'latest': # Follow latest symlink to the actual save try: resolved = vfs.resolve(repo, b'latest', parent=resolved[:-1], want_meta=True) except vfs.IOError as e: add_error(e) continue # Rename it back to 'latest' resolved = tuple(elt if i != 2 else (b'latest', ) + elt[1:] for i, elt in enumerate(resolved)) path_parent, path_name = os.path.split(path) leaf_name, leaf_item = resolved[-1] if not leaf_item: add_error('error: cannot access %r in %r' % (b'/'.join(name for name, item in resolved), path)) continue if not path_name or path_name == b'.': # Source is /foo/what/ever/ or /foo/what/ever/. -- extract # what/ever/* to the current directory, and if name == '.' # (i.e. /foo/what/ever/.), then also restore what/ever's # metadata to the current directory. treeish = vfs.item_mode(leaf_item) if not treeish: add_error('%r cannot be restored as a directory' % path) else: items = vfs.contents(repo, leaf_item, want_meta=True) dot, leaf_item = next(items, None) assert dot == b'.' for sub_name, sub_item in items: restore(repo, b'', sub_name, sub_item, top, opt.sparse, opt.numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks) if path_name == b'.': leaf_item = vfs.augment_item_meta(repo, leaf_item, include_size=True) apply_metadata(leaf_item.meta, b'.', opt.numeric_ids, owner_map) else: restore(repo, b'', leaf_name, leaf_item, top, opt.sparse, opt.numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks) if verbosity >= 0: progress('Restoring: %d, done.\n' % total_restored) die_if_errors()
def restore(repo, parent_path, name, item, top, sparse, numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks): global total_restored mode = vfs.item_mode(item) treeish = S_ISDIR(mode) fullname = parent_path + b'/' + name # Match behavior of index --exclude-rx with respect to paths. if should_rx_exclude_path(fullname + (b'/' if treeish else b''), exclude_rxs): return if not treeish: # Do this now so we'll have meta.symlink_target for verbose output item = vfs.augment_item_meta(repo, item, include_size=True) meta = item.meta assert (meta.mode == mode) if stat.S_ISDIR(mode): if verbosity >= 1: out.write(b'%s/\n' % fullname) elif stat.S_ISLNK(mode): assert (meta.symlink_target) if verbosity >= 2: out.write(b'%s@ -> %s\n' % (fullname, meta.symlink_target)) else: if verbosity >= 2: out.write(fullname + b'\n') orig_cwd = os.getcwd() try: if treeish: # Assumes contents() returns '.' with the full metadata first sub_items = vfs.contents(repo, item, want_meta=True) dot, item = next(sub_items, None) assert (dot == b'.') item = vfs.augment_item_meta(repo, item, include_size=True) meta = item.meta meta.create_path(name) os.chdir(name) total_restored += 1 if verbosity >= 0: qprogress('Restoring: %d\r' % total_restored) for sub_name, sub_item in sub_items: restore(repo, fullname, sub_name, sub_item, top, sparse, numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks) os.chdir(b'..') apply_metadata(meta, name, numeric_ids, owner_map) else: created_hardlink = False if meta.hardlink_target: created_hardlink = hardlink_if_possible( fullname, item, top, hardlinks) if not created_hardlink: meta.create_path(name) if stat.S_ISREG(meta.mode): if sparse: write_file_content_sparsely(repo, name, item) else: write_file_content(repo, name, item) total_restored += 1 if verbosity >= 0: qprogress('Restoring: %d\r' % total_restored) if not created_hardlink: apply_metadata(meta, name, numeric_ids, owner_map) finally: os.chdir(orig_cwd)
def test_resolve(repo, tmpdir): data_path = tmpdir + '/src' resolve = repo.resolve save_time = 100000 save_time_str = strftime('%Y-%m-%d-%H%M%S', localtime(save_time)) os.mkdir(data_path) os.mkdir(data_path + '/dir') with open(data_path + '/file', 'w+') as tmpfile: print('canary', file=tmpfile) symlink('file', data_path + '/file-symlink') symlink('dir', data_path + '/dir-symlink') symlink('not-there', data_path + '/bad-symlink') ex((bup_path, 'index', '-v', data_path)) ex((bup_path, 'save', '-d', str(save_time), '-tvvn', 'test', '--strip', data_path)) ex((bup_path, 'tag', 'test-tag', 'test')) tip_hash = exo(('git', 'show-ref', 'refs/heads/test'))[0] tip_oidx = tip_hash.strip().split()[0] tip_oid = tip_oidx.decode('hex') tip_tree_oidx = exo(('git', 'log', '--pretty=%T', '-n1', tip_oidx))[0].strip() tip_tree_oid = tip_tree_oidx.decode('hex') tip_tree = tree_dict(repo, tip_tree_oid) test_revlist_w_meta = vfs.RevList(meta=tip_tree['.'].meta, oid=tip_oid) expected_latest_item = vfs.Commit(meta=S_IFDIR | 0o755, oid=tip_tree_oid, coid=tip_oid) expected_latest_item_w_meta = vfs.Commit(meta=tip_tree['.'].meta, oid=tip_tree_oid, coid=tip_oid) expected_latest_link = vfs.FakeLink(meta=vfs.default_symlink_mode, target=save_time_str) expected_test_tag_item = expected_latest_item wvstart('resolve: /') vfs.clear_cache() res = resolve('/') wvpasseq(1, len(res)) wvpasseq((('', vfs._root),), res) ignore, root_item = res[0] root_content = frozenset(vfs.contents(repo, root_item)) wvpasseq(frozenset([('.', root_item), ('.tag', vfs._tags), ('test', test_revlist_w_meta)]), root_content) for path in ('//', '/.', '/./', '/..', '/../', '/test/latest/dir/../../..', '/test/latest/dir/../../../', '/test/latest/dir/../../../.', '/test/latest/dir/../../..//', '/test//latest/dir/../../..', '/test/./latest/dir/../../..', '/test/././latest/dir/../../..', '/test/.//./latest/dir/../../..', '/test//.//.//latest/dir/../../..' '/test//./latest/dir/../../..'): wvstart('resolve: ' + path) vfs.clear_cache() res = resolve(path) wvpasseq((('', vfs._root),), res) wvstart('resolve: /.tag') vfs.clear_cache() res = resolve('/.tag') wvpasseq(2, len(res)) wvpasseq((('', vfs._root), ('.tag', vfs._tags)), res) ignore, tag_item = res[1] tag_content = frozenset(vfs.contents(repo, tag_item)) wvpasseq(frozenset([('.', tag_item), ('test-tag', expected_test_tag_item)]), tag_content) wvstart('resolve: /test') vfs.clear_cache() res = resolve('/test') wvpasseq(2, len(res)) wvpasseq((('', vfs._root), ('test', test_revlist_w_meta)), res) ignore, test_item = res[1] test_content = frozenset(vfs.contents(repo, test_item)) # latest has metadata here due to caching wvpasseq(frozenset([('.', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), ('latest', expected_latest_link)]), test_content) wvstart('resolve: /test/latest') vfs.clear_cache() res = resolve('/test/latest') wvpasseq(3, len(res)) expected_latest_item_w_meta = vfs.Commit(meta=tip_tree['.'].meta, oid=tip_tree_oid, coid=tip_oid) expected = (('', vfs._root), ('test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta)) wvpasseq(expected, res) ignore, latest_item = res[2] latest_content = frozenset(vfs.contents(repo, latest_item)) expected = frozenset((x.name, vfs.Item(oid=x.oid, meta=x.meta)) for x in (tip_tree[name] for name in ('.', 'bad-symlink', 'dir', 'dir-symlink', 'file', 'file-symlink'))) wvpasseq(expected, latest_content) wvstart('resolve: /test/latest/file') vfs.clear_cache() res = resolve('/test/latest/file') wvpasseq(4, len(res)) expected_file_item_w_meta = vfs.Item(meta=tip_tree['file'].meta, oid=tip_tree['file'].oid) expected = (('', vfs._root), ('test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), ('file', expected_file_item_w_meta)) wvpasseq(expected, res) wvstart('resolve: /test/latest/bad-symlink') vfs.clear_cache() res = resolve('/test/latest/bad-symlink') wvpasseq(4, len(res)) expected = (('', vfs._root), ('test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), ('not-there', None)) wvpasseq(expected, res) wvstart('resolve nofollow: /test/latest/bad-symlink') vfs.clear_cache() res = resolve('/test/latest/bad-symlink', follow=False) wvpasseq(4, len(res)) bad_symlink_value = tip_tree['bad-symlink'] expected_bad_symlink_item_w_meta = vfs.Item(meta=bad_symlink_value.meta, oid=bad_symlink_value.oid) expected = (('', vfs._root), ('test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), ('bad-symlink', expected_bad_symlink_item_w_meta)) wvpasseq(expected, res) wvstart('resolve: /test/latest/file-symlink') vfs.clear_cache() res = resolve('/test/latest/file-symlink') wvpasseq(4, len(res)) expected = (('', vfs._root), ('test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), ('file', expected_file_item_w_meta)) wvpasseq(expected, res) wvstart('resolve nofollow: /test/latest/file-symlink') vfs.clear_cache() res = resolve('/test/latest/file-symlink', follow=False) wvpasseq(4, len(res)) file_symlink_value = tip_tree['file-symlink'] expected_file_symlink_item_w_meta = vfs.Item(meta=file_symlink_value.meta, oid=file_symlink_value.oid) expected = (('', vfs._root), ('test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), ('file-symlink', expected_file_symlink_item_w_meta)) wvpasseq(expected, res) wvstart('resolve: /test/latest/missing') vfs.clear_cache() res = resolve('/test/latest/missing') wvpasseq(4, len(res)) name, item = res[-1] wvpasseq('missing', name) wvpass(item is None) for path in ('/test/latest/file/', '/test/latest/file/.', '/test/latest/file/..', '/test/latest/file/../', '/test/latest/file/../.', '/test/latest/file/../..', '/test/latest/file/foo'): wvstart('resolve: ' + path) vfs.clear_cache() try: resolve(path) except vfs.IOError as res_ex: wvpasseq(ENOTDIR, res_ex.errno) wvpasseq(['', 'test', save_time_str, 'file'], [name for name, item in res_ex.terminus]) for path in ('/test/latest/file-symlink/', '/test/latest/file-symlink/.', '/test/latest/file-symlink/..', '/test/latest/file-symlink/../', '/test/latest/file-symlink/../.', '/test/latest/file-symlink/../..'): wvstart('resolve nofollow: ' + path) vfs.clear_cache() try: resolve(path, follow=False) except vfs.IOError as res_ex: wvpasseq(ENOTDIR, res_ex.errno) wvpasseq(['', 'test', save_time_str, 'file'], [name for name, item in res_ex.terminus]) wvstart('resolve: non-directory parent') vfs.clear_cache() file_res = resolve('/test/latest/file') try: resolve('foo', parent=file_res) except vfs.IOError as res_ex: wvpasseq(ENOTDIR, res_ex.errno) wvpasseq(None, res_ex.terminus) wvstart('resolve nofollow: /test/latest/dir-symlink') vfs.clear_cache() res = resolve('/test/latest/dir-symlink', follow=False) wvpasseq(4, len(res)) dir_symlink_value = tip_tree['dir-symlink'] expected_dir_symlink_item_w_meta = vfs.Item(meta=dir_symlink_value.meta, oid=dir_symlink_value.oid) expected = (('', vfs._root), ('test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), ('dir-symlink', expected_dir_symlink_item_w_meta)) wvpasseq(expected, res) dir_value = tip_tree['dir'] expected_dir_item = vfs.Item(oid=dir_value.oid, meta=tree_dict(repo, dir_value.oid)['.'].meta) expected = (('', vfs._root), ('test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), ('dir', expected_dir_item)) def lresolve(*args, **keys): return resolve(*args, **dict(keys, follow=False)) for resname, resolver in (('resolve', resolve), ('resolve nofollow', lresolve)): for path in ('/test/latest/dir-symlink/', '/test/latest/dir-symlink/.'): wvstart(resname + ': ' + path) vfs.clear_cache() res = resolver(path) wvpasseq(4, len(res)) wvpasseq(expected, res) wvstart('resolve: /test/latest/dir-symlink') vfs.clear_cache() res = resolve(path) wvpasseq(4, len(res)) wvpasseq(expected, res)
raise Exception('%s does not exist' % path_msg(b'/'.join(name for name, item in res))) with vfs.fopen(repo, leaf_item) as srcfile: with open(lname, 'wb') as destfile: log('Saving %s\n' % path_msg(lname)) write_to_file(srcfile, destfile) elif cmd == b'mget': for parm in words[1:]: dir, base = os.path.split(parm) res = vfs.resolve(repo, dir, parent=pwd) _, dir_item = res[-1] if not dir_item: raise Exception('%s does not exist' % path_msg(dir)) for name, item in vfs.contents(repo, dir_item): if name == b'.': continue if fnmatch.fnmatch(name, base): if stat.S_ISLNK(vfs.item_mode(item)): deref = vfs.resolve(repo, name, parent=res) deref_name, deref_item = deref[-1] if not deref_item: raise Exception('%s does not exist' % path_msg( '/'.join(name for name, item in deref))) item = deref_item with vfs.fopen(repo, item) as srcfile: with open(name, 'wb') as destfile: log('Saving %s\n' % path_msg(name)) write_to_file(srcfile, destfile) elif cmd == b'help' or cmd == b'?':
def restore(repo, parent_path, name, item, top, sparse, numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks): global total_restored mode = vfs.item_mode(item) treeish = S_ISDIR(mode) fullname = parent_path + '/' + name # Match behavior of index --exclude-rx with respect to paths. if should_rx_exclude_path(fullname + ('/' if treeish else ''), exclude_rxs): return if not treeish: # Do this now so we'll have meta.symlink_target for verbose output item = vfs.augment_item_meta(repo, item, include_size=True) meta = item.meta assert(meta.mode == mode) if stat.S_ISDIR(mode): if verbosity >= 1: print('%s/' % fullname) elif stat.S_ISLNK(mode): assert(meta.symlink_target) if verbosity >= 2: print('%s@ -> %s' % (fullname, meta.symlink_target)) else: if verbosity >= 2: print(fullname) orig_cwd = os.getcwd() try: if treeish: # Assumes contents() returns '.' with the full metadata first sub_items = vfs.contents(repo, item, want_meta=True) dot, item = next(sub_items, None) assert(dot == '.') item = vfs.augment_item_meta(repo, item, include_size=True) meta = item.meta meta.create_path(name) os.chdir(name) total_restored += 1 if verbosity >= 0: qprogress('Restoring: %d\r' % total_restored) for sub_name, sub_item in sub_items: restore(repo, fullname, sub_name, sub_item, top, sparse, numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks) os.chdir('..') apply_metadata(meta, name, numeric_ids, owner_map) else: created_hardlink = False if meta.hardlink_target: created_hardlink = hardlink_if_possible(fullname, item, top, hardlinks) if not created_hardlink: meta.create_path(name) if stat.S_ISREG(meta.mode): if sparse: write_file_content_sparsely(repo, name, item) else: write_file_content(repo, name, item) total_restored += 1 if verbosity >= 0: qprogress('Restoring: %d\r' % total_restored) if not created_hardlink: apply_metadata(meta, name, numeric_ids, owner_map) finally: os.chdir(orig_cwd)
def main(): o = options.Options(optspec) opt, flags, extra = o.parse(sys.argv[1:]) verbosity = opt.verbose if not opt.quiet else -1 git.check_repo_or_die() if not extra: o.fatal('must specify at least one filename to restore') exclude_rxs = parse_rx_excludes(flags, o.fatal) owner_map = {} for map_type in ('user', 'group', 'uid', 'gid'): owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal) if opt.outdir: mkdirp(opt.outdir) os.chdir(opt.outdir) repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo() top = os.getcwd() hardlinks = {} for path in extra: if not valid_restore_path(path): add_error("path %r doesn't include a branch and revision" % path) continue try: resolved = vfs.resolve(repo, path, want_meta=True, follow=False) except vfs.IOError as e: add_error(e) continue if len(resolved) == 3 and resolved[2][0] == 'latest': # Follow latest symlink to the actual save try: resolved = vfs.resolve(repo, 'latest', parent=resolved[:-1], want_meta=True) except vfs.IOError as e: add_error(e) continue # Rename it back to 'latest' resolved = tuple(elt if i != 2 else ('latest',) + elt[1:] for i, elt in enumerate(resolved)) path_parent, path_name = os.path.split(path) leaf_name, leaf_item = resolved[-1] if not leaf_item: add_error('error: cannot access %r in %r' % ('/'.join(name for name, item in resolved), path)) continue if not path_name or path_name == '.': # Source is /foo/what/ever/ or /foo/what/ever/. -- extract # what/ever/* to the current directory, and if name == '.' # (i.e. /foo/what/ever/.), then also restore what/ever's # metadata to the current directory. treeish = vfs.item_mode(leaf_item) if not treeish: add_error('%r cannot be restored as a directory' % path) else: items = vfs.contents(repo, leaf_item, want_meta=True) dot, leaf_item = next(items, None) assert(dot == '.') for sub_name, sub_item in items: restore(repo, '', sub_name, sub_item, top, opt.sparse, opt.numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks) if path_name == '.': leaf_item = vfs.augment_item_meta(repo, leaf_item, include_size=True) apply_metadata(leaf_item.meta, '.', opt.numeric_ids, owner_map) else: restore(repo, '', leaf_name, leaf_item, top, opt.sparse, opt.numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks) if verbosity >= 0: progress('Restoring: %d, done.\n' % total_restored) die_if_errors()
def show_paths(repo, opt, paths, out, pwd, should_columnate, prefix=b''): def item_line(item, name): return item_info(item, prefix + name, show_hash=opt.hash, commit_hash=opt.commit_hash, long_fmt=opt.long_listing, classification=opt.classification, numeric_ids=opt.numeric_ids, human_readable=opt.human_readable) ret = 0 want_meta = bool(opt.long_listing or opt.classification) pending = [] last_n = len(paths) - 1 for n, printpath in enumerate(paths): path = posixpath.join(pwd, printpath) try: if last_n > 0: out.write(b'%s:\n' % printpath) if opt.directory: resolved = vfs.resolve(repo, path, follow=False) else: resolved = vfs.try_resolve(repo, path, want_meta=want_meta) leaf_name, leaf_item = resolved[-1] if not leaf_item: log('error: cannot access %r in %r\n' % ('/'.join(path_msg(name) for name, item in resolved), path_msg(path))) ret = 1 continue if not opt.directory and S_ISDIR(vfs.item_mode(leaf_item)): items = vfs.contents(repo, leaf_item, want_meta=want_meta) if opt.show_hidden == 'all': # Match non-bup "ls -a ... /". parent = resolved[-2] if len(resolved) > 1 else resolved[0] items = chain(items, ((b'..', parent[1]), )) for sub_name, sub_item in sorted(items, key=lambda x: x[0]): if opt.show_hidden != 'all' and sub_name == b'.': continue if sub_name.startswith(b'.') and \ opt.show_hidden not in ('almost', 'all'): continue # always skip . and .. in the subfolders - already printed it anyway if prefix and sub_name in (b'.', b'..'): continue if opt.l: sub_item = vfs.ensure_item_has_metadata( repo, sub_item, include_size=True) elif want_meta: sub_item = vfs.augment_item_meta(repo, sub_item, include_size=True) line = item_line(sub_item, sub_name) if should_columnate: pending.append(line) else: out.write(line) out.write(b'\n') # recurse into subdirectories (apart from . and .., of course) if opt.recursive and S_ISDIR( vfs.item_mode(sub_item)) and sub_name not in ( b'.', b'..'): show_paths(repo, opt, [path + b'/' + sub_name], out, pwd, should_columnate, prefix=prefix + sub_name + b'/') else: if opt.long_listing: leaf_item = vfs.augment_item_meta(repo, leaf_item, include_size=True) line = item_line(leaf_item, os.path.normpath(path)) if should_columnate: pending.append(line) else: out.write(line) out.write(b'\n') except vfs.IOError as ex: log('bup: %s\n' % ex) ret = 1 if pending: out.write(columnate(pending, b'')) pending = [] if n < last_n: out.write(b'\n') return ret
def test_resolve(repo, tmpdir): data_path = tmpdir + b'/src' resolve = repo.resolve save_time = 100000 save_time_str = strftime('%Y-%m-%d-%H%M%S', localtime(save_time)).encode('ascii') os.mkdir(data_path) os.mkdir(data_path + b'/dir') with open(data_path + b'/file', 'wb+') as tmpfile: tmpfile.write(b'canary\n') symlink(b'file', data_path + b'/file-symlink') symlink(b'dir', data_path + b'/dir-symlink') symlink(b'not-there', data_path + b'/bad-symlink') ex((bup_path, b'index', b'-v', data_path)) ex((bup_path, b'save', b'-d', b'%d' % save_time, b'-tvvn', b'test', b'--strip', data_path)) ex((bup_path, b'tag', b'test-tag', b'test')) tip_hash = exo((b'git', b'show-ref', b'refs/heads/test'))[0] tip_oidx = tip_hash.strip().split()[0] tip_oid = unhexlify(tip_oidx) tip_tree_oidx = exo((b'git', b'log', b'--pretty=%T', b'-n1', tip_oidx))[0].strip() tip_tree_oid = unhexlify(tip_tree_oidx) tip_tree = tree_dict(repo, tip_tree_oid) test_revlist_w_meta = vfs.RevList(meta=tip_tree[b'.'].meta, oid=tip_oid) expected_latest_item = vfs.Commit(meta=S_IFDIR | 0o755, oid=tip_tree_oid, coid=tip_oid) expected_latest_item_w_meta = vfs.Commit(meta=tip_tree[b'.'].meta, oid=tip_tree_oid, coid=tip_oid) expected_latest_link = vfs.FakeLink(meta=vfs.default_symlink_mode, target=save_time_str) expected_test_tag_item = expected_latest_item wvstart('resolve: /') vfs.clear_cache() res = resolve(b'/') wvpasseq(1, len(res)) wvpasseq(((b'', vfs._root),), res) ignore, root_item = res[0] root_content = frozenset(vfs.contents(repo, root_item)) wvpasseq(frozenset([(b'.', root_item), (b'.tag', vfs._tags), (b'test', test_revlist_w_meta)]), root_content) for path in (b'//', b'/.', b'/./', b'/..', b'/../', b'/test/latest/dir/../../..', b'/test/latest/dir/../../../', b'/test/latest/dir/../../../.', b'/test/latest/dir/../../..//', b'/test//latest/dir/../../..', b'/test/./latest/dir/../../..', b'/test/././latest/dir/../../..', b'/test/.//./latest/dir/../../..', b'/test//.//.//latest/dir/../../..' b'/test//./latest/dir/../../..'): wvstart('resolve: ' + path_msg(path)) vfs.clear_cache() res = resolve(path) wvpasseq(((b'', vfs._root),), res) wvstart('resolve: /.tag') vfs.clear_cache() res = resolve(b'/.tag') wvpasseq(2, len(res)) wvpasseq(((b'', vfs._root), (b'.tag', vfs._tags)), res) ignore, tag_item = res[1] tag_content = frozenset(vfs.contents(repo, tag_item)) wvpasseq(frozenset([(b'.', tag_item), (b'test-tag', expected_test_tag_item)]), tag_content) wvstart('resolve: /test') vfs.clear_cache() res = resolve(b'/test') wvpasseq(2, len(res)) wvpasseq(((b'', vfs._root), (b'test', test_revlist_w_meta)), res) ignore, test_item = res[1] test_content = frozenset(vfs.contents(repo, test_item)) # latest has metadata here due to caching wvpasseq(frozenset([(b'.', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), (b'latest', expected_latest_link)]), test_content) wvstart('resolve: /test/latest') vfs.clear_cache() res = resolve(b'/test/latest') wvpasseq(3, len(res)) expected_latest_item_w_meta = vfs.Commit(meta=tip_tree[b'.'].meta, oid=tip_tree_oid, coid=tip_oid) expected = ((b'', vfs._root), (b'test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta)) wvpasseq(expected, res) ignore, latest_item = res[2] latest_content = frozenset(vfs.contents(repo, latest_item)) expected = frozenset((x.name, vfs.Item(oid=x.oid, meta=x.meta)) for x in (tip_tree[name] for name in (b'.', b'bad-symlink', b'dir', b'dir-symlink', b'file', b'file-symlink'))) wvpasseq(expected, latest_content) wvstart('resolve: /test/latest/file') vfs.clear_cache() res = resolve(b'/test/latest/file') wvpasseq(4, len(res)) expected_file_item_w_meta = vfs.Item(meta=tip_tree[b'file'].meta, oid=tip_tree[b'file'].oid) expected = ((b'', vfs._root), (b'test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), (b'file', expected_file_item_w_meta)) wvpasseq(expected, res) wvstart('resolve: /test/latest/bad-symlink') vfs.clear_cache() res = resolve(b'/test/latest/bad-symlink') wvpasseq(4, len(res)) expected = ((b'', vfs._root), (b'test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), (b'not-there', None)) wvpasseq(expected, res) wvstart('resolve nofollow: /test/latest/bad-symlink') vfs.clear_cache() res = resolve(b'/test/latest/bad-symlink', follow=False) wvpasseq(4, len(res)) bad_symlink_value = tip_tree[b'bad-symlink'] expected_bad_symlink_item_w_meta = vfs.Item(meta=bad_symlink_value.meta, oid=bad_symlink_value.oid) expected = ((b'', vfs._root), (b'test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), (b'bad-symlink', expected_bad_symlink_item_w_meta)) wvpasseq(expected, res) wvstart('resolve: /test/latest/file-symlink') vfs.clear_cache() res = resolve(b'/test/latest/file-symlink') wvpasseq(4, len(res)) expected = ((b'', vfs._root), (b'test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), (b'file', expected_file_item_w_meta)) wvpasseq(expected, res) wvstart('resolve nofollow: /test/latest/file-symlink') vfs.clear_cache() res = resolve(b'/test/latest/file-symlink', follow=False) wvpasseq(4, len(res)) file_symlink_value = tip_tree[b'file-symlink'] expected_file_symlink_item_w_meta = vfs.Item(meta=file_symlink_value.meta, oid=file_symlink_value.oid) expected = ((b'', vfs._root), (b'test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), (b'file-symlink', expected_file_symlink_item_w_meta)) wvpasseq(expected, res) wvstart('resolve: /test/latest/missing') vfs.clear_cache() res = resolve(b'/test/latest/missing') wvpasseq(4, len(res)) name, item = res[-1] wvpasseq(b'missing', name) wvpass(item is None) for path in (b'/test/latest/file/', b'/test/latest/file/.', b'/test/latest/file/..', b'/test/latest/file/../', b'/test/latest/file/../.', b'/test/latest/file/../..', b'/test/latest/file/foo'): wvstart('resolve: ' + path_msg(path)) vfs.clear_cache() try: resolve(path) except vfs.IOError as res_ex: wvpasseq(ENOTDIR, res_ex.errno) wvpasseq([b'', b'test', save_time_str, b'file'], [name for name, item in res_ex.terminus]) for path in (b'/test/latest/file-symlink/', b'/test/latest/file-symlink/.', b'/test/latest/file-symlink/..', b'/test/latest/file-symlink/../', b'/test/latest/file-symlink/../.', b'/test/latest/file-symlink/../..'): wvstart('resolve nofollow: ' + path_msg(path)) vfs.clear_cache() try: resolve(path, follow=False) except vfs.IOError as res_ex: wvpasseq(ENOTDIR, res_ex.errno) wvpasseq([b'', b'test', save_time_str, b'file'], [name for name, item in res_ex.terminus]) wvstart('resolve: non-directory parent') vfs.clear_cache() file_res = resolve(b'/test/latest/file') try: resolve(b'foo', parent=file_res) except vfs.IOError as res_ex: wvpasseq(ENOTDIR, res_ex.errno) wvpasseq(None, res_ex.terminus) wvstart('resolve nofollow: /test/latest/dir-symlink') vfs.clear_cache() res = resolve(b'/test/latest/dir-symlink', follow=False) wvpasseq(4, len(res)) dir_symlink_value = tip_tree[b'dir-symlink'] expected_dir_symlink_item_w_meta = vfs.Item(meta=dir_symlink_value.meta, oid=dir_symlink_value.oid) expected = ((b'', vfs._root), (b'test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), (b'dir-symlink', expected_dir_symlink_item_w_meta)) wvpasseq(expected, res) dir_value = tip_tree[b'dir'] expected_dir_item = vfs.Item(oid=dir_value.oid, meta=tree_dict(repo, dir_value.oid)[b'.'].meta) expected = ((b'', vfs._root), (b'test', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), (b'dir', expected_dir_item)) def lresolve(*args, **keys): return resolve(*args, **dict(keys, follow=False)) for resname, resolver in (('resolve', resolve), ('resolve nofollow', lresolve)): for path in (b'/test/latest/dir-symlink/', b'/test/latest/dir-symlink/.'): wvstart(resname + ': ' + path_msg(path)) vfs.clear_cache() res = resolver(path) wvpasseq(4, len(res)) wvpasseq(expected, res) wvstart('resolve: /test/latest/dir-symlink') vfs.clear_cache() res = resolve(path) wvpasseq(4, len(res)) wvpasseq(expected, res)
def present_interface(stdin, out, extra, repo): pwd = vfs.resolve(repo, b'/') if extra: lines = (argv_bytes(arg) for arg in extra) else: if hasattr(_helpers, 'readline'): _helpers.set_completer_word_break_characters(b' \t\n\r/') _helpers.set_attempted_completion_function(attempt_completion) _helpers.set_completion_entry_function(enter_completion) if sys.platform.startswith('darwin'): # MacOS uses a slightly incompatible clone of libreadline _helpers.parse_and_bind(b'bind ^I rl_complete') _helpers.parse_and_bind(b'tab: complete') lines = inputiter(stdin, pwd, out) for line in lines: if not line.strip(): continue words = [word for (wordstart, word) in shquote.quotesplit(line)] cmd = words[0].lower() #log('execute: %r %r\n' % (cmd, parm)) try: if cmd == b'ls': do_ls(repo, pwd, words[1:], out) out.flush() elif cmd == b'cd': np = pwd for parm in words[1:]: res = vfs.resolve(repo, parm, parent=np) _, leaf_item = res[-1] if not leaf_item: raise CommandError('path does not exist: ' + rpath_msg(res)) if not stat.S_ISDIR(vfs.item_mode(leaf_item)): raise CommandError('path is not a directory: ' + path_msg(parm)) np = res pwd = np elif cmd == b'pwd': if len(pwd) == 1: out.write(b'/') out.write(b'/'.join(name for name, item in pwd) + b'\n') out.flush() elif cmd == b'cat': for parm in words[1:]: res = vfs.resolve(repo, parm, parent=pwd) _, leaf_item = res[-1] if not leaf_item: raise CommandError('path does not exist: ' + rpath_msg(res)) with vfs.fopen(repo, leaf_item) as srcfile: write_to_file(srcfile, out) out.flush() elif cmd == b'get': if len(words) not in [2, 3]: raise CommandError('Usage: get <filename> [localname]') rname = words[1] (dir, base) = os.path.split(rname) lname = len(words) > 2 and words[2] or base res = vfs.resolve(repo, rname, parent=pwd) _, leaf_item = res[-1] if not leaf_item: raise CommandError('path does not exist: ' + rpath_msg(res)) with vfs.fopen(repo, leaf_item) as srcfile: with open(lname, 'wb') as destfile: log('Saving %s\n' % path_msg(lname)) write_to_file(srcfile, destfile) elif cmd == b'mget': for parm in words[1:]: dir, base = os.path.split(parm) res = vfs.resolve(repo, dir, parent=pwd) _, dir_item = res[-1] if not dir_item: raise CommandError('path does not exist: ' + path_msg(dir)) for name, item in vfs.contents(repo, dir_item): if name == b'.': continue if fnmatch.fnmatch(name, base): if stat.S_ISLNK(vfs.item_mode(item)): deref = vfs.resolve(repo, name, parent=res) deref_name, deref_item = deref[-1] if not deref_item: raise CommandError( 'path does not exist: ' + rpath_msg(res)) item = deref_item with vfs.fopen(repo, item) as srcfile: with open(name, 'wb') as destfile: log('Saving %s\n' % path_msg(name)) write_to_file(srcfile, destfile) elif cmd in (b'help', b'?'): out.write(b'Commands: ls cd pwd cat get mget help quit\n') out.flush() elif cmd in (b'quit', b'exit', b'bye'): break else: raise CommandError('no such command: ' + cmd.encode(errors='backslashreplace')) except CommandError as ex: out.write(b'error: %s\n' % str(ex).encode(errors='backslashreplace')) out.flush()
def within_repo(repo, opt): if opt.commit_hash: opt.hash = True def item_line(item, name): return item_info(item, name, show_hash=opt.hash, commit_hash=opt.commit_hash, long_fmt=opt.long_listing, classification=opt.classification, numeric_ids=opt.numeric_ids, human_readable=opt.human_readable) ret = 0 pending = [] for path in opt.paths: try: if opt.directory: resolved = vfs.resolve(repo, path, follow=False) else: resolved = vfs.try_resolve(repo, path) leaf_name, leaf_item = resolved[-1] if not leaf_item: log('error: cannot access %r in %r\n' % ('/'.join(name for name, item in resolved), path)) ret = 1 continue if not opt.directory and S_ISDIR(vfs.item_mode(leaf_item)): items = vfs.contents(repo, leaf_item) if opt.show_hidden == 'all': # Match non-bup "ls -a ... /". parent = resolved[-2] if len(resolved) > 1 else resolved[0] items = chain(items, (('..', parent[1]),)) for sub_name, sub_item in sorted(items, key=lambda x: x[0]): if opt.show_hidden != 'all' and sub_name == '.': continue if sub_name.startswith('.') and \ opt.show_hidden not in ('almost', 'all'): continue if opt.l: sub_item = vfs.ensure_item_has_metadata(repo, sub_item, include_size=True) else: sub_item = vfs.augment_item_meta(repo, sub_item, include_size=True) line = item_line(sub_item, sub_name) if not opt.long_listing and istty1: pending.append(line) else: print(line) else: leaf_item = vfs.augment_item_meta(repo, leaf_item, include_size=True) line = item_line(leaf_item, os.path.normpath(path)) if not opt.long_listing and istty1: pending.append(line) else: print(line) except vfs.IOError as ex: log('bup: %s\n' % ex) ret = 1 if pending: sys.stdout.write(columnate(pending, '')) return ret
def within_repo(repo, opt): if opt.commit_hash: opt.hash = True def item_line(item, name): return item_info(item, name, show_hash=opt.hash, commit_hash=opt.commit_hash, long_fmt=opt.long_listing, classification=opt.classification, numeric_ids=opt.numeric_ids, human_readable=opt.human_readable) ret = 0 pending = [] for path in opt.paths: try: if opt.directory: resolved = vfs.resolve(repo, path, follow=False) else: resolved = vfs.try_resolve(repo, path) leaf_name, leaf_item = resolved[-1] if not leaf_item: log('error: cannot access %r in %r\n' % ('/'.join(name for name, item in resolved), path)) ret = 1 continue if not opt.directory and S_ISDIR(vfs.item_mode(leaf_item)): items = vfs.contents(repo, leaf_item) if opt.show_hidden == 'all': # Match non-bup "ls -a ... /". parent = resolved[-2] if len(resolved) > 1 else resolved[0] items = chain(items, (('..', parent[1]), )) for sub_name, sub_item in sorted(items, key=lambda x: x[0]): if opt.show_hidden != 'all' and sub_name == '.': continue if sub_name.startswith('.') and \ opt.show_hidden not in ('almost', 'all'): continue if opt.l: sub_item = vfs.ensure_item_has_metadata( repo, sub_item, include_size=True) else: sub_item = vfs.augment_item_meta(repo, sub_item, include_size=True) line = item_line(sub_item, sub_name) if not opt.long_listing and istty1: pending.append(line) else: print(line) else: leaf_item = vfs.augment_item_meta(repo, leaf_item, include_size=True) line = item_line(leaf_item, os.path.normpath(path)) if not opt.long_listing and istty1: pending.append(line) else: print(line) except vfs.IOError as ex: log('bup: %s\n' % ex) ret = 1 if pending: sys.stdout.write(columnate(pending, '')) return ret
if not leaf_item: raise Exception('%r does not exist' % '/'.join(name for name, item in res)) with vfs.fopen(repo, leaf_item) as srcfile: with open(lname, 'wb') as destfile: log('Saving %r\n' % lname) write_to_file(srcfile, destfile) elif cmd == 'mget': for parm in words[1:]: (dir,base) = os.path.split(parm) res = vfs.resolve(repo, dir, parent=pwd) _, dir_item = res[-1] if not dir_item: raise Exception('%r does not exist' % dir) for name, item in vfs.contents(repo, dir_item): if name == '.': continue if fnmatch.fnmatch(name, base): if stat.S_ISLNK(vfs.item_mode(item)): deref = vfs.resolve(repo, name, parent=res) deref_name, deref_item = deref[-1] if not deref_item: raise Exception('%r does not exist' % '/'.join(name for name, item in deref)) item = deref_item with vfs.fopen(repo, item) as srcfile: with open(name, 'wb') as destfile: log('Saving %r\n' % name) write_to_file(srcfile, destfile)
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) global repo repo = from_opts(opt, reverse=False) sys.stdout.flush() out = byte_stream(sys.stdout) stdin = byte_stream(sys.stdin) pwd = vfs.resolve(repo, b'/') rv = 0 def inputiter(f): if os.isatty(f.fileno()): while 1: prompt = b'bup %s> ' % (b'/'.join(name for name, item in pwd) or b'/', ) if hasattr(_helpers, 'readline'): try: yield _helpers.readline(prompt) except EOFError: print() # Clear the line for the terminal's next prompt break else: out.write(prompt) out.flush() read_line = f.readline() if not read_line: print('') break yield read_line else: for line in f: yield line if extra: lines = (argv_bytes(arg) for arg in extra) else: if hasattr(_helpers, 'readline'): _helpers.set_completer_word_break_characters(b' \t\n\r/') _helpers.set_attempted_completion_function(attempt_completion) _helpers.set_completion_entry_function(enter_completion) if sys.platform.startswith('darwin'): # MacOS uses a slightly incompatible clone of libreadline _helpers.parse_and_bind(b'bind ^I rl_complete') _helpers.parse_and_bind(b'tab: complete') lines = inputiter(stdin) for line in lines: if not line.strip(): continue words = [word for (wordstart,word) in shquote.quotesplit(line)] cmd = words[0].lower() #log('execute: %r %r\n' % (cmd, parm)) try: if cmd == b'ls': do_ls(repo, pwd, words[1:], out) out.flush() elif cmd == b'cd': np = pwd for parm in words[1:]: res = vfs.resolve(repo, parm, parent=np) _, leaf_item = res[-1] if not leaf_item: raise CommandError(b'"%s" does not exist' % b'/'.join(name for name, item in res)) if not stat.S_ISDIR(vfs.item_mode(leaf_item)): raise CommandError(b'"%s" is not a directory' % parm) np = res pwd = np elif cmd == b'pwd': if len(pwd) == 1: out.write(b'/') out.write(b'/'.join(name for name, item in pwd) + b'\n') out.flush() elif cmd == b'cat': for parm in words[1:]: res = vfs.resolve(repo, parm, parent=pwd) _, leaf_item = res[-1] if not leaf_item: raise CommandError(b'"%s" does not exist' % b'/'.join(name for name, item in res)) with vfs.fopen(repo, leaf_item) as srcfile: write_to_file(srcfile, out) out.flush() elif cmd == b'get': if len(words) not in [2,3]: rv = 1 raise CommandError(b'Usage: get <filename> [localname]') rname = words[1] (dir,base) = os.path.split(rname) lname = len(words) > 2 and words[2] or base res = vfs.resolve(repo, rname, parent=pwd) _, leaf_item = res[-1] if not leaf_item: raise CommandError(b'"%s" does not exist' % b'/'.join(name for name, item in res)) with vfs.fopen(repo, leaf_item) as srcfile: with open(lname, 'wb') as destfile: log('Saving %s\n' % path_msg(lname)) write_to_file(srcfile, destfile) elif cmd == b'mget': for parm in words[1:]: dir, base = os.path.split(parm) res = vfs.resolve(repo, dir, parent=pwd) _, dir_item = res[-1] if not dir_item: raise CommandError(b'"%s" does not exist' % dir) for name, item in vfs.contents(repo, dir_item): if name == b'.': continue if fnmatch.fnmatch(name, base): if stat.S_ISLNK(vfs.item_mode(item)): deref = vfs.resolve(repo, name, parent=res) deref_name, deref_item = deref[-1] if not deref_item: raise CommandError(b'"%s" does not exist' % b'/'.join(name for name, item in res)) item = deref_item with vfs.fopen(repo, item) as srcfile: with open(name, 'wb') as destfile: log('Saving %s\n' % path_msg(name)) write_to_file(srcfile, destfile) elif cmd == b'help' or cmd == b'?': out.write(b'Commands: ls cd pwd cat get mget help quit\n') out.flush() elif cmd in (b'quit', b'exit', b'bye'): break else: rv = 1 raise CommandError(b'no such command "%s"' % cmd) except CommandError as e: rv = 1 out.write(b'error: %s\n' % e.args[0]) out.flush() except Exception as e: rv = 1 out.write(b'error: %s\n' % str(e).encode()) out.flush() sys.exit(rv)
def _dir_contents(repo, resolution, args): """Yield the display information for the contents of dir_item.""" def display_info(name, item, resolved_item, display_name=None, omitsize=False): # link should be based on fully resolved type to avoid extra # HTTP redirect. link = tornado.escape.url_escape(name, plus=False) if stat.S_ISDIR(vfs.item_mode(resolved_item)): link += '/' link = link.encode('ascii') if not omitsize: size = vfs.item_size(repo, item) if args.hsizes: display_size = format_filesize(size) else: display_size = size else: display_size = None if not display_name: mode = vfs.item_mode(item) if stat.S_ISDIR(mode): display_name = name + b'/' display_size = None elif stat.S_ISLNK(mode): display_name = name + b'@' display_size = None else: display_name = name meta = resolved_item.meta if not isinstance(meta, Metadata): meta = None try: oidx = hexlify(resolved_item.oid) except AttributeError: oidx = '' return display_name, link + args, display_size, meta, oidx dir_item = resolution[-1][1] for name, item in vfs.contents(repo, dir_item): if not args.hidden: if (name not in (b'.', b'..')) and name.startswith(b'.'): continue if name == b'.': if len(resolution) > 1: parent_item = resolution[-2][1] if len( resolution) > 1 else dir_item yield display_info(b'..', parent_item, parent_item, b'..', omitsize=True) continue res = vfs.try_resolve(repo, name, parent=resolution, want_meta=args.meta) res_name, res_item = res[-1] yield display_info(name, item, res_item)
def test_resolve(): with no_lingering_errors(): with test_tempdir('bup-tvfs-') as tmpdir: resolve = vfs.resolve lresolve = vfs.lresolve bup_dir = tmpdir + '/bup' environ['GIT_DIR'] = bup_dir environ['BUP_DIR'] = bup_dir git.repodir = bup_dir data_path = tmpdir + '/src' save_time = 100000 save_time_str = strftime('%Y-%m-%d-%H%M%S', localtime(save_time)) os.mkdir(data_path) os.mkdir(data_path + '/dir') with open(data_path + '/file', 'w+') as tmpfile: print('canary', file=tmpfile) symlink('file', data_path + '/file-symlink') symlink('dir', data_path + '/dir-symlink') symlink('not-there', data_path + '/bad-symlink') ex((bup_path, 'init')) ex((bup_path, 'index', '-v', data_path)) ex((bup_path, 'save', '-d', str(save_time), '-tvvn', 'test', '--strip', data_path)) ex((bup_path, 'tag', 'test-tag', 'test')) repo = LocalRepo() tip_hash = exo(('git', 'show-ref', 'refs/heads/test'))[0] tip_oidx = tip_hash.strip().split()[0] tip_oid = tip_oidx.decode('hex') tip_tree_oidx = exo( ('git', 'log', '--pretty=%T', '-n1', tip_oidx))[0].strip() tip_tree_oid = tip_tree_oidx.decode('hex') tip_tree = tree_dict(repo, tip_tree_oid) test_revlist_w_meta = vfs.RevList(meta=tip_tree['.'].meta, oid=tip_oid) expected_latest_item = vfs.Commit(meta=S_IFDIR | 0o755, oid=tip_tree_oid, coid=tip_oid) expected_latest_item_w_meta = vfs.Commit(meta=tip_tree['.'].meta, oid=tip_tree_oid, coid=tip_oid) expected_test_tag_item = expected_latest_item wvstart('resolve: /') vfs.clear_cache() res = resolve(repo, '/') wvpasseq(1, len(res)) wvpasseq((('', vfs._root), ), res) ignore, root_item = res[0] root_content = frozenset(vfs.contents(repo, root_item)) wvpasseq( frozenset([('.', root_item), ('.tag', vfs._tags), ('test', test_revlist_w_meta)]), root_content) for path in ('//', '/.', '/./', '/..', '/../', '/test/latest/dir/../../..', '/test/latest/dir/../../../', '/test/latest/dir/../../../.', '/test/latest/dir/../../..//', '/test//latest/dir/../../..', '/test/./latest/dir/../../..', '/test/././latest/dir/../../..', '/test/.//./latest/dir/../../..', '/test//.//.//latest/dir/../../..' '/test//./latest/dir/../../..'): wvstart('resolve: ' + path) vfs.clear_cache() res = resolve(repo, path) wvpasseq((('', vfs._root), ), res) wvstart('resolve: /.tag') vfs.clear_cache() res = resolve(repo, '/.tag') wvpasseq(2, len(res)) wvpasseq((('', vfs._root), ('.tag', vfs._tags)), res) ignore, tag_item = res[1] tag_content = frozenset(vfs.contents(repo, tag_item)) wvpasseq( frozenset([('.', tag_item), ('test-tag', expected_test_tag_item)]), tag_content) wvstart('resolve: /test') vfs.clear_cache() res = resolve(repo, '/test') wvpasseq(2, len(res)) wvpasseq((('', vfs._root), ('test', test_revlist_w_meta)), res) ignore, test_item = res[1] test_content = frozenset(vfs.contents(repo, test_item)) # latest has metadata here due to caching wvpasseq( frozenset([('.', test_revlist_w_meta), (save_time_str, expected_latest_item_w_meta), ('latest', expected_latest_item_w_meta)]), test_content) wvstart('resolve: /test/latest') vfs.clear_cache() res = resolve(repo, '/test/latest') wvpasseq(3, len(res)) expected_latest_item_w_meta = vfs.Commit(meta=tip_tree['.'].meta, oid=tip_tree_oid, coid=tip_oid) expected = (('', vfs._root), ('test', test_revlist_w_meta), ('latest', expected_latest_item_w_meta)) wvpasseq(expected, res) ignore, latest_item = res[2] latest_content = frozenset(vfs.contents(repo, latest_item)) expected = frozenset( (x.name, vfs.Item(oid=x.oid, meta=x.meta)) for x in (tip_tree[name] for name in ('.', 'bad-symlink', 'dir', 'dir-symlink', 'file', 'file-symlink'))) wvpasseq(expected, latest_content) wvstart('resolve: /test/latest/file') vfs.clear_cache() res = resolve(repo, '/test/latest/file') wvpasseq(4, len(res)) expected_file_item_w_meta = vfs.Item(meta=tip_tree['file'].meta, oid=tip_tree['file'].oid) expected = (('', vfs._root), ('test', test_revlist_w_meta), ('latest', expected_latest_item_w_meta), ('file', expected_file_item_w_meta)) wvpasseq(expected, res) wvstart('resolve: /test/latest/bad-symlink') vfs.clear_cache() res = resolve(repo, '/test/latest/bad-symlink') wvpasseq(4, len(res)) expected = (('', vfs._root), ('test', test_revlist_w_meta), ('latest', expected_latest_item_w_meta), ('not-there', None)) wvpasseq(expected, res) wvstart('lresolve: /test/latest/bad-symlink') vfs.clear_cache() res = lresolve(repo, '/test/latest/bad-symlink') wvpasseq(4, len(res)) bad_symlink_value = tip_tree['bad-symlink'] expected_bad_symlink_item_w_meta = vfs.Item( meta=bad_symlink_value.meta, oid=bad_symlink_value.oid) expected = (('', vfs._root), ('test', test_revlist_w_meta), ('latest', expected_latest_item_w_meta), ('bad-symlink', expected_bad_symlink_item_w_meta)) wvpasseq(expected, res) wvstart('resolve: /test/latest/file-symlink') vfs.clear_cache() res = resolve(repo, '/test/latest/file-symlink') wvpasseq(4, len(res)) expected = (('', vfs._root), ('test', test_revlist_w_meta), ('latest', expected_latest_item_w_meta), ('file', expected_file_item_w_meta)) wvpasseq(expected, res) wvstart('lresolve: /test/latest/file-symlink') vfs.clear_cache() res = lresolve(repo, '/test/latest/file-symlink') wvpasseq(4, len(res)) file_symlink_value = tip_tree['file-symlink'] expected_file_symlink_item_w_meta = vfs.Item( meta=file_symlink_value.meta, oid=file_symlink_value.oid) expected = (('', vfs._root), ('test', test_revlist_w_meta), ('latest', expected_latest_item_w_meta), ('file-symlink', expected_file_symlink_item_w_meta)) wvpasseq(expected, res) wvstart('resolve: /test/latest/missing') vfs.clear_cache() res = resolve(repo, '/test/latest/missing') wvpasseq(4, len(res)) name, item = res[-1] wvpasseq('missing', name) wvpass(item is None) for path in ('/test/latest/file/', '/test/latest/file/.', '/test/latest/file/..', '/test/latest/file/../', '/test/latest/file/../.', '/test/latest/file/../..', '/test/latest/file/foo'): wvstart('resolve: ' + path) vfs.clear_cache() try: resolve(repo, path) except vfs.IOError as res_ex: wvpasseq(ENOTDIR, res_ex.errno) wvpasseq(['', 'test', 'latest', 'file'], [name for name, item in res_ex.terminus]) for path in ('/test/latest/file-symlink/', '/test/latest/file-symlink/.', '/test/latest/file-symlink/..', '/test/latest/file-symlink/../', '/test/latest/file-symlink/../.', '/test/latest/file-symlink/../..'): wvstart('lresolve: ' + path) vfs.clear_cache() try: lresolve(repo, path) except vfs.IOError as res_ex: wvpasseq(ENOTDIR, res_ex.errno) wvpasseq(['', 'test', 'latest', 'file'], [name for name, item in res_ex.terminus]) wvstart('resolve: non-directory parent') vfs.clear_cache() file_res = resolve(repo, '/test/latest/file') try: resolve(repo, 'foo', parent=file_res) except vfs.IOError as res_ex: wvpasseq(ENOTDIR, res_ex.errno) wvpasseq(None, res_ex.terminus) wvstart('lresolve: /test/latest/dir-symlink') vfs.clear_cache() res = lresolve(repo, '/test/latest/dir-symlink') wvpasseq(4, len(res)) dir_symlink_value = tip_tree['dir-symlink'] expected_dir_symlink_item_w_meta = vfs.Item( meta=dir_symlink_value.meta, oid=dir_symlink_value.oid) expected = (('', vfs._root), ('test', test_revlist_w_meta), ('latest', expected_latest_item_w_meta), ('dir-symlink', expected_dir_symlink_item_w_meta)) wvpasseq(expected, res) dir_value = tip_tree['dir'] expected_dir_item = vfs.Item(oid=dir_value.oid, meta=tree_dict( repo, dir_value.oid)['.'].meta) expected = (('', vfs._root), ('test', test_revlist_w_meta), ('latest', expected_latest_item_w_meta), ('dir', expected_dir_item)) for resname, resolver in (('resolve', resolve), ('lresolve', lresolve)): for path in ('/test/latest/dir-symlink/', '/test/latest/dir-symlink/.'): wvstart(resname + ': ' + path) vfs.clear_cache() res = resolver(repo, path) wvpasseq(4, len(res)) wvpasseq(expected, res) wvstart('resolve: /test/latest/dir-symlink') vfs.clear_cache() res = resolve(repo, path) wvpasseq(4, len(res)) wvpasseq(expected, res)