def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) if opt.remote: opt.remote = argv_bytes(opt.remote) git.check_repo_or_die() stdin = byte_stream(sys.stdin) if not extra: extra = linereader(stdin) ret = 0 repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo() if opt.o: outfile = open(opt.o, 'wb') else: sys.stdout.flush() outfile = byte_stream(sys.stdout) for ref in [argv_bytes(x) for x in extra]: try: for blob in repo.join(ref): outfile.write(blob) except KeyError as e: outfile.flush() log('error: %s\n' % e) ret = 1 sys.exit(ret)
def via_cmdline(args, onabort=None): """Output a listing of a file or directory in the bup repository. When a long listing is not requested and stdout is attached to a tty, the output is formatted in columns. When not attached to tty (for example when the output is piped to another command), one file is listed per line. """ opt = opts_from_cmdline(args, onabort=onabort) return within_repo( RemoteRepo(opt.remote) if opt.remote else LocalRepo(), opt)
def via_cmdline(args, out=None, onabort=None): """Write a listing of a file or directory in the bup repository to out. When a long listing is not requested and stdout is attached to a tty, the output is formatted in columns. When not attached to tty (for example when the output is piped to another command), one file is listed per line. """ assert out opt = opts_from_cmdline(args, onabort=onabort) with RemoteRepo(argv_bytes(opt.remote)) if opt.remote \ else LocalRepo() as repo: return within_repo(repo, opt, out)
def main(): handle_ctrl_c() is_reverse = environ.get(b'BUP_SERVER_REVERSE') opt = parse_args(compat.argv) git.check_repo_or_die() if opt.source: opt.source = argv_bytes(opt.source) if opt.bwlimit: client.bwlimit = parse_num(opt.bwlimit) if is_reverse and opt.remote: misuse("don't use -r in reverse mode; it's automatic") if opt.remote: opt.remote = argv_bytes(opt.remote) if opt.remote or is_reverse: dest_repo = RemoteRepo(opt.remote) else: dest_repo = LocalRepo() with dest_repo as dest_repo: with LocalRepo(repo_dir=opt.source) as src_repo: with dest_repo.new_packwriter(compression_level=opt.compress) as writer: # Resolve and validate all sources and destinations, # implicit or explicit, and do it up-front, so we can # fail before we start writing (for any obviously # broken cases). target_items = resolve_targets(opt.target_specs, src_repo, dest_repo) updated_refs = {} # ref_name -> (original_ref, tip_commit(bin)) no_ref_info = (None, None) handlers = {'ff': handle_ff, 'append': handle_append, 'force-pick': handle_pick, 'pick': handle_pick, 'new-tag': handle_new_tag, 'replace': handle_replace, 'unnamed': handle_unnamed} for item in target_items: debug1('get-spec: %r\n' % (item.spec,)) debug1('get-src: %s\n' % loc_desc(item.src)) debug1('get-dest: %s\n' % loc_desc(item.dest)) dest_path = item.dest and item.dest.path if dest_path: if dest_path.startswith(b'/.tag/'): dest_ref = b'refs/tags/%s' % dest_path[6:] else: dest_ref = b'refs/heads/%s' % dest_path[1:] else: dest_ref = None dest_hash = item.dest and item.dest.hash orig_ref, cur_ref = updated_refs.get(dest_ref, no_ref_info) orig_ref = orig_ref or dest_hash cur_ref = cur_ref or dest_hash handler = handlers[item.spec.method] item_result = handler(item, src_repo, writer, opt) if len(item_result) > 1: new_id, tree = item_result else: new_id = item_result[0] if not dest_ref: log_item(item.spec.src, item.src.type, opt) else: updated_refs[dest_ref] = (orig_ref, new_id) if dest_ref.startswith(b'refs/tags/'): log_item(item.spec.src, item.src.type, opt, tag=new_id) else: log_item(item.spec.src, item.src.type, opt, tree=tree, commit=new_id) # Only update the refs at the very end, once the writer is # closed, so that if something goes wrong above, the old refs # will be undisturbed. for ref_name, info in items(updated_refs): orig_ref, new_ref = info try: dest_repo.update_ref(ref_name, new_ref, orig_ref) if opt.verbose: new_hex = hexlify(new_ref) if orig_ref: orig_hex = hexlify(orig_ref) log('updated %r (%s -> %s)\n' % (ref_name, orig_hex, new_hex)) else: log('updated %r (%s)\n' % (ref_name, new_hex)) except (git.GitError, client.ClientError) as ex: add_error('unable to update ref %r: %s' % (ref_name, ex)) if saved_errors: log('WARNING: %d errors encountered while saving.\n' % len(saved_errors)) sys.exit(1)
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) verbosity = (opt.verbose or 0) if not opt.quiet else -1 if opt.remote: opt.remote = argv_bytes(opt.remote) if opt.outdir: opt.outdir = argv_bytes(opt.outdir) git.check_repo_or_die() if not extra: o.fatal('must specify at least one filename to restore') exclude_rxs = parse_rx_excludes(flags, o.fatal) owner_map = {} for map_type in ('user', 'group', 'uid', 'gid'): owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal) if opt.outdir: mkdirp(opt.outdir) os.chdir(opt.outdir) repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo() top = fsencode(os.getcwd()) hardlinks = {} for path in [argv_bytes(x) for x in extra]: if not valid_restore_path(path): add_error("path %r doesn't include a branch and revision" % path) continue try: resolved = vfs.resolve(repo, path, want_meta=True, follow=False) except vfs.IOError as e: add_error(e) continue if len(resolved) == 3 and resolved[2][0] == b'latest': # Follow latest symlink to the actual save try: resolved = vfs.resolve(repo, b'latest', parent=resolved[:-1], want_meta=True) except vfs.IOError as e: add_error(e) continue # Rename it back to 'latest' resolved = tuple(elt if i != 2 else (b'latest', ) + elt[1:] for i, elt in enumerate(resolved)) path_parent, path_name = os.path.split(path) leaf_name, leaf_item = resolved[-1] if not leaf_item: add_error('error: cannot access %r in %r' % (b'/'.join(name for name, item in resolved), path)) continue if not path_name or path_name == b'.': # Source is /foo/what/ever/ or /foo/what/ever/. -- extract # what/ever/* to the current directory, and if name == '.' # (i.e. /foo/what/ever/.), then also restore what/ever's # metadata to the current directory. treeish = vfs.item_mode(leaf_item) if not treeish: add_error('%r cannot be restored as a directory' % path) else: items = vfs.contents(repo, leaf_item, want_meta=True) dot, leaf_item = next(items, None) assert dot == b'.' for sub_name, sub_item in items: restore(repo, b'', sub_name, sub_item, top, opt.sparse, opt.numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks) if path_name == b'.': leaf_item = vfs.augment_item_meta(repo, leaf_item, include_size=True) apply_metadata(leaf_item.meta, b'.', opt.numeric_ids, owner_map) else: restore(repo, b'', leaf_name, leaf_item, top, opt.sparse, opt.numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks) if verbosity >= 0: progress('Restoring: %d, done.\n' % total_restored) die_if_errors()
def main(): o = options.Options(optspec) opt, flags, extra = o.parse(sys.argv[1:]) verbosity = opt.verbose if not opt.quiet else -1 git.check_repo_or_die() if not extra: o.fatal('must specify at least one filename to restore') exclude_rxs = parse_rx_excludes(flags, o.fatal) owner_map = {} for map_type in ('user', 'group', 'uid', 'gid'): owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal) if opt.outdir: mkdirp(opt.outdir) os.chdir(opt.outdir) repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo() top = os.getcwd() hardlinks = {} for path in extra: if not valid_restore_path(path): add_error("path %r doesn't include a branch and revision" % path) continue try: resolved = vfs2.lresolve(repo, path, want_meta=True) except vfs2.IOError as e: add_error(e) continue path_parent, path_name = os.path.split(path) leaf_name, leaf_item = resolved[-1] if not leaf_item: add_error('error: cannot access %r in %r' % ('/'.join(name for name, item in resolved), path)) continue if not path_name or path_name == '.': # Source is /foo/what/ever/ or /foo/what/ever/. -- extract # what/ever/* to the current directory, and if name == '.' # (i.e. /foo/what/ever/.), then also restore what/ever's # metadata to the current directory. treeish = vfs2.item_mode(leaf_item) if not treeish: add_error('%r cannot be restored as a directory' % path) else: items = vfs2.contents(repo, leaf_item, want_meta=True) dot, leaf_item = next(items, None) assert (dot == '.') for sub_name, sub_item in items: restore(repo, '', sub_name, sub_item, top, opt.sparse, opt.numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks) if path_name == '.': leaf_item = vfs2.augment_item_meta(repo, leaf_item, include_size=True) apply_metadata(leaf_item.meta, '.', opt.numeric_ids, owner_map) else: restore(repo, '', leaf_name, leaf_item, top, opt.sparse, opt.numeric_ids, owner_map, exclude_rxs, verbosity, hardlinks) if verbosity >= 0: progress('Restoring: %d, done.\n' % total_restored) die_if_errors()
def test_remote_resolve_loop(): prep_and_test_repo(b'remote-vfs-resolve-loop', lambda x: RemoteRepo(x), test_resolve_loop)
def test_remote_resolve(): prep_and_test_repo(b'remote-vfs-resolve', lambda x: RemoteRepo(x), test_resolve)
o= output filename """ o = options.Options(optspec) (opt, flags, extra) = o.parse(sys.argv[1:]) if opt.remote: opt.remote = argv_bytes(opt.remote) git.check_repo_or_die() stdin = byte_stream(sys.stdin) if not extra: extra = linereader(stdin) ret = 0 repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo() if opt.o: outfile = open(opt.o, 'wb') else: sys.stdout.flush() outfile = byte_stream(sys.stdout) for ref in [argv_bytes(x) for x in extra]: try: for blob in repo.join(ref): outfile.write(blob) except KeyError as e: outfile.flush() log('error: %s\n' % e) ret = 1
optspec = """ bup join [-r host:path] [refs or hashes...] -- r,remote= remote repository path o= output filename """ o = options.Options(optspec) (opt, flags, extra) = o.parse(sys.argv[1:]) git.check_repo_or_die() if not extra: extra = linereader(sys.stdin) ret = 0 repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo() if opt.o: outfile = open(opt.o, 'wb') else: outfile = sys.stdout for ref in extra: try: for blob in repo.join(ref): outfile.write(blob) except KeyError as e: outfile.flush() log('error: %s\n' % e) ret = 1
def test_remote_resolve_loop(tmpdir): prep_and_test_repo(tmpdir, lambda x: RemoteRepo(x), _test_resolve_loop)