def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) if extra: o.fatal('no positional parameters expected') if not opt.check and opt.k and opt.k not in (4, 5): o.fatal('only k values of 4 and 5 are supported') if opt.check: opt.check = argv_bytes(opt.check) output = argv_bytes(opt.output) if opt.output else None if opt.dir: path = argv_bytes(opt.dir) else: path = LocalRepo().packdir() debug1('bloom: scanning %s\n' % path_msg(path)) outfilename = output or os.path.join(path, b'bup.bloom') if opt.check: check_bloom(path, outfilename, opt.check) elif opt.ruin: ruin_bloom(outfilename) else: do_bloom(path, outfilename, opt.k, opt.force) if saved_errors: log('WARNING: %d errors encountered during bloom.\n' % len(saved_errors)) sys.exit(1) elif opt.check: log('All tests passed.\n')
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) if opt.remote: opt.remote = argv_bytes(opt.remote) git.check_repo_or_die() stdin = byte_stream(sys.stdin) if not extra: extra = linereader(stdin) ret = 0 repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo() if opt.o: outfile = open(opt.o, 'wb') else: sys.stdout.flush() outfile = byte_stream(sys.stdout) for ref in [argv_bytes(x) for x in extra]: try: for blob in repo.join(ref): outfile.write(blob) except KeyError as e: outfile.flush() log('error: %s\n' % e) ret = 1 sys.exit(ret)
def main(argv): global dry_run log('\nbup: import-duplicity is EXPERIMENTAL (proceed with caution)\n\n') o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) dry_run = opt.dry_run if len(extra) < 1 or not extra[0]: o.fatal('duplicity source URL required') if len(extra) < 2 or not extra[1]: o.fatal('bup destination save name required') if len(extra) > 2: o.fatal('too many arguments') source_url, save_name = extra source_url = argv_bytes(source_url) save_name = argv_bytes(save_name) bup_path = bup.path.exe() git.check_repo_or_die() tmpdir = tempfile.mkdtemp(prefix=b'bup-import-dup-') try: dup = [b'duplicity', b'--archive-dir', tmpdir + b'/dup-cache'] restoredir = tmpdir + b'/restore' tmpidx = tmpdir + b'/index' collection_status = \ exo(dup + [b'collection-status', b'--log-fd=3', source_url], close_fds=False, preexec_fn=redirect_dup_output) # i.e. 3>&1 1>&2 # Duplicity output lines of interest look like this (one leading space): # full 20150222T073111Z 1 noenc # inc 20150222T073233Z 1 noenc dup_timestamps = [] for line in collection_status.splitlines(): if line.startswith(b' inc '): assert (len(line) >= len(b' inc 20150222T073233Z')) dup_timestamps.append(line[5:21]) elif line.startswith(b' full '): assert (len(line) >= len(b' full 20150222T073233Z')) dup_timestamps.append(line[6:22]) for i, dup_ts in enumerate(dup_timestamps): tm = strptime(dup_ts.decode('ascii'), '%Y%m%dT%H%M%SZ') exc([b'rm', b'-rf', restoredir]) exc(dup + [b'restore', b'-t', dup_ts, source_url, restoredir]) exc([bup_path, b'index', b'-uxf', tmpidx, restoredir]) exc([ bup_path, b'save', b'--strip', b'--date', b'%d' % timegm(tm), b'-f', tmpidx, b'-n', save_name, restoredir ]) sys.stderr.flush() finally: exc([b'rm', b'-rf', tmpdir]) if saved_errors: log('warning: %d errors encountered\n' % len(saved_errors)) sys.exit(1)
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) opt.dir = argv_bytes(opt.dir) if opt.dir else None opt.output = argv_bytes(opt.output) if opt.output else None if extra and (opt.auto or opt.force): o.fatal("you can't use -f/-a and also provide filenames") if opt.check and (not extra and not opt.auto): o.fatal("if using --check, you must provide filenames or -a") git.check_repo_or_die() if opt.max_files < 0: opt.max_files = max_files() assert (opt.max_files >= 5) extra = [argv_bytes(x) for x in extra] if opt.check: # check existing midx files if extra: midxes = extra else: path = opt.dir or git.repo(b'objects/pack') debug1('midx: scanning %s\n' % path) midxes = glob.glob(os.path.join(path, b'*.midx')) for name in midxes: check_midx(name) if not saved_errors: log('All tests passed.\n') else: if extra: sys.stdout.flush() do_midx(git.repo(b'objects/pack'), opt.output, extra, b'', byte_stream(sys.stdout), auto=opt.auto, force=opt.force, print_names=opt.print) elif opt.auto or opt.force: sys.stdout.flush() path = opt.dir or git.repo(b'objects/pack') debug1('midx: scanning %s\n' % path_msg(path)) do_midx_dir(path, opt.output, byte_stream(sys.stdout), auto=opt.auto, force=opt.force, max_files=opt.max_files) else: o.fatal("you must use -f or -a or provide input filenames") if saved_errors: log('WARNING: %d errors encountered.\n' % len(saved_errors)) sys.exit(1)
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) handle_ctrl_c() opt.find = argv_bytes(opt.find) if opt.find else b'' if not extra: o.fatal('you must provide at least one filename') if len(opt.find) > 40: o.fatal('--find parameter must be <= 40 chars long') else: if len(opt.find) % 2: s = opt.find + b'0' else: s = opt.find try: bin = unhexlify(s) except TypeError: o.fatal('--find parameter is not a valid hex string') sys.stdout.flush() out = byte_stream(sys.stdout) find = opt.find.lower() count = 0 idxfiles = [argv_bytes(x) for x in extra] for name in idxfiles: try: ix = git.open_idx(name) except git.GitError as e: add_error('%r: %s' % (name, e)) continue if len(opt.find) == 40: if ix.exists(bin): out.write(b'%s %s\n' % (name, find)) else: # slow, exhaustive search for _i in ix: i = hexlify(_i) if i.startswith(find): out.write(b'%s %s\n' % (name, i)) qprogress('Searching: %d\r' % count) count += 1 if saved_errors: log('WARNING: %d errors encountered while saving.\n' % len(saved_errors)) sys.exit(1)
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) git.check_repo_or_die() tags = [t for sublist in git.tags().values() for t in sublist] if opt.delete: # git.delete_ref() doesn't complain if a ref doesn't exist. We # could implement this verification but we'd need to read in the # contents of the tag file and pass the hash, and we already know # about the tag's existance via "tags". tag_name = argv_bytes(opt.delete) if not opt.force and tag_name not in tags: log("error: tag '%s' doesn't exist\n" % path_msg(tag_name)) sys.exit(1) tag_file = b'refs/tags/%s' % tag_name git.delete_ref(tag_file) sys.exit(0) if not extra: for t in tags: sys.stdout.flush() out = byte_stream(sys.stdout) out.write(t) out.write(b'\n') sys.exit(0) elif len(extra) != 2: o.fatal('expected commit ref and hash') tag_name, commit = map(argv_bytes, extra[:2]) if not tag_name: o.fatal("tag name must not be empty.") debug1("args: tag name = %s; commit = %s\n" % (path_msg(tag_name), commit.decode('ascii'))) if tag_name in tags and not opt.force: log("bup: error: tag '%s' already exists\n" % path_msg(tag_name)) sys.exit(1) if tag_name.startswith(b'.'): o.fatal("'%s' is not a valid tag name." % path_msg(tag_name)) try: hash = git.rev_parse(commit) except git.GitError as e: log("bup: error: %s" % e) sys.exit(2) if not hash: log("bup: error: commit %s not found.\n" % commit.decode('ascii')) sys.exit(2) with git.PackIdxList(git.repo(b'objects/pack')) as pL: if not pL.exists(hash): log("bup: error: commit %s not found.\n" % commit.decode('ascii')) sys.exit(2) git.update_ref(b'refs/tags/' + tag_name, hash, None, force=True)
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) if not extra: o.fatal('filenames expected') if opt.seed != None: random.seed(opt.seed) for name in extra: name = argv_bytes(name) log('Damaging "%s"...\n' % path_msg(name)) with open(name, 'r+b') as f: st = os.fstat(f.fileno()) size = st.st_size if opt.percent or opt.size: ms1 = int(float(opt.percent or 0) / 100.0 * size) or size ms2 = opt.size or size maxsize = min(ms1, ms2) else: maxsize = 1 chunks = opt.num or 10 chunksize = size // chunks for r in range(chunks): sz = random.randrange(1, maxsize + 1) if sz > size: sz = size if opt.equal: ofs = r * chunksize else: ofs = random.randrange(0, size - sz + 1) log(' %6d bytes at %d\n' % (sz, ofs)) f.seek(ofs) f.write(randblock(sz))
def opts_from_cmdline(args, onabort=None, pwd=b'/'): """Parse ls command line arguments and return a dictionary of ls options, agumented with "classification", "long_listing", "paths", and "show_hidden". """ if onabort: opt, flags, extra = Options(optspec, onabort=onabort).parse_bytes(args) else: opt, flags, extra = Options(optspec).parse_bytes(args) opt.paths = [argv_bytes(x) for x in extra] or (pwd, ) opt.long_listing = opt.l opt.classification = None opt.show_hidden = None for flag in flags: option, parameter = flag if option in ('-F', '--classify'): opt.classification = 'all' elif option == '--file-type': opt.classification = 'type' elif option in ('-a', '--all'): opt.show_hidden = 'all' elif option in ('-A', '--almost-all'): opt.show_hidden = 'almost' return opt
def getattr(self, path): path = argv_bytes(path) if self.verbose > 0: log('--getattr(%r)\n' % path) res = vfs.resolve(self.repo, path, want_meta=(not self.fake_metadata), follow=False) name, item = res[-1] if not item: return -errno.ENOENT if self.fake_metadata: item = vfs.augment_item_meta(self.repo, item, include_size=True) else: item = vfs.ensure_item_has_metadata(self.repo, item, include_size=True) meta = item.meta # FIXME: do we want/need to do anything more with nlink? st = fuse.Stat(st_mode=meta.mode, st_nlink=1, st_size=meta.size) st.st_mode = meta.mode st.st_uid = meta.uid or 0 st.st_gid = meta.gid or 0 st.st_atime = max(0, xstat.fstime_floor_secs(meta.atime)) st.st_mtime = max(0, xstat.fstime_floor_secs(meta.mtime)) st.st_ctime = max(0, xstat.fstime_floor_secs(meta.ctime)) return st
def parse_rx_excludes(options, fatal): """Traverse the options and extract all rx excludes, or call Option.fatal().""" excluded_patterns = [] for flag in options: (option, parameter) = flag if option == '--exclude-rx': try: excluded_patterns.append(re.compile(argv_bytes(parameter))) except re.error as ex: fatal('invalid --exclude-rx pattern (%r): %s' % (parameter, ex)) elif option == '--exclude-rx-from': try: f = open(resolve_parent(parameter), 'rb') except IOError as e: raise fatal("couldn't read %r" % parameter) for pattern in f.readlines(): spattern = pattern.rstrip(b'\n') if not spattern: continue try: excluded_patterns.append(re.compile(spattern)) except re.error as ex: fatal('invalid --exclude-rx pattern (%r): %s' % (spattern, ex)) return excluded_patterns
def readlink(self, path): path = argv_bytes(path) if self.verbose > 0: log('--readlink(%r)\n' % path) res = vfs.resolve(self.repo, path, follow=False) name, item = res[-1] if not item: return -errno.ENOENT return fsdecode(vfs.readlink(self.repo, item))
def opts_from_cmdline(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) opt.sources = extra if opt.name: opt.name = argv_bytes(opt.name) if opt.remote: opt.remote = argv_bytes(opt.remote) if opt.verbose is None: opt.verbose = 0 if not (opt.blobs or opt.tree or opt.commit or opt.name or opt.noop or opt.copy): o.fatal("use one or more of -b, -t, -c, -n, --noop, --copy") if opt.copy and (opt.blobs or opt.tree): o.fatal('--copy is incompatible with -b, -t') if (opt.noop or opt.copy) and (opt.commit or opt.name): o.fatal('--noop and --copy are incompatible with -c, -n') if opt.blobs and (opt.tree or opt.commit or opt.name): o.fatal('-b is incompatible with -t, -c, -n') if extra and opt.git_ids: o.fatal("don't provide filenames when using --git-ids") if opt.verbose >= 2: git.verbose = opt.verbose - 1 opt.bench = 1 if opt.max_pack_size: opt.max_pack_size = parse_num(opt.max_pack_size) if opt.max_pack_objects: opt.max_pack_objects = parse_num(opt.max_pack_objects) if opt.fanout: opt.fanout = parse_num(opt.fanout) if opt.bwlimit: opt.bwlimit = parse_num(opt.bwlimit) if opt.date: opt.date = parse_date_or_fatal(opt.date, o.fatal) else: opt.date = time.time() opt.is_reverse = environ.get(b'BUP_SERVER_REVERSE') if opt.is_reverse and opt.remote: o.fatal("don't use -r in reverse mode; it's automatic") if opt.name and not valid_save_name(opt.name): o.fatal("'%r' is not a valid branch name." % opt.name) return opt
def parse_excludes(options, fatal): """Traverse the options and extract all excludes, or call Option.fatal().""" excluded_paths = [] for flag in options: (option, parameter) = flag if option == '--exclude': excluded_paths.append(resolve_parent(argv_bytes(parameter))) elif option == '--exclude-from': try: f = open(resolve_parent(argv_bytes(parameter)), 'rb') except IOError as e: raise fatal("couldn't read %r" % parameter) for exclude_path in f.readlines(): # FIXME: perhaps this should be rstrip('\n') exclude_path = resolve_parent(exclude_path.strip()) if exclude_path: excluded_paths.append(exclude_path) return sorted(frozenset(excluded_paths))
def read(self, path, size, offset): path = argv_bytes(path) if self.verbose > 0: log('--read(%r)\n' % path) res = vfs.resolve(self.repo, path, follow=False) name, item = res[-1] if not item: return -errno.ENOENT with vfs.fopen(self.repo, item) as f: f.seek(offset) return f.read(size)
def open(self, path, flags): path = argv_bytes(path) if self.verbose > 0: log('--open(%r)\n' % path) res = vfs.resolve(self.repo, path, follow=False) name, item = res[-1] if not item: return -errno.ENOENT accmode = os.O_RDONLY | os.O_WRONLY | os.O_RDWR if (flags & accmode) != os.O_RDONLY: return -errno.EACCES
def commit_tree(tree, parent, date, argv, writer): if compat.py_maj > 2: # Strip b prefix from python 3 bytes reprs to preserve previous format msgcmd = b'[%s]' % b', '.join([repr(argv_bytes(x))[1:].encode('ascii') for x in argv]) else: msgcmd = repr(argv) msg = b'bup save\n\nGenerated by command:\n%s\n' % msgcmd userline = (b'%s <%s@%s>' % (userfullname(), username(), hostname())) return writer.new_commit(tree, parent, userline, date, None, userline, date, None, msg)
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) git.check_repo_or_die() if not extra: o.fatal('must specify a target') if len(extra) > 1: o.fatal('only one target file allowed') if opt.bupm and opt.meta: o.fatal('--meta and --bupm are incompatible') target = argv_bytes(extra[0]) if not re.match(br'/*[^/]+/[^/]+', target): o.fatal("path %r doesn't include a branch and revision" % target) with LocalRepo() as repo: resolved = vfs.resolve(repo, target, follow=False) leaf_name, leaf_item = resolved[-1] if not leaf_item: log('error: cannot access %r in %r\n' % (b'/'.join(name for name, item in resolved), target)) sys.exit(1) mode = vfs.item_mode(leaf_item) sys.stdout.flush() out = byte_stream(sys.stdout) if opt.bupm: if not stat.S_ISDIR(mode): o.fatal('%r is not a directory' % target) _, bupm_oid = vfs.tree_data_and_bupm(repo, leaf_item.oid) if bupm_oid: with vfs.tree_data_reader(repo, bupm_oid) as meta_stream: out.write(meta_stream.read()) elif opt.meta: augmented = vfs.augment_item_meta(repo, leaf_item, include_size=True) out.write(augmented.meta.encode()) else: if stat.S_ISREG(mode): with vfs.fopen(repo, leaf_item) as f: for b in chunkyreader(f): out.write(b) else: o.fatal('%r is not a plain file' % target) if saved_errors: log('warning: %d errors encountered\n' % len(saved_errors)) sys.exit(1)
def from_opts(opt, reverse=True): """ Return a repo - understands: * the following optional options: - max-pack-size - max-pack-objects - compress - remote * the BUP_SERVER_REVERSE environment variable """ if reverse: is_reverse = environ.get(b'BUP_SERVER_REVERSE') if is_reverse and opt.remote: log("error: don't use -r in reverse mode; it's automatic") sys.exit(97) else: is_reverse = False try: compress = opt.compress except (KeyError, AttributeError): compress = None try: max_pack_size = parse_num( opt.max_pack_size) if opt.max_pack_size else None except (KeyError, AttributeError): max_pack_size = None try: max_pack_objects = parse_num( opt.max_pack_objects) if opt.max_pack_objects else None except (KeyError, AttributeError): max_pack_objects = None try: if opt.remote: return make_repo(argv_bytes(opt.remote), compression_level=compress, max_pack_size=max_pack_size, max_pack_objects=max_pack_objects) if is_reverse: return make_repo(b'reverse://%s' % is_reverse, compression_level=compress, max_pack_size=max_pack_size, max_pack_objects=max_pack_objects) return LocalRepo(compression_level=compress, max_pack_size=max_pack_size, max_pack_objects=max_pack_objects) except client.ClientError as e: log('error: %s' % e) sys.exit(1)
def readdir(self, path, offset): path = argv_bytes(path) assert not offset # We don't return offsets, so offset should be unused res = vfs.resolve(self.repo, path, follow=False) dir_name, dir_item = res[-1] if not dir_item: yield -errno.ENOENT yield fuse.Direntry('..') # FIXME: make sure want_meta=False is being completely respected for ent_name, ent_item in vfs.contents(repo, dir_item, want_meta=False): fusename = fsdecode(ent_name.replace(b'/', b'-')) yield fuse.Direntry(fusename)
def via_cmdline(args, out=None, onabort=None): """Write a listing of a file or directory in the bup repository to out. When a long listing is not requested and stdout is attached to a tty, the output is formatted in columns. When not attached to tty (for example when the output is piped to another command), one file is listed per line. """ assert out opt = opts_from_cmdline(args, onabort=onabort) repo = RemoteRepo(argv_bytes(opt.remote)) if opt.remote else LocalRepo() return within_repo(repo, opt, out)
def main(argv): o = Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) if not opt.unsafe: o.fatal( 'refusing to run dangerous, experimental command without --unsafe') if len(extra) < 1: o.fatal('no paths specified') repo = from_opts(opt) bup_rm(repo, [argv_bytes(x) for x in extra], verbosity=opt.verbose) die_if_errors()
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) if extra: o.fatal("no arguments expected") if opt.remote: repo.make_repo(argv_bytes(opt.remote), create=True) else: try: repo.LocalRepo.create() except git.GitError as e: log("bup: error: could not init repository: %s" % e) sys.exit(1)
def open(self, path, flags): path = argv_bytes(path) if self.verbose > 0: log('--open(%r)\n' % path) res = vfs.resolve(self.repo, path, follow=False) name, item = res[-1] if not item: return -errno.ENOENT accmode = os.O_RDONLY | os.O_WRONLY | os.O_RDWR if (flags & accmode) != os.O_RDONLY: return -errno.EACCES # Return None since read doesn't need the file atm... # If we *do* return the file, it'll show up as the last argument #return vfs.fopen(repo, item) return None
def main(argv): o = options.Options(optspec) (opt, flags, extra) = o.parse(argv[1:]) if len(extra) != 1: o.fatal("must give exactly one name") name = argv_bytes(extra[0]) r = repo.from_opts(opt) if opt.type == 'str': opt.type = None print("%s = %r" % (name.decode('utf-8'), r.config(name, opttype=opt.type))) r.close()
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) if extra: o.fatal("no arguments expected") try: git.init_repo() # local repo except git.GitError as e: log("bup: error: could not init repository: %s" % e) sys.exit(1) if opt.remote: git.check_repo_or_die() cli = client.Client(argv_bytes(opt.remote), create=True) cli.close()
def main(argv): o = Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) if not opt.unsafe: o.fatal( 'refusing to run dangerous, experimental command without --unsafe') if len(extra) < 1: o.fatal('no paths specified') check_repo_or_die() with LocalRepo() as repo: bup_rm(repo, [argv_bytes(x) for x in extra], compression=opt.compress, verbosity=opt.verbose) die_if_errors()
def parse_owner_mappings(type, options, fatal): """Traverse the options and parse all --map-TYPEs, or call Option.fatal().""" opt_name = '--map-' + type if type in ('uid', 'gid'): value_rx = re.compile(br'^(-?[0-9]+)=(-?[0-9]+)$') else: value_rx = re.compile(br'^([^=]+)=([^=]*)$') owner_map = {} for flag in options: (option, parameter) = flag if option != opt_name: continue parameter = argv_bytes(parameter) match = value_rx.match(parameter) if not match: raise fatal("couldn't parse %r as %s mapping" % (parameter, type)) old_id, new_id = match.groups() if type in ('uid', 'gid'): old_id = int(old_id) new_id = int(new_id) owner_map[old_id] = new_id return owner_map
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) if len(extra) == 0: # the wrapper program provides the default usage string os.execvp(path.exe(), [path.exe()]) elif len(extra) == 1: docname = (extra[0] == 'bup' and b'bup' or (b'bup-%s' % argv_bytes(extra[0]))) manpath = os.path.join(path.exedir(), b'../../Documentation/' + docname + b'.[1-9]') g = glob.glob(manpath) try: if g: os.execvp('man', ['man', '-l', g[0]]) else: os.execvp('man', ['man', docname]) except OSError as e: sys.stderr.write('Unable to run man command: %s\n' % e) sys.exit(1) else: o.fatal("exactly one command name expected")
def main(argv): o = options.Options(optspec) opt, flags, extra = o.parse_bytes(argv[1:]) if len(extra) != 1: o.fatal("exactly one filename expected") drecurse_top = argv_bytes(extra[0]) excluded_paths = parse_excludes(flags, o.fatal) if not drecurse_top.startswith(b'/'): excluded_paths = [relpath(x) for x in excluded_paths] exclude_rxs = parse_rx_excludes(flags, o.fatal) it = drecurse.recursive_dirlist([drecurse_top], opt.xdev, excluded_paths=excluded_paths, exclude_rxs=exclude_rxs) if opt.profile: import cProfile def do_it(): for i in it: pass cProfile.run('do_it()') else: if opt.quiet: for i in it: pass else: sys.stdout.flush() out = byte_stream(sys.stdout) for (name, st) in it: out.write(name + b'\n') if saved_errors: log('WARNING: %d errors encountered.\n' % len(saved_errors)) sys.exit(1)
def main(): handle_ctrl_c() is_reverse = environ.get(b'BUP_SERVER_REVERSE') opt = parse_args(compat.argv) git.check_repo_or_die() if opt.source: opt.source = argv_bytes(opt.source) if opt.bwlimit: client.bwlimit = parse_num(opt.bwlimit) if is_reverse and opt.remote: misuse("don't use -r in reverse mode; it's automatic") if opt.remote: opt.remote = argv_bytes(opt.remote) if opt.remote or is_reverse: dest_repo = RemoteRepo(opt.remote) else: dest_repo = LocalRepo() with dest_repo as dest_repo: with LocalRepo(repo_dir=opt.source) as src_repo: with dest_repo.new_packwriter(compression_level=opt.compress) as writer: # Resolve and validate all sources and destinations, # implicit or explicit, and do it up-front, so we can # fail before we start writing (for any obviously # broken cases). target_items = resolve_targets(opt.target_specs, src_repo, dest_repo) updated_refs = {} # ref_name -> (original_ref, tip_commit(bin)) no_ref_info = (None, None) handlers = {'ff': handle_ff, 'append': handle_append, 'force-pick': handle_pick, 'pick': handle_pick, 'new-tag': handle_new_tag, 'replace': handle_replace, 'unnamed': handle_unnamed} for item in target_items: debug1('get-spec: %r\n' % (item.spec,)) debug1('get-src: %s\n' % loc_desc(item.src)) debug1('get-dest: %s\n' % loc_desc(item.dest)) dest_path = item.dest and item.dest.path if dest_path: if dest_path.startswith(b'/.tag/'): dest_ref = b'refs/tags/%s' % dest_path[6:] else: dest_ref = b'refs/heads/%s' % dest_path[1:] else: dest_ref = None dest_hash = item.dest and item.dest.hash orig_ref, cur_ref = updated_refs.get(dest_ref, no_ref_info) orig_ref = orig_ref or dest_hash cur_ref = cur_ref or dest_hash handler = handlers[item.spec.method] item_result = handler(item, src_repo, writer, opt) if len(item_result) > 1: new_id, tree = item_result else: new_id = item_result[0] if not dest_ref: log_item(item.spec.src, item.src.type, opt) else: updated_refs[dest_ref] = (orig_ref, new_id) if dest_ref.startswith(b'refs/tags/'): log_item(item.spec.src, item.src.type, opt, tag=new_id) else: log_item(item.spec.src, item.src.type, opt, tree=tree, commit=new_id) # Only update the refs at the very end, once the writer is # closed, so that if something goes wrong above, the old refs # will be undisturbed. for ref_name, info in items(updated_refs): orig_ref, new_ref = info try: dest_repo.update_ref(ref_name, new_ref, orig_ref) if opt.verbose: new_hex = hexlify(new_ref) if orig_ref: orig_hex = hexlify(orig_ref) log('updated %r (%s -> %s)\n' % (ref_name, orig_hex, new_hex)) else: log('updated %r (%s)\n' % (ref_name, new_hex)) except (git.GitError, client.ClientError) as ex: add_error('unable to update ref %r: %s' % (ref_name, ex)) if saved_errors: log('WARNING: %d errors encountered while saving.\n' % len(saved_errors)) sys.exit(1)