def _mksubs(self): self._subs = {} tags = git.tags() revs = list(git.rev_list(self.hash.encode('hex'))) latest = revs[0] for (date, commit) in revs: l = time.localtime(date) ls = time.strftime('%Y-%m-%d-%H%M%S', l) commithex = commit.encode('hex') target = '../.commit/%s/%s' % (commithex[:2], commithex[2:]) n1 = FakeSymlink(self, ls, target) n1.ctime = n1.mtime = date self._subs[ls] = n1 for tag in tags.get(commit, []): t1 = FakeSymlink(self, tag, target) t1.ctime = t1.mtime = date self._subs[tag] = t1 (date, commit) = latest commithex = commit.encode('hex') target = '../.commit/%s/%s' % (commithex[:2], commithex[2:]) n1 = FakeSymlink(self, 'latest', target) n1.ctime = n1.mtime = date self._subs['latest'] = n1
def _mksubs(self): self._subs = {} tags = git.tags(repo_dir=self._repo_dir) revs = list(git.rev_list(self.hash.encode("hex"), repo_dir=self._repo_dir)) latest = revs[0] for (date, commit) in revs: l = time.localtime(date) ls = time.strftime("%Y-%m-%d-%H%M%S", l) commithex = commit.encode("hex") target = "../.commit/%s/%s" % (commithex[:2], commithex[2:]) n1 = FakeSymlink(self, ls, target, self._repo_dir) n1.ctime = n1.mtime = date self._subs[ls] = n1 for tag in tags.get(commit, []): t1 = FakeSymlink(self, tag, target, self._repo_dir) t1.ctime = t1.mtime = date self._subs[tag] = t1 (date, commit) = latest commithex = commit.encode("hex") target = "../.commit/%s/%s" % (commithex[:2], commithex[2:]) n1 = FakeSymlink(self, "latest", target, self._repo_dir) n1.ctime = n1.mtime = date self._subs["latest"] = n1
def _mksubs(self): self._subs = {} heads = git.list_refs(repo_dir=self._repo_dir, limit_to_heads=True) tags = git.list_refs(repo_dir=self._repo_dir, limit_to_tags=True) tags, tags_dup = tee(tags) tags_info = git.object_info((x[1].encode('hex') for x in tags_dup), repo_dir=self._repo_dir) commit_tags = (ref for ref, info in izip(tags, tags_info) if info[1] == 'commit') for ref in sorted(chain(heads, commit_tags)): #debug2('ref name: %s\n' % ref[0]) revs = git.rev_list(ref[1].encode('hex'), repo_dir = self._repo_dir) for (date, commit) in revs: #debug2('commit: %s date: %s\n' % (commit.encode('hex'), date)) commithex = commit.encode('hex') containername = commithex[:2] dirname = commithex[2:] n1 = self._subs.get(containername) if not n1: n1 = CommitList(self, containername, self._repo_dir) self._subs[containername] = n1 if n1.commits.get(dirname): # Stop work for this ref, the rest should already be present break n1.commits[dirname] = (commit, date)
def handle_ff(item, repo, cp, writer, opt, fatal): assert(item.spec.method == 'ff') assert(item.src.type in ('branch', 'save', 'commit')) hex_src = item.src.hash.encode('hex') commits = [c for d, c in git.rev_list(hex_src, repo_dir=repo)] if not item.dest.hash or item.dest.hash in commits: # Can fast forward. get_random_item(item.spec.src, hex_src, cp, writer, opt) commit_items = get_commit_items(hex_src, cp) return item.src.hash, commit_items.tree.decode('hex') spec_args = '%s %s' % (item.spec.argopt, item.spec.argval) fatal('destination is not an ancestor of source for %r' % spec_args)
def filter_branch(tip_commit_hex, exclude, writer): # May return None if everything is excluded. commits = [unhexlify(x) for x in git.rev_list(tip_commit_hex)] commits.reverse() last_c, tree = None, None # Rather than assert that we always find an exclusion here, we'll # just let the StopIteration signal the error. first_exclusion = next(i for i, c in enumerate(commits) if exclude(c)) if first_exclusion != 0: last_c = commits[first_exclusion - 1] tree = unhexlify(get_commit_items(hexlify(last_c), git.cp()).tree) commits = commits[first_exclusion:] for c in commits: if exclude(c): continue last_c, tree = append_commit(hexlify(c), last_c, git.cp(), writer) return last_c
def filter_branch(tip_commit_hex, exclude, writer): # May return None if everything is excluded. commits = [c for _, c in git.rev_list(tip_commit_hex)] commits.reverse() last_c, tree = None, None # Rather than assert that we always find an exclusion here, we'll # just let the StopIteration signal the error. first_exclusion = next(i for i, c in enumerate(commits) if exclude(c)) if first_exclusion != 0: last_c = commits[first_exclusion - 1] tree = get_commit_items(last_c.encode('hex'), git.cp()).tree.decode('hex') commits = commits[first_exclusion:] for c in commits: if exclude(c): continue last_c, tree = append_commit(c.encode('hex'), last_c, git.cp(), writer) return last_c
def _mksubs(self): self._subs = {} revs = list(git.rev_list(self.hash.encode('hex'))) for (date, commit) in revs: l = time.localtime(date) ls = time.strftime('%Y-%m-%d-%H%M%S', l) commithex = '.' + commit.encode('hex') n1 = Dir(self, commithex, 040000, commit) n2 = FakeSymlink(self, ls, commithex) n1.ctime = n1.mtime = n2.ctime = n2.mtime = date self._subs[commithex] = n1 self._subs[ls] = n2 latest = max(revs) if latest: (date, commit) = latest commithex = '.' + commit.encode('hex') n2 = FakeSymlink(self, 'latest', commithex) n2.ctime = n2.mtime = date self._subs['latest'] = n2
def handle_append(item, repo, cp, writer, opt, fatal): assert(item.spec.method == 'append') assert(item.src.type in ('branch', 'save', 'commit', 'tree')) assert(item.dest.type == 'branch' or not item.dest.type) hex_src = item.src.hash.encode('hex') if item.src.type == 'tree': get_random_item(item.spec.src, hex_src, cp, writer, opt) parent = item.dest.hash msg = 'bup save\n\nGenerated by command:\n%r\n' % sys.argv userline = '%s <%s@%s>' % (userfullname(), username(), hostname()) now = time.time() commit = writer.new_commit(item.src.hash, parent, userline, now, None, userline, now, None, msg) return commit, item.src.hash commits = [c for d, c in git.rev_list(hex_src, repo_dir=repo)] commits.reverse() return append_commits(commits, item.spec.src, item.dest.hash, cp, writer, opt)
def _mksubs(self): self._subs = {} refs = git.list_refs() for ref in refs: #debug2('ref name: %s\n' % ref[0]) revs = git.rev_list(ref[1].encode('hex')) for (date, commit) in revs: #debug2('commit: %s date: %s\n' % (commit.encode('hex'), date)) commithex = commit.encode('hex') containername = commithex[:2] dirname = commithex[2:] n1 = self._subs.get(containername) if not n1: n1 = CommitList(self, containername) self._subs[containername] = n1 if n1.commits.get(dirname): # Stop work for this ref, the rest should already be present break n1.commits[dirname] = (commit, date)
def _mksubs(self): self._subs = {} revs = list(git.rev_list(self.hash.encode('hex'), repo_dir=self._repo_dir)) latest = revs[0] for (date, commit) in revs: l = time.localtime(date) ls = time.strftime('%Y-%m-%d-%H%M%S', l) commithex = commit.encode('hex') target = '../.commit/%s/%s' % (commithex[:2], commithex[2:]) n1 = FakeSymlink(self, ls, target, self._repo_dir) n1.ctime = n1.mtime = date self._subs[ls] = n1 (date, commit) = latest commithex = commit.encode('hex') target = '../.commit/%s/%s' % (commithex[:2], commithex[2:]) n1 = FakeSymlink(self, 'latest', target, self._repo_dir) n1.ctime = n1.mtime = date self._subs['latest'] = n1
def _mksubs(self): self._subs = {} revs = list( git.rev_list(self.hash.encode('hex'), format='%at', parse=lambda f: int(f.readline().strip()), repo_dir=self._repo_dir)) latest = revs[0] for commithex, date in revs: l = time.localtime(date) ls = time.strftime('%Y-%m-%d-%H%M%S', l) target = '../.commit/%s/%s' % (commithex[:2], commithex[2:]) n1 = FakeSymlink(self, ls, target, self._repo_dir) n1.ctime = n1.mtime = date self._subs[ls] = n1 commithex, date = latest target = '../.commit/%s/%s' % (commithex[:2], commithex[2:]) n1 = FakeSymlink(self, 'latest', target, self._repo_dir) n1.ctime = n1.mtime = date self._subs['latest'] = n1
def _mksubs(self): self._subs = {} refs = git.list_refs(repo_dir=self._repo_dir) for ref in refs: #debug2('ref name: %s\n' % ref[0]) revs = git.rev_list(ref[1].encode('hex'), format='%at', parse=lambda f: int(f.readline().strip()), repo_dir=self._repo_dir) for commithex, date in revs: #debug2('commit: %s date: %s\n' % (commit.encode('hex'), date)) commit = commithex.decode('hex') containername = commithex[:2] dirname = commithex[2:] n1 = self._subs.get(containername) if not n1: n1 = CommitList(self, containername, self._repo_dir) self._subs[containername] = n1 if n1.commits.get(dirname): # Stop work for this ref, the rest should already be present break n1.commits[dirname] = (commit, date)
%(kind, abs(period_utc), epoch_ymd)) elif period_utc > 0: log('keeping %s since %d seconds after %s\n' %(kind, period_utc, epoch_ymd)) else: log('keeping %s since %s\n' % (kind, epoch_ymd)) git.check_repo_or_die() # This could be more efficient, but for now just build the whole list # in memory and let bup_rm() do some redundant work. removals = [] for branch, branch_id in branches(roots): die_if_errors() saves = git.rev_list(branch_id.encode('hex')) for keep_save, (utc, id) in classify_saves(saves, period_start): assert(keep_save in (False, True)) # FIXME: base removals on hashes if opt.pretend: print('+' if keep_save else '-', save_name(branch, utc)) elif not keep_save: removals.append(save_name(branch, utc)) if not opt.pretend: die_if_errors() bup_rm(removals, compression=opt.compress, verbosity=opt.verbose) if opt.gc: die_if_errors() bup_gc(threshold=opt.gc_threshold, compression=opt.compress,
log('keeping %s since %s\n' % (kind, epoch_ymd)) git.check_repo_or_die() # This could be more efficient, but for now just build the whole list # in memory and let bup_rm() do some redundant work. def parse_info(f): author_secs = f.readline().strip() return int(author_secs) removals = [] for branch, branch_id in branches(roots): die_if_errors() saves = ((utc, oidx.decode('hex')) for (oidx, utc) in git.rev_list(branch_id, format='%at', parse=parse_info)) for keep_save, (utc, id) in classify_saves(saves, period_start): assert(keep_save in (False, True)) # FIXME: base removals on hashes if opt.pretend: print('+' if keep_save else '-', save_name(branch, utc)) elif not keep_save: removals.append(save_name(branch, utc)) if not opt.pretend: die_if_errors() repo = LocalRepo() bup_rm(repo, removals, compression=opt.compress, verbosity=opt.verbose) if opt.gc: die_if_errors() bup_gc(threshold=opt.gc_threshold,
log('keeping %s since %s\n' % (kind, epoch_ymd)) git.check_repo_or_die() # This could be more efficient, but for now just build the whole list # in memory and let bup_rm() do some redundant work. def parse_info(f): author_secs = f.readline().strip() return int(author_secs) removals = [] for branch, branch_id in branches(roots): die_if_errors() saves = ((utc, oidx.decode('hex')) for (oidx, utc) in git.rev_list(branch_id, format='%at', parse=parse_info)) for keep_save, (utc, id) in classify_saves(saves, period_start): assert(keep_save in (False, True)) # FIXME: base removals on hashes if opt.pretend: print('+' if keep_save else '-', save_name(branch, utc)) elif not keep_save: removals.append(save_name(branch, utc)) if not opt.pretend: die_if_errors() bup_rm(removals, compression=opt.compress, verbosity=opt.verbose) if opt.gc: die_if_errors() bup_gc(threshold=opt.gc_threshold, compression=opt.compress,
# in memory and let bup_rm() do some redundant work. def parse_info(f): author_secs = f.readline().strip() return int(author_secs) sys.stdout.flush() out = byte_stream(sys.stdout) removals = [] for branch, branch_id in branches(roots): die_if_errors() saves = ((utc, unhexlify(oidx)) for ( oidx, utc) in git.rev_list(branch_id, format=b'%at', parse=parse_info)) for keep_save, (utc, id) in classify_saves(saves, period_start): assert (keep_save in (False, True)) # FIXME: base removals on hashes if opt.pretend: out.write(b'+ ' if keep_save else b'- ' + save_name(branch, utc) + b'\n') elif not keep_save: removals.append(save_name(branch, utc)) if not opt.pretend: die_if_errors() repo = LocalRepo() bup_rm(repo, removals, compression=opt.compress, verbosity=opt.verbose) if opt.gc: die_if_errors()
def main(argv): o = options.Options(optspec) opt, flags, roots = o.parse_bytes(argv[1:]) roots = [argv_bytes(x) for x in roots] if not opt.unsafe: o.fatal( 'refusing to run dangerous, experimental command without --unsafe') now = int(time()) if opt.wrt is None else opt.wrt if not isinstance(now, int_types): o.fatal('--wrt value ' + str(now) + ' is not an integer') period_start = {} for period, extent in (('all', opt.keep_all_for), ('dailies', opt.keep_dailies_for), ('monthlies', opt.keep_monthlies_for), ('yearlies', opt.keep_yearlies_for)): if extent: secs = period_as_secs(extent.encode('ascii')) if not secs: o.fatal('%r is not a valid period' % extent) period_start[period] = now - secs if not period_start: o.fatal('at least one keep argument is required') period_start = defaultdict(lambda: float('inf'), period_start) if opt.verbose: epoch_ymd = strftime('%Y-%m-%d-%H%M%S', localtime(0)) for kind in ['all', 'dailies', 'monthlies', 'yearlies']: period_utc = period_start[kind] if period_utc != float('inf'): if not (period_utc > float('-inf')): log('keeping all ' + kind) else: try: when = strftime('%Y-%m-%d-%H%M%S', localtime(period_utc)) log('keeping ' + kind + ' since ' + when + '\n') except ValueError as ex: if period_utc < 0: log('keeping %s since %d seconds before %s\n' % (kind, abs(period_utc), epoch_ymd)) elif period_utc > 0: log('keeping %s since %d seconds after %s\n' % (kind, period_utc, epoch_ymd)) else: log('keeping %s since %s\n' % (kind, epoch_ymd)) git.check_repo_or_die() # This could be more efficient, but for now just build the whole list # in memory and let bup_rm() do some redundant work. def parse_info(f): author_secs = f.readline().strip() return int(author_secs) sys.stdout.flush() out = byte_stream(sys.stdout) removals = [] for branch, branch_id in branches(roots): die_if_errors() saves = ((utc, unhexlify(oidx)) for ( oidx, utc) in git.rev_list(branch_id, format=b'%at', parse=parse_info)) for keep_save, (utc, id) in classify_saves(saves, period_start): assert (keep_save in (False, True)) # FIXME: base removals on hashes if opt.pretend: out.write((b'+ ' if keep_save else b'- ') + save_name(branch, utc) + b'\n') elif not keep_save: removals.append(save_name(branch, utc)) if not opt.pretend: die_if_errors() repo = LocalRepo() bup_rm(repo, removals, compression=opt.compress, verbosity=opt.verbose) if opt.gc: die_if_errors() bup_gc(threshold=opt.gc_threshold, compression=opt.compress, verbosity=opt.verbose) die_if_errors()