def write(self, annotatedresult, lines=None, existinglines=None): if annotatedresult: self._writecomma() pieces = [ (name, pycompat.maplist(f, annotatedresult)) for f, sep, name, enc in self.funcmap ] if lines is not None: pieces.append((b'line', lines)) pieces.sort() seps = [b','] * len(pieces[:-1]) + [b''] result = b'' lasti = len(annotatedresult) - 1 for i in pycompat.xrange(len(annotatedresult)): result += b'\n {\n' for j, p in enumerate(pieces): k, vs = p result += b' "%s": %s%s\n' % ( k, templatefilters.json(vs[i], paranoid=False), seps[j], ) result += b' }%s' % (b'' if i == lasti else b',') if lasti >= 0: self.needcomma = True self.ui.write(result)
def __init__(self, fctxs, path, ui=None, opts=None): """([fctx], ui or None) -> None fctxs should be linear, and sorted by topo order - oldest first. fctxs[0] will be considered as "immutable" and will not be changed. """ self.fctxs = fctxs self.path = path self.ui = ui or nullui() self.opts = opts or {} # following fields are built from fctxs. they exist for perf reason self.contents = [f.data() for f in fctxs] self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents) self.linelog = self._buildlinelog() if self.ui.debugflag: assert self._checkoutlinelog() == self.contents # following fields will be filled later self.chunkstats = [0, 0] # [adopted, total : int] self.targetlines = [] # [str] self.fixups = [] # [(linelog rev, a1, a2, b1, b2)] self.finalcontents = [] # [str] self.ctxaffected = set()
def commonancestorsheads(self, a, b): validatenode(a) validatenode(b) a = self.rev(a) b = self.rev(b) ancestors = ancestor.commonancestorsheads(self.parentrevs, a, b) return pycompat.maplist(self.node, ancestors)
def testfilefixup(oldcontents, workingcopy, expectedcontents, fixups=None): """([str], str, [str], [(rev, a1, a2, b1, b2)]?) -> None workingcopy is a string, of which every character denotes a single line. oldcontents, expectedcontents are lists of strings, every character of every string denots a single line. if fixups is not None, it's the expected fixups list and will be checked. """ expectedcontents = insertreturns(expectedcontents) oldcontents = insertreturns(oldcontents) workingcopy = insertreturns(workingcopy) state = absorb.filefixupstate(pycompat.maplist(simplefctx, oldcontents), 'path') state.diffwith(simplefctx(workingcopy)) if fixups is not None: assertlistequal(state.fixups, fixups) state.apply() assertlistequal(state.finalcontents, expectedcontents, removereturns)
def write(self, annotatedresult, lines=None, existinglines=None): """(annotateresult, [str], set([rev, linenum])) -> None. write output. annotateresult can be [(node, linenum, path)], or [(node, linenum)] """ pieces = [] # [[str]] maxwidths = [] # [int] # calculate padding for f, sep, name, enc in self.funcmap: l = [enc(f(x)) for x in annotatedresult] pieces.append(l) if name in [b'node', b'date']: # node and date has fixed size l = l[:1] widths = pycompat.maplist(encoding.colwidth, set(l)) maxwidth = max(widths) if widths else 0 maxwidths.append(maxwidth) # buffered output result = b'' for i in pycompat.xrange(len(annotatedresult)): for j, p in enumerate(pieces): sep = self.funcmap[j][1] padding = b' ' * (maxwidths[j] - len(p[i])) result += sep + padding + p[i] if lines: if existinglines is None: result += b': ' + lines[i] else: # extra formatting showing whether a line exists key = (annotatedresult[i][0], annotatedresult[i][1]) if key in existinglines: result += b': ' + lines[i] else: result += b': ' + self.ui.label( b'-' + lines[i], b'diff.deleted' ) if result[-1:] != b'\n': result += b'\n' self.ui.write(result)
def revstr(r): # this is needed, because logentry.revision is a tuple of "int" # (e.g. (1, 2) for "1.2") return '.'.join(pycompat.maplist(pycompat.bytestr, r))
def commonancestorsheads(self, node1, node2): rev1 = self.rev(node1) rev2 = self.rev(node2) ancestors = ancestor.commonancestorsheads(self.parentrevs, rev1, rev2) return pycompat.maplist(self.node, ancestors)
def _dotransplant(ui, repo, *revs, **opts): def incwalk(repo, csets, match=util.always): for node in csets: if match(node): yield node def transplantwalk(repo, dest, heads, match=util.always): """Yield all nodes that are ancestors of a head but not ancestors of dest. If no heads are specified, the heads of repo will be used.""" if not heads: heads = repo.heads() ancestors = [] ctx = repo[dest] for head in heads: ancestors.append(ctx.ancestor(repo[head]).node()) for node in repo.changelog.nodesbetween(ancestors, heads)[0]: if match(node): yield node def checkopts(opts, revs): if opts.get(b'continue'): cmdutil.check_incompatible_arguments(opts, b'continue', [b'branch', b'all', b'merge']) return if opts.get(b'stop'): cmdutil.check_incompatible_arguments(opts, b'stop', [b'branch', b'all', b'merge']) return if not (opts.get(b'source') or revs or opts.get(b'merge') or opts.get(b'branch')): raise error.Abort( _(b'no source URL, branch revision, or revision ' b'list provided')) if opts.get(b'all'): if not opts.get(b'branch'): raise error.Abort(_(b'--all requires a branch revision')) if revs: raise error.Abort( _(b'--all is incompatible with a revision list')) opts = pycompat.byteskwargs(opts) checkopts(opts, revs) if not opts.get(b'log'): # deprecated config: transplant.log opts[b'log'] = ui.config(b'transplant', b'log') if not opts.get(b'filter'): # deprecated config: transplant.filter opts[b'filter'] = ui.config(b'transplant', b'filter') tp = transplanter(ui, repo, opts) p1 = repo.dirstate.p1() if len(repo) > 0 and p1 == nullid: raise error.Abort(_(b'no revision checked out')) if opts.get(b'continue'): if not tp.canresume(): raise error.StateError(_(b'no transplant to continue')) elif opts.get(b'stop'): if not tp.canresume(): raise error.StateError(_(b'no interrupted transplant found')) return tp.stop(ui, repo) else: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) sourcerepo = opts.get(b'source') if sourcerepo: peer = hg.peer(repo, opts, ui.expandpath(sourcerepo)) heads = pycompat.maplist(peer.lookup, opts.get(b'branch', ())) target = set(heads) for r in revs: try: target.add(peer.lookup(r)) except error.RepoError: pass source, csets, cleanupfn = bundlerepo.getremotechanges( ui, repo, peer, onlyheads=sorted(target), force=True) else: source = repo heads = pycompat.maplist(source.lookup, opts.get(b'branch', ())) cleanupfn = None try: if opts.get(b'continue'): tp.resume(repo, source, opts) return tf = tp.transplantfilter(repo, source, p1) if opts.get(b'prune'): prune = { source[r].node() for r in scmutil.revrange(source, opts.get(b'prune')) } matchfn = lambda x: tf(x) and x not in prune else: matchfn = tf merges = pycompat.maplist(source.lookup, opts.get(b'merge', ())) revmap = {} if revs: for r in scmutil.revrange(source, revs): revmap[int(r)] = source[r].node() elif opts.get(b'all') or not merges: if source != repo: alltransplants = incwalk(source, csets, match=matchfn) else: alltransplants = transplantwalk(source, p1, heads, match=matchfn) if opts.get(b'all'): revs = alltransplants else: revs, newmerges = browserevs(ui, source, alltransplants, opts) merges.extend(newmerges) for r in revs: revmap[source.changelog.rev(r)] = r for r in merges: revmap[source.changelog.rev(r)] = r tp.apply(repo, source, revmap, merges, opts) finally: if cleanupfn: cleanupfn()
def removereturns(x): # the revert of "insertreturns" if isinstance(x, bytes): return x.replace(b'\n', b'') else: return pycompat.maplist(removereturns, x)
def insertreturns(x): # insert "\n"s after each single char if isinstance(x, bytes): return b''.join(ch + b'\n' for ch in pycompat.bytestr(x)) else: return pycompat.maplist(insertreturns, x)