def __init__(self, ui, repo, opts): self.ui = ui self.opts = opts if ui.quiet: datefunc = dateutil.shortdate else: datefunc = dateutil.datestr datefunc = util.cachefunc(datefunc) getctx = util.cachefunc(lambda x: repo[x[0]]) hexfunc = self._hexfunc # special handling working copy "changeset" and "rev" functions if self.opts.get(b'rev') == b'wdir()': orig = hexfunc hexfunc = lambda x: None if x is None else orig(x) wnode = hexfunc(repo[b'.'].node()) + b'+' wrev = b'%d' % repo[b'.'].rev() wrevpad = b'' if not opts.get(b'changeset'): # only show + if changeset is hidden wrev += b'+' wrevpad = b' ' revenc = lambda x: wrev if x is None else (b'%d' % x) + wrevpad def csetenc(x): if x is None: return wnode return pycompat.bytestr(x) + b' ' else: revenc = csetenc = pycompat.bytestr # opt name, separator, raw value (for json/plain), encoder (for plain) opmap = [ (b'user', b' ', lambda x: getctx(x).user(), ui.shortuser), (b'number', b' ', lambda x: getctx(x).rev(), revenc), (b'changeset', b' ', lambda x: hexfunc(x[0]), csetenc), (b'date', b' ', lambda x: getctx(x).date(), datefunc), (b'file', b' ', lambda x: x[2], pycompat.bytestr), (b'line_number', b':', lambda x: x[1] + 1, pycompat.bytestr), ] fieldnamemap = {b'number': b'rev', b'changeset': b'node'} funcmap = [ (get, sep, fieldnamemap.get(op, op), enc) for op, sep, get, enc in opmap if opts.get(op) ] # no separator for first column funcmap[0] = list(funcmap[0]) funcmap[0][1] = b'' self.funcmap = funcmap
def _get_history_1_3(self, repo, pats, opts, limit): if self.repos.version_info > (1, 3, 999): changefn = lambda r: repo[r] else: changefn = lambda r: repo[r].changeset() get = cachefunc(changefn) if self.isfile: fncache = {} chgiter, matchfn = cmdutil.walkchangerevs(self.repos.ui, repo, pats, get, opts) # keep one lookahead entry so that we can detect renames path = self.path entry = None count = 1 for st, rev, fns in chgiter: if st == 'add' and self.isfile: fncache[rev] = fns[0] elif st == 'iter': if self.isfile and entry: path = fncache[rev] if path != entry[0]: entry = entry[0:2] + (Changeset.COPY,) if entry: count += 1 yield entry n = repo.changelog.node(rev) entry = (path, self.repos.hg_display(n), Changeset.EDIT) if entry: if count < limit: entry = entry[0:2] + (Changeset.ADD,) yield entry
def repo_bugids(ui, repo): def addbugids(bugids, ctx): lns = ctx.description().splitlines() for ln in lns: m = bug_check.match(ln) if m: b = int(m.group(1)) if not b in bugids: bugids[b] = ctx.rev() # Should cache this, eventually bugids = { } # bugid -> rev opts = { 'rev' : ['0:tip'] } ui.debug("Gathering bugids ...\n") try: nop = lambda c, fns: None iter = cmdutil.walkchangerevs(repo, _matchall(repo), opts, nop) for ctx in iter: addbugids(bugids, ctx) except (AttributeError, TypeError): # AttributeError: matchall does not exist in hg < 1.1 # TypeError: walkchangerevs args differ in hg <= 1.3.1 get = util.cachefunc(lambda r: repo.changectx(r).changeset()) changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, [], get, opts) for st, rev, fns in changeiter: if st == 'add': node = repo.changelog.node(rev) addbugids(bugids, context.changectx(repo, node)) return bugids
def tipmost_shared(head, outnodes): '''Return the tipmost node on the same branch as head that is not in outnodes. We walk from head to the bottom of the workspace (revision 0) collecting nodes not in outnodes during the add phase and return the first node we see in the iter phase that was previously collected. See the docstring of mercurial.cmdutil.walkchangerevs() for the phased approach to the iterator returned. The important part to note is that the 'add' phase gathers nodes, which the 'iter' phase then iterates through.''' get = util.cachefunc(lambda r: self.repo.changectx(r).changeset()) changeiter = cmdutil.walkchangerevs(self.repo.ui, self.repo, [], get, {'rev': ['%s:0' % head], 'follow': True})[0] seen = [] for st, rev, fns in changeiter: n = self.repo.changelog.node(rev) if st == 'add': if n not in outnodes: seen.append(n) elif st == 'iter': if n in seen: return rev return None
def bases(self): '''Find the bases that in combination define the "old" side of a recommitted set of changes, based on AL''' get = util.cachefunc(lambda r: self.ws.repo.changectx(r).changeset()) # We don't rebuild the AL So the AL local tip is the old tip revrange = "%s:%s" % (self.base.rev(), self.localtip.rev()) changeiter = cmdutil.walkchangerevs(self.ws.repo.ui, self.ws.repo, [], get, {'rev': [revrange]})[0] hold = [] ret = [] alrevs = [x.rev() for x in self.revs] for st, rev, fns in changeiter: n = self.ws.repo.changelog.node(rev) if st == 'add': if rev in alrevs: hold.append(n) elif st == 'iter': if n not in hold: continue p = self.ws.repo.changelog.parents(n) if p[1] != node.nullid: continue if self.ws.repo.changectx(p[0]).rev() not in alrevs: ret.append(n) return ret
def _walkctxs(self, base, head, follow=False, pick=None): '''Generate changectxs between BASE and HEAD. Walk changesets between BASE and HEAD (in the order implied by their relation), following a given branch if FOLLOW is a true value, yielding changectxs where PICK (if specified) returns a true value. PICK is a function of one argument, a changectx.''' opts = {'rev': ['%s:%s' % (base.rev(), head.rev())], 'follow': follow} changectx = self.repo.changectx getcset = util.cachefunc(lambda r: changectx(r).changeset()) # # See the docstring of mercurial.cmdutil.walkchangerevs() for # the phased approach to the iterator returned. The important # part to note is that the 'add' phase gathers nodes, which # the 'iter' phase then iterates through. # changeiter = cmdutil.walkchangerevs(self.ui, self.repo, [], getcset, opts)[0] matched = {} for st, rev, fns in changeiter: if st == 'add': ctx = changectx(rev) if not pick or pick(ctx): matched[rev] = ctx elif st == 'iter': if rev in matched: yield matched[rev]
def countrate(ui, repo, amap, *pats, **opts): """Calculate stats""" if opts.get('dateformat'): def getkey(ctx): t, tz = ctx.date() date = datetime.datetime(*time.gmtime(float(t) - tz)[:6]) return date.strftime(opts['dateformat']) else: tmpl = opts.get('template', '{author|email}') tmpl = maketemplater(ui, repo, tmpl) def getkey(ctx): ui.pushbuffer() tmpl.show(ctx) return ui.popbuffer() count = pct = 0 rate = {} df = False if opts.get('date'): df = util.matchdate(opts['date']) get = util.cachefunc(lambda r: repo[r].changeset()) changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) for st, rev, fns in changeiter: if not st == 'add': continue if df and not df(get(rev)[2][0]): # doesn't match date format continue ctx = repo[rev] key = getkey(ctx) key = amap.get(key, key) # alias remap if opts.get('changesets'): rate[key] = rate.get(key, 0) + 1 else: parents = ctx.parents() if len(parents) > 1: ui.note(_('Revision %d is a merge, ignoring...\n') % (rev, )) continue ctx1 = parents[0] lines = changedlines(ui, repo, ctx1, ctx, fns) rate[key] = rate.get(key, 0) + lines if opts.get('progress'): count += 1 newpct = int(100.0 * count / max(len(repo), 1)) if pct < newpct: pct = newpct ui.write("\r" + _("generating stats: %d%%") % pct) sys.stdout.flush() if opts.get('progress'): ui.write("\r") sys.stdout.flush() return rate
def countrate(ui, repo, amap, *pats, **opts): """Calculate stats""" if opts.get('dateformat'): def getkey(ctx): t, tz = ctx.date() date = datetime.datetime(*time.gmtime(float(t) - tz)[:6]) return date.strftime(opts['dateformat']) else: tmpl = opts.get('template', '{author|email}') tmpl = maketemplater(ui, repo, tmpl) def getkey(ctx): ui.pushbuffer() tmpl.show(ctx) return ui.popbuffer() count = pct = 0 rate = {} df = False if opts.get('date'): df = util.matchdate(opts['date']) get = util.cachefunc(lambda r: repo[r].changeset()) changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts) for st, rev, fns in changeiter: if not st == 'add': continue if df and not df(get(rev)[2][0]): # doesn't match date format continue ctx = repo[rev] key = getkey(ctx) key = amap.get(key, key) # alias remap if opts.get('changesets'): rate[key] = rate.get(key, 0) + 1 else: parents = ctx.parents() if len(parents) > 1: ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,)) continue ctx1 = parents[0] lines = changedlines(ui, repo, ctx1, ctx, fns) rate[key] = rate.get(key, 0) + lines if opts.get('progress'): count += 1 newpct = int(100.0 * count / max(len(repo), 1)) if pct < newpct: pct = newpct ui.write("\r" + _("generating stats: %d%%") % pct) sys.stdout.flush() if opts.get('progress'): ui.write("\r") sys.stdout.flush() return rate
def __init__(self, ui, repo, opts): self.ui = ui self.opts = opts if ui.quiet: datefunc = util.shortdate else: datefunc = util.datestr datefunc = util.cachefunc(datefunc) getctx = util.cachefunc(lambda x: repo[x[0]]) hexfunc = self._hexfunc # special handling working copy "changeset" and "rev" functions if self.opts.get('rev') == 'wdir()': orig = hexfunc hexfunc = lambda x: None if x is None else orig(x) wnode = hexfunc(repo[None].p1().node()) + '+' wrev = str(repo[None].p1().rev()) wrevpad = '' if not opts.get('changeset'): # only show + if changeset is hidden wrev += '+' wrevpad = ' ' revenc = lambda x: wrev if x is None else str(x) + wrevpad csetenc = lambda x: wnode if x is None else str(x) + ' ' else: revenc = csetenc = str # opt name, separator, raw value (for json/plain), encoder (for plain) opmap = [('user', ' ', lambda x: getctx(x).user(), ui.shortuser), ('number', ' ', lambda x: getctx(x).rev(), revenc), ('changeset', ' ', lambda x: hexfunc(x[0]), csetenc), ('date', ' ', lambda x: getctx(x).date(), datefunc), ('file', ' ', lambda x: x[2], str), ('line_number', ':', lambda x: x[1] + 1, str)] fieldnamemap = {'number': 'rev', 'changeset': 'node'} funcmap = [(get, sep, fieldnamemap.get(op, op), enc) for op, sep, get, enc in opmap if opts.get(op)] # no separator for first column funcmap[0] = list(funcmap[0]) funcmap[0][1] = '' self.funcmap = funcmap
def jcheck(ui, repo, **opts): """check changesets against JDK standards""" ui.debug("jcheck repo=%s opts=%s\n" % (repo.path, opts)) repocompat(repo) if not repo.local(): raise error_Abort("repository '%s' is not local" % repo.path) if not os.path.exists(os.path.join(repo.root, ".jcheck")): ui.status("jcheck not enabled (no .jcheck in repository root)\n") return Pass if len(opts["rev"]) == 0: opts["rev"] = ["tip"] strict = opts.has_key("strict") and opts["strict"] lax = opts.has_key("lax") and opts["lax"] if strict: lax = False ch = checker(ui, repo, strict, lax) ch.check_repo() try: nop = lambda c, fns: None iter = cmdutil.walkchangerevs(repo, _matchall(repo), opts, nop) for ctx in iter: ch.check(ctx.rev(), ctx.node()) except (AttributeError, TypeError): # AttributeError: matchall does not exist in hg < 1.1 # TypeError: walkchangerevs args differ in hg <= 1.3.1 get = util.cachefunc(lambda r: repo.changectx(r).changeset()) changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, [], get, opts) if ui.debugflag: displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn) for st, rev, fns in changeiter: if st == 'add': node = repo.changelog.node(rev) if ui.debugflag: displayer.show(rev, node, copies=False) ch.check(rev, node) elif st == 'iter': if ui.debugflag: displayer.flush(rev) if ch.rv == Fail: ui.status("\n") return ch.rv
def filtered_log_generator(repo, pats, opts): """Fill view model iteratively repo - Mercurial repository object pats - list of file names or patterns opts - command line options for log command """ # Log searches: pattern, keyword, date, etc df = False if opts["date"]: df = util.matchdate(opts["date"]) stack = [] get = util.cachefunc(lambda r: repo.changectx(r).changeset()) changeiter, matchfn = cmdutil.walkchangerevs(repo.ui, repo, pats, get, opts) for st, rev, fns in changeiter: if st == "iter": if stack: yield stack.pop() continue if st != "add": continue parents = __get_parents(repo, rev) if opts["no_merges"] and len(parents) == 2: continue if opts["only_merges"] and len(parents) != 2: continue if df: changes = get(rev) if not df(changes[2][0]): continue # TODO: add copies/renames later if opts["keyword"]: changes = get(rev) miss = 0 for k in [kw.lower() for kw in opts["keyword"]]: if not (k in changes[1].lower() or k in changes[4].lower() or k in " ".join(changes[3]).lower()): miss = 1 break if miss: continue stack.append((rev, (0, 0), [], parents))
def _walkctxs(self, base, head, follow=False, pick=None): '''Generate changectxs between BASE and HEAD. Walk changesets between BASE and HEAD (in the order implied by their relation), following a given branch if FOLLOW is a true value, yielding changectxs where PICK (if specified) returns a true value. PICK is a function of one argument, a changectx.''' opts = { 'rev': ['%s:%s' % (base.rev(), head.rev())], 'follow': follow } changectx = self.repo.changectx getcset = util.cachefunc(lambda r: changectx(r).changeset()) # # See the docstring of mercurial.cmdutil.walkchangerevs() for # the phased approach to the iterator returned. The important # part to note is that the 'add' phase gathers nodes, which # the 'iter' phase then iterates through. # changeiter = cmdutil.walkchangerevs(self.ui, self.repo, [], getcset, opts)[0] matched = {} for st, rev, fns in changeiter: if st == 'add': ctx = changectx(rev) if not pick or pick(ctx): matched[rev] = ctx elif st == 'iter': if rev in matched: yield matched[rev]
class WorkSpace(object): def __init__(self, repository): self.repo = repository self.ui = self.repo.ui self.name = self.repo.root self.activecache = {} def parent(self, spec=None): '''Return the canonical workspace parent, either SPEC (which will be expanded) if provided or the default parent otherwise.''' if spec: return self.ui.expandpath(spec) p = self.ui.expandpath('default') if p == 'default': return None else: return p def _localtip(self, outgoing, wctx): '''Return the most representative changeset to act as the localtip. If the working directory is modified (has file changes, is a merge, or has switched branches), this will be a workingctx. If the working directory is unmodified, this will be the most recent (highest revision number) local (outgoing) head on the current branch, if no heads are determined to be outgoing, it will be the most recent head on the current branch. ''' # # A modified working copy is seen as a proto-branch, and thus # our only option as the local tip. # if (wctx.files() or len(wctx.parents()) > 1 or wctx.branch() != wctx.parents()[0].branch()): return wctx heads = self.repo.heads(start=wctx.parents()[0].node()) headctxs = [self.repo.changectx(n) for n in heads] localctxs = [c for c in headctxs if c.node() in outgoing] ltip = sorted(localctxs or headctxs, key=lambda x: x.rev())[-1] if len(heads) > 1: self.ui.warn('The current branch has more than one head, ' 'using %s\n' % ltip.rev()) return ltip def _parenttip(self, heads, outgoing): '''Return the highest-numbered, non-outgoing changeset that is an ancestor of a changeset in heads. This is intended to find the most recent changeset on a given branch that is shared between a parent and child workspace, such that it can act as a stand-in for the parent workspace. ''' def tipmost_shared(head, outnodes): '''Return the tipmost node on the same branch as head that is not in outnodes. We walk from head to the bottom of the workspace (revision 0) collecting nodes not in outnodes during the add phase and return the first node we see in the iter phase that was previously collected. If no node is found (all revisions >= 0 are outgoing), the only possible parenttip is the null node (node.nullid) which is returned explicitly. See the docstring of mercurial.cmdutil.walkchangerevs() for the phased approach to the iterator returned. The important part to note is that the 'add' phase gathers nodes, which the 'iter' phase then iterates through.''' opts = {'rev': ['%s:0' % head.rev()], 'follow': True} get = util.cachefunc(lambda r: self.repo.changectx(r).changeset()) changeiter = cmdutil.walkchangerevs(self.repo.ui, self.repo, [], get, opts)[0] seen = [] for st, rev, fns in changeiter: n = self.repo.changelog.node(rev) if st == 'add': if n not in outnodes: seen.append(n) elif st == 'iter': if n in seen: return rev return self.repo.changelog.rev(node.nullid) nodes = set(outgoing) ptips = map(lambda x: tipmost_shared(x, nodes), heads) return self.repo.changectx(sorted(ptips)[-1]) def status(self, base='.', head=None): '''Translate from the hg 6-tuple status format to a hash keyed on change-type''' states = [ 'modified', 'added', 'removed', 'deleted', 'unknown', 'ignored' ] chngs = self.repo.status(base, head) return dict(zip(states, chngs)) def findoutgoing(self, parent): '''Return the base set of outgoing nodes. A caching wrapper around mercurial.localrepo.findoutgoing(). Complains (to the user), if the parent workspace is non-existent or inaccessible''' self.ui.pushbuffer() try: try: ui = self.ui if hasattr(cmdutil, 'remoteui'): ui = cmdutil.remoteui(ui, {}) pws = hg.repository(ui, parent) return self.repo.findoutgoing(pws) except HgRepoError: self.ui.warn("Warning: Parent workspace '%s' is not " "accessible\n" "active list will be incomplete\n\n" % parent) return [] finally: self.ui.popbuffer() findoutgoing = util.cachefunc(findoutgoing) def modified(self): '''Return a list of files modified in the workspace''' wctx = self.workingctx() return sorted(wctx.files() + wctx.deleted()) or None def merged(self): '''Return boolean indicating whether the workspace has an uncommitted merge''' wctx = self.workingctx() return len(wctx.parents()) > 1 def branched(self): '''Return boolean indicating whether the workspace has an uncommitted named branch''' wctx = self.workingctx() return wctx.branch() != wctx.parents()[0].branch() def active(self, parent=None): '''Return an ActiveList describing changes between workspace and parent workspace (including uncommitted changes). If workspace has no parent ActiveList will still describe any uncommitted changes''' parent = self.parent(parent) if parent in self.activecache: return self.activecache[parent] if parent: outgoing = self.findoutgoing(parent) outnodes = self.repo.changelog.nodesbetween(outgoing)[0] else: outgoing = [] # No parent, no outgoing nodes outnodes = [] localtip = self._localtip(outnodes, self.workingctx()) if localtip.rev() is None: heads = localtip.parents() else: heads = [localtip] ctxs = [ self.repo.changectx(n) for n in self.repo.changelog.nodesbetween( outgoing, [h.node() for h in heads])[0] ] if localtip.rev() is None: ctxs.append(localtip) act = ActiveList(self, self._parenttip(heads, outnodes), ctxs) self.activecache[parent] = act return act def pdiff(self, pats, opts, parent=None): 'Return diffs relative to PARENT, as best as we can make out' parent = self.parent(parent) act = self.active(parent) # # act.localtip maybe nil, in the case of uncommitted local # changes. # if not act.revs: return matchfunc = cmdutil.match(self.repo, pats, opts) opts = patch.diffopts(self.ui, opts) return self.diff(act.parenttip.node(), act.localtip.node(), match=matchfunc, opts=opts) def squishdeltas(self, active, message, user=None): '''Create a single conglomerate changeset based on a given active list. Removes the original changesets comprising the given active list, and any tags pointing to them. Operation: - Commit an activectx object representing the specified active list, - Remove any local tags pointing to changesets in the specified active list. - Remove the changesets comprising the specified active list. - Remove any metadata that may refer to changesets that were removed. Calling code is expected to hold both the working copy lock and repository lock of the destination workspace ''' def strip_local_tags(active): '''Remove any local tags referring to the specified nodes.''' if os.path.exists(self.repo.join('localtags')): fh = None try: fh = self.repo.opener('localtags') tags = active.prune_tags(fh) fh.close() fh = self.repo.opener('localtags', 'w', atomictemp=True) fh.writelines(tags) fh.rename() finally: if fh and not fh.closed: fh.close() if active.files(): for entry in active: # # Work around Mercurial issue #1666, if the source # file of a rename exists in the working copy # Mercurial will complain, and remove the file. # # We preemptively remove the file to avoid the # complaint (the user was asked about this in # cdm_recommit) # if entry.is_renamed(): path = self.repo.wjoin(entry.parentname) if os.path.exists(path): os.unlink(path) self.repo.commitctx(active.context(message, user)) wsstate = "recommitted" destination = self.repo.changelog.tip() else: # # If all we're doing is stripping the old nodes, we want to # update the working copy such that we're not at a revision # that's about to go away. # wsstate = "tip" destination = active.parenttip.node() self.clean(destination) # # Tags were elided by the activectx object. Local tags, # however, must be removed manually. # try: strip_local_tags(active) except EnvironmentError, e: raise util.Abort('Could not recommit tags: %s\n' % e) # Silence all the strip and update fun self.ui.pushbuffer() # # Remove the active lists component changesets by stripping # the base of any active branch (of which there may be # several) # try: try: for base in active.bases(): # # Any cached information about the repository is # likely to be invalid during the strip. The # caching of branch tags is especially # problematic. # self.repo.invalidate() repair.strip(self.ui, self.repo, base.node(), backup=False) except: # # If this fails, it may leave us in a surprising place in # the history. # # We want to warn the user that something went wrong, # and what will happen next, re-raise the exception, and # bring the working copy back into a consistent state # (which the finally block will do) # self.ui.warn("stripping failed, your workspace will have " "superfluous heads.\n" "your workspace has been updated to the " "%s changeset.\n" % wsstate) raise # Re-raise the exception finally: self.clean() self.repo.dirstate.write() # Flush the dirstate self.repo.invalidate() # Invalidate caches # # We need to remove Hg's undo information (used for rollback), # since it refers to data that will probably not exist after # the strip. # if os.path.exists(self.repo.sjoin('undo')): try: os.unlink(self.repo.sjoin('undo')) except EnvironmentError, e: raise util.Abort('failed to remove undo data: %s\n' % e) self.ui.popbuffer()
def blame_trail(origfn, ui, repo, *pats, **opts): trail_line = opts['trail'] if trail_line == -1: return origfn(ui, repo, *pats, **opts) if not ui.interactive(): raise util.Abort(_("blame trail must be used in interactive mode.\n" "If you're using the pager extension, use --pager off or create" " a non paged alias for blametrail.")) # have to use line_number for trail opts['line_number'] = True if opts.get('follow'): # --follow is deprecated and now just an alias for -f/--file # to mimic the behavior of Mercurial before version 1.5 opts['file'] = True datefunc = ui.quiet and util.shortdate or util.datestr getdate = util.cachefunc(lambda x: datefunc(x[0].date())) if not pats: raise util.Abort(_('at least one filename or pattern is required')) hexfn = ui.debugflag and hex or short opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())), ('number', ' ', lambda x: str(x[0].rev())), ('changeset', ' ', lambda x: hexfn(x[0].node())), ('date', ' ', getdate), ('file', ' ', lambda x: x[0].path()), ('line_number', ':', lambda x: str(x[1])), ] if (not opts.get('user') and not opts.get('changeset') and not opts.get('date') and not opts.get('file')): opts['number'] = True linenumber = opts.get('line_number') is not None if linenumber and (not opts.get('changeset')) and (not opts.get('number')): raise util.Abort(_('at least one of -n/-c is required for -l')) funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)] funcmap[0] = (funcmap[0][0], '') # no separator in front of first column def bad(x, y): raise util.Abort("%s: %s" % (x, y)) ctx = scmutil.revsingle(repo, opts.get('rev')) m = scmutil.match(ctx, pats, opts) m.bad = bad follow = not opts.get('no_follow') diffopts = patch.diffopts(ui, opts, section='annotate') files = list(ctx.walk(m)) assert len(files) == 1 # todo what fails this assertion? original code assumed more than one file. fctx = ctx[files[0]] if not opts.get('text') and util.binary(fctx.data()): ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs)) return lines = fctx.annotate(follow=follow, linenumber=linenumber, diffopts=diffopts) metadata, line_contents = lines[trail_line - 1] original_rev = metadata[0].rev() original_line = metadata[1] context = opts['context'] if context != -1: line_s = trail_line - 1 - context line_e = trail_line - 1 + context + 1 display_lines = lines[line_s:line_e] print "lines %i±%i:" % (trail_line, context) for ((l, lineno), line) in display_lines: print "%s: %s: %s" % (l.rev(), lineno, line), print rev = original_rev line = original_line # print the summary of the diff mercurial.commands.log(ui, repo, *pats, rev=[rev], follow=True, date=None) # now look at just the hunk with this line show_hunk(ui, repo, *pats, patch=True, rev=[rev], follow=True, date=None, line=line) ctx = scmutil.revsingle(repo, rev) parents = ctx.parents() assert len(parents) == 1 parent = parents[0].rev() ui.write("parent is %s\n" % parent) line = ui.prompt("Line number for next iteration", None) if line: opts['trail'] = int(line) opts['rev'] = str(parent) # recurse until we overflow the stack or run out of history :) # santa(parent, line, context, filename) blame_trail(origfn, ui, repo, *pats, **opts)
def graphviz(ui, repo, *limits, **opts): '''generate DOT language source to visualize changeset tree. "limit-spec" should be formatted in one of styles shown below: 1. Start,End 2. Start, 3. ,End With "revision" option, "Start" and "End" should be ones which are acceptable for "-r" option of other Mercurial commands. With "datetime" option, they should be in dateTime format of XML Schema, which is known as "YYYY-MM-DDThh:mm:ss". With "interval" option, they should be specified as interval from invocation time(= "now") in second. Both "Start" and "End" are treated as "inclusive". ''' ################ def between_datetime(node, lower_dt, upper_dt): date = datetimestr(repo.changelog.read(node)[2]) return ((lower_dt <= date) and (date <= upper_dt)) def accept_by_dt(lower_dt, upper_dt): if (not dtregexp.match(lower_dt)): raise util.Abort(_('illegal datetime format: %s') % (lower_dt)) if (not dtregexp.match(upper_dt)): raise util.Abort(_('illegal datetime format: %s') % (upper_dt)) ui.note(('# visualize revision between "%s" - "%s"\n') % (lower_dt, upper_dt)) return lambda rev, node: between_datetime(node, lower_dt, upper_dt) def accept_by_interval(now, lower_interval, upper_interval): lower_dt = datetimestr((now - lower_interval, None)) upper_dt = datetimestr((now - upper_interval, None)) ui.note(('# visualize revision between "%s" - "%s"\n') % (lower_dt, upper_dt)) return lambda rev, node: between_datetime(node, lower_dt, upper_dt) def accept_by_rev(lower_rev, upper_rev): ui.note(('# visualize revision between "%d" - "%d"\n') % (lower_rev, upper_rev)) return lambda rev, node: ((lower_rev <= rev) and (rev <= upper_rev)) def getrev(revname): return repo.changelog.rev(repo.lookup(revname)) ui.note('# %s\n' % opts) ################ # check limit type: types = 0 types += (opts['revision'] and 1 or 0) types += (opts['datetime'] and 1 or 0) types += (opts['interval'] and 1 or 0) if 1 < types: raise util.Abort(_('-r, -d and -i are mutually exclusive')) ################ # check urlbase options: if opts['urlbase']: ui.note(('# "-u %s" is specified\n') % (opts['urlbase'])) urlbase = opts['urlbase'] or '' ################ # check aliases option: aliases = {} # (key, val) => (username, alias) getalias = lambda name: aliases.get(name, name) if opts['aliases']: ui.note(('# "-a %s" is specified\n') % opts['aliases']) if os.path.isfile(opts['aliases']): f = open(opts['aliases'] ,"r") for line in f.readlines(): line = line.strip() if '#' != line[0]: alias, actual = line.split('=') aliases[actual] = alias ui.note(('# use "%s" as alias for "%s"\n') % (alias, actual)) f.close() else: ui.warn(('%s: file not found, so skip reading aliases\n') % (opts['aliases'])) ################ # check format: ui.note(('# "%s" is recognized as label format\n') % (opts['format_label'])) ui.note(('# "%s" is recognized as tooltip format\n') % (opts['format_tooltip'])) ################ # check attribute renderring hook: node_hook = lambda repo, rev, node, changes, alias: None if opts['node_attr_hook']: hookname = opts['node_attr_hook'] ui.note(('# try to use "%s" as node attribute hook\n') % (escapeNL(hookname))) node_hook = get_hook(hookname) edge_hook = lambda repo, parent, child, first_parent, alias: None # (parentrev, parentnode, parentchanges) = parent # (childrev, childnode, childchanges) = child if opts['edge_attr_hook']: hookname = opts['edge_attr_hook'] ui.note(('# try to use "%s" as edge attribute hook\n') % (escapeNL(hookname))) edge_hook = get_hook(hookname) cluster_hook = lambda repo, branch, date: None if opts['cluster_attr_hook']: hookname = opts['cluster_attr_hook'] ui.note(('# try to use "%s" as cluster attribute hook\n') % (escapeNL(hookname))) cluster_hook = get_hook(hookname) ################ # check limit specificaitons: accepts = [] # list of "lambda: rev, node:" def acceptable(rev, node): for accept in accepts: if accept(rev, node): return True return False if (0 == len(limits)): # all revisions will be visualized accepts.append(lambda rev, node: True) now = long(time.time()) for limit in limits: fields = limit.split(',') if ((2 != len(fields)) or ((fields[0] == "") and (fields[1] == ""))): raise util.Abort(_('invalid limit specification: "%s"') % (limit)) if opts['datetime']: accepts.append(accept_by_dt(getval(fields[0], "0001-01-01T00:00:00"), getval(fields[1], "9999-12-31T23:59:59"))) elif opts['interval']: accepts.append(accept_by_interval(now, int(getval(fields[0], now)), int(getval(fields[1], '0')))) else: # default type is revision accepts.append(accept_by_rev(getrev(getval(fields[0], "1")), getrev(getval(fields[1], "-1")))) ################ # gather target revisions: revmap = {} # (key, val) => (rev, (node, changes)) get = util.cachefunc(lambda r: repo.changectx(r).changeset()) changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats=None, change=get, opts={ 'rev': [] }) for st, rev, b in changeiter: if (st == 'iter'): node = repo.lookup(rev); if acceptable(rev, node): revmap[rev] = (node, repo.changelog.read(node)) ################ # render source code for DOT language: ui.write(("digraph {\n")) branches = { } branchindex = 0 group_by_branch = opts['group_by_branch'] group_by_date = opts['group_by_date'] for revision, (node, changes) in revmap.items(): branch = (group_by_branch and changes[5].get("branch") or 'default') if not branches.has_key(branch): branches[branch] = ({ }, branchindex) branchindex += 1 branchcluster, index = branches[branch] date = (group_by_date and datestr(changes[2]) or '') datecluster = branchcluster.get(date) if not datecluster: datecluster = [ ] branchcluster[date] = datecluster datecluster.append(revision) for branch, (branchcluster, index) in branches.items(): if branch != 'default': ui.write(('subgraph cluster_%d {\n') % (index)) for date, revs in branchcluster.items(): if '' != date: ui.write(('subgraph cluster_%s_%s \n{') % (index, date)) hooked = cluster_hook(repo, branch, date) or '' ui.write(('%s\n') % (hooked)) for revision in revs: node, changes = revmap[revision] keywordmap = { 'u': getalias(changes[1]), 'U': changes[1], 'r': str(revision), 's': short(node), 'h': hex(node), 'd': datetimestr(changes[2]), 't': get_1stline(changes[4]) } attr = [] attr.append(('URL = "%s/rev/%s"') % (urlbase, short(node))) attr.append(('label = "%s"') % (escapeDQ(opts['format_label'] % keywordmap))) attr.append(('tooltip = "%s"') % (escapeDQ(opts['format_tooltip'] % keywordmap))) hooked = node_hook(repo, revision, node, changes, getalias) if hooked: attr.append(hooked) ui.write(('%d [ %s ]\n') % (revision, string.join(attr, ', '))) if '' != date: ui.write(('}\n')) if branch != 'default': hooked = cluster_hook(repo, branch, '') or '' ui.write(('%s\n') % (hooked)) ui.write(('}\n')) if opts['show_tags']: tagindex = 0 for tag, node in repo.tagslist(): revision = repo.changelog.rev(node) if revmap.has_key(revision): ui.write(('tag_%d [ label="%s" shape=plaintext ]\n' % (tagindex, escapeDQ(tag)))) ui.write(('tag_%d -> %s [ arrowhead=none, arrowtail=dot ]' % (tagindex, revision))) tagindex += 1 for revision, (node, changes) in revmap.items(): child = (revision, node, changes) first_parent = True for parentnode in repo.changelog.parents(node): parentrev = repo.changelog.rev(parentnode) if revmap.has_key(parentrev): pnode, pchanges = revmap[parentrev] parent = (parentrev, pnode, pchanges) hooked = edge_hook(repo, parent, child, first_parent, getalias) ui.write(('%d -> %d [ %s ] \n') % (parentrev, revision, (hooked or ' '))) first_parent = False ui.write(("}\n"))
class WorkSpace(object): def __init__(self, repository): self.repo = repository self.ui = self.repo.ui self.name = self.repo.root self.activecache = {} def parent(self, spec=None): '''Return the canonical workspace parent, either SPEC (which will be expanded) if provided or the default parent otherwise.''' if spec: return self.ui.expandpath(spec) p = self.ui.expandpath('default') if p == 'default': return None else: return p def _localtip(self, outgoing, wctx): '''Return the most representative changeset to act as the localtip. If the working directory is modified (has file changes, is a merge, or has switched branches), this will be a workingctx. If the working directory is unmodified, this will be the most recent (highest revision number) local (outgoing) head on the current branch, if no heads are determined to be outgoing, it will be the most recent head on the current branch. ''' if (wctx.files() or len(wctx.parents()) > 1 or wctx.branch() != wctx.parents()[0].branch()): return wctx heads = self.repo.heads(start=wctx.parents()[0].node()) headctxs = [self.repo.changectx(n) for n in heads] localctxs = [c for c in headctxs if c.node() in outgoing] ltip = sorted(localctxs or headctxs, key=lambda x: x.rev())[-1] if len(heads) > 1: self.ui.warn('The current branch has more than one head, ' 'using %s\n' % ltip.rev()) return ltip def parenttip(self, heads, outgoing): '''Return the highest-numbered, non-outgoing changeset that is an ancestor of a changeset in heads. This returns the most recent changeset on a given branch that is shared between a parent and child workspace, in effect the common ancestor of the chosen local tip and the parent workspace. ''' def tipmost_shared(head, outnodes): '''Return the changeset on the same branch as head that is not in outnodes and is closest to the tip. Walk outgoing changesets from head to the bottom of the workspace (revision 0) and return the the first changeset we see that is not in outnodes. If none is found (all revisions >= 0 are outgoing), the only possible parenttip is the null node (node.nullid) which is returned explicitly. ''' for ctx in self._walkctxs(head, self.repo.changectx(0), follow=True, pick=lambda c: c.node() not in outnodes): return ctx return self.repo.changectx(node.nullid) nodes = set(outgoing) ptips = map(lambda x: tipmost_shared(x, nodes), heads) return sorted(ptips, key=lambda x: x.rev(), reverse=True)[0] def status(self, base='.', head=None, files=None): '''Translate from the hg 6-tuple status format to a hash keyed on change-type''' states = [ 'modified', 'added', 'removed', 'deleted', 'unknown', 'ignored' ] match = self.matcher(files=files) chngs = self.repo.status(base, head, match=match) ret = {} for paths, change in zip(chngs, states): ret.update((f, change) for f in paths) return ret def findoutgoing(self, parent): '''Return the base set of outgoing nodes. A caching wrapper around mercurial.localrepo.findoutgoing(). Complains (to the user), if the parent workspace is non-existent or inaccessible''' self.ui.pushbuffer() try: try: ui = self.ui if hasattr(cmdutil, 'remoteui'): ui = cmdutil.remoteui(ui, {}) pws = hg.repository(ui, parent) if Version.at_least("1.6"): return discovery.findoutgoing(self.repo, pws) else: return self.repo.findoutgoing(pws) except error.RepoError: self.ui.warn("Warning: Parent workspace '%s' is not " "accessible\n" "active list will be incomplete\n\n" % parent) return [] finally: self.ui.popbuffer() findoutgoing = util.cachefunc(findoutgoing) def modified(self): '''Return a list of files modified in the workspace''' wctx = self.workingctx() return sorted(wctx.files() + wctx.deleted()) or None def merged(self): '''Return boolean indicating whether the workspace has an uncommitted merge''' wctx = self.workingctx() return len(wctx.parents()) > 1 def branched(self): '''Return boolean indicating whether the workspace has an uncommitted named branch''' wctx = self.workingctx() return wctx.branch() != wctx.parents()[0].branch() def active(self, parent=None, thorough=False): '''Return an ActiveList describing changes between workspace and parent workspace (including uncommitted changes). If the workspace has no parent, ActiveList will still describe any uncommitted changes. If thorough is True use neither the WorkList nor any cached results (though the result of this call will be cached for future, non-thorough, calls).''' parent = self.parent(parent) # # Use the cached copy if we can (we have one, and weren't # asked to be thorough) # if not thorough and parent in self.activecache: return self.activecache[parent] # # outbases: The set of outgoing nodes with no outgoing ancestors # outnodes: The full set of outgoing nodes # if parent: outbases = self.findoutgoing(parent) outnodes = self.repo.changelog.nodesbetween(outbases)[0] else: # No parent, no outgoing nodes outbases = [] outnodes = [] wctx = self.workingctx(worklist=not thorough) localtip = self._localtip(outnodes, wctx) if localtip.rev() is None: heads = localtip.parents() else: heads = [localtip] parenttip = self.parenttip(heads, outnodes) # # If we couldn't find a parenttip, the two repositories must # be unrelated (Hg catches most of this, but this case is # valid for it but invalid for us) # if parenttip == None: raise util.Abort('repository is unrelated') headnodes = [h.node() for h in heads] ctxs = [ self.repo.changectx(n) for n in self.repo.changelog.nodesbetween(outbases, headnodes)[0] ] if localtip.rev() is None: ctxs.append(localtip) act = ActiveList(self, parenttip, ctxs) self.activecache[parent] = act return act def squishdeltas(self, active, message, user=None): '''Create a single conglomerate changeset based on a given active list. Removes the original changesets comprising the given active list, and any tags pointing to them. Operation: - Commit an activectx object representing the specified active list, - Remove any local tags pointing to changesets in the specified active list. - Remove the changesets comprising the specified active list. - Remove any metadata that may refer to changesets that were removed. Calling code is expected to hold both the working copy lock and repository lock of the destination workspace ''' def strip_local_tags(active): '''Remove any local tags referring to the specified nodes.''' if os.path.exists(self.repo.join('localtags')): fh = None try: fh = self.repo.opener('localtags') tags = active.prune_tags(fh) fh.close() fh = self.repo.opener('localtags', 'w', atomictemp=True) fh.writelines(tags) fh.rename() finally: if fh and not fh.closed: fh.close() if active.files(): for entry in active: # # Work around Mercurial issue #1666, if the source # file of a rename exists in the working copy # Mercurial will complain, and remove the file. # # We preemptively remove the file to avoid the # complaint (the user was asked about this in # cdm_recommit) # if entry.is_renamed(): path = self.repo.wjoin(entry.parentname) if os.path.exists(path): os.unlink(path) self.repo.commitctx(active.context(message, user)) wsstate = "recommitted" destination = self.repo.changelog.tip() else: # # If all we're doing is stripping the old nodes, we want to # update the working copy such that we're not at a revision # that's about to go away. # wsstate = "tip" destination = active.parenttip.node() self.clean(destination) # # Tags were elided by the activectx object. Local tags, # however, must be removed manually. # try: strip_local_tags(active) except EnvironmentError, e: raise util.Abort('Could not recommit tags: %s\n' % e) # Silence all the strip and update fun self.ui.pushbuffer() # # Remove the previous child-local changes by stripping the # nodes that form the base of the ActiveList (removing their # children in the process). # try: try: for base in active.bases(): # # Any cached information about the repository is # likely to be invalid during the strip. The # caching of branch tags is especially # problematic. # self.repo.invalidate() repair.strip(self.ui, self.repo, base.node(), backup=False) except: # # If this fails, it may leave us in a surprising place in # the history. # # We want to warn the user that something went wrong, # and what will happen next, re-raise the exception, and # bring the working copy back into a consistent state # (which the finally block will do) # self.ui.warn("stripping failed, your workspace will have " "superfluous heads.\n" "your workspace has been updated to the " "%s changeset.\n" % wsstate) raise # Re-raise the exception finally: self.clean() self.repo.dirstate.write() # Flush the dirstate self.repo.invalidate() # Invalidate caches # # We need to remove Hg's undo information (used for rollback), # since it refers to data that will probably not exist after # the strip. # if os.path.exists(self.repo.sjoin('undo')): try: os.unlink(self.repo.sjoin('undo')) except EnvironmentError, e: raise util.Abort('failed to remove undo data: %s\n' % e) self.ui.popbuffer()
def graphviz(ui, repo, *limits, **opts): '''generate DOT language source to visualize changeset tree. "limit-spec" should be formatted in one of styles shown below: 1. Start,End 2. Start, 3. ,End With "revision" option, "Start" and "End" should be ones which are acceptable for "-r" option of other Mercurial commands. With "datetime" option, they should be in dateTime format of XML Schema, which is known as "YYYY-MM-DDThh:mm:ss". With "interval" option, they should be specified as interval from invocation time(= "now") in second. Both "Start" and "End" are treated as "inclusive". ''' ################ def between_datetime(node, lower_dt, upper_dt): date = datetimestr(repo.changelog.read(node)[2]) return ((lower_dt <= date) and (date <= upper_dt)) def accept_by_dt(lower_dt, upper_dt): if (not dtregexp.match(lower_dt)): raise util.Abort(_('illegal datetime format: %s') % (lower_dt)) if (not dtregexp.match(upper_dt)): raise util.Abort(_('illegal datetime format: %s') % (upper_dt)) ui.note(('# visualize revision between "%s" - "%s"\n') % (lower_dt, upper_dt)) return lambda rev, node: between_datetime(node, lower_dt, upper_dt) def accept_by_interval(now, lower_interval, upper_interval): lower_dt = datetimestr((now - lower_interval, None)) upper_dt = datetimestr((now - upper_interval, None)) ui.note(('# visualize revision between "%s" - "%s"\n') % (lower_dt, upper_dt)) return lambda rev, node: between_datetime(node, lower_dt, upper_dt) def accept_by_rev(lower_rev, upper_rev): ui.note(('# visualize revision between "%d" - "%d"\n') % (lower_rev, upper_rev)) return lambda rev, node: ((lower_rev <= rev) and (rev <= upper_rev)) def getrev(revname): return repo.changelog.rev(repo.lookup(revname)) ui.note('# %s\n' % opts) ################ # check limit type: types = 0 types += (opts['revision'] and 1 or 0) types += (opts['datetime'] and 1 or 0) types += (opts['interval'] and 1 or 0) if 1 < types: raise util.Abort(_('-r, -d and -i are mutually exclusive')) ################ # check urlbase options: if opts['urlbase']: ui.note(('# "-u %s" is specified\n') % (opts['urlbase'])) urlbase = opts['urlbase'] or '' ################ # check aliases option: aliases = {} # (key, val) => (username, alias) getalias = lambda name: aliases.get(name, name) if opts['aliases']: ui.note(('# "-a %s" is specified\n') % opts['aliases']) if os.path.isfile(opts['aliases']): f = open(opts['aliases'], "r") for line in f.readlines(): line = line.strip() if '#' != line[0]: alias, actual = line.split('=') aliases[actual] = alias ui.note( ('# use "%s" as alias for "%s"\n') % (alias, actual)) f.close() else: ui.warn(('%s: file not found, so skip reading aliases\n') % (opts['aliases'])) ################ # check format: ui.note( ('# "%s" is recognized as label format\n') % (opts['format_label'])) ui.note(('# "%s" is recognized as tooltip format\n') % (opts['format_tooltip'])) ################ # check attribute renderring hook: node_hook = lambda repo, rev, node, changes, alias: None if opts['node_attr_hook']: hookname = opts['node_attr_hook'] ui.note(('# try to use "%s" as node attribute hook\n') % (escapeNL(hookname))) node_hook = get_hook(hookname) edge_hook = lambda repo, parent, child, first_parent, alias: None # (parentrev, parentnode, parentchanges) = parent # (childrev, childnode, childchanges) = child if opts['edge_attr_hook']: hookname = opts['edge_attr_hook'] ui.note(('# try to use "%s" as edge attribute hook\n') % (escapeNL(hookname))) edge_hook = get_hook(hookname) cluster_hook = lambda repo, branch, date: None if opts['cluster_attr_hook']: hookname = opts['cluster_attr_hook'] ui.note(('# try to use "%s" as cluster attribute hook\n') % (escapeNL(hookname))) cluster_hook = get_hook(hookname) ################ # check limit specificaitons: accepts = [] # list of "lambda: rev, node:" def acceptable(rev, node): for accept in accepts: if accept(rev, node): return True return False if (0 == len(limits)): # all revisions will be visualized accepts.append(lambda rev, node: True) now = long(time.time()) for limit in limits: fields = limit.split(',') if ((2 != len(fields)) or ((fields[0] == "") and (fields[1] == ""))): raise util.Abort(_('invalid limit specification: "%s"') % (limit)) if opts['datetime']: accepts.append( accept_by_dt(getval(fields[0], "0001-01-01T00:00:00"), getval(fields[1], "9999-12-31T23:59:59"))) elif opts['interval']: accepts.append( accept_by_interval(now, int(getval(fields[0], now)), int(getval(fields[1], '0')))) else: # default type is revision accepts.append( accept_by_rev(getrev(getval(fields[0], "1")), getrev(getval(fields[1], "-1")))) ################ # gather target revisions: revmap = {} # (key, val) => (rev, (node, changes)) get = util.cachefunc(lambda r: repo.changectx(r).changeset()) changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats=None, change=get, opts={'rev': []}) for st, rev, b in changeiter: if (st == 'iter'): node = repo.lookup(rev) if acceptable(rev, node): revmap[rev] = (node, repo.changelog.read(node)) ################ # render source code for DOT language: ui.write(("digraph {\n")) branches = {} branchindex = 0 group_by_branch = opts['group_by_branch'] group_by_date = opts['group_by_date'] for revision, (node, changes) in revmap.items(): branch = (group_by_branch and changes[5].get("branch") or 'default') if not branches.has_key(branch): branches[branch] = ({}, branchindex) branchindex += 1 branchcluster, index = branches[branch] date = (group_by_date and datestr(changes[2]) or '') datecluster = branchcluster.get(date) if not datecluster: datecluster = [] branchcluster[date] = datecluster datecluster.append(revision) for branch, (branchcluster, index) in branches.items(): if branch != 'default': ui.write(('subgraph cluster_%d {\n') % (index)) for date, revs in branchcluster.items(): if '' != date: ui.write(('subgraph cluster_%s_%s \n{') % (index, date)) hooked = cluster_hook(repo, branch, date) or '' ui.write(('%s\n') % (hooked)) for revision in revs: node, changes = revmap[revision] keywordmap = { 'u': getalias(changes[1]), 'U': changes[1], 'r': str(revision), 's': short(node), 'h': hex(node), 'd': datetimestr(changes[2]), 't': get_1stline(changes[4]) } attr = [] attr.append(('URL = "%s/rev/%s"') % (urlbase, short(node))) attr.append(('label = "%s"') % (escapeDQ(opts['format_label'] % keywordmap))) attr.append(('tooltip = "%s"') % (escapeDQ(opts['format_tooltip'] % keywordmap))) hooked = node_hook(repo, revision, node, changes, getalias) if hooked: attr.append(hooked) ui.write(('%d [ %s ]\n') % (revision, string.join(attr, ', '))) if '' != date: ui.write(('}\n')) if branch != 'default': hooked = cluster_hook(repo, branch, '') or '' ui.write(('%s\n') % (hooked)) ui.write(('}\n')) if opts['show_tags']: tagindex = 0 for tag, node in repo.tagslist(): revision = repo.changelog.rev(node) if revmap.has_key(revision): ui.write(('tag_%d [ label="%s" shape=plaintext ]\n' % (tagindex, escapeDQ(tag)))) ui.write(('tag_%d -> %s [ arrowhead=none, arrowtail=dot ]' % (tagindex, revision))) tagindex += 1 for revision, (node, changes) in revmap.items(): child = (revision, node, changes) first_parent = True for parentnode in repo.changelog.parents(node): parentrev = repo.changelog.rev(parentnode) if revmap.has_key(parentrev): pnode, pchanges = revmap[parentrev] parent = (parentrev, pnode, pchanges) hooked = edge_hook(repo, parent, child, first_parent, getalias) ui.write(('%d -> %d [ %s ] \n') % (parentrev, revision, (hooked or ' '))) first_parent = False ui.write(("}\n"))
def buildfilelist(ws, parent, files): '''Build a list of files in which we're interested. If no files are specified take files from the active list relative to 'parent'. Return a list of 2-tuples the first element being a path relative to the current directory and the second an entry from the active list, or None if an explicit file list was given.''' if files: return [(path, None) for path in sorted(files)] else: active = ws.active(parent=parent) return [(ws.filepath(e.name), e) for e in sorted(active)] buildfilelist = util.cachefunc(buildfilelist) def not_check(repo, cmd): '''return a function which returns boolean indicating whether a file should be skipped for CMD.''' # # The ignore routines need a canonical path to the file (relative to the # repo root), whereas the check commands get paths relative to the cwd. # # Wrap our argument such that the path is canonified before it is checked. # def canonified_check(ignfunc): def f(path): cpath = util.canonpath(repo.root, repo.getcwd(), path)