def rewritemessage(ui, repo, revs=None, **opts): nodes = [repo[rev].node() for rev in repo.revs(revs)] offset = [0] def createfn(repo, ctx, revmap, filectxfn): parents = newparents(repo, ctx, revmap) description = ctx.description() if not opts['unmodified']: description += '\n%d' % offset[0] memctx = context.memctx(repo, parents, description, ctx.files(), filectxfn, user=ctx.user(), date=ctx.date(), extra=ctx.extra()) status = ctx.p1().status(ctx) memctx.modified = lambda: status[0] memctx.added = lambda: status[1] memctx.removed = lambda: status[2] offset[0] += 1 return memctx replacechangesets(repo, nodes, createfn)
def rewritemessage(ui, repo, revs=None): nodes = [repo[rev].node() for rev in repo.revs(revs)] offset = [0] def createfn(repo, ctx, revmap, filectxfn): parents = newparents(repo, ctx, revmap) memctx = context.memctx( repo, parents, ctx.description() + "\n%d" % offset[0], ctx.files(), filectxfn, user=ctx.user(), date=ctx.date(), extra=ctx.extra(), ) status = ctx.p1().status(ctx) memctx.modified = lambda: status[0] memctx.added = lambda: status[1] memctx.removed = lambda: status[2] offset[0] += 1 return memctx replacechangesets(repo, nodes, createfn)
def rewritemessage(ui, repo, revs=None, **opts): nodes = [repo[rev].node() for rev in repo.revs(revs)] offset = [0] def createfn(repo, ctx, revmap, filectxfn): parents = newparents(repo, ctx, revmap) description = ctx.description() if not opts['unmodified']: description += b'\n%d' % offset[0] memctx = context.memctx(repo, parents, description, ctx.files(), filectxfn, user=ctx.user(), date=ctx.date(), extra=ctx.extra()) status = ctx.p1().status(ctx) # TRACKING hg53 - status is an object instead of a tuple if util.versiontuple(n=2) >= (5, 3): memctx.modified = lambda: status.modified memctx.added = lambda: status.added memctx.removed = lambda: status.removed else: memctx.modified = lambda: status[0] memctx.added = lambda: status[1] memctx.removed = lambda: status[2] offset[0] += 1 return memctx replacechangesets(repo, nodes, createfn)
def rewrite_commit_descriptions(ui, repo, node, descriptions=None): if not node: node = 'tip' ctx = repo[node] nodes = [ctx.node()] for ancestor in ctx.ancestors(): ctx = repo[ancestor] if ctx.phase() != phases.draft: break nodes.append(ctx.node()) nodes.reverse() description_map = {} with open(descriptions, 'rb') as f: raw_descriptions = json.load(f) for k in raw_descriptions: description_map[k] = encoding.tolocal(raw_descriptions[k].encode('utf-8')) def prune_unchanged(node): sha1 = repo[node].hex()[:12] description = repo[node].description() revised_description = description_map.get(sha1, description) if description == revised_description: ui.write(_('not rewriting %s - description unchanged\n' % sha1)) return False return True nodes = filter(prune_unchanged, nodes) if not nodes: ui.write(_('no commits found to be rewritten\n')) return 1 def createfn(repo, ctx, revmap, filectxfn): parents = rewrite.newparents(repo, ctx, revmap) sha1 = ctx.hex()[:12] if sha1 in description_map: description = description_map[sha1] else: description = ctx.description() memctx = context.memctx(repo, parents, description, ctx.files(), filectxfn, user=ctx.user(), date=ctx.date(), extra=ctx.extra()) status = ctx.p1().status(ctx) memctx.modified = lambda: status[0] memctx.added = lambda: status[1] memctx.removed = lambda: status[2] return memctx rewrite.replacechangesets(repo, nodes, createfn)
def rewritechangefile(ui, repo, revs=None): nodes = [repo[rev].node() for rev in repo.revs(revs)] def createfn(repo, ctx, revmap, filectxfn): parents = newparents(repo, ctx, revmap) files = ctx.files() files.pop() memctx = context.memctx( repo, parents, ctx.description(), files, filectxfn, user=ctx.user(), date=ctx.date(), extra=ctx.extra() ) return memctx replacechangesets(repo, nodes, createfn)
def rewritechangefile(ui, repo, revs=None): nodes = [repo[rev].node() for rev in repo.revs(revs)] def createfn(repo, ctx, revmap, filectxfn): parents = newparents(repo, ctx, revmap) files = ctx.files() files.pop() memctx = context.memctx(repo, parents, ctx.description(), files, filectxfn, user=ctx.user(), date=ctx.date(), extra=ctx.extra()) return memctx replacechangesets(repo, nodes, createfn)
def wrappedpushdiscovery(orig, pushop): """Wraps exchange._pushdiscovery to add extra review metadata. We discover what nodes to review before discovery. This ensures that errors are discovered and reported quickly, without waiting for server communication. """ pushop.reviewnodes = None caps = getreviewcaps(pushop.remote) if "pushreview" not in caps: return orig(pushop) ui = pushop.ui repo = pushop.repo if repo.noreviewboardpush: return orig(pushop) # If no arguments are specified to push, Mercurial will try to push all # non-remote changesets by default. This can result in unexpected behavior, # especially for people doing multi-headed development. # # Since we reject pushes with multiple heads anyway, default to pushing # the working copy. if not pushop.revs: pushop.revs = [repo["."].node()] tipnode = None basenode = None # Our prepushoutgoing hook validates that all pushed changesets are # part of the same DAG head. If revisions were specified by the user, # the last is the tip commit to review and the first (if more than 1) # is the base commit to review. # # Note: the revisions are in the order they were specified by the user. # This may not be DAG order. So we have to explicitly order them here. revs = sorted(repo[r].rev() for r in pushop.revs) tipnode = repo[revs[-1]].node() if len(revs) > 1: basenode = repo[revs[0]].node() if repo.pushsingle: basenode = tipnode # Given a base and tip node, find all changesets to review. # # A solution that works most of the time is to find all non-public # ancestors of that node. This is our default. # # If basenode is specified, we stop the traversal when we encounter it. # # Note that we will still refuse to review a public changeset even with # basenode. This decision is somewhat arbitrary and can be revisited later # if there is an actual need to review public changesets. nodes = [tipnode] # Special case where basenode is the tip node. if basenode and tipnode == basenode: pass else: for node in repo[tipnode].ancestors(): ctx = repo[node] if ctx.phase() == phases.public: break if basenode and ctx.node() == basenode: nodes.insert(0, ctx.node()) break nodes.insert(0, ctx.node()) # Filter out public nodes. publicnodes = [] for node in nodes: ctx = repo[node] if ctx.phase() == phases.public: publicnodes.append(node) ui.status(_("(ignoring public changeset %s in review request)\n") % ctx.hex()[0:12]) nodes = [n for n in nodes if n not in publicnodes] if not nodes: raise util.Abort( _("no non-public changesets left to review"), hint=_("add or change the -r argument to include draft changesets"), ) # We stop completely empty changesets prior to review. for node in nodes: ctx = repo[node] if not ctx.files(): raise util.Abort( _("cannot review empty changeset %s") % ctx.hex()[:12], hint=_("add files to or remove changeset") ) # Ensure all reviewed changesets have commit IDs. replacenodes = [] for node in nodes: ctx = repo[node] if "commitid" not in ctx.extra(): replacenodes.append(node) def addcommitid(repo, ctx, revmap, copyfilectxfn): parents = newparents(repo, ctx, revmap) # Need to make a copy otherwise modification is made on original, # which is just plain wrong. extra = dict(ctx.extra()) assert "commitid" not in extra extra["commitid"] = genid(repo) memctx = context.memctx( repo, parents, ctx.description(), ctx.files(), copyfilectxfn, user=ctx.user(), date=ctx.date(), extra=extra ) return memctx if replacenodes: ui.status(_("(adding commit id to %d changesets)\n") % (len(replacenodes))) nodemap = replacechangesets(repo, replacenodes, addcommitid, backuptopic="addcommitid") # Since we're in the middle of an operation, update references # to rewritten nodes. nodes = [nodemap.get(node, node) for node in nodes] pushop.revs = [nodemap.get(node, node) for node in pushop.revs] pushop.reviewnodes = nodes # Since we may rewrite changesets to contain review metadata after # push, abort immediately if the working directory state is not # compatible with rewriting. This prevents us from successfully # pushing and failing to update commit metadata after the push. i.e. # it prevents potential loss of metadata. # # There may be some scenarios where we don't rewrite after push. # But coding that here would be complicated. And future server changes # may change things like review request mapping, which may invalidate # client assumptions. So always assume a rewrite is needed. impactedrevs = list(repo.revs("%ln::", nodes)) if repo["."].rev() in impactedrevs: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) return orig(pushop)
def wrappedpushdiscovery(orig, pushop): """Wraps exchange._pushdiscovery to add extra review metadata. We discover what nodes to review before discovery. This ensures that errors are discovered and reported quickly, without waiting for server communication. """ pushop.reviewnodes = None caps = getreviewcaps(pushop.remote) if 'pushreview' not in caps: return orig(pushop) ui = pushop.ui repo = pushop.repo if repo.noreviewboardpush: return orig(pushop) # If no arguments are specified to push, Mercurial will try to push all # non-remote changesets by default. This can result in unexpected behavior, # especially for people doing multi-headed development. # # Since we reject pushes with multiple heads anyway, default to pushing # the working copy. if not pushop.revs: pushop.revs = [repo['.'].node()] tipnode = None basenode = None # Our prepushoutgoing hook validates that all pushed changesets are # part of the same DAG head. If revisions were specified by the user, # the last is the tip commit to review and the first (if more than 1) # is the base commit to review. # # Note: the revisions are in the order they were specified by the user. # This may not be DAG order. So we have to explicitly order them here. revs = sorted(repo[r].rev() for r in pushop.revs) tipnode = repo[revs[-1]].node() if len(revs) > 1: basenode = repo[revs[0]].node() if repo.pushsingle: basenode = tipnode # Given a base and tip node, find all changesets to review. # # A solution that works most of the time is to find all non-public # ancestors of that node. This is our default. # # If basenode is specified, we stop the traversal when we encounter it. # # Note that we will still refuse to review a public changeset even with # basenode. This decision is somewhat arbitrary and can be revisited later # if there is an actual need to review public changesets. nodes = [tipnode] # Special case where basenode is the tip node. if basenode and tipnode == basenode: pass else: for node in repo[tipnode].ancestors(): ctx = repo[node] if ctx.phase() == phases.public: break if basenode and ctx.node() == basenode: nodes.insert(0, ctx.node()) break nodes.insert(0, ctx.node()) # Filter out public nodes. publicnodes = [] for node in nodes: ctx = repo[node] if ctx.phase() == phases.public: publicnodes.append(node) ui.status( _('(ignoring public changeset %s in review request)\n') % ctx.hex()[0:12]) nodes = [n for n in nodes if n not in publicnodes] if not nodes: raise util.Abort( _('no non-public changesets left to review'), hint=_( 'add or change the -r argument to include draft changesets')) # We stop completely empty changesets prior to review. for node in nodes: ctx = repo[node] if not ctx.files(): raise util.Abort(_('cannot review empty changeset %s') % ctx.hex()[:12], hint=_('add files to or remove changeset')) run_android_checkstyle(repo, nodes) # Ensure all reviewed changesets have commit IDs. replacenodes = [] for node in nodes: ctx = repo[node] if not parse_commit_id(encoding.fromlocal(ctx.description())): replacenodes.append(node) def makememctx(repo, ctx, revmap, copyfilectxfn): parents = newparents(repo, ctx, revmap) # Need to make a copy otherwise modification is made on original, # which is just plain wrong. msg = encoding.fromlocal(ctx.description()) new_msg, changed = addcommitid(msg, repo=repo) memctx = context.memctx(repo, parents, encoding.tolocal(new_msg), ctx.files(), copyfilectxfn, user=ctx.user(), date=ctx.date(), extra=dict(ctx.extra())) return memctx if replacenodes: ui.status( _('(adding commit id to %d changesets)\n') % (len(replacenodes))) nodemap = replacechangesets(repo, replacenodes, makememctx, backuptopic='addcommitid') # Since we're in the middle of an operation, update references # to rewritten nodes. nodes = [nodemap.get(node, node) for node in nodes] pushop.revs = [nodemap.get(node, node) for node in pushop.revs] pushop.reviewnodes = nodes # Since we may rewrite changesets to contain review metadata after # push, abort immediately if the working directory state is not # compatible with rewriting. This prevents us from successfully # pushing and failing to update commit metadata after the push. i.e. # it prevents potential loss of metadata. # # There may be some scenarios where we don't rewrite after push. # But coding that here would be complicated. And future server changes # may change things like review request mapping, which may invalidate # client assumptions. So always assume a rewrite is needed. impactedrevs = list(repo.revs('%ln::', nodes)) if repo['.'].rev() in impactedrevs: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) return orig(pushop)
def rewrite_commit_descriptions(ui, repo, base_node, descriptions=None): def sha1_short(node): return repo[node].hex()[:12] def sha1_full(node): return repo[node].hex() # Rewriting fails if the evolve extension is enabled. try: extensions.find('evolve') raise error.Abort('Cannot continue as the "evolve" extension is ' 'enabled.') except KeyError: pass # Read commit descriptions map. # MozReview passes in short SHA1 (12 chars), so we have to use [:12] here # and in `add_node`. description_map = {} with open(descriptions, 'rb') as f: raw_descriptions = json.load(f) for sha1 in raw_descriptions: description_map[sha1[:12]] = encoding.tolocal( raw_descriptions[sha1].encode('utf-8')) # Collect nodes listed by description_map. nodes = [] def add_node(ctx): node = ctx.node() if sha1_short(node) in description_map: nodes.append(node) ctx = repo[base_node] add_node(ctx) for ancestor in ctx.ancestors(): if ctx.phase() != phases.draft: break add_node(repo[ancestor]) nodes.reverse() if not nodes: raise error.Abort('No commits found to be rewritten.') # We need to store the original sha1 values because we won't be able to # look them up once they are rewritten. original_sha1s = {} for node in nodes: original_sha1s[node] = sha1_full(node) # Update changed nodes. def create_func(repo, ctx, revmap, filectxfn): parents = rewrite.newparents(repo, ctx, revmap) sha1 = ctx.hex()[:12] description = description_map[sha1] memctx = context.memctx(repo, parents, description, ctx.files(), filectxfn, user=ctx.user(), date=ctx.date(), extra=ctx.extra()) status = ctx.p1().status(ctx) memctx.modified = lambda: status[0] memctx.added = lambda: status[1] memctx.removed = lambda: status[2] return memctx node_map = rewrite.replacechangesets(repo, nodes, create_func) # Output result. for node in nodes: ui.write('rev: %s -> %s\n' % (original_sha1s[node], sha1_full(node_map[node])))