def chash(manifest, files, desc, p1, p2, user, date, extra): """Compute changeset hash from the changeset pieces.""" user = user.strip() if "\n" in user: raise error.RevlogError( _("username %s contains a newline") % repr(user)) # strip trailing whitespace and leading and trailing empty lines desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n') user, desc = encoding.fromlocal(user), encoding.fromlocal(desc) if date: parseddate = "%d %d" % util.parsedate(date) else: parseddate = "%d %d" % util.makedate() extra = extra.copy() if 'signature' in extra: del extra['signature'] if extra.get("branch") in ("default", ""): del extra["branch"] if extra: extra = changelog.encodeextra(extra) parseddate = "%s %s" % (parseddate, extra) l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc] text = "\n".join(l) return revlog.hash(text, p1, p2)
def chash(manifest, files, desc, p1, p2, user, date, extra): """Compute changeset hash from the changeset pieces.""" user = user.strip() if "\n" in user: raise error.RevlogError(_("username %s contains a newline") % repr(user)) # strip trailing whitespace and leading and trailing empty lines desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n') user, desc = encoding.fromlocal(user), encoding.fromlocal(desc) if date: parseddate = "%d %d" % util.parsedate(date) else: parseddate = "%d %d" % util.makedate() extra = extra.copy() if 'signature' in extra: del extra['signature'] if extra.get("branch") in ("default", ""): del extra["branch"] if extra: extra = changelog.encodeextra(extra) parseddate = "%s %s" % (parseddate, extra) l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc] text = "\n".join(l) return revlog.hash(text, p1, p2)
def _update_issue(ui, repo, node, **kwargs): """Update a Roundup issue for corresponding changesets. Return True if updating the Roundup issue fails, else False. """ repourl = ui.config('hgroundup', 'repourl') if not repourl: repourl = posixpath.join(ui.config('web', 'baseurl'), 'rev/') fromaddr = ui.config('hgroundup', 'fromaddr') toaddr = ui.config('hgroundup', 'toaddr') for var in ('repourl', 'fromaddr', 'toaddr'): if not locals()[var]: raise RuntimeError( 'roundup hook not configured properly,\nplease ' 'set the "%s" property in the [hgroundup] section' % var) start = repo[node].rev() issues = {} for rev in xrange(start, len(repo)): ctx = repo[rev] description = fromlocal(ctx.description().strip()) matches = ISSUE_PATTERN.finditer(description) ids = set() for match in matches: data = match.groupdict() ui.debug('match in commit msg: %s\n' % data) # check for duplicated issue numbers in the same commit msg if data['issue_id'] in ids: continue ids.add(data['issue_id']) comment = Template(COMMENT_TEMPLATE).substitute({ 'author': fromlocal(person(ctx.user())), 'branch': ctx.branch(), 'changeset_id': str(ctx), 'changeset_url': posixpath.join(repourl, str(ctx)), 'commit_msg': description.splitlines()[0], }) add_comment(issues, data, comment) if issues: smtp_host = ui.config('smtp', 'host', default='localhost') smtp_port = int(ui.config('smtp', 'port', 25)) s = smtplib.SMTP(smtp_host, smtp_port) username = ui.config('smtp', 'username', '') if username: password = ui.config('smtp', 'password', '') s.login(username, password) try: send_comments(s, fromaddr, toaddr, issues) ui.status("sent email to roundup at " + toaddr + '\n') except Exception, err: # make sure an issue updating roundup does not prevent an # otherwise successful push. ui.warn("sending email to roundup at %s failed: %s\n" % (toaddr, err))
def write(repo): '''Write bookmarks Write the given bookmark => hash dictionary to the .hg/bookmarks file in a format equal to those of localtags. We also store a backup of the previous state in undo.bookmarks that can be copied back on rollback. ''' refs = repo._bookmarks if repo._bookmarkcurrent not in refs: setcurrent(repo, None) for mark in refs.keys(): if not valid(mark): raise util.Abort(_("bookmark '%s' contains illegal " "character" % mark)) wlock = repo.wlock() try: file = repo.opener('bookmarks', 'w', atomictemp=True) for refspec, node in refs.iteritems(): file.write("%s %s\n" % (hex(node), encoding.fromlocal(refspec))) file.close() # touch 00changelog.i so hgweb reloads bookmarks (no lock needed) try: os.utime(repo.sjoin('00changelog.i'), None) except OSError: pass finally: wlock.release()
def setcurrent(repo, mark): '''Set the name of the bookmark that we are currently on Set the name of the bookmark that we are on (hg update <bookmark>). The name is recorded in .hg/bookmarks.current ''' current = repo._bookmarkcurrent if current == mark: return if mark not in repo._bookmarks: mark = '' if not valid(mark): raise util.Abort( _("bookmark '%s' contains illegal " "character" % mark)) wlock = repo.wlock() try: file = repo.opener('bookmarks.current', 'w', atomictemp=True) file.write(encoding.fromlocal(mark)) file.close() finally: wlock.release() repo._bookmarkcurrent = mark
def sendchanges(ui, master, changes): # send change information to one master from buildbot.clients import sendchange s = sendchange.Sender(master) d = defer.Deferred() reactor.callLater(0, d.callback, None) def send(res, c): return s.send(**c) for change in changes: for k, v in change.items(): # Yikes! if isinstance(v, localstr): change[k] = fromlocal(v).decode('utf8', 'replace') elif isinstance(v, str): change[k] = v.decode('utf8', 'replace') d.addCallback(send, change) def printSuccess(res): print "change(s) sent successfully" def printFailure(why): print "change(s) NOT sent, something went wrong:" print why d.addCallbacks(printSuccess, printFailure) d.addBoth(lambda _: reactor.stop())
def write(repo): '''Write bookmarks Write the given bookmark => hash dictionary to the .hg/bookmarks file in a format equal to those of localtags. We also store a backup of the previous state in undo.bookmarks that can be copied back on rollback. ''' refs = repo._bookmarks if repo._bookmarkcurrent not in refs: setcurrent(repo, None) for mark in refs.keys(): if not valid(mark): raise util.Abort( _("bookmark '%s' contains illegal " "character" % mark)) wlock = repo.wlock() try: file = repo.opener('bookmarks', 'w', atomictemp=True) for refspec, node in refs.iteritems(): file.write("%s %s\n" % (hex(node), encoding.fromlocal(refspec))) file.close() # touch 00changelog.i so hgweb reloads bookmarks (no lock needed) try: os.utime(repo.sjoin('00changelog.i'), None) except OSError: pass finally: wlock.release()
def write(self): '''Write bookmarks Write the given bookmark => hash dictionary to the .hg/bookmarks file in a format equal to those of localtags. We also store a backup of the previous state in undo.bookmarks that can be copied back on rollback. ''' repo = self._repo if repo._bookmarkcurrent not in self: setcurrent(repo, None) wlock = repo.wlock() try: file = repo.vfs('bookmarks', 'w', atomictemp=True) for name, node in self.iteritems(): file.write("%s %s\n" % (hex(node), encoding.fromlocal(name))) file.close() # touch 00changelog.i so hgweb reloads bookmarks (no lock needed) try: repo.svfs.utime('00changelog.i', None) except OSError: pass finally: wlock.release()
def write(self): '''Write bookmarks Write the given bookmark => hash dictionary to the .hg/bookmarks file in a format equal to those of localtags. We also store a backup of the previous state in undo.bookmarks that can be copied back on rollback. ''' repo = self._repo if repo._bookmarkcurrent not in self: setcurrent(repo, None) wlock = repo.wlock() try: file = repo.vfs('bookmarks', 'w', atomictemp=True) for name, node in self.iteritems(): file.write("%s %s\n" % (hex(node), encoding.fromlocal(name))) file.close() # touch 00changelog.i so hgweb reloads bookmarks (no lock needed) try: os.utime(repo.sjoin('00changelog.i'), None) except OSError: pass finally: wlock.release()
def expandpath(path, default=None): ep = oldexpandpath(path, default) if ep != path: return ep bent = store.encodefilename(encoding.fromlocal(path)) if os.path.isdir(os.path.join('.hg', 'branches', bent)): return 'lbranch://%s' % path return ep
def loadlocalbranch(self, branch): spath = self.localbranchpath(encoding.fromlocal(branch)) if spath != repo.spath: if not os.path.isdir(spath): raise util.Abort(_('local branch %s not found') % branch) self.store = store.store(self.getrequirements(), spath, util.opener) self.spath = self.store.path self.sopener = self.store.opener self.sopener.options = {}
def template_backsoutnodes(repo, ctx, **args): description = encoding.fromlocal(ctx.description()) backouts = parse_backouts(description) # return just the nodes, not the bug numbers if backouts and backouts[0]: # TRACKING hg47 if templateutil: return templateutil.hybridlist(backouts[0], 'backouts') return backouts[0]
def _set_bookmark(repo, mark): """Set the name of the remote branch that the repo is tracking.""" # Based on bookmarks.setcurrent wlock = repo.wlock() try: file = repo.opener('bookrepos.bookmark', 'w', atomictemp=True) file.write(encoding.fromlocal(mark)) file.close() finally: wlock.release()
def localbranch(self, name): # switch to local branch, creating if necessary def checkdir(d): if not os.path.isdir(d): if os.path.exists(d): raise util.Abort(_('%s is not a directory') % d) return False return True if self.dirstate.parents()[1] != nullid: raise util.Abort(_('merge in progress')) obranch = self.getlocalbranch() lname = encoding.fromlocal(name) if obranch == name: return omf = self.changectx('').manifest() del self.changelog del self.manifest if not name: lbpath = self.join('localbranch') if os.path.exists(lbpath): os.unlink(lbpath) else: bdir = self.join('branches') if not checkdir(bdir): os.mkdir(bdir) dest = os.path.join(bdir, store.encodefilename(lname)) if not checkdir(dest): # check for non-store layout if self.spath == self.path: os.mkdir(dest) datadir = os.path.join(dest, 'data') util.copyfiles(self.join('data'), datadir) for f in ('00changelog.i', '00changelog.d', '00manifest.i', '00manifest.d'): src = self.join(f) if os.path.exists(src): util.copyfiles(src, os.path.join(dest, f)) else: os.mkdir(dest) spath = os.path.join(dest, 'store') util.copyfiles(self.spath, spath) self.opener('localbranch', 'w').write(lname + '\n') self.loadlocalbranch(name) ctx = repo.changectx('tip') wlock = self.wlock() try: self.refreshdirstate(ctx, omf) finally: wlock.release()
def listkeyspatterns(self, namespace, patterns): if not self.capable('pushkey'): yield {}, None f = wireprotov1peer.future() self.ui.debug('preparing listkeys for "%s"\n' % namespace) yield { 'namespace': encoding.fromlocal(namespace), 'patterns': wireprototypes.encodelist(patterns) }, f d = f.value self.ui.debug('received listkey for "%s": %i bytes\n' % (namespace, len(d))) yield pushkey.decodekeys(d)
def makememctx(repo, ctx, revmap, copyfilectxfn): parents = newparents(repo, ctx, revmap) # Need to make a copy otherwise modification is made on original, # which is just plain wrong. msg = encoding.fromlocal(ctx.description()) new_msg, changed = addcommitid(msg, repo=repo) memctx = context.memctx(repo, parents, encoding.tolocal(new_msg), ctx.files(), copyfilectxfn, user=ctx.user(), date=ctx.date(), extra=dict(ctx.extra())) return memctx
def alignString(self, inStr, window): """ Add whitespace to the end of a string in order to make it fill the screen in the x direction. The current cursor position is taken into account when making this calculation. The string can span multiple lines. """ y, xStart = window.getyx() width = self.xScreenSize # turn tabs into spaces inStr = inStr.expandtabs(4) strLen = len(unicode(encoding.fromlocal(inStr), code)) numSpaces = (width - ((strLen + xStart) % width) - 1) return inStr + " " * numSpaces + "\n"
def write_tag(self, ref): node = self.parsed_refs[ref] tag = git_to_hg_spaces(ref[len('refs/tags/'):]) branch = self.repo[node].branch() # Calling self.repo.tag() doesn't append the tag to the correct # commit. So I copied some of localrepo._tag into here. # But that method, like much of mercurial's code, is ugly. # So I then rewrote it. tags_revision = revsingle(self.repo, hghex(branch_tip(self.repo, branch))) if '.hgtags' in tags_revision: old_tags = tags_revision['.hgtags'].data() else: old_tags = '' newtags = [old_tags] if old_tags and old_tags[-1] != '\n': newtags.append('\n') encoded_tag = encoding.fromlocal(tag) tag_line = '%s %s' % (hghex(node), encoded_tag) if tag_line in old_tags: return # Don't commit a tag that was previously committed newtags.append(tag_line) def get_filectx(repo, memctx, file): return memfilectx(file, ''.join(newtags)) if tag in self.parsed_tags: author, message = self.parsed_tags[tag] user, date, tz = author date_tz = (date, tz) else: message = "Added tag %s for changeset %s" % (tag, hgshort(node)) user = None date_tz = None ctx = memctx(self.repo, (branch_tip(self.repo, branch), self.NULL_PARENT), message, ['.hgtags'], get_filectx, user, date_tz, {'branch': branch}) tmp = encoding.encoding encoding.encoding = 'utf-8' node = self.repo.commitctx(ctx) encoding.encoding = tmp
def write_tag(self, name, node): branch = self.repo[node].branch() # Calling self.repo.tag() doesn't append the tag to the correct # commit. So I copied some of localrepo._tag into here. # But that method, like much of mercurial's code, is ugly. # So I then rewrote it. tags_revision = revsingle(self.repo, hghex(branch_tip(self.repo, branch))) if '.hgtags' in tags_revision: old_tags = tags_revision['.hgtags'].data() else: old_tags = '' newtags = [old_tags] if old_tags and old_tags[-1] != '\n': newtags.append('\n') encoded_tag = encoding.fromlocal(name) tag_line = '%s %s' % (hghex(node), encoded_tag) if tag_line in old_tags: return # Don't commit a tag that was previously committed newtags.append(tag_line) def get_filectx(repo, memctx, file): return memfilectx(file, ''.join(newtags)) if name in self.parsed_tags: author, message = self.parsed_tags[name] user, date, tz = author date_tz = (date, tz) else: message = "Added tag %s for changeset %s" % (name, hgshort(node)) user = self.hgrc.get("ui", "username", None) date_tz = None # XXX insert current date here ctx = memctx(self.repo, (branch_tip(self.repo, branch), self.NULL_PARENT), message, ['.hgtags'], get_filectx, user, date_tz, {'branch': branch}) tmp = encoding.encoding encoding.encoding = 'utf-8' node = self.repo.commitctx(ctx) encoding.encoding = tmp
def alignString(self, inStr, window): """ Add whitespace to the end of a string in order to make it fill the screen in the x direction. The current cursor position is taken into account when making this calculation. The string can span multiple lines. """ y,xStart = window.getyx() width = self.xScreenSize # turn tabs into spaces inStr = inStr.expandtabs(4) try: strLen = len(unicode(encoding.fromlocal(inStr), code)) except: # if text is not utf8, then assume an 8-bit single-byte encoding. strLen = len(inStr) numSpaces = (width - ((strLen + xStart) % width) - 1) return inStr + " " * numSpaces + "\n"
def get_backoutbynode(ext_name, repo, ctx): """Look for changesets that back out this one.""" # We limit the distance we search for backouts because an exhaustive # search could be very intensive. e.g. you load up the root commit # on a repository with 200,000 changesets and that commit is never # backed out. This finds most backouts because backouts typically happen # shortly after a bad commit is introduced. thisshort = short(ctx.node()) count = 0 searchlimit = repo.ui.configint(ext_name, b'backoutsearchlimit', 100) for bctx in repo.set(b'%ld::', [ctx.rev()]): count += 1 if count >= searchlimit: break backouts = commitparser.parse_backouts( encoding.fromlocal(bctx.description())) if backouts and thisshort in backouts[0]: return bctx.hex() return None
def activate(repo, mark): """ Set the given bookmark to be 'active', meaning that this bookmark will follow new commits that are made. The name is recorded in .hg/bookmarks.current """ if mark not in repo._bookmarks: raise AssertionError('bookmark %s does not exist!' % mark) active = repo._activebookmark if active == mark: return wlock = repo.wlock() try: file = repo.vfs('bookmarks.current', 'w', atomictemp=True) file.write(encoding.fromlocal(mark)) file.close() finally: wlock.release() repo._activebookmark = mark
def setcurrent(repo, mark): '''Set the name of the bookmark that we are currently on Set the name of the bookmark that we are on (hg update <bookmark>). The name is recorded in .hg/bookmarks.current ''' if mark not in repo._bookmarks: raise AssertionError('bookmark %s does not exist!' % mark) current = repo._bookmarkcurrent if current == mark: return wlock = repo.wlock() try: file = repo.vfs('bookmarks.current', 'w', atomictemp=True) file.write(encoding.fromlocal(mark)) file.close() finally: wlock.release() repo._bookmarkcurrent = mark
def setcurrent(repo, mark): '''Set the name of the bookmark that we are currently on Set the name of the bookmark that we are on (hg update <bookmark>). The name is recorded in .hg/bookmarks.current ''' current = repo._bookmarkcurrent if current == mark: return if mark not in repo._bookmarks: mark = '' wlock = repo.wlock() try: file = repo.opener('bookmarks.current', 'w', atomictemp=True) file.write(encoding.fromlocal(mark)) file.close() finally: wlock.release() repo._bookmarkcurrent = mark
def setcurrent(repo, mark): '''Set the name of the bookmark that we are currently on Set the name of the bookmark that we are on (hg update <bookmark>). The name is recorded in .hg/bookmarks.current ''' current = repo._bookmarkcurrent if current == mark: return if mark not in repo._bookmarks: mark = '' if not valid(mark): raise util.Abort(_("bookmark '%s' contains illegal " "character" % mark)) wlock = repo.wlock() try: file = repo.opener('bookmarks.current', 'w', atomictemp=True) file.write(encoding.fromlocal(mark)) file.close() finally: wlock.release() repo._bookmarkcurrent = mark
def wrappedpushbookmark(orig, pushop): result = orig(pushop) # pushop.ret was renamed to pushop.cgresult in Mercurial 3.2. We can drop # this branch once we drop <3.2 support. if hasattr(pushop, 'cgresult'): origresult = pushop.cgresult else: origresult = pushop.ret # Don't do anything if error from push. if not origresult: return result remoteurl = pushop.remote.url() tree = repository.resolve_uri_to_tree(remoteurl) # We don't support release trees (yet) because they have special flags # that need to get updated. if tree and tree in repository.RELEASE_TREES: return result ui = pushop.ui if tree and tree in ui.configlist('bzpost', 'excludetrees', default=[]): return result if tree: baseuri = repository.resolve_trees_to_uris([tree ])[0][1].encode('utf-8') assert baseuri else: # This isn't a known Firefox tree. Fall back to resolving URLs by # hostname. # Only attend Mozilla's server. if not updateunknown(remoteurl, repository.BASE_WRITE_URI, ui): return result baseuri = remoteurl.replace(repository.BASE_WRITE_URI, repository.BASE_READ_URI).rstrip('/') bugsmap = {} lastbug = None lastnode = None for node in pushop.outgoing.missing: ctx = pushop.repo[node] # Don't do merge commits. if len(ctx.parents()) > 1: continue # Our bug parser is buggy for Gaia bump commit messages. if '<*****@*****.**>' in ctx.user(): continue # Pushing to Try (and possibly other repos) could push unrelated # changesets that have been pushed to an official tree but aren't yet # on this specific remote. We use the phase information as a proxy # for "already pushed" and prune public changesets from consideration. if tree == 'try' and ctx.phase() == phases.public: continue bugs = parse_bugs(ctx.description()) if not bugs: continue bugsmap.setdefault(bugs[0], []).append(ctx.hex()) lastbug = bugs[0] lastnode = ctx.hex() if not bugsmap: return result bzauth = getbugzillaauth(ui) if not bzauth: return result bzurl = ui.config('bugzilla', 'url', 'https://bugzilla.mozilla.org/rest') bugsy = Bugsy(username=bzauth.username, password=bzauth.password, userid=bzauth.userid, cookie=bzauth.cookie, api_key=bzauth.apikey, bugzilla_url=bzurl) def public_url_for_bug(bug): '''Turn 123 into "https://bugzilla.mozilla.org/show_bug.cgi?id=123".''' public_baseurl = bzurl.replace('rest', '').rstrip('/') return '%s/show_bug.cgi?id=%s' % (public_baseurl, bug) # If this is a try push, we paste the Treeherder link for the tip commit, because # the per-commit URLs don't have much value. # TODO roll this into normal pushing so we get a Treeherder link in bugs as well. if tree == 'try' and lastbug: treeherderurl = repository.treeherder_url(tree, lastnode) bug = bugsy.get(lastbug) comments = bug.get_comments() for comment in comments: if treeherderurl in comment.text: return result ui.write( _('recording Treeherder push at %s\n') % public_url_for_bug(lastbug)) bug.add_comment(treeherderurl) return result for bugnumber, nodes in bugsmap.items(): bug = bugsy.get(bugnumber) comments = bug.get_comments() missing_nodes = [] # When testing whether this changeset URL is referenced in a # comment, we only need to test for the node fragment. The # important side-effect is that each unique node for a changeset # is recorded in the bug. for node in nodes: if not any(node in comment.text for comment in comments): missing_nodes.append(node) if not missing_nodes: ui.write( _('bug %s already knows about pushed changesets\n') % bugnumber) continue lines = [] for node in missing_nodes: ctx = pushop.repo[node] lines.append('%s/rev/%s' % (baseuri, ctx.hex())) # description is using local encodings. Depending on the # configured encoding, replacement characters could be involved. We # use encoding.fromlocal() to get the raw bytes, which should be # valid UTF-8. lines.append(encoding.fromlocal(ctx.description()).splitlines()[0]) lines.append('') comment = '\n'.join(lines) ui.write(_('recording push at %s\n') % public_url_for_bug(bugnumber)) bug.add_comment(comment) return result
def hook(ui, repo, hooktype, node=None, source=None, **kwargs): if hooktype != 'changegroup': ui.status('hgbuildbot: hooktype %s not supported.\n' % hooktype) return # Read config parameters masters = ui.configlist('hgbuildbot', 'master') if not masters: ui.write('* You must add a [hgbuildbot] section to .hg/hgrc in ' 'order to use the Buildbot hook\n') return # - virtualenv venv = ui.config('hgbuildbot', 'venv', None) if venv is not None: if not os.path.isdir(venv): ui.write('* Virtualenv "%s" does not exist.\n' % venv) sys.path.insert(0, venv) # - auth username = ui.config('hgbuildbot', 'user', 'change') password = ui.config('hgbuildbot', 'passwd', 'changepw') # - branch branchtype = ui.config('hgbuildbot', 'branchtype', 'inrepo') branch = ui.config('hgbuildbot', 'branch', None) # - repo URL baseurl = ui.config('hgbuildbot', 'baseurl', ui.config('web', 'baseurl', '')) stripcount = int( ui.config('hgbuildbot', 'strip', ui.config('notify', 'strip', 0))) # - category, project and codebase category = ui.config('hgbuildbot', 'category', None) project = ui.config('hgbuildbot', 'project', '') codebase = ui.config('hgbuildbot', 'codebase', '') # Only import this after the (optional) venv has been added to sys.path: from buildbot.clients import sendchange from twisted.internet import defer, reactor # Process changesets if branch is None and branchtype == 'dirname': branch = os.path.basename(repo.root) # If branchtype == 'inrepo', update "branch" for each commit later. repository = strip(repo.root, stripcount) repository = baseurl + repository start = repo[node].rev() end = len(repo) for master in masters: s = sendchange.Sender(master, auth=(username, password)) d = defer.Deferred() reactor.callLater(0, d.callback, None) for rev in range(start, end): # send changeset node = repo.changelog.node(rev) log = repo.changelog.read(node) manifest, user, (time, timezone), files, desc, extra = log parents = [p for p in repo.changelog.parents(node) if p != nullid] if branchtype == 'inrepo': branch = extra['branch'] if branch: branch = fromlocal(branch) is_merge = len(parents) > 1 # merges don't always contain files, but at least one file is # required by buildbot if is_merge and not files: files = ["merge"] properties = {'is_merge': is_merge} change = { # 'master': master, 'branch': branch, 'revision': hex(node), 'comments': fromlocal(desc), 'files': files, 'username': fromlocal(user), 'category': category, 'time': time, 'properties': properties, 'repository': repository, 'project': project, 'codebase': codebase, } d.addCallback(send_cs, s, change) def _printSuccess(res): ui.status(s.getSuccessString(res) + '\n') def _printFailure(why): ui.warn(s.getFailureString(why) + '\n') d.addCallbacks(_printSuccess, _printFailure) d.addBoth(lambda _: reactor.stop()) reactor.run()
def addmetadata(repo, ctx, d, onlycheap=False): """Add changeset metadata for hgweb templates.""" description = encoding.fromlocal(ctx.description()) def bugsgen(_context): '''Generator for bugs list''' for bug in commitparser.parse_bugs(description): bug = pycompat.bytestr(bug) yield { b'no': bug, b'url': b'https://bugzilla.mozilla.org/show_bug.cgi?id=%s' % bug, } def reviewersgen(_context): '''Generator for reviewers list''' for reviewer in commitparser.parse_reviewers(description): yield { b'name': reviewer, b'revset': b'reviewer(%s)' % reviewer, } def backoutsgen(_context): '''Generator for backouts list''' backouts = commitparser.parse_backouts(description) if backouts: for node in backouts[0]: try: bctx = scmutil.revsymbol(repo, node) yield {b'node': bctx.hex()} except error.RepoLookupError: pass d[b'reviewers'] = templateutil.mappinggenerator(reviewersgen) d[b'bugs'] = templateutil.mappinggenerator(bugsgen) d[b'backsoutnodes'] = templateutil.mappinggenerator(backoutsgen) # Repositories can define which TreeHerder repository they are associated # with. treeherder = repo.ui.config(b'mozilla', b'treeherder_repo') if treeherder: d[b'treeherderrepourl'] = b'https://treeherder.mozilla.org/#/jobs?repo=%s' % treeherder d[b'treeherderrepo'] = treeherder push = repo.pushlog.pushfromchangeset(ctx) # Don't print Perfherder link on non-publishing repos (like Try) # because the previous push likely has nothing to do with this # push. # Changeset on autoland are in the phase 'draft' until they get merged # to mozilla-central. if push and push.nodes and (repo.ui.configbool( b'phases', b'publish', True) or treeherder == b'autoland'): lastpushhead = repo[push.nodes[0]].hex() d[b'perfherderurl'] = ( b'https://treeherder.mozilla.org/perf.html#/compare?' b'originalProject=%s&' b'originalRevision=%s&' b'newProject=%s&' b'newRevision=%s') % (treeherder, push.nodes[-1], treeherder, lastpushhead) # If this changeset was converted from another one and we know which repo # it came from, add that metadata. convertrevision = ctx.extra().get(b'convert_revision') if convertrevision: sourcerepo = repo.ui.config(b'hgmo', b'convertsource') if sourcerepo: d[b'convertsourcepath'] = sourcerepo d[b'convertsourcenode'] = convertrevision # Did the push to this repo included extra data about the automated landing # system used? # We omit the key if it has no value so that the 'json' filter function in # the map file will return null for the key's value. Otherwise the filter # will return a JSON empty string, even for False-y values like None. landingsystem = ctx.extra().get(b'moz-landing-system') if landingsystem: d[b'landingsystem'] = landingsystem if onlycheap: return # Obtain the Gecko/app version/milestone. # # We could probably only do this if the repo is a known app repo (by # looking at the initial changeset). But, path based lookup is relatively # fast, so just do it. However, we need this in the "onlycheap" # section because resolving manifests is relatively slow and resolving # several on changelist pages may add seconds to page load times. try: fctx = repo.filectx(b'config/milestone.txt', changeid=ctx.node()) lines = fctx.data().splitlines() lines = [l for l in lines if not l.startswith(b'#') and l.strip()] if lines: d[b'milestone'] = lines[0].strip() except error.LookupError: pass backout_node = get_backoutbynode(b'hgmo', repo, ctx) if backout_node is not None: d[b'backedoutbynode'] = backout_node
def _write(self, fp): for name, node in self.iteritems(): fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name)))
def _processpushreview(repo, req, ldap_username): """Handle a request to turn changesets into review requests. ``ldap_username`` is the LDAP username to associate with the MozReview account whose credentials are passed as part of the request. We implicitly trust the passed LDAP username has been authenticated to belong to the MozReview account. """ bzusername = req.get('bzusername') bzapikey = req.get('bzapikey') if not bzusername or not bzapikey: return errorresponse('Bugzilla API keys not configured; see ' 'https://mozilla-version-control-tools.readthedocs.io/en/latest/mozreview/install.html#bugzilla-credentials ' 'for instructions on how to configure your client') identifier = req['identifier'] nodes = [] precursors = {} for cset in req['changesets']: node = cset['node'] nodes.append(node) if 'precursors' in cset: precursors[node] = cset['precursors'] diffopts = mdiff.diffopts(context=8, showfunc=True, git=True) commits = { 'individual': [], 'squashed': {}, 'obsolescence': req.get('obsolescence', False), } # We do multiple passes over the changesets requested for review because # some operations could be slow or may involve queries to external # resources. We want to run the fast checks first so we don't waste # resources before finding the error. The drawback here is the client # will not see the full set of errors. We may revisit this decision # later. for node in nodes: ctx = repo[node] # Reviewing merge commits doesn't make much sense and only makes # situations more complicated. So disallow the practice. if len(ctx.parents()) > 1: msg = 'cannot review merge commits (%s)' % short(ctx.node()) return errorresponse(msg) # Invalid or confidental bugs will raise errors in the Review Board # interface later. Fail fast to minimize wasted time and resources. try: reviewid = ReviewID(identifier) except error.Abort as e: return errorresponse(str(e)) # We use xmlrpc here because the Bugsy REST client doesn't currently handle # errors in responses. # We don't use available Bugzilla credentials because that's the # easiest way to test for confidential bugs. If/when we support posting # reviews to confidential bugs, we'll need to change this. xmlrpc_url = repo.ui.config('bugzilla', 'url').rstrip('/') + '/xmlrpc.cgi' proxy = xmlrpclib.ServerProxy(xmlrpc_url) try: proxy.Bug.get({'ids': [reviewid.bug]}) except xmlrpclib.Fault as f: if f.faultCode == 101: return errorresponse('bug %s does not exist; ' 'please change the review id (%s)' % (reviewid.bug, reviewid.full)) elif f.faultCode == 102: return errorresponse('bug %s could not be accessed ' '(we do not currently allow posting of reviews to ' 'confidential bugs)' % reviewid.bug) return errorresponse('server error verifying bug %s exists; ' 'please retry or report a bug' % reviewid.bug) # Find the first public node in the ancestry of this series. This is # used by MozReview to query the upstream repo for additional context. first_public_ancestor = None for node in repo[nodes[0]].ancestors(): ctx = repo[node] if ctx.phase() == phases.public: first_public_ancestor = ctx.hex() break commits['squashed']['first_public_ancestor'] = first_public_ancestor # Note patch.diff() appears to accept anything that can be fed into # repo[]. However, it blindly does a hex() on the argument as opposed # to the changectx, so we need to pass in the binary node. base_ctx = repo[nodes[0]].p1() base_parent_node = base_ctx.node() for i, node in enumerate(nodes): ctx = repo[node] p1 = ctx.p1().node() diff = ''.join(patch.diff(repo, node1=p1, node2=ctx.node(), opts=diffopts)) + '\n' if i: base_commit_id = nodes[i-1] else: base_commit_id = base_ctx.hex() summary = encoding.fromlocal(ctx.description().splitlines()[0]) if req.get('deduce-reviewers', True): reviewers = list(commitparser.parse_rquestion_reviewers(summary)) requal_reviewers = list(commitparser.parse_requal_reviewers(summary)) else: reviewers = [] requal_reviewers = [] commits['individual'].append({ 'id': node, 'author': encoding.fromlocal(ctx.user()), 'precursors': precursors.get(node, []), 'message': encoding.fromlocal(ctx.description()), # Diffs are arbitrary byte sequences. json.dump() will try to # interpret str as UTF-8, which could fail. Instead of trying # to coerce the str to a unicode or use ensure_ascii=False (which # is a giant pain), just base64 encode the diff in the JSON. 'diff_b64': diff.encode('base64'), 'bug': str(reviewid.bug), 'base_commit_id': base_commit_id, 'first_public_ancestor': first_public_ancestor, 'reviewers': reviewers, 'requal_reviewers': requal_reviewers }) squashed_diff = b''.join(patch.diff(repo, node1=base_parent_node, node2=repo[nodes[-1]].node(), opts=diffopts)) + '\n' commits['squashed']['diff_b64'] = squashed_diff.encode('base64') commits['squashed']['base_commit_id'] = base_ctx.hex() rburl = repo.ui.config('reviewboard', 'url', None).rstrip('/') repoid = repo.ui.configint('reviewboard', 'repoid', None) privileged_rb_username = repo.ui.config('reviewboard', 'username', None) privileged_rb_password = repo.ui.config('reviewboard', 'password', None) if ldap_username: associate_ldap_username(rburl, ldap_username, privileged_rb_username, privileged_rb_password, username=bzusername, apikey=bzapikey) res = { 'rburl': rburl, 'reviewid': identifier, 'reviewrequests': {}, 'display': [], } try: parentrid, commitmap, reviews, warnings = \ post_reviews(rburl, repoid, identifier, commits, privileged_rb_username, privileged_rb_password, username=bzusername, apikey=bzapikey) res['display'].extend(warnings) res['parentrrid'] = parentrid res['reviewrequests'][parentrid] = { 'status': reviews[parentrid]['status'], 'public': reviews[parentrid]['public'], } for node, rid in commitmap.items(): rd = reviews[rid] res['reviewrequests'][rid] = { 'node': node, 'status': rd['status'], 'public': rd['public'], } if rd['reviewers']: res['reviewrequests'][rid]['reviewers'] = list(rd['reviewers']) except AuthorizationError as e: return errorresponse(str(e)) except BadRequestError as e: return errorresponse(str(e)) return res
def wrappedpushdiscovery(orig, pushop): """Wraps exchange._pushdiscovery to add extra review metadata. We discover what nodes to review before discovery. This ensures that errors are discovered and reported quickly, without waiting for server communication. """ pushop.reviewnodes = None caps = getreviewcaps(pushop.remote) if 'pushreview' not in caps: return orig(pushop) ui = pushop.ui repo = pushop.repo if repo.noreviewboardpush: return orig(pushop) # If no arguments are specified to push, Mercurial will try to push all # non-remote changesets by default. This can result in unexpected behavior, # especially for people doing multi-headed development. # # Since we reject pushes with multiple heads anyway, default to pushing # the working copy. if not pushop.revs: pushop.revs = [repo['.'].node()] tipnode = None basenode = None # Our prepushoutgoing hook validates that all pushed changesets are # part of the same DAG head. If revisions were specified by the user, # the last is the tip commit to review and the first (if more than 1) # is the base commit to review. # # Note: the revisions are in the order they were specified by the user. # This may not be DAG order. So we have to explicitly order them here. revs = sorted(repo[r].rev() for r in pushop.revs) tipnode = repo[revs[-1]].node() if len(revs) > 1: basenode = repo[revs[0]].node() if repo.pushsingle: basenode = tipnode # Given a base and tip node, find all changesets to review. # # A solution that works most of the time is to find all non-public # ancestors of that node. This is our default. # # If basenode is specified, we stop the traversal when we encounter it. # # Note that we will still refuse to review a public changeset even with # basenode. This decision is somewhat arbitrary and can be revisited later # if there is an actual need to review public changesets. nodes = [tipnode] # Special case where basenode is the tip node. if basenode and tipnode == basenode: pass else: for node in repo[tipnode].ancestors(): ctx = repo[node] if ctx.phase() == phases.public: break if basenode and ctx.node() == basenode: nodes.insert(0, ctx.node()) break nodes.insert(0, ctx.node()) # Filter out public nodes. publicnodes = [] for node in nodes: ctx = repo[node] if ctx.phase() == phases.public: publicnodes.append(node) ui.status(_('(ignoring public changeset %s in review request)\n') % ctx.hex()[0:12]) nodes = [n for n in nodes if n not in publicnodes] if not nodes: raise util.Abort( _('no non-public changesets left to review'), hint=_('add or change the -r argument to include draft changesets')) # We stop completely empty changesets prior to review. for node in nodes: ctx = repo[node] if not ctx.files(): raise util.Abort( _('cannot review empty changeset %s') % ctx.hex()[:12], hint=_('add files to or remove changeset')) # Ensure all reviewed changesets have commit IDs. replacenodes = [] for node in nodes: ctx = repo[node] if not parse_commit_id(encoding.fromlocal(ctx.description())): replacenodes.append(node) def makememctx(repo, ctx, revmap, copyfilectxfn): parents = newparents(repo, ctx, revmap) # Need to make a copy otherwise modification is made on original, # which is just plain wrong. msg = encoding.fromlocal(ctx.description()) new_msg, changed = addcommitid(msg, repo=repo) memctx = context.memctx(repo, parents, encoding.tolocal(new_msg), ctx.files(), copyfilectxfn, user=ctx.user(), date=ctx.date(), extra=dict(ctx.extra())) return memctx if replacenodes: ui.status(_('(adding commit id to %d changesets)\n') % (len(replacenodes))) nodemap = replacechangesets(repo, replacenodes, makememctx, backuptopic='addcommitid') # Since we're in the middle of an operation, update references # to rewritten nodes. nodes = [nodemap.get(node, node) for node in nodes] pushop.revs = [nodemap.get(node, node) for node in pushop.revs] pushop.reviewnodes = nodes # Since we may rewrite changesets to contain review metadata after # push, abort immediately if the working directory state is not # compatible with rewriting. This prevents us from successfully # pushing and failing to update commit metadata after the push. i.e. # it prevents potential loss of metadata. # # There may be some scenarios where we don't rewrite after push. # But coding that here would be complicated. And future server changes # may change things like review request mapping, which may invalidate # client assumptions. So always assume a rewrite is needed. impactedrevs = list(repo.revs('%ln::', nodes)) if repo['.'].rev() in impactedrevs: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) return orig(pushop)
def hook(ui, repo, hooktype, node=None, source=None, **kwargs): # read config parameters baseurl = ui.config('hgbuildbot', 'baseurl', ui.config('web', 'baseurl', '')) masters = ui.configlist('hgbuildbot', 'master') if masters: branchtype = ui.config('hgbuildbot', 'branchtype', 'inrepo') branch = ui.config('hgbuildbot', 'branch') fork = ui.configbool('hgbuildbot', 'fork', False) # notify also has this setting stripcount = int(ui.config('notify', 'strip') or ui.config('hgbuildbot', 'strip', 3)) category = ui.config('hgbuildbot', 'category', None) project = ui.config('hgbuildbot', 'project', '') auth = ui.config('hgbuildbot', 'auth', None) else: ui.write("* You must add a [hgbuildbot] section to .hg/hgrc in " "order to use buildbot hook\n") return if hooktype != "changegroup": ui.status("hgbuildbot: hooktype %s not supported.\n" % hooktype) return if fork: child_pid = os.fork() if child_pid == 0: # child pass else: # parent ui.status("Notifying buildbot...\n") return # only import inside the fork if forked from buildbot.clients import sendchange from twisted.internet import defer, reactor if branch is None: if branchtype == 'dirname': branch = os.path.basename(repo.root) if not auth: auth = 'change:changepw' auth = auth.split(':', 1) # process changesets def _send(res, s, c): if not fork: ui.status("rev %s sent\n" % c['revision']) return s.send(c['branch'], c['revision'], c['comments'], c['files'], c['username'], category=category, repository=repository, project=project, vc='hg', properties=c['properties']) try: # first try Mercurial 1.1+ api start = repo[node].rev() end = len(repo) except TypeError: # else fall back to old api start = repo.changelog.rev(bin(node)) end = repo.changelog.count() repository = strip(repo.root, stripcount) repository = baseurl + repository for master in masters: s = sendchange.Sender(master, auth=auth) d = defer.Deferred() reactor.callLater(0, d.callback, None) for rev in xrange(start, end): # send changeset node = repo.changelog.node(rev) manifest, user, (time, timezone), files, desc, extra = repo.changelog.read(node) parents = filter(lambda p: not p == nullid, repo.changelog.parents(node)) if branchtype == 'inrepo': branch = extra['branch'] is_merge = len(parents) > 1 # merges don't always contain files, but at least one file is required by buildbot if is_merge and not files: files = ["merge"] properties = {'is_merge': is_merge} if branch: branch = fromlocal(branch) change = { 'master': master, 'username': fromlocal(user), 'revision': hex(node), 'comments': fromlocal(desc), 'files': files, 'branch': branch, 'properties': properties } d.addCallback(_send, s, change) def _printSuccess(res): ui.status(s.getSuccessString(res) + '\n') def _printFailure(why): ui.warn(s.getFailureString(why) + '\n') d.addCallbacks(_printSuccess, _printFailure) d.addBoth(lambda _: reactor.stop()) reactor.run() if fork: os._exit(os.EX_OK) else: return
def test_invalid_message(self): repo = self._load_fixture_and_fetch('invalid_utf8.tar.gz') # changelog returns descriptions in local encoding desc = encoding.fromlocal(repo[0].description()) self.assertEqual(desc.decode('utf8'), u'bl\xe5b\xe6rgr\xf8d')
def hg2u(s): """Returns a unicode object representing the mercurial string.""" return encoding.fromlocal(s).decode("utf-8")
def _incoming(ui, repo, **kwargs): # Ensure that no fancying of output is enabled (e.g. coloring) os.environ['TERM'] = 'dumb' ui.setconfig('ui', 'interactive', 'False') ui.setconfig('ui', 'formatted', 'False') try: colormod = sys.modules['hgext.color'] except KeyError: pass else: colormod._styles.clear() blacklisted = ui.config('mail', 'diff-blacklist', '').split() displayer = cmdutil.changeset_printer(ui, repo, False, False, True) ctx = repo[kwargs['node']] displayer.show(ctx) log = displayer.hunk[ctx.rev()] user = os.environ.get('HGPUSHER', 'local') path = '/'.join(repo.root.split('/')[4:]) body = [] #body += ['%s pushed %s to %s:' % (user, str(ctx), path), ''] body += [CSET_URL % (path, ctx)] body += [line for line in log.splitlines()[:-2] if line != 'tag: tip'] body += ['summary:\n ' + fromlocal(ctx.description())] # ctx.files() gives us misleading info on merges, we use a diffstat instead body += ['', 'files:'] diffopts = patch.diffopts(repo.ui, {'git': True, 'showfunc': True}) parents = ctx.parents() node1 = parents and parents[0].node() or nullid node2 = ctx.node() diffchunks = list(patch.diff(repo, node1, node2, opts=diffopts)) diffstat = patch.diffstat(iterlines(diffchunks), width=60, git=True) for line in iterlines([''.join(diffstat)]): body.append(' ' + line) body += ['', ''] diffchunks = strip_bin_diffs(diffchunks) diffchunks = strip_blacklisted_files(diffchunks, blacklisted) body.append(''.join(chunk for chunk in diffchunks)) body.append('-- ') body.append('Repository URL: %s%s' % (BASE, path)) to = ui.config('mail', 'notify', None) if to is None: print 'no email address configured' return False from_ = ui.config('mail', 'sender', None) if from_ is None: from_ = to sender = '%s <%s>' % (user, from_) prefixes = [path] if len(parents) == 2: b1, b2, b = parents[0].branch(), parents[1].branch(), ctx.branch() if b in (b1, b2): bp = b2 if b == b1 else b1 # normal case prefixes.append('(merge %s -> %s)' % (bp, b)) else: # XXX really?? prefixes.append('(merge %s + %s -> %s)' % (b1, b2, b)) else: branch = ctx.branch() if branch != 'default': prefixes.append('(%s)' % branch) desc = ctx.description().splitlines()[0] if len(desc) > 80: desc = desc[:80] if ' ' in desc: desc = desc.rsplit(' ', 1)[0] if prefixes: prefixes = ' '.join(prefixes) + ': ' else: prefixes = '' subj = prefixes + desc host = ui.config('smtp', 'host', '') port = int(ui.config('smtp', 'port', 0)) smtp = smtplib.SMTP(host, port) username = ui.config('smtp', 'username', '') if username: smtp.login(username, ui.config('smtp', 'password', '')) send(smtp, subj, sender, to, '\n'.join(body) + '\n') smtp.close() ui.status('notified %s of incoming changeset %s\n' % (to, ctx)) return False
def hook(ui, repo, hooktype, node=None, source=None, **kwargs): if hooktype != 'changegroup': ui.status('hgbuildbot: hooktype %s not supported.\n' % hooktype) return # Read config parameters masters = ui.configlist('hgbuildbot', 'master') if not masters: ui.write('* You must add a [hgbuildbot] section to .hg/hgrc in ' 'order to use the Buildbot hook\n') return # - virtualenv venv = ui.config('hgbuildbot', 'venv', None) if venv is not None: if not os.path.isdir(venv): ui.write('* Virtualenv "%s" does not exist.\n' % venv) else: activate_this = os.path.join(venv, "bin/activate_this.py") exec(open(activate_this).read(), dict(__file__=activate_this)) # - auth username = ui.config('hgbuildbot', 'user') password = ui.config('hgbuildbot', 'passwd') if username is not None and password is not None: auth = requests.auth.HTTPBasicAuth(username, password) else: auth = None # - branch branchtype = ui.config('hgbuildbot', 'branchtype', 'inrepo') branch = ui.config('hgbuildbot', 'branch', None) # - repo URL baseurl = ui.config('hgbuildbot', 'baseurl', ui.config('web', 'baseurl', '')) stripcount = int(ui.config('hgbuildbot', 'strip', ui.config('notify', 'strip', 0))) # - category, project and codebase category = ui.config('hgbuildbot', 'category', None) project = ui.config('hgbuildbot', 'project', '') codebase = ui.config('hgbuildbot', 'codebase', '') # Process changesets if branch is None and branchtype == 'dirname': branch = os.path.basename(repo.root) # If branchtype == 'inrepo', update "branch" for each commit later. repository = strip(repo.root, stripcount) repository = baseurl + repository start = repo[node].rev() end = len(repo) for rev in range(start, end): # send changeset node = repo.changelog.node(rev) log = repo.changelog.read(node) manifest, user, (time, timezone), files, desc, extra = log parents = [p for p in repo.changelog.parents(node) if p != nullid] if branchtype == 'inrepo': branch = extra['branch'] if branch: branch = fromlocal(branch) is_merge = len(parents) > 1 # merges don't always contain files, but at least one file is # required by buildbot if is_merge and not files: files = ["merge"] properties = {'is_merge': is_merge} change = { # 'master': master, 'branch': branch, 'revision': hex(node), 'comments': fromlocal(desc), 'files': json.dumps(files), 'author': fromlocal(user), 'category': category, 'when': time, 'properties': json.dumps(properties), 'repository': repository, 'project': project, 'codebase': codebase, } for master in masters: response = requests.post( master, auth=auth, params=change, headers={"Content-Type": "application/x-www-form-urlencoded"}, ) if not response.ok: ui.warn("couldn't notify buildbot about {}: {} {}".format( hex(node)[:12], response.status_code, response.reason )) else: ui.status("notified buildbot about {}".format(hex(node)[:12]))
def write(self): f = self._repo.vfs('remoterefs', 'w', atomictemp=True) for ref in sorted(self): f.write('%s %s\n' % (hex(self[ref]), encoding.fromlocal(ref))) f.close()
def reviewboard(repo, proto, args=None): proto.redirect() o = parsepayload(proto, args) if isinstance(o, ServerError): return formatresponse(str(o)) bzusername = o['bzusername'] bzapikey = o['bzapikey'] identifier, nodes, precursors = parseidentifier(o) if not identifier: return ['error %s' % _('no review identifier in request')] diffopts = mdiff.diffopts(context=8, showfunc=True, git=True) commits = { 'individual': [], 'squashed': {} } # We do multiple passes over the changesets requested for review because # some operations could be slow or may involve queries to external # resources. We want to run the fast checks first so we don't waste # resources before finding the error. The drawback here is the client # will not see the full set of errors. We may revisit this decision # later. for node in nodes: ctx = repo[node] # Reviewing merge commits doesn't make much sense and only makes # situations more complicated. So disallow the practice. if len(ctx.parents()) > 1: msg = 'cannot review merge commits (%s)' % short(ctx.node()) return formatresponse('error %s' % msg) # Invalid or confidental bugs will raise errors in the Review Board # interface later. Fail fast to minimize wasted time and resources. try: reviewid = ReviewID(identifier) except util.Abort as e: return formatresponse('error %s' % e) # We use xmlrpc here because the Bugsy REST client doesn't currently handle # errors in responses. # We don't use available Bugzilla credentials because that's the # easiest way to test for confidential bugs. If/when we support posting # reviews to confidential bugs, we'll need to change this. xmlrpc_url = repo.ui.config('bugzilla', 'url').rstrip('/') + '/xmlrpc.cgi' proxy = xmlrpclib.ServerProxy(xmlrpc_url) try: proxy.Bug.get({'ids': [reviewid.bug]}) except xmlrpclib.Fault as f: if f.faultCode == 101: return formatresponse('error bug %s does not exist; ' 'please change the review id (%s)' % (reviewid.bug, reviewid.full)) elif f.faultCode == 102: return formatresponse('error bug %s could not be accessed ' '(we do not currently allow posting of reviews to ' 'confidential bugs)' % reviewid.bug) return formatresponse('error server error verifying bug %s exists; ' 'please retry or report a bug' % reviewid.bug) # Find the first public node in the ancestry of this series. This is # used by MozReview to query the upstream repo for additional context. first_public_ancestor = None for node in repo[nodes[0]].ancestors(): ctx = repo[node] if ctx.phase() == phases.public: first_public_ancestor = ctx.hex() break commits['squashed']['first_public_ancestor'] = first_public_ancestor # Note patch.diff() appears to accept anything that can be fed into # repo[]. However, it blindly does a hex() on the argument as opposed # to the changectx, so we need to pass in the binary node. base_ctx = repo[nodes[0]].p1() base_parent_node = base_ctx.node() for i, node in enumerate(nodes): ctx = repo[node] p1 = ctx.p1().node() diff = None parent_diff = None diff = ''.join(patch.diff(repo, node1=p1, node2=ctx.node(), opts=diffopts)) + '\n' if i: base_commit_id = nodes[i-1] else: base_commit_id = base_ctx.hex() summary = encoding.fromlocal(ctx.description().splitlines()[0]) commits['individual'].append({ 'id': node, 'precursors': precursors.get(node, []), 'message': encoding.fromlocal(ctx.description()), 'diff': diff, 'bug': str(reviewid.bug), 'base_commit_id': base_commit_id, 'first_public_ancestor': first_public_ancestor, 'reviewers': list(commitparser.parse_rquestion_reviewers(summary)), 'requal_reviewers': list(commitparser.parse_requal_reviewers(summary)) }) commits['squashed']['diff'] = ''.join(patch.diff(repo, node1=base_parent_node, node2=repo[nodes[-1]].node(), opts=diffopts)) + '\n' commits['squashed']['base_commit_id'] = base_ctx.hex() rburl = repo.ui.config('reviewboard', 'url', None).rstrip('/') repoid = repo.ui.configint('reviewboard', 'repoid', None) privleged_rb_username = repo.ui.config('reviewboard', 'username', None) privleged_rb_password = repo.ui.config('reviewboard', 'password', None) # We support pushing via HTTP and SSH. REMOTE_USER will be set via HTTP. # USER via SSH. But USER is a common variable and could also sneak into # the HTTP environment. # # REMOTE_USER values come from Bugzilla. USER values come from LDAP. # There is a potential privilege escalation vulnerability if someone # obtains a Bugzilla account overlapping with a LDAP user having # special privileges. So, we explicitly don't perform an LDAP lookup # if REMOTE_USER is present because we could be crossing the user # stores. ldap_username = os.environ.get('USER') remote_user = repo.ui.environ.get('REMOTE_USER', os.environ.get('REMOTE_USER')) if ldap_username and not remote_user: associate_ldap_username(rburl, ldap_username, privleged_rb_username, privleged_rb_password, username=bzusername, apikey=bzapikey) lines = [ 'rburl %s' % rburl, 'reviewid %s' % identifier, ] try: parentrid, commitmap, reviews = post_reviews(rburl, repoid, identifier, commits, lines, username=bzusername, apikey=bzapikey) lines.extend([ 'parentreview %s' % parentrid, 'reviewdata %s status %s' % ( parentrid, urllib.quote(reviews[parentrid]['status'].encode('utf-8'))), 'reviewdata %s public %s' % ( parentrid, reviews[parentrid]['public']), ]) for node, rid in commitmap.items(): rd = reviews[rid] lines.append('csetreview %s %s' % (node, rid)) lines.append('reviewdata %s status %s' % (rid, urllib.quote(rd['status'].encode('utf-8')))) lines.append('reviewdata %s public %s' % (rid, rd['public'])) if rd['reviewers']: parts = [urllib.quote(r.encode('utf-8')) for r in rd['reviewers']] lines.append('reviewdata %s reviewers %s' % (rid, ','.join(parts))) except AuthorizationError as e: lines.append('error %s' % str(e)) except BadRequestError as e: lines.append('error %s' % str(e)) res = formatresponse(*lines) return res
def addmetadata(repo, ctx, d, onlycheap=False): """Add changeset metadata for hgweb templates.""" description = encoding.fromlocal(ctx.description()) d['bugs'] = [] for bug in commitparser.parse_bugs(description): d['bugs'].append({ 'no': str(bug), 'url': 'https://bugzilla.mozilla.org/show_bug.cgi?id=%s' % bug, }) d['reviewers'] = [] for reviewer in commitparser.parse_reviewers(description): d['reviewers'].append({ 'name': reviewer, 'revset': 'reviewer(%s)' % reviewer, }) d['backsoutnodes'] = [] backouts = commitparser.parse_backouts(description) if backouts: for node in backouts[0]: try: bctx = repo[node] d['backsoutnodes'].append({'node': bctx.hex()}) except error.RepoLookupError: pass # Repositories can define which TreeHerder repository they are associated # with. treeherder = repo.ui.config('mozilla', 'treeherder_repo') if treeherder: d['treeherderrepourl'] = 'https://treeherder.mozilla.org/#/jobs?repo=%s' % treeherder d['treeherderrepo'] = treeherder push = repo.pushlog.pushfromchangeset(ctx) # Don't print Perfherder link on non-publishing repos (like Try) # because the previous push likely has nothing to do with this # push. if push and push.nodes and repo.ui.configbool('phases', 'publish', True): lastpushhead = repo[push.nodes[0]].hex() d['perfherderurl'] = ( 'https://treeherder.mozilla.org/perf.html#/compare?' 'originalProject=%s&' 'originalRevision=%s&' 'newProject=%s&' 'newRevision=%s') % (treeherder, push.nodes[-1], treeherder, lastpushhead) # If this changeset was converted from another one and we know which repo # it came from, add that metadata. convertrevision = ctx.extra().get('convert_revision') if convertrevision: sourcerepo = repo.ui.config('hgmo', 'convertsource') if sourcerepo: d['convertsourcepath'] = sourcerepo d['convertsourcenode'] = convertrevision if onlycheap: return # Obtain the Gecko/app version/milestone. # # We could probably only do this if the repo is a known app repo (by # looking at the initial changeset). But, path based lookup is relatively # fast, so just do it. However, we need this in the "onlycheap" # section because resolving manifests is relatively slow and resolving # several on changelist pages may add seconds to page load times. try: fctx = repo.filectx('config/milestone.txt', changeid=ctx.node()) lines = fctx.data().splitlines() lines = [l for l in lines if not l.startswith('#') and l.strip()] if lines: d['milestone'] = lines[0].strip() except error.LookupError: pass # Look for changesets that back out this one. # # We limit the distance we search for backouts because an exhaustive # search could be very intensive. e.g. you load up the root commit # on a repository with 200,000 changesets and that commit is never # backed out. This finds most backouts because backouts typically happen # shortly after a bad commit is introduced. thisshort = short(ctx.node()) count = 0 searchlimit = repo.ui.configint('hgmo', 'backoutsearchlimit', 100) for bctx in repo.set('%ld::', [ctx.rev()]): count += 1 if count >= searchlimit: break backouts = commitparser.parse_backouts( encoding.fromlocal(bctx.description())) if backouts and thisshort in backouts[0]: d['backedoutbynode'] = bctx.hex() break
def _processpushreview(repo, req, ldap_username): """Handle a request to turn changesets into review requests. ``ldap_username`` is the LDAP username to associate with the MozReview account whose credentials are passed as part of the request. We implicitly trust the passed LDAP username has been authenticated to belong to the MozReview account. """ bzusername = req.get('bzusername') bzapikey = req.get('bzapikey') if not bzusername or not bzapikey: return errorresponse( 'Bugzilla API keys not configured; see ' 'https://mozilla-version-control-tools.readthedocs.io/en/latest/mozreview/install.html#obtaining-accounts-credentials-and-privileges ' 'for instructions on how to configure your client') identifier = req['identifier'] nodes = [] precursors = {} for cset in req['changesets']: node = cset['node'] nodes.append(node) if 'precursors' in cset: precursors[node] = cset['precursors'] diffopts = mdiff.diffopts(context=8, showfunc=True, git=True) commits = { 'individual': [], 'squashed': {}, 'obsolescence': req.get('obsolescence', False), } # We do multiple passes over the changesets requested for review because # some operations could be slow or may involve queries to external # resources. We want to run the fast checks first so we don't waste # resources before finding the error. The drawback here is the client # will not see the full set of errors. We may revisit this decision # later. for node in nodes: ctx = repo[node] # Reviewing merge commits doesn't make much sense and only makes # situations more complicated. So disallow the practice. if len(ctx.parents()) > 1: msg = 'cannot review merge commits (%s)' % short(ctx.node()) return errorresponse(msg) # Invalid or confidental bugs will raise errors in the Review Board # interface later. Fail fast to minimize wasted time and resources. try: reviewid = ReviewID(identifier) except error.Abort as e: return errorresponse(str(e)) # We use xmlrpc here because the Bugsy REST client doesn't currently handle # errors in responses. # We don't use available Bugzilla credentials because that's the # easiest way to test for confidential bugs. If/when we support posting # reviews to confidential bugs, we'll need to change this. xmlrpc_url = repo.ui.config('bugzilla', 'url').rstrip('/') + '/xmlrpc.cgi' proxy = xmlrpclib.ServerProxy(xmlrpc_url) try: proxy.Bug.get({'ids': [reviewid.bug]}) except xmlrpclib.Fault as f: if f.faultCode == 101: return errorresponse('bug %s does not exist; ' 'please change the review id (%s)' % (reviewid.bug, reviewid.full)) elif f.faultCode == 102: return errorresponse( 'bug %s could not be accessed ' '(we do not currently allow posting of reviews to ' 'confidential bugs)' % reviewid.bug) return errorresponse('server error verifying bug %s exists; ' 'please retry or report a bug' % reviewid.bug) # Find the first public node in the ancestry of this series. This is # used by MozReview to query the upstream repo for additional context. first_public_ancestor = None for node in repo[nodes[0]].ancestors(): ctx = repo[node] if ctx.phase() == phases.public: first_public_ancestor = ctx.hex() break commits['squashed']['first_public_ancestor'] = first_public_ancestor # Note patch.diff() appears to accept anything that can be fed into # repo[]. However, it blindly does a hex() on the argument as opposed # to the changectx, so we need to pass in the binary node. base_ctx = repo[nodes[0]].p1() base_parent_node = base_ctx.node() for i, node in enumerate(nodes): ctx = repo[node] p1 = ctx.p1().node() diff = ''.join( patch.diff(repo, node1=p1, node2=ctx.node(), opts=diffopts)) + '\n' if i: base_commit_id = nodes[i - 1] else: base_commit_id = base_ctx.hex() summary = encoding.fromlocal(ctx.description().splitlines()[0]) if req.get('deduce-reviewers', True): reviewers = list(commitparser.parse_rquestion_reviewers(summary)) requal_reviewers = list( commitparser.parse_requal_reviewers(summary)) else: reviewers = [] requal_reviewers = [] commits['individual'].append({ 'id': node, 'author': encoding.fromlocal(ctx.user()), 'precursors': precursors.get(node, []), 'message': encoding.fromlocal(ctx.description()), # Diffs are arbitrary byte sequences. json.dump() will try to # interpret str as UTF-8, which could fail. Instead of trying # to coerce the str to a unicode or use ensure_ascii=False (which # is a giant pain), just base64 encode the diff in the JSON. 'diff_b64': diff.encode('base64'), 'bug': str(reviewid.bug), 'base_commit_id': base_commit_id, 'first_public_ancestor': first_public_ancestor, 'reviewers': reviewers, 'requal_reviewers': requal_reviewers }) squashed_diff = b''.join( patch.diff(repo, node1=base_parent_node, node2=repo[nodes[-1]].node(), opts=diffopts)) + '\n' commits['squashed']['diff_b64'] = squashed_diff.encode('base64') commits['squashed']['base_commit_id'] = base_ctx.hex() rburl = repo.ui.config('reviewboard', 'url', None).rstrip('/') repoid = repo.ui.configint('reviewboard', 'repoid', None) privileged_rb_username = repo.ui.config('reviewboard', 'username', None) privileged_rb_password = repo.ui.config('reviewboard', 'password', None) if ldap_username: associate_ldap_username(rburl, ldap_username, privileged_rb_username, privileged_rb_password, username=bzusername, apikey=bzapikey) res = { 'rburl': rburl, 'reviewid': identifier, 'reviewrequests': {}, 'display': [], } try: parentrid, commitmap, reviews, warnings = \ post_reviews(rburl, repoid, identifier, commits, privileged_rb_username, privileged_rb_password, username=bzusername, apikey=bzapikey) res['display'].extend(warnings) res['parentrrid'] = parentrid res['reviewrequests'][parentrid] = { 'status': reviews[parentrid]['status'], 'public': reviews[parentrid]['public'], } for node, rid in commitmap.items(): rd = reviews[rid] res['reviewrequests'][rid] = { 'node': node, 'status': rd['status'], 'public': rd['public'], } if rd['reviewers']: res['reviewrequests'][rid]['reviewers'] = list(rd['reviewers']) except AuthorizationError as e: return errorresponse(str(e)) except BadRequestError as e: return errorresponse(str(e)) return res
def hook(ui, repo, hooktype, node=None, source=None, **kwargs): if hooktype != "changegroup": ui.status("hgbuildbot: hooktype %s not supported.\n" % hooktype) return # Read config parameters masters = ui.configlist("hgbuildbot", "master") if not masters: ui.write("* You must add a [hgbuildbot] section to .hg/hgrc in " "order to use the Buildbot hook\n") return # - virtualenv venv = ui.config("hgbuildbot", "venv", None) if venv is not None: if not os.path.isdir(venv): ui.write('* Virtualenv "%s" does not exist.\n' % venv) sys.path.insert(0, venv) # - auth username = ui.config("hgbuildbot", "user", "change") password = ui.config("hgbuildbot", "passwd", "changepw") # - branch branchtype = ui.config("hgbuildbot", "branchtype", "inrepo") branch = ui.config("hgbuildbot", "branch", None) # - repo URL baseurl = ui.config("hgbuildbot", "baseurl", ui.config("web", "baseurl", "")) stripcount = int(ui.config("hgbuildbot", "strip", ui.config("notify", "strip", 0))) # - category, project and codebase category = ui.config("hgbuildbot", "category", None) project = ui.config("hgbuildbot", "project", "") codebase = ui.config("hgbuildbot", "codebase", "") # Only import this after the (optional) venv has been added to sys.path: from buildbot.clients import sendchange from twisted.internet import defer, reactor # Process changesets if branch is None and branchtype == "dirname": branch = os.path.basename(repo.root) # If branchtype == 'inrepo', update "branch" for each commit later. repository = strip(repo.root, stripcount) repository = baseurl + repository start = repo[node].rev() end = len(repo) for master in masters: s = sendchange.Sender(master, auth=(username, password)) d = defer.Deferred() reactor.callLater(0, d.callback, None) for rev in range(start, end): # send changeset node = repo.changelog.node(rev) log = repo.changelog.read(node) manifest, user, (time, timezone), files, desc, extra = log parents = [p for p in repo.changelog.parents(node) if p != nullid] if branchtype == "inrepo": branch = extra["branch"] if branch: branch = fromlocal(branch) is_merge = len(parents) > 1 # merges don't always contain files, but at least one file is # required by buildbot if is_merge and not files: files = ["merge"] properties = {"is_merge": is_merge} change = { # 'master': master, "branch": branch, "revision": hex(node), "comments": fromlocal(desc), "files": files, "username": fromlocal(user), "category": category, "time": time, "properties": properties, "repository": repository, "project": project, "codebase": codebase, } d.addCallback(send_cs, s, change) def _printSuccess(res): ui.status(s.getSuccessString(res) + "\n") def _printFailure(why): ui.warn(s.getFailureString(why) + "\n") d.addCallbacks(_printSuccess, _printFailure) d.addBoth(lambda _: reactor.stop()) reactor.run()
def message(self): return encoding.fromlocal(self.rev.description())
def wrappedpushbookmark(orig, pushop): result = orig(pushop) # pushop.ret was renamed to pushop.cgresult in Mercurial 3.2. We can drop # this branch once we drop <3.2 support. if hasattr(pushop, 'cgresult'): origresult = pushop.cgresult else: origresult = pushop.ret # Don't do anything if error from push. if not origresult: return result remoteurl = pushop.remote.url() tree = repository.resolve_uri_to_tree(remoteurl) # We don't support release trees (yet) because they have special flags # that need to get updated. if tree and tree in repository.RELEASE_TREES: return result ui = pushop.ui if tree and tree in ui.configlist('bzpost', 'excludetrees', default=[]): return result if tree: baseuri = repository.resolve_trees_to_uris([tree])[0][1].encode('utf-8') assert baseuri else: # This isn't a known Firefox tree. Fall back to resolving URLs by # hostname. # Only attend Mozilla's server. if not updateunknown(remoteurl, repository.BASE_WRITE_URI, ui): return result baseuri = remoteurl.replace(repository.BASE_WRITE_URI, repository.BASE_READ_URI).rstrip('/') bugsmap = {} lastbug = None lastnode = None for node in pushop.outgoing.missing: ctx = pushop.repo[node] # Don't do merge commits. if len(ctx.parents()) > 1: continue # Our bug parser is buggy for Gaia bump commit messages. if '<*****@*****.**>' in ctx.user(): continue # Pushing to Try (and possibly other repos) could push unrelated # changesets that have been pushed to an official tree but aren't yet # on this specific remote. We use the phase information as a proxy # for "already pushed" and prune public changesets from consideration. if tree == 'try' and ctx.phase() == phases.public: continue bugs = parse_bugs(ctx.description()) if not bugs: continue bugsmap.setdefault(bugs[0], []).append(ctx.hex()[0:12]) lastbug = bugs[0] lastnode = ctx.hex()[0:12] if not bugsmap: return result bzauth = getbugzillaauth(ui) if not bzauth: return result bzurl = ui.config('bugzilla', 'url', 'https://bugzilla.mozilla.org/rest') bugsy = Bugsy(username=bzauth.username, password=bzauth.password, userid=bzauth.userid, cookie=bzauth.cookie, api_key=bzauth.apikey, bugzilla_url=bzurl) def public_url_for_bug(bug): '''Turn 123 into "https://bugzilla.mozilla.org/show_bug.cgi?id=123".''' public_baseurl = bzurl.replace('rest', '').rstrip('/') return '%s/show_bug.cgi?id=%s' % (public_baseurl, bug) # If this is a try push, we paste the Treeherder link for the tip commit, because # the per-commit URLs don't have much value. # TODO roll this into normal pushing so we get a Treeherder link in bugs as well. if tree == 'try' and lastbug: treeherderurl = repository.treeherder_url(tree, lastnode) bug = bugsy.get(lastbug) comments = bug.get_comments() for comment in comments: if treeherderurl in comment.text: return result ui.write(_('recording Treeherder push at %s\n') % public_url_for_bug(lastbug)) bug.add_comment(treeherderurl) return result for bugnumber, nodes in bugsmap.items(): bug = bugsy.get(bugnumber) comments = bug.get_comments() missing_nodes = [] # When testing whether this changeset URL is referenced in a # comment, we only need to test for the node fragment. The # important side-effect is that each unique node for a changeset # is recorded in the bug. for node in nodes: if not any(node in comment.text for comment in comments): missing_nodes.append(node) if not missing_nodes: ui.write(_('bug %s already knows about pushed changesets\n') % bugnumber) continue lines = [] for node in missing_nodes: ctx = pushop.repo[node] lines.append('%s/rev/%s' % (baseuri, ctx.hex())) # description is using local encodings. Depending on the # configured encoding, replacement characters could be involved. We # use encoding.fromlocal() to get the raw bytes, which should be # valid UTF-8. lines.append(encoding.fromlocal(ctx.description()).splitlines()[0]) lines.append('') comment = '\n'.join(lines) ui.write(_('recording push at %s\n') % public_url_for_bug(bugnumber)) bug.add_comment(comment) return result
def author(self): return encoding.fromlocal(self.rev.user())
def testasciifastpath(self): s = b'\0' * 100 self.assertTrue(s is encoding.tolocal(s)) self.assertTrue(s is encoding.fromlocal(s))
def hook(ui, repo, hooktype, node=None, source=None, **kwargs): if hooktype != 'changegroup': ui.status('hgbuildbot: hooktype %s not supported.\n' % hooktype) return # Read config parameters masters = ui.configlist('hgbuildbot', 'master') if not masters: ui.write('* You must add a [hgbuildbot] section to .hg/hgrc in ' 'order to use the Buildbot hook\n') return # - virtualenv venv = ui.config('hgbuildbot', 'venv', None) if venv is not None: if not os.path.isdir(venv): ui.write('* Virtualenv "%s" does not exist.\n' % venv) sys.path.insert(0, venv) # - auth username = ui.config('hgbuildbot', 'user', 'change') password = ui.config('hgbuildbot', 'passwd', 'changepw') # - branch branchtype = ui.config('hgbuildbot', 'branchtype', 'inrepo') branch = ui.config('hgbuildbot', 'branch', None) # - repo URL baseurl = ui.config('hgbuildbot', 'baseurl', ui.config('web', 'baseurl', '')) stripcount = int(ui.config('hgbuildbot', 'strip', ui.config('notify', 'strip', 0))) # - category, project and codebase category = ui.config('hgbuildbot', 'category', None) project = ui.config('hgbuildbot', 'project', '') codebase = ui.config('hgbuildbot', 'codebase', '') # Only import this after the (optional) venv has been added to sys.path: from buildbot.clients import sendchange from twisted.internet import defer, reactor # Process changesets if branch is None and branchtype == 'dirname': branch = os.path.basename(repo.root) # If branchtype == 'inrepo', update "branch" for each commit later. repository = strip(repo.root, stripcount) repository = baseurl + repository start = repo[node].rev() end = len(repo) for master in masters: s = sendchange.Sender(master, auth=(username, password)) d = defer.Deferred() reactor.callLater(0, d.callback, None) for rev in range(start, end): # send changeset node = repo.changelog.node(rev) log = repo.changelog.read(node) manifest, user, (time, timezone), files, desc, extra = log parents = [p for p in repo.changelog.parents(node) if p != nullid] if branchtype == 'inrepo': branch = extra['branch'] if branch: branch = fromlocal(branch) is_merge = len(parents) > 1 # merges don't always contain files, but at least one file is # required by buildbot if is_merge and not files: files = ["merge"] properties = {'is_merge': is_merge} change = { # 'master': master, 'branch': branch, 'revision': hex(node), 'comments': fromlocal(desc), 'files': files, 'username': fromlocal(user), 'category': category, 'time': time, 'properties': properties, 'repository': repository, 'project': project, 'codebase': codebase, } d.addCallback(send_cs, s, change) def _printSuccess(res): ui.status(s.getSuccessString(res) + '\n') def _printFailure(why): ui.warn(s.getFailureString(why) + '\n') d.addCallbacks(_printSuccess, _printFailure) d.addBoth(lambda _: reactor.stop()) reactor.run()
def hook(ui, repo, hooktype, node=None, source=None, **kwargs): if hooktype != 'changegroup': ui.status('hgbuildbot: hooktype %s not supported.\n' % hooktype) return # Read config parameters masters = ui.configlist('hgbuildbot', 'master') if not masters: ui.write('* You must add a [hgbuildbot] section to .hg/hgrc in ' 'order to use the Buildbot hook\n') return # - virtualenv venv = ui.config('hgbuildbot', 'venv', None) if venv is not None: if not os.path.isdir(venv): ui.write('* Virtualenv "%s" does not exist.\n' % venv) else: activate_this = os.path.join(venv, "bin/activate_this.py") execfile(activate_this, dict(__file__=activate_this)) # - auth username = ui.config('hgbuildbot', 'user') password = ui.config('hgbuildbot', 'passwd') if username is not None and password is not None: auth = requests.auth.HTTPBasicAuth(username, password) else: auth = None # - branch branchtype = ui.config('hgbuildbot', 'branchtype', 'inrepo') branch = ui.config('hgbuildbot', 'branch', None) # - repo URL baseurl = ui.config('hgbuildbot', 'baseurl', ui.config('web', 'baseurl', '')) stripcount = int( ui.config('hgbuildbot', 'strip', ui.config('notify', 'strip', 0))) # - category, project and codebase category = ui.config('hgbuildbot', 'category', None) project = ui.config('hgbuildbot', 'project', '') codebase = ui.config('hgbuildbot', 'codebase', '') # Process changesets if branch is None and branchtype == 'dirname': branch = os.path.basename(repo.root) # If branchtype == 'inrepo', update "branch" for each commit later. repository = strip(repo.root, stripcount) repository = baseurl + repository start = repo[node].rev() end = len(repo) for rev in range(start, end): # send changeset node = repo.changelog.node(rev) log = repo.changelog.read(node) manifest, user, (time, timezone), files, desc, extra = log parents = [p for p in repo.changelog.parents(node) if p != nullid] if branchtype == 'inrepo': branch = extra['branch'] if branch: branch = fromlocal(branch) is_merge = len(parents) > 1 # merges don't always contain files, but at least one file is # required by buildbot if is_merge and not files: files = ["merge"] properties = {'is_merge': is_merge} change = { # 'master': master, 'branch': branch, 'revision': hex(node), 'comments': fromlocal(desc), 'files': json.dumps(files), 'author': fromlocal(user), 'category': category, 'when': time, 'properties': json.dumps(properties), 'repository': repository, 'project': project, 'codebase': codebase, } for master in masters: response = requests.post( master, auth=auth, params=change, headers={"Content-Type": "application/x-www-form-urlencoded"}, ) if not response.ok: ui.warn("couldn't notify buildbot about {}: {} {}".format( hex(node)[:12], response.status_code, response.reason)) else: ui.status("notified buildbot about {}".format(hex(node)[:12]))
def wrappedpushdiscovery(orig, pushop): """Wraps exchange._pushdiscovery to add extra review metadata. We discover what nodes to review before discovery. This ensures that errors are discovered and reported quickly, without waiting for server communication. """ pushop.reviewnodes = None caps = getreviewcaps(pushop.remote) if 'pushreview' not in caps: return orig(pushop) ui = pushop.ui repo = pushop.repo if repo.noreviewboardpush: return orig(pushop) # If no arguments are specified to push, Mercurial will try to push all # non-remote changesets by default. This can result in unexpected behavior, # especially for people doing multi-headed development. # # Since we reject pushes with multiple heads anyway, default to pushing # the working copy. if not pushop.revs: pushop.revs = [repo['.'].node()] tipnode = None basenode = None # Our prepushoutgoing hook validates that all pushed changesets are # part of the same DAG head. If revisions were specified by the user, # the last is the tip commit to review and the first (if more than 1) # is the base commit to review. # # Note: the revisions are in the order they were specified by the user. # This may not be DAG order. So we have to explicitly order them here. revs = sorted(repo[r].rev() for r in pushop.revs) tipnode = repo[revs[-1]].node() if len(revs) > 1: basenode = repo[revs[0]].node() if repo.pushsingle: basenode = tipnode # Given a base and tip node, find all changesets to review. # # A solution that works most of the time is to find all non-public # ancestors of that node. This is our default. # # If basenode is specified, we stop the traversal when we encounter it. # # Note that we will still refuse to review a public changeset even with # basenode. This decision is somewhat arbitrary and can be revisited later # if there is an actual need to review public changesets. nodes = [tipnode] # Special case where basenode is the tip node. if basenode and tipnode == basenode: pass else: for node in repo[tipnode].ancestors(): ctx = repo[node] if ctx.phase() == phases.public: break if basenode and ctx.node() == basenode: nodes.insert(0, ctx.node()) break nodes.insert(0, ctx.node()) # Filter out public nodes. publicnodes = [] for node in nodes: ctx = repo[node] if ctx.phase() == phases.public: publicnodes.append(node) ui.status( _('(ignoring public changeset %s in review request)\n') % ctx.hex()[0:12]) nodes = [n for n in nodes if n not in publicnodes] if not nodes: raise util.Abort( _('no non-public changesets left to review'), hint=_( 'add or change the -r argument to include draft changesets')) # We stop completely empty changesets prior to review. for node in nodes: ctx = repo[node] if not ctx.files(): raise util.Abort(_('cannot review empty changeset %s') % ctx.hex()[:12], hint=_('add files to or remove changeset')) run_android_checkstyle(repo, nodes) # Ensure all reviewed changesets have commit IDs. replacenodes = [] for node in nodes: ctx = repo[node] if not parse_commit_id(encoding.fromlocal(ctx.description())): replacenodes.append(node) def makememctx(repo, ctx, revmap, copyfilectxfn): parents = newparents(repo, ctx, revmap) # Need to make a copy otherwise modification is made on original, # which is just plain wrong. msg = encoding.fromlocal(ctx.description()) new_msg, changed = addcommitid(msg, repo=repo) memctx = context.memctx(repo, parents, encoding.tolocal(new_msg), ctx.files(), copyfilectxfn, user=ctx.user(), date=ctx.date(), extra=dict(ctx.extra())) return memctx if replacenodes: ui.status( _('(adding commit id to %d changesets)\n') % (len(replacenodes))) nodemap = replacechangesets(repo, replacenodes, makememctx, backuptopic='addcommitid') # Since we're in the middle of an operation, update references # to rewritten nodes. nodes = [nodemap.get(node, node) for node in nodes] pushop.revs = [nodemap.get(node, node) for node in pushop.revs] pushop.reviewnodes = nodes # Since we may rewrite changesets to contain review metadata after # push, abort immediately if the working directory state is not # compatible with rewriting. This prevents us from successfully # pushing and failing to update commit metadata after the push. i.e. # it prevents potential loss of metadata. # # There may be some scenarios where we don't rewrite after push. # But coding that here would be complicated. And future server changes # may change things like review request mapping, which may invalidate # client assumptions. So always assume a rewrite is needed. impactedrevs = list(repo.revs('%ln::', nodes)) if repo['.'].rev() in impactedrevs: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) return orig(pushop)
def hook(ui, repo, hooktype, node=None, source=None, **kwargs): # read config parameters baseurl = ui.config("hgbuildbot", "baseurl", "") master = ui.config("hgbuildbot", "master") if master: branchtype = ui.config("hgbuildbot", "branchtype") branch = ui.config("hgbuildbot", "branch") fork = ui.configbool("hgbuildbot", "fork", False) # notify also has this setting stripcount = int(ui.config("notify", "strip") or ui.config("hgbuildbot", "strip", 3)) category = ui.config("hgbuildbot", "category", None) project = ui.config("hgbuildbot", "project", "") auth = ui.config("hgbuildbot", "auth", None) else: ui.write("* You must add a [hgbuildbot] section to .hg/hgrc in " "order to use buildbot hook\n") return if hooktype != "changegroup": ui.status("hgbuildbot: hooktype %s not supported.\n" % hooktype) return if fork: child_pid = os.fork() if child_pid == 0: # child pass else: # parent ui.status("Notifying buildbot...\n") return # only import inside the fork if forked from buildbot.clients import sendchange from twisted.internet import defer, reactor if branch is None: if branchtype is not None: if branchtype == "dirname": branch = os.path.basename(repo.root) if branchtype == "inrepo": branch = workingctx(repo).branch() if not auth: auth = "change:changepw" auth = auth.split(":", 1) s = sendchange.Sender(master, auth=auth) d = defer.Deferred() reactor.callLater(0, d.callback, None) # process changesets def _send(res, c): if not fork: ui.status("rev %s sent\n" % c["revision"]) return s.send( c["branch"], c["revision"], c["comments"], c["files"], c["username"], category=category, repository=repository, project=project, vc="hg", properties=c["properties"], ) try: # first try Mercurial 1.1+ api start = repo[node].rev() end = len(repo) except TypeError: # else fall back to old api start = repo.changelog.rev(bin(node)) end = repo.changelog.count() repository = strip(repo.root, stripcount) repository = baseurl + repository for rev in xrange(start, end): # send changeset node = repo.changelog.node(rev) manifest, user, (time, timezone), files, desc, extra = repo.changelog.read(node) parents = filter(lambda p: not p == nullid, repo.changelog.parents(node)) if branchtype == "inrepo": branch = extra["branch"] is_merge = len(parents) > 1 # merges don't always contain files, but at least one file is required by buildbot if is_merge and not files: files = ["merge"] properties = {"is_merge": is_merge} if branch: branch = fromlocal(branch) change = { "master": master, "username": fromlocal(user), "revision": hex(node), "comments": fromlocal(desc), "files": files, "branch": branch, "properties": properties, } d.addCallback(_send, change) def _printSuccess(res): ui.status(s.getSuccessString(res) + "\n") def _printFailure(why): ui.warn(s.getFailureString(why) + "\n") d.addCallbacks(_printSuccess, _printFailure) d.addBoth(lambda _: reactor.stop()) reactor.run() if fork: os._exit(os.EX_OK) else: return