def __call__(self, req, db, user): from operation.typechecker import TypeCheckerContext if user.isAnonymous() and not self.__accept_anonymous_user: return OperationFailureMustLogin() if req.method == "POST": data = req.read() else: data = req.getParameter("data") if not data: raise OperationError("no input") try: value = json_decode(data) except ValueError as error: raise OperationError("invalid input: %s" % str(error)) try: self.__checker(value, TypeCheckerContext(req, db, user)) return self.process(db, user, **value) except OperationError as error: return error except OperationFailure as failure: return failure except dbutils.NoSuchUser as error: return OperationFailure(code="nosuchuser", title="Who is '%s'?" % error.name, message="There is no user in Critic's database named that.") except dbutils.NoSuchReview as error: return OperationFailure(code="nosuchreview", title="Invalid review ID", message="The review ID r/%d is not valid." % error.id) except dbutils.TransactionRollbackError: return OperationFailure(code="transactionrollback", title="Transaction rolled back", message="Your database transaction rolled back, probably due to a deadlock. Please try again.") except: # Decode value again since the type checkers might have modified it. value = json_decode(data) error_message = ("User: %s\nReferrer: %s\nData: %s\n\n%s" % (user.name, req.getReferrer(), json_encode(self.sanitize(value), indent=2), traceback.format_exc())) db.rollback() import mailutils import configuration if not user.hasRole(db, "developer"): mailutils.sendExceptionMessage(db, "wsgi[%s]" % req.path, error_message) if configuration.debug.IS_DEVELOPMENT or user.hasRole(db, "developer"): return OperationError(error_message) else: return OperationError("An unexpected error occurred. " + "A message has been sent to the system administrator(s) " + "with details about the problem.")
def handle_input(self, data): try: result = json_decode(data) except ValueError: result = { "status": "error", "error": "invalid response:\n" + indent(data) } if result["status"] == "ok": for item in result["info"]: self.server.info(item) if result["output"]: self.__client.write(result["output"].strip() + "\n") if result["accept"]: self.__client.write("ok\n") elif result["status"] == "reject": self.server.warning(result["message"]) self.__client.write(result["message"].strip() + "\n") else: self.server.error(result["error"]) self.__client.write("""\ An exception was raised while processing the request. A message has been sent to the system administrator(s). """) self.__client.close()
def handle_input(self, data): try: result = json_decode(data) except ValueError: result = { "status": "error", "error": ("invalid response:\n" + background.utils.indent(data)) } if result["status"] == "ok": for item in result["info"]: self.server.info(item) if result["output"]: self.__client.write(result["output"].strip() + "\n") if result["accept"]: self.__client.write("ok\n") elif result["status"] == "reject": self.server.warning(result["message"]) self.__client.write(result["message"].strip() + "\n") else: self.server.error(result["error"]) self.__client.write("""\ An exception was raised while processing the request. A message has been sent to the system administrator(s). """) if configuration.debug.IS_DEVELOPMENT: self.__client.write("\n" + result["error"].strip() + "\n") self.__client.close()
def process(self, db, user, service_name): if not user.hasRole(db, "administrator"): raise OperationFailure( code="notallowed", title="Not allowed!", message="Only a system administrator can restart services.") if service_name == "wsgi": for pid in os.listdir(configuration.paths.WSGI_PIDFILE_DIR): try: os.kill(int(pid), signal.SIGINT) except: pass return OperationResult() else: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) connection.connect( configuration.services.SERVICEMANAGER["address"]) connection.send( textutils.json_encode({ "command": "restart", "service": service_name })) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received result = textutils.json_decode(data) if result["status"] == "ok": return OperationResult() else: raise OperationError, result["error"]
def process(self, db, user, service_name): if not user.hasRole(db, "administrator"): raise OperationFailure( code="notallowed", title="Not allowed!", message="Only a system administrator can restart services." ) if service_name == "wsgi": for pid in os.listdir(configuration.paths.WSGI_PIDFILE_DIR): try: os.kill(int(pid), signal.SIGINT) except: pass return OperationResult() else: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) connection.connect(configuration.services.SERVICEMANAGER["address"]) connection.send(textutils.json_encode({"command": "restart", "service": service_name})) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received result = textutils.json_decode(data) if result["status"] == "ok": return OperationResult() else: raise OperationError, result["error"]
def handle_input(self, _file, value): try: result = json_decode(value) except ValueError: self.server.error("invalid response:\n" + indent(value)) result = self.request.copy() result["error"] = value for client in self.clients: client.add_result(result) self.server.request_finished(self, self.request, result)
def handle_input(self, value): try: result = json_decode(value) except ValueError: self.server.error("invalid response:\n" + indent(value)) result = self.request.copy() result["error"] = value for client in self.clients: client.add_result(result) self.server.request_finished(self, self.request, result)
def perform_job(): import syntaxhighlight.generate request = json_decode(sys.stdin.read()) request["highlighted"] = syntaxhighlight.generate.generateHighlight( repository_path=request["repository_path"], sha1=request["sha1"], language=request["language"]) sys.stdout.write(json_encode(request))
def handle_input(self, value): decoded = json_decode(value) if isinstance(decoded, list): self.__requests = decoded self.__pending_requests = map(freeze, decoded) self.__results = [] self.server.add_requests(self) else: assert isinstance(decoded, dict) self.server.execute_command(self, decoded)
def handle_input(self, _file, data): data = textutils.json_decode(data) process = self.server.get_process(data["flavor"]) extension = ExtensionRunner.Extension( self.server, self, process, data["timeout"]) extension.write(data["stdin"]) extension.close() self.server.add_peer(extension)
def handle_input(self, _file, value): decoded = json_decode(value) assert isinstance(decoded, dict) if "requests" in decoded: self.__requests = decoded["requests"] self.__pending_requests = map(freeze, self.__requests) self.__async = decoded.get("async", False) self.__results = [] self.server.add_requests(self) else: self.server.execute_command(self, decoded) if self.__async: self.close()
def handle_input(self, _file, data): try: data = json_decode(data) except ValueError: self.server.error("invalid response from wait-for-update child: %r" % data) self.client.close() if data["status"] == "output": self.client.write(data["output"]) self.server.debug(" hook output written to client") elif data["status"] == "no-output": self.server.debug(" update produced no hook output") else: self.server.debug(" timeout") self.client.close()
def handle_input(self, data): try: data = json_decode(data) except ValueError: self.server.error("invalid response from wait-for-update child: %r" % data) self.client.close() if data["status"] == "output": self.client.write(data["output"]) self.server.debug(" hook output written to client") elif data["status"] == "no-output": self.server.debug(" update produced no hook output") else: self.server.debug(" timeout") self.client.close()
def requestChangesets(requests): try: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) connection.connect(configuration.services.CHANGESET["address"]) connection.send(json_encode(requests)) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received connection.close() except socket.error as error: raise ChangesetBackgroundServiceError(error[1]) try: results = json_decode(data) except ValueError: raise ChangesetBackgroundServiceError( "returned an invalid response: %r" % data) if type(results) != list: # If not a list, the result is probably an error message. raise ChangesetBackgroundServiceError(str(results)) if len(results) != len(requests): raise ChangesetBackgroundServiceError("didn't process all requests") errors = [] for result in results: if "error" in result: errors.append(result["error"]) if errors: raise ChangesetBackgroundServiceError( "one or more requests failed:\n%s" % "\n".join(map(indent, errors)))
def __call__(self, req, db, user): if user.isAnonymous() and not self.__accept_anonymous_user: return OperationFailureMustLogin() if req.method == "POST": data = req.read() else: data = req.getParameter("data") if not data: raise OperationError, "no input" try: value = json_decode(data) except ValueError, error: raise OperationError, "invalid input: %s" % str(error) self.__checker(value) try: return self.process(db, user, **value) except OperationError: raise except OperationFailure, failure: return failure except dbutils.NoSuchUser, error: return OperationFailure(code="nosuchuser", title="Who is '%s'?" % error.name, message="There is no user in Critic's database named that.")
def requestHighlights(repository, sha1s): requests = [{ "repository_path": repository.path, "sha1": sha1, "path": path, "language": language } for sha1, (path, language) in sha1s.items() if not syntaxhighlight.isHighlighted(sha1, language)] if not requests: return try: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) connection.connect(configuration.services.HIGHLIGHT["address"]) connection.send(json_encode(requests)) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received connection.close() except socket.error as error: raise HighlightBackgroundServiceError(error[1]) try: results = json_decode(data) except ValueError: raise HighlightBackgroundServiceError( "returned an invalid response (%r)" % data) if type(results) != list: # If not a list, the result is probably an error message. raise HighlightBackgroundServiceError(str(results)) if len(results) != len(requests): raise HighlightBackgroundServiceError("didn't process all requests")
def perform_job(): soft_limit, hard_limit = getrlimit(RLIMIT_RSS) rss_limit = configuration.services.CHANGESET["rss_limit"] if soft_limit < rss_limit: setrlimit(RLIMIT_RSS, (rss_limit, hard_limit)) from changeset.create import createChangeset request = json_decode(sys.stdin.read()) try: db = dbutils.Database() createChangeset(db, request) db.close() sys.stdout.write(json_encode(request)) except: print "Request:" print json_encode(request, indent=2) print print_exc(file=sys.stdout)
def slave(): import StringIO import traceback import dbutils import gitutils import index def reject(message): sys_stdout.write(json_encode({ "status": "reject", "message": message })) sys.exit(0) def error(message): sys_stdout.write(json_encode({ "status": "error", "error": message })) sys.exit(0) db = dbutils.Database.forUser() try: data = sys.stdin.read() request = json_decode(data) create_branches = [] delete_branches = [] update_branches = [] create_tags = [] delete_tags = [] update_tags = [] user = getUser(db, request["user_name"]) authentication_labels = auth.DATABASE.getAuthenticationLabels(user) db.setUser(user, authentication_labels) try: repository = gitutils.Repository.fromName( db, request["repository_name"], for_modify=True) except auth.AccessDenied as error: reject(error.message) if request["flags"] and user.isSystem(): flags = dict(flag.split("=", 1) for flag in request["flags"].split(",")) else: flags = {} sys.stdout = StringIO.StringIO() commits_to_process = set() for ref in request["refs"]: name = ref["name"] old_sha1 = ref["old_sha1"] new_sha1 = ref["new_sha1"] if "//" in name: reject("invalid ref name: '%s'" % name) if not name.startswith("refs/"): reject("unexpected ref name: '%s'" % name) if new_sha1 != '0000000000000000000000000000000000000000': commits_to_process.add(new_sha1) name = name[len("refs/"):] if name.startswith("heads/"): name = name[len("heads/"):] if new_sha1 == '0000000000000000000000000000000000000000': delete_branches.append((name, old_sha1)) elif old_sha1 == '0000000000000000000000000000000000000000': create_branches.append((name, new_sha1)) else: update_branches.append((name, old_sha1, new_sha1)) elif name.startswith("tags/"): name = name[len("tags/"):] if old_sha1 == '0000000000000000000000000000000000000000': create_tags.append((name, new_sha1)) elif new_sha1 == '0000000000000000000000000000000000000000': delete_tags.append(name) else: update_tags.append((name, old_sha1, new_sha1)) elif name.startswith("temporary/") or name.startswith("keepalive/"): # len("temporary/") == len("keepalive/") name = name[len("temporary/"):] if name != new_sha1: reject("invalid update of '%s'; value is not %s" % (ref["name"], name)) else: reject("unexpected ref name: '%s'" % ref["name"]) multiple = (len(delete_branches) + len(update_branches) + len(create_branches) + len(delete_tags) + len(update_tags) + len(create_tags)) > 1 info = [] for sha1 in commits_to_process: index.processCommits(db, repository, sha1) for name, old in delete_branches: index.deleteBranch(db, user, repository, name, old) info.append("branch deleted: %s" % name) for name, old, new in update_branches: index.updateBranch(db, user, repository, name, old, new, multiple, flags) info.append("branch updated: %s (%s..%s)" % (name, old[:8], new[:8])) index.createBranches(db, user, repository, create_branches, flags) for name, new in create_branches: info.append("branch created: %s (%s)" % (name, new[:8])) for name in delete_tags: index.deleteTag(db, user, repository, name) info.append("tag deleted: %s" % name) for name, old, new in update_tags: index.updateTag(db, user, repository, name, old, new) info.append("tag updated: %s (%s..%s)" % (name, old[:8], new[:8])) for name, new in create_tags: index.createTag(db, user, repository, name, new) info.append("tag created: %s (%s)" % (name, new[:8])) sys_stdout.write(json_encode({ "status": "ok", "accept": True, "output": sys.stdout.getvalue(), "info": info })) db.commit() except index.IndexException as exception: sys_stdout.write(json_encode({ "status": "ok", "accept": False, "output": exception.message, "info": info })) except SystemExit: raise except: exception = traceback.format_exc() message = """\ %s Request: %s %s""" % (exception.splitlines()[-1], json_encode(request, indent=2), traceback.format_exc()) sys_stdout.write(json_encode({ "status": "error", "error": message })) finally: db.close()
def handle_input(self, data): try: data = json_decode(data) except ValueError: return message = "connection from %s:%d:" % self.__peeraddress message += "\n repository: %s" % data["repository"] if data.has_key("timeout"): message += "\n timeout: %d" % data["timeout"] if data["branches"]: message += "\n branches: %s" % ", ".join(data["branches"]) if data["tags"]: message += "\n tags: %s" % ", ".join(data["tags"]) self.server.info(message) db = dbutils.Database() try: cursor = db.cursor() notify_tracker = False wait_for_reply = False for branch in data["branches"]: cursor.execute("""SELECT id, local_name FROM trackedbranches WHERE remote=%s AND remote_name=%s AND NOT disabled AND next IS NOT NULL""", (data["repository"], branch)) row = cursor.fetchone() if row: branch_id, local_name = row cursor.execute("""UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id,)) notify_tracker = True self.server.debug("tracked branch: %s" % local_name) if len(data["branches"]) == 1 and local_name.startswith("r/"): wait_for_reply = (True, branch_id) self.server.debug(" will wait for reply") if data["tags"]: cursor.execute("""SELECT id FROM trackedbranches WHERE remote=%s AND remote_name=%s AND NOT disabled AND next IS NOT NULL""", (data["repository"], "*")) row = cursor.fetchone() if row: branch_id = row[0] cursor.execute("""UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id,)) notify_tracker = True db.commit() if notify_tracker: if wait_for_reply: branch_id = wait_for_reply[1] cursor.execute("SELECT COUNT(*) FROM trackedbranchlog WHERE branch=%s", (branch_id,)) log_offset = cursor.fetchone()[0] self.server.add_peer(BranchTrackerHook.WaitForUpdate(self, branch_id, data.get("timeout", 30), log_offset)) try: branchtracker_pid = int(open(configuration.services.BRANCHTRACKER["pidfile_path"]).read().strip()) os.kill(branchtracker_pid, signal.SIGHUP) except: self.server.exception() return if wait_for_reply: return self.close() finally: try: db.close() except: pass
def handle_input(self, data): try: data = json_decode(data) except ValueError: return message = "connection from %s:%d:" % self.__peeraddress message += "\n repository: %s" % data["repository"] if data.has_key("timeout"): message += "\n timeout: %d" % data["timeout"] if data["branches"]: message += "\n branches: %s" % ", ".join( data["branches"]) if data["tags"]: message += "\n tags: %s" % ", ".join(data["tags"]) self.server.info(message) db = dbutils.Database() try: cursor = db.cursor() notify_tracker = False wait_for_reply = False # Make sure the 'knownremotes' table has this remote listed # as "pushing" since it obviously is. cursor.execute( """SELECT pushing FROM knownremotes WHERE url=%s""", (data["repository"], )) row = cursor.fetchone() if not row: cursor.execute( """INSERT INTO knownremotes (url, pushing) VALUES (%s, TRUE)""", (data["repository"], )) elif not row[0]: cursor.execute( """UPDATE knownremotes SET pushing=TRUE WHERE url=%s""", (data["repository"], )) # If we just recorded this remote as "pushing," adjust the # configured updating frequency of any existing tracked # branches from it. if not row or not row[0]: cursor.execute( """UPDATE trackedbranches SET delay='1 week' WHERE remote=%s""", (data["repository"], )) for branch in data["branches"]: cursor.execute( """SELECT id, local_name FROM trackedbranches WHERE remote=%s AND remote_name=%s AND NOT disabled AND next IS NOT NULL""", (data["repository"], branch)) row = cursor.fetchone() if row: branch_id, local_name = row cursor.execute( """UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id, )) notify_tracker = True self.server.debug("tracked branch: %s" % local_name) if len(data["branches"] ) == 1 and local_name.startswith("r/"): wait_for_reply = (True, branch_id) self.server.debug(" will wait for reply") if data["tags"]: cursor.execute( """SELECT id FROM trackedbranches WHERE remote=%s AND remote_name=%s AND NOT disabled AND next IS NOT NULL""", (data["repository"], "*")) row = cursor.fetchone() if row: branch_id = row[0] cursor.execute( """UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id, )) notify_tracker = True db.commit() if notify_tracker: if wait_for_reply: branch_id = wait_for_reply[1] cursor.execute( "SELECT COUNT(*) FROM trackedbranchlog WHERE branch=%s", (branch_id, )) log_offset = cursor.fetchone()[0] self.server.add_peer( BranchTrackerHook.WaitForUpdate( self, branch_id, data.get("timeout", 30), log_offset)) try: branchtracker_pid = int( open(configuration.services.BRANCHTRACKER[ "pidfile_path"]).read().strip()) os.kill(branchtracker_pid, signal.SIGHUP) except: self.server.exception() return if wait_for_reply: return self.close() finally: try: db.close() except: pass
def handle_input(self, data): try: data = json_decode(data) except ValueError: return message = "connection from %s:%d:" % self.__peeraddress message += "\n repository: %s" % data["repository"] if data.has_key("timeout"): message += "\n timeout: %d" % data["timeout"] if data["branches"]: message += "\n branches: %s" % ", ".join(data["branches"]) if data["tags"]: message += "\n tags: %s" % ", ".join(data["tags"]) self.server.info(message) db = dbutils.Database() try: cursor = db.cursor() notify_tracker = False wait_for_reply = False # Make sure the 'knownremotes' table has this remote listed # as "pushing" since it obviously is. cursor.execute("""SELECT pushing FROM knownremotes WHERE url=%s""", (data["repository"],)) row = cursor.fetchone() if not row: cursor.execute("""INSERT INTO knownremotes (url, pushing) VALUES (%s, TRUE)""", (data["repository"],)) elif not row[0]: cursor.execute("""UPDATE knownremotes SET pushing=TRUE WHERE url=%s""", (data["repository"],)) # If we just recorded this remote as "pushing," adjust the # configured updating frequency of any existing tracked # branches from it. if not row or not row[0]: cursor.execute("""UPDATE trackedbranches SET delay='1 week' WHERE remote=%s""", (data["repository"],)) for branch in data["branches"]: cursor.execute("""SELECT id, local_name FROM trackedbranches WHERE remote=%s AND remote_name=%s AND NOT disabled AND next IS NOT NULL""", (data["repository"], branch)) row = cursor.fetchone() if row: branch_id, local_name = row cursor.execute("""UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id,)) notify_tracker = True self.server.debug("tracked branch: %s" % local_name) if len(data["branches"]) == 1 and local_name.startswith("r/"): wait_for_reply = (True, branch_id) self.server.debug(" will wait for reply") if data["tags"]: cursor.execute("""SELECT id FROM trackedbranches WHERE remote=%s AND remote_name=%s AND NOT disabled AND next IS NOT NULL""", (data["repository"], "*")) row = cursor.fetchone() if row: branch_id = row[0] cursor.execute("""UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id,)) notify_tracker = True db.commit() if notify_tracker: if wait_for_reply: branch_id = wait_for_reply[1] cursor.execute("SELECT COUNT(*) FROM trackedbranchlog WHERE branch=%s", (branch_id,)) log_offset = cursor.fetchone()[0] self.server.add_peer(BranchTrackerHook.WaitForUpdate(self, branch_id, data.get("timeout", 30), log_offset)) try: branchtracker_pid = int(open(configuration.services.BRANCHTRACKER["pidfile_path"]).read().strip()) os.kill(branchtracker_pid, signal.SIGHUP) except: self.server.exception() return if wait_for_reply: return self.close() finally: try: db.close() except: pass
def renderCreateReview(req, db, user): if user.isAnonymous(): raise page.utils.NeedLogin(req) repository = req.getParameter("repository", filter=gitutils.Repository.FromParameter(db), default=None) applyparentfilters = req.getParameter("applyparentfilters", "yes" if user.getPreference(db, 'review.applyUpstreamFilters') else "no") == "yes" cursor = db.cursor() if req.method == "POST": data = json_decode(req.read()) summary = data.get("summary") description = data.get("description") review_branch_name = data.get("review_branch_name") commit_ids = data.get("commit_ids") commit_sha1s = data.get("commit_sha1s") else: summary = req.getParameter("summary", None) description = req.getParameter("description", None) review_branch_name = req.getParameter("reviewbranchname", None) commit_ids = None commit_sha1s = None commits_arg = req.getParameter("commits", None) remote = req.getParameter("remote", None) upstream = req.getParameter("upstream", "master") branch_name = req.getParameter("branch", None) if commits_arg: try: commit_ids = map(int, commits_arg.split(",")) except: commit_sha1s = [repository.revparse(ref) for ref in commits_arg.split(",")] elif branch_name: cursor.execute("""SELECT commit FROM reachable JOIN branches ON (branch=id) WHERE repository=%s AND name=%s""", (repository.id, branch_name)) commit_ids = [commit_id for (commit_id,) in cursor] if len(commit_ids) > configuration.limits.MAXIMUM_REVIEW_COMMITS: raise page.utils.DisplayMessage( "Too many commits!", (("<p>The branch <code>%s</code> contains %d commits. Reviews can" "be created from branches that contain at most %d commits.</p>" "<p>This limit can be adjusted by modifying the system setting" "<code>configuration.limits.MAXIMUM_REVIEW_COMMITS</code>.</p>") % (htmlutils.htmlify(branch_name), len(commit_ids), configuration.limits.MAXIMUM_REVIEW_COMMITS)), html=True) else: return renderSelectSource(req, db, user) req.content_type = "text/html; charset=utf-8" if commit_ids: commits = [gitutils.Commit.fromId(db, repository, commit_id) for commit_id in commit_ids] elif commit_sha1s: commits = [gitutils.Commit.fromSHA1(db, repository, commit_sha1) for commit_sha1 in commit_sha1s] else: commits = [] if not commit_ids: commit_ids = [commit.getId(db) for commit in commits] if not commit_sha1s: commit_sha1s = [commit.sha1 for commit in commits] if summary is None: if len(commits) == 1: summary = commits[0].summary() else: summary = "" if review_branch_name: invalid_branch_name = "false" default_branch_name = review_branch_name else: invalid_branch_name = htmlutils.jsify(user.name + "/") default_branch_name = user.name + "/" match = re.search("(?:^|[Ff]ix(?:e[ds])?(?: +for)?(?: +bug)? +)([A-Z][A-Z0-9]+-[0-9]+)", summary) if match: invalid_branch_name = "false" default_branch_name = htmlutils.htmlify(match.group(1)) changesets = [] changeset_utils.createChangesets(db, repository, commits) for commit in commits: changesets.extend(changeset_utils.createChangeset(db, None, repository, commit, do_highlight=False)) changeset_ids = [changeset.id for changeset in changesets] all_reviewers, all_watchers = reviewing.utils.getReviewersAndWatchers( db, repository, changesets=changesets, applyparentfilters=applyparentfilters) document = htmlutils.Document(req) html = document.html() head = html.head() document.addInternalScript(user.getJS(db)) if branch_name: document.addInternalScript("var fromBranch = %s;" % htmlutils.jsify(branch_name)) if remote: document.addInternalScript("var trackedbranch = { remote: %s, name: %s };" % (htmlutils.jsify(remote), htmlutils.jsify(branch_name))) head.title().text("Create Review") body = html.body(onload="document.getElementById('branch_name').focus()") page.utils.generateHeader(body, db, user, lambda target: target.button(onclick="submitReview();").text("Submit Review")) document.addExternalStylesheet("resource/createreview.css") document.addExternalScript("resource/createreview.js") document.addExternalScript("resource/reviewfilters.js") document.addExternalScript("resource/autocomplete.js") document.addInternalScript(""" var invalid_branch_name = %s; var review = { commit_ids: %r, commit_sha1s: %r, changeset_ids: %r };""" % (invalid_branch_name, commit_ids, commit_sha1s, changeset_ids)) document.addInternalScript(repository.getJS()) main = body.div("main") table = main.table("basic paleyellow", align="center") table.tr().td("h1", colspan=3).h1().text("Create Review") row = table.tr("line") row.td("heading").text("Branch Name:") row.td("value").text("r/").input("value", id="branch_name", value=default_branch_name) row.td("status") row = table.tr() if not remote: row.td("help", colspan=3).div().text("""\ This is the main identifier of the review. It will be created in the review repository to contain the commits below. Reviewers can fetch it from there, and additional commits can be added to the review later by pushing them to this branch in the review repository.""") else: row.td("help", colspan=3).div().text("""\ This is the main identifier of the review. It will be created in the review repository to contain the commits below, and reviewers can fetch it from there.""") if remote: row = table.tr("line") row.td("heading").text("Tracked Branch:") value = row.td("value") value.code("branch inset").text(branch_name, linkify=linkify.Context(remote=remote)) value.text(" in ") value.code("remote inset").text(remote, linkify=linkify.Context()) row.td("status") row = table.tr() row.td("help", colspan=3).div().text("""\ Rather than pushing directly to the review branch in Critic's repository to add commits to the review, you will be pushing to this branch (in a separate repository,) from which Critic will fetch commits and add them to the review automatically.""") row = table.tr("line") row.td("heading").text("Summary:") row.td("value").input("value", id="summary", value=summary) row.td("status") row = table.tr() row.td("help", colspan=3).div().text("""\ The summary should be a short summary of the changes in the review. It will appear in the subject of all emails sent about the review. """) row = table.tr("line description") row.td("heading").text("Description:") textarea = row.td("value").textarea(id="description", rows=12) textarea.preformatted() if description: textarea.text(description) row.td("status") row = table.tr() row.td("help", colspan=3).div().text("""\ The description should describe the changes to be reviewed. It is usually fine to leave the description empty, since the commit messages are also available in the review. """) generateReviewersAndWatchersTable(db, repository, main, all_reviewers, all_watchers, applyparentfilters=applyparentfilters) row = table.tr("line recipients") row.td("heading").text("Recipient List:") cell = row.td("value", colspan=2).preformatted() cell.span("mode").text("Everyone") cell.span("users") cell.text(".") buttons = cell.div("buttons") buttons.button(onclick="editRecipientList();").text("Edit Recipient List") row = table.tr() row.td("help", colspan=3).div().text("""\ The basic recipient list for e-mails sent about the review. """) log.html.render(db, main, "Commits", commits=commits) return document
def renderConfig(req, db, user): highlight = req.getParameter("highlight", None) repository = req.getParameter("repository", None, gitutils.Repository.FromParameter(db)) filter_id = req.getParameter("filter", None, int) defaults = req.getParameter("defaults", "no") == "yes" if filter_id is not None: # There can't be system-wide defaults for one of a single user's # filters. defaults = False cursor = db.cursor() if filter_id is not None: cursor.execute( """SELECT filters.path, repositories.name FROM filters JOIN repositories ON (repositories.id=filters.repository) WHERE filters.id=%s""", (filter_id, )) row = cursor.fetchone() if not row: raise page.utils.InvalidParameterValue(name="filter", value=str(filter_id), expected="valid filter id") title = "Filter preferences: %s in %s" % row elif repository is not None: title = "Repository preferences: %s" % repository.name else: title = "User preferences" document = htmlutils.Document(req) document.setTitle(title) html = document.html() head = html.head() body = html.body() if user.isAnonymous(): disabled = "disabled" else: disabled = None def generate_right(target): if defaults: url = "/config" if repository is not None: url += "?repository=%d" % repository.id target.a("button", href=url).text("Edit Own") elif user.hasRole(db, "administrator"): url = "/config?defaults=yes" if repository is not None: url += "&repository=%d" % repository.id what = "Repository Defaults" else: what = "System Defaults" target.a("button", href=url).text("Edit " + what) injected = page.utils.generateHeader(body, db, user, current_page="config", generate_right=generate_right) document.addExternalStylesheet("resource/config.css") document.addExternalScript("resource/config.js") document.addInternalScript(user.getJS()) document.addInternalScript( "var repository_id = %s, filter_id = %s, defaults = %s;" % (htmlutils.jsify(repository.id if repository else None), htmlutils.jsify(filter_id), htmlutils.jsify(defaults))) target = body.div("main") table = target.table('preferences paleyellow', align='center', cellspacing=0) h1 = table.tr().td('h1', colspan=3).h1() h1.text(title) if filter_id is None: page.utils.generateRepositorySelect( db, user, h1.span("right"), allow_selecting_none=True, selected=repository.name if repository else False) if filter_id is not None: conditional = "per_filter" elif repository is not None: conditional = "per_repository" elif defaults: conditional = "per_system" else: conditional = "per_user" cursor = db.cursor() cursor.execute( """SELECT item, type, description, per_repository, per_filter FROM preferences WHERE %(conditional)s""" % {"conditional": conditional}) preferences = dict(( item, [preference_type, description, None, None, per_repository, per_filter]) for item, preference_type, description, per_repository, per_filter in cursor) def set_values(rows, is_overrides): index = 3 if is_overrides else 2 for item, integer, string in rows: if preferences[item][0] == "boolean": preferences[item][index] = bool(integer) elif preferences[item][0] == "integer": preferences[item][index] = integer else: preferences[item][index] = string cursor.execute( """SELECT item, integer, string FROM userpreferences WHERE item=ANY (%s) AND uid IS NULL AND repository IS NULL""", (preferences.keys(), )) set_values(cursor, is_overrides=False) if repository is not None: cursor.execute( """SELECT item, integer, string FROM userpreferences WHERE item=ANY (%s) AND uid IS NULL AND repository=%s""", (preferences.keys(), repository.id)) # These are overrides if we're editing the defaults for a specific # repository. set_values(cursor, is_overrides=defaults) if not defaults: cursor.execute( """SELECT item, integer, string FROM userpreferences WHERE item=ANY (%s) AND uid=%s AND repository IS NULL AND filter IS NULL""", (preferences.keys(), user.id)) if filter_id is not None or repository is not None: # We're looking at per-filter or per-repository settings, so the # user's global settings are defaults, not the overrides. If a # per-filter or per-repository override is deleted, the user's # global setting kicks in instead. set_values(cursor, is_overrides=False) if filter_id is not None: cursor.execute( """SELECT item, integer, string FROM userpreferences WHERE item=ANY (%s) AND uid=%s AND filter=%s""", (preferences.keys(), user.id, filter_id)) else: cursor.execute( """SELECT item, integer, string FROM userpreferences WHERE item=ANY (%s) AND uid=%s AND repository=%s""", (preferences.keys(), user.id, repository.id)) # Set the overrides. This is either the user's global settings, if # we're not looking at per-filter or per-repository settings, or the # user's per-filter or per-repository settings if we are. set_values(cursor, is_overrides=True) elif repository is None: # When editing global defaults, use the values from preferences.json # used when initially installing Critic as the default values. defaults_path = os.path.join(configuration.paths.INSTALL_DIR, "data/preferences.json") with open(defaults_path) as defaults_file: factory_defaults = textutils.json_decode(defaults_file.read()) for item, data in preferences.items(): data[3] = data[2] if item in factory_defaults: data[2] = factory_defaults[item]["default"] if data[2] == data[3]: data[3] = None if req.getParameter("recalculate", "no") == "yes": for item, data in preferences.items(): if data[2] == data[3]: user.setPreference(db, item, None, repository=repository, filter_id=filter_id) data[3] = None db.commit() debug_enabled = user.getPreference(db, "debug.enabled") for item, (preference_type, description, default_value, current_value, per_repository, per_filter) in sorted(preferences.items()): if item.startswith( "debug.") and item != "debug.enabled" and not debug_enabled: continue line_class_name = "line" help_class_name = "help" if highlight is not None and not fnmatch.fnmatch(item, highlight): continue if current_value is None: current_value = default_value else: line_class_name += " customized" row = table.tr(line_class_name) heading = row.td("heading") heading.text("%s:" % item) value = row.td("value", colspan=2) value.preformatted() options = None optgroup = None def addOption(value, name, selected=lambda value: value == current_value, **attributes): (optgroup or options).option( value=value, selected="selected" if selected(value) else None, **attributes).text(name) if preference_type == "boolean": value.input("setting", type="checkbox", name=item, checked="checked" if current_value else None, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) elif preference_type == "integer": value.input("setting", type="number", min=0, max=2**31 - 1, name=item, value=current_value, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) elif item == "defaultRepository": page.utils.generateRepositorySelect( db, user, value, allow_selecting_none=True, placeholder_text="No default repository", selected=current_value, name=item, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) elif item == "defaultPage": options = value.select( "setting", name=item, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) addOption("home", "Home") addOption("dashboard", "Dashboard") addOption("branches", "Branches") addOption("config", "Config") addOption("tutorial", "Tutorial") elif item == "email.urlType": cursor2 = db.cursor() cursor2.execute( """SELECT key, description, authenticated_scheme, hostname FROM systemidentities ORDER BY description ASC""") identities = cursor2.fetchall() selected = set(current_value.split(",")) options = value.select( "setting", name=item, size=len(identities), multiple="multiple", disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) for key, label, authenticated_scheme, hostname in identities: prefix = "%s://%s/" % (authenticated_scheme, hostname) addOption( key, label, selected=lambda value: value in selected, class_="url-type flex", data_text=label, data_html=( "<span class=label>%s</span>" "<span class=prefix>%s</span>" % (htmlutils.htmlify(label), htmlutils.htmlify(prefix)))) elif item == "email.updatedReview.quotedComments": options = value.select( "setting", name=item, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) addOption("all", "All") addOption("first", "First") addOption("last", "Last") addOption("firstlast", "First & Last") elif item == "timezone": options = value.select( "setting", name=item, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) for group, zones in dbutils.timezones.sortedTimezones(db): optgroup = options.optgroup(label=group) for name, abbrev, utc_offset in zones: seconds = utc_offset.total_seconds() offset = "%s%02d:%02d" % ("-" if seconds < 0 else "+", abs(seconds) / 3600, (abs(seconds) % 3600) / 60) addOption("%s/%s" % (group, name), "%s (%s / UTC%s)" % (name, abbrev, offset)) elif item == "repository.urlType": options = value.select( "setting", name=item, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) long_path = os.path.join(configuration.paths.GIT_DIR, "<path>.git") if "git" in configuration.base.REPOSITORY_URL_TYPES: addOption("git", "git://%s/<path>.git" % configuration.base.HOSTNAME) if "http" in configuration.base.REPOSITORY_URL_TYPES: scheme = configuration.base.ACCESS_SCHEME if scheme == "both": if user.isAnonymous(): scheme = "http" else: scheme = "https" addOption( "http", "%s://%s/<path>.git" % (scheme, configuration.base.HOSTNAME)) if "ssh" in configuration.base.REPOSITORY_URL_TYPES: addOption( "ssh", "ssh://%s%s" % (configuration.base.HOSTNAME, long_path)) if "host" in configuration.base.REPOSITORY_URL_TYPES: addOption("host", "%s:%s" % (configuration.base.HOSTNAME, long_path)) else: if item.startswith("email.subjectLine."): placeholder = "Email type disabled" else: placeholder = None value.input("setting", type="text", size=80, name=item, placeholder=placeholder, value=current_value, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) also_configurable_per = [] if per_repository and repository is None: also_configurable_per.append("repository") if per_filter and filter_id is None: also_configurable_per.append("filter") if also_configurable_per: value.span("also-configurable-per").text( "Also configurable per: %s" % ", ".join(also_configurable_per)) reset = value.span("reset") reset.a(href="javascript:saveSettings(%s);" % htmlutils.jsify(item)).text("[reset to default]") cell = table.tr(help_class_name).td("help", colspan=3) magic_description_links = { "format string for subject line": "/tutorial?item=reconfigure#subject_line_formats", "phony recipients": "/tutorial?item=reconfigure#review_association_recipients" } for link_text, link_href in magic_description_links.items(): prefix, link_text, suffix = description.partition(link_text) if link_text: cell.text(prefix) cell.a(href=link_href).text(link_text) cell.text(suffix) break else: cell.text(description) if injected and injected.has_key("preferences") \ and not defaults \ and repository is None \ and filter_id is None: for extension_name, author, preferences in injected["preferences"]: if highlight is not None: prefix = "%s/%s" % (author.name, extension_name) preferences = [ preference for preference in preferences if fnmatch.fnmatch("%s/%s" % (prefix, preference["name"]), highlight) ] if not preferences: continue h2 = table.tr("extension").td("extension", colspan=3).h2() h2.span("name").text(extension_name) h2.text(" by ") h2.span("author").text(author.fullname) for preference in preferences: preference_url = preference["url"] preference_name = preference["name"] preference_type = preference["type"] preference_value = preference["value"] preference_default = preference["default"] preference_description = preference["description"] line_class_name = "line" help_class_name = "help" if preference_value != preference_default: line_class_name += " customized" row = table.tr(line_class_name) heading = row.td("heading") heading.text("%s:" % preference_name) value = row.td("value", colspan=2) value.preformatted() if preference_type == "boolean": value.input( "setting", type="checkbox", name=preference_name, disabled=disabled, checked="checked" if preference_value else None, critic_url=preference_url, critic_default=htmlutils.jsify(bool(preference_value)), critic_extension=extension_name) elif preference_type == "integer": value.input( "setting", type="number", min=0, name=preference_name, value=preference_value, disabled=disabled, critic_url=preference_url, critic_default=htmlutils.jsify(preference_default), critic_extension=extension_name) elif preference_type == "string": value.input( "setting", type="text", name=preference_name, value=preference_value, disabled=disabled, critic_url=preference_url, critic_default=htmlutils.jsify(preference_default), critic_extension=extension_name) else: select = value.select( "setting", name=preference_name, disabled=disabled, critic_url=preference_url, critic_value=preference_value, critic_default=htmlutils.jsify(preference_default), critic_extension=extension_name) for choice in preference_type: select.option(value=choice["value"], selected="selected" if preference_value == choice["value"] else None).text( choice["title"]) cell = table.tr(help_class_name).td("help", colspan=3) cell.text(preference_description) critic_installed_sha1 = dbutils.getInstalledSHA1(db) div = body.div("installed_sha1") div.text("Critic version: ") div.a(href="https://critic-review.org/critic/%s" % critic_installed_sha1).text(critic_installed_sha1) return document
def processLine(line): try: command, value = line.split(" ", 1) except ValueError: raise Error("Invalid line in output: %r" % line) if command not in ("link", "script", "stylesheet", "preference"): raise Error("Invalid command: %r" % command) try: value = json_decode(value.strip()) except ValueError: raise Error("Invalid JSON: %r" % value.strip()) def is_string(value): return isinstance(value, basestring) if command in ("script", "stylesheet") and not is_string(value): raise Error("Invalid value for %r: %r (expected string)" % (command, value)) elif command == "link": if not isinstance(value, list) or len(value) != 2: raise Error("Invalid value for %r: %r (expected array of length two)" % (command, value)) elif not is_string(value[0]): raise Error("Invalid value for %r: %r (expected string at array[0])" % (command, value)) elif not (is_string(value[1]) or value[1] is None): raise Error("Invalid value for %r: %r (expected string or null at array[1])" % (command, value)) elif command == "preference": if path != "config": raise Error("Invalid command: %r only valid on /config page" % command) elif not isinstance(value, dict): raise Error("Invalid value for %r: %r (expected object)" % (command, value)) for name in ("url", "name", "type", "value", "default", "description"): if name not in value: raise Error("Invalid value for %r: %r (missing property: %r)" % (command, value, name)) preference_url = value["url"] preference_name = value["name"] preference_type = value["type"] preference_value = value["value"] preference_default = value["default"] preference_description = value["description"] if not is_string(preference_url): raise Error("Invalid value for %r: %r (expected string as %r)" % (command, value, "url")) elif not is_string(preference_name): raise Error("Invalid value for %r: %r (expected string as %r)" % (command, value, "name")) elif not is_string(preference_description): raise Error("Invalid value for %r: %r (expected string as %r)" % (command, value, "description")) if is_string(preference_type): if preference_type not in ("boolean", "integer", "string"): raise Error("Invalid value for %r: %r (unsupported preference type)" % (command, value)) if preference_type == "boolean": type_check = lambda value: isinstance(value, bool) elif preference_type == "integer": type_check = lambda value: isinstance(value, int) else: type_check = is_string if not type_check(preference_value): raise Error("Invalid value for %r: %r (type mismatch between %r and %r)" % (command, value, "value", "type")) if not type_check(preference_default): raise Error("Invalid value for %r: %r (type mismatch between %r and %r)" % (command, value, "default", "type")) else: if not isinstance(preference_type, list): raise Error("Invalid value for %r: %r (invalid %r, expected string or array)" % (command, value, "type")) for index, choice in enumerate(preference_type): if not isinstance(choice, dict) \ or not isinstance(choice.get("value"), basestring) \ or not isinstance(choice.get("title"), basestring): raise Error("Invalid value for %r: %r (invalid preference choice: %r)" % (command, value, choice)) choices = set([choice["value"] for choice in preference_type]) if not is_string(preference_value) or preference_value not in choices: raise Error("Invalid value for %r: %r (%r not among valid choices)" % (command, value, "value")) if not is_string(preference_default) or preference_default not in choices: raise Error("Invalid value for %r: %r (%r not among valid choices)" % (command, value, "default")) commands.append((command, value)) return True
def process(value): value = value.strip() if value[0] == '"' == value[-1]: return json_decode(value) else: return value
def renderCreateReview(req, db, user): if user.isAnonymous(): raise page.utils.NeedLogin(req) repository = req.getParameter("repository", filter=gitutils.Repository.FromParameter(db), default=None) applyparentfilters = req.getParameter("applyparentfilters", "yes" if user.getPreference(db, 'review.applyUpstreamFilters') else "no") == "yes" cursor = db.cursor() if req.method == "POST": data = json_decode(req.read()) summary = data.get("summary") description = data.get("description") review_branch_name = data.get("review_branch_name") commit_ids = data.get("commit_ids") commit_sha1s = data.get("commit_sha1s") else: summary = req.getParameter("summary", None) description = req.getParameter("description", None) review_branch_name = req.getParameter("reviewbranchname", None) commit_ids = None commit_sha1s = None commits_arg = req.getParameter("commits", None) remote = req.getParameter("remote", None) upstream = req.getParameter("upstream", "master") branch_name = req.getParameter("branch", None) if commits_arg: try: commit_ids = map(int, commits_arg.split(",")) except: commit_sha1s = [repository.revparse(ref) for ref in commits_arg.split(",")] elif branch_name: cursor.execute("""SELECT commit FROM reachable JOIN branches ON (branch=id) WHERE repository=%s AND name=%s""", (repository.id, branch_name)) commit_ids = [commit_id for (commit_id,) in cursor] if len(commit_ids) > configuration.limits.MAXIMUM_REVIEW_COMMITS: raise page.utils.DisplayMessage( "Too many commits!", (("<p>The branch <code>%s</code> contains %d commits. Reviews can" "be created from branches that contain at most %d commits.</p>" "<p>This limit can be adjusted by modifying the system setting" "<code>configuration.limits.MAXIMUM_REVIEW_COMMITS</code>.</p>") % (htmlutils.htmlify(branch_name), len(commit_ids), configuration.limits.MAXIMUM_REVIEW_COMMITS)), html=True) else: return renderSelectSource(req, db, user) req.content_type = "text/html; charset=utf-8" if commit_ids: commits = [gitutils.Commit.fromId(db, repository, commit_id) for commit_id in commit_ids] elif commit_sha1s: commits = [gitutils.Commit.fromSHA1(db, repository, commit_sha1) for commit_sha1 in commit_sha1s] else: commits = [] if not commit_ids: commit_ids = [commit.getId(db) for commit in commits] if not commit_sha1s: commit_sha1s = [commit.sha1 for commit in commits] if summary is None: if len(commits) == 1: summary = commits[0].summary() else: summary = "" if review_branch_name: invalid_branch_name = "false" default_branch_name = review_branch_name else: invalid_branch_name = htmlutils.jsify(user.name + "/") default_branch_name = user.name + "/" match = re.search("(?:^|[Ff]ix(?:e[ds])?(?: +for)?(?: +bug)? +)([A-Z][A-Z0-9]+-[0-9]+)", summary) if match: invalid_branch_name = "false" default_branch_name = htmlutils.htmlify(match.group(1)) changesets = [] changeset_utils.createChangesets(db, repository, commits) for commit in commits: changesets.extend(changeset_utils.createChangeset(db, None, repository, commit, do_highlight=False)) changeset_ids = [changeset.id for changeset in changesets] all_reviewers, all_watchers = reviewing.utils.getReviewersAndWatchers( db, repository, changesets=changesets, applyparentfilters=applyparentfilters) document = htmlutils.Document(req) html = document.html() head = html.head() document.addInternalScript(user.getJS(db)) if branch_name: document.addInternalScript("var fromBranch = %s;" % htmlutils.jsify(branch_name)) if remote: document.addInternalScript("var trackedbranch = { remote: %s, name: %s };" % (htmlutils.jsify(remote), htmlutils.jsify(branch_name))) head.title().text("Create Review") body = html.body(onload="document.getElementById('branch_name').focus()") page.utils.generateHeader(body, db, user, lambda target: target.button(onclick="submitReview();").text("Submit Review")) document.addExternalStylesheet("resource/createreview.css") document.addExternalScript("resource/createreview.js") document.addExternalScript("resource/reviewfilters.js") document.addExternalScript("resource/autocomplete.js") document.addInternalScript(""" var invalid_branch_name = %s; var review_data = { commit_ids: %r, commit_sha1s: %r, changeset_ids: %r };""" % (invalid_branch_name, commit_ids, commit_sha1s, changeset_ids)) document.addInternalScript(repository.getJS()) main = body.div("main") table = main.table("basic paleyellow", align="center") table.tr().td("h1", colspan=3).h1().text("Create Review") row = table.tr("line") row.td("heading").text("Branch Name:") row.td("value").text("r/").input("value", id="branch_name", value=default_branch_name) row.td("status") row = table.tr() if not remote: row.td("help", colspan=3).div().text("""\ This is the main identifier of the review. It will be created in the review repository to contain the commits below. Reviewers can fetch it from there, and additional commits can be added to the review later by pushing them to this branch in the review repository.""") else: row.td("help", colspan=3).div().text("""\ This is the main identifier of the review. It will be created in the review repository to contain the commits below, and reviewers can fetch it from there.""") if remote: row = table.tr("line") row.td("heading").text("Tracked Branch:") value = row.td("value") value.code("branch inset").text(branch_name, linkify=linkify.Context(remote=remote)) value.text(" in ") value.code("remote inset").text(remote, linkify=linkify.Context()) row.td("status") row = table.tr() row.td("help", colspan=3).div().text("""\ Rather than pushing directly to the review branch in Critic's repository to add commits to the review, you will be pushing to this branch (in a separate repository,) from which Critic will fetch commits and add them to the review automatically.""") row = table.tr("line") row.td("heading").text("Summary:") row.td("value").input("value", id="summary", value=summary) row.td("status") row = table.tr() row.td("help", colspan=3).div().text("""\ The summary should be a short summary of the changes in the review. It will appear in the subject of all emails sent about the review. """) row = table.tr("line description") row.td("heading").text("Description:") textarea = row.td("value").textarea(id="description", rows=12) textarea.preformatted() if description: textarea.text(description) row.td("status") row = table.tr() row.td("help", colspan=3).div().text("""\ The description should describe the changes to be reviewed. It is usually fine to leave the description empty, since the commit messages are also available in the review. """) generateReviewersAndWatchersTable(db, repository, main, all_reviewers, all_watchers, applyparentfilters=applyparentfilters) row = table.tr("line recipients") row.td("heading").text("Recipient List:") cell = row.td("value", colspan=2).preformatted() cell.span("mode").text("Everyone") cell.span("users") cell.text(".") buttons = cell.div("buttons") buttons.button(onclick="editRecipientList();").text("Edit Recipient List") row = table.tr() row.td("help", colspan=3).div().text("""\ The basic recipient list for e-mails sent about the review. """) log.html.render(db, main, "Commits", commits=commits) return document
def executeProcess(db, manifest, role_name, script, function, extension_id, user_id, argv, timeout, stdin=None, rlimit_rss=256): # If |user_id| is not the same as |db.user|, then one user's access of the # system is triggering an extension on behalf of another user. This will # for instance happen when one user is adding changes to a review, # triggering an extension filter hook set up by another user. # # In this case, we need to check that the other user can access the # extension. # # If |user_id| is the same as |db.user|, we need to use |db.profiles|, which # may contain a profile associated with an access token that was used to # authenticate the user. if user_id != db.user.id: user = dbutils.User.fromId(db, user_id) authentication_labels = auth.DATABASE.getAuthenticationLabels(user) profiles = [ auth.AccessControlProfile.forUser(db, user, authentication_labels) ] else: authentication_labels = db.authentication_labels profiles = db.profiles extension = Extension.fromId(db, extension_id) if not auth.AccessControlProfile.isAllowedExtension( profiles, "execute", extension): raise auth.AccessDenied("Access denied to extension: execute %s" % extension.getKey()) flavor = manifest.flavor if manifest.flavor not in configuration.extensions.FLAVORS: flavor = configuration.extensions.DEFAULT_FLAVOR stdin_data = "%s\n" % json_encode({ "library_path": configuration.extensions.FLAVORS[flavor]["library"], "rlimit": { "rss": rlimit_rss }, "hostname": configuration.base.HOSTNAME, "dbname": configuration.database.PARAMETERS["database"], "dbuser": configuration.database.PARAMETERS["user"], "git": configuration.executables.GIT, "python": configuration.executables.PYTHON, "python_path": "%s:%s" % (configuration.paths.CONFIG_DIR, configuration.paths.INSTALL_DIR), "repository_work_copy_path": configuration.extensions.WORKCOPY_DIR, "changeset_address": configuration.services.CHANGESET["address"], "branchtracker_pid_path": configuration.services.BRANCHTRACKER["pidfile_path"], "maildelivery_pid_path": configuration.services.MAILDELIVERY["pidfile_path"], "is_development": configuration.debug.IS_DEVELOPMENT, "extension_path": manifest.path, "extension_id": extension_id, "user_id": user_id, "authentication_labels": list(authentication_labels), "role": role_name, "script_path": script, "fn": function, "argv": argv }) if stdin is not None: stdin_data += stdin # Double the timeout. Timeouts are primarily handled by the extension runner # service, which returns an error response on timeout. This deadline here is # thus mostly to catch the extension runner service itself timing out. deadline = time.time() + timeout * 2 try: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) connection.settimeout(max(0, deadline - time.time())) connection.connect(configuration.services.EXTENSIONRUNNER["address"]) connection.sendall( json_encode({ "stdin": stdin_data, "flavor": flavor, "timeout": timeout })) connection.shutdown(socket.SHUT_WR) data = "" while True: connection.settimeout(max(0, deadline - time.time())) try: received = connection.recv(4096) except socket.error as error: if error.errno == errno.EINTR: continue raise if not received: break data += received connection.close() except socket.timeout as error: raise ProcessTimeout(timeout) except socket.error as error: raise ProcessError("failed to read response: %s" % error) try: data = json_decode(data) except ValueError as error: raise ProcessError("failed to decode response: %s" % error) if data["status"] == "timeout": raise ProcessTimeout(timeout) if data["status"] == "error": raise ProcessError(data["error"]) if data["returncode"] != 0: raise ProcessFailure(data["returncode"], data["stderr"]) return data["stdout"]
sys.path.insert( 0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) from textutils import json_decode, json_encode sys_stdout = sys.stdout if "--slave" in sys.argv[1:]: import StringIO import traceback import index data = sys.stdin.read() request = json_decode(data) def reject(message): sys_stdout.write(json_encode({"status": "reject", "message": message})) sys.exit(0) def error(message): sys_stdout.write(json_encode({"status": "error", "error": message})) sys.exit(0) create_branches = [] delete_branches = [] update_branches = [] create_tags = [] delete_tags = []
else: raise if not connected: raise page.utils.DisplayMessage, "Service manager not responding!" connection.send(textutils.json_encode({"query": "status"})) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received result = textutils.json_decode(data) if result["status"] == "error": raise page.utils.DisplayMessage, result["error"] paleyellow = page.utils.PaleYellowTable(body, "Services") def render(target): table = target.table("services", cellspacing=0, align="center") headings = table.tr("headings") headings.th("name").text("Name") headings.th("path").text("Path") headings.th("pid").text("PID") headings.th("rss").text("RSS") headings.th("uptime").text("Uptime")
def handle_input(self, data): try: data = json_decode(data) except ValueError: return message = "connection from %s:%d:" % self.__peeraddress message += "\n repository: %s" % data["repository"] if data.has_key("timeout"): message += "\n timeout: %d" % data["timeout"] if data["branches"]: message += "\n branches: %s" % ", ".join( data["branches"]) if data["tags"]: message += "\n tags: %s" % ", ".join(data["tags"]) self.server.info(message) db = dbutils.Database() try: cursor = db.cursor() notify_tracker = False wait_for_reply = False for branch in data["branches"]: cursor.execute( """SELECT id, local_name FROM trackedbranches WHERE remote=%s AND remote_name=%s AND NOT disabled AND next IS NOT NULL""", (data["repository"], branch)) row = cursor.fetchone() if row: branch_id, local_name = row cursor.execute( """UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id, )) notify_tracker = True self.server.debug("tracked branch: %s" % local_name) if len(data["branches"] ) == 1 and local_name.startswith("r/"): wait_for_reply = (True, branch_id) self.server.debug(" will wait for reply") if data["tags"]: cursor.execute( """SELECT id FROM trackedbranches WHERE remote=%s AND remote_name=%s AND NOT disabled AND next IS NOT NULL""", (data["repository"], "*")) row = cursor.fetchone() if row: branch_id = row[0] cursor.execute( """UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id, )) notify_tracker = True db.commit() if notify_tracker: if wait_for_reply: branch_id = wait_for_reply[1] cursor.execute( "SELECT COUNT(*) FROM trackedbranchlog WHERE branch=%s", (branch_id, )) log_offset = cursor.fetchone()[0] self.server.add_peer( BranchTrackerHook.WaitForUpdate( self, branch_id, data.get("timeout", 30), log_offset)) try: branchtracker_pid = int( open(configuration.services.BRANCHTRACKER[ "pidfile_path"]).read().strip()) os.kill(branchtracker_pid, signal.SIGHUP) except: self.server.exception() return if wait_for_reply: return self.close() finally: try: db.close() except: pass
def executeProcess(db, manifest, role_name, script, function, extension_id, user_id, argv, timeout, stdin=None, rlimit_rss=256): # If |user_id| is not the same as |db.user|, then one user's access of the # system is triggering an extension on behalf of another user. This will # for instance happen when one user is adding changes to a review, # triggering an extension filter hook set up by another user. # # In this case, we need to check that the other user can access the # extension. # # If |user_id| is the same as |db.user|, we need to use |db.profiles|, which # may contain a profile associated with an access token that was used to # authenticate the user. if user_id != db.user.id: user = dbutils.User.fromId(db, user_id) authentication_labels = auth.DATABASE.getAuthenticationLabels(user) profiles = [auth.AccessControlProfile.forUser( db, user, authentication_labels)] else: authentication_labels = db.authentication_labels profiles = db.profiles extension = Extension.fromId(db, extension_id) if not auth.AccessControlProfile.isAllowedExtension( profiles, "execute", extension): raise auth.AccessDenied("Access denied to extension: execute %s" % extension.getKey()) flavor = manifest.flavor if manifest.flavor not in configuration.extensions.FLAVORS: flavor = configuration.extensions.DEFAULT_FLAVOR stdin_data = "%s\n" % json_encode({ "library_path": configuration.extensions.FLAVORS[flavor]["library"], "rlimit": { "rss": rlimit_rss }, "hostname": configuration.base.HOSTNAME, "dbname": configuration.database.PARAMETERS["database"], "dbuser": configuration.database.PARAMETERS["user"], "git": configuration.executables.GIT, "python": configuration.executables.PYTHON, "python_path": "%s:%s" % (configuration.paths.CONFIG_DIR, configuration.paths.INSTALL_DIR), "repository_work_copy_path": configuration.extensions.WORKCOPY_DIR, "changeset_address": configuration.services.CHANGESET["address"], "branchtracker_pid_path": configuration.services.BRANCHTRACKER["pidfile_path"], "maildelivery_pid_path": configuration.services.MAILDELIVERY["pidfile_path"], "is_development": configuration.debug.IS_DEVELOPMENT, "extension_path": manifest.path, "extension_id": extension_id, "user_id": user_id, "authentication_labels": list(authentication_labels), "role": role_name, "script_path": script, "fn": function, "argv": argv }) if stdin is not None: stdin_data += stdin # Double the timeout. Timeouts are primarily handled by the extension runner # service, which returns an error response on timeout. This deadline here is # thus mostly to catch the extension runner service itself timing out. deadline = time.time() + timeout * 2 try: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) connection.settimeout(max(0, deadline - time.time())) connection.connect(configuration.services.EXTENSIONRUNNER["address"]) connection.sendall(json_encode({ "stdin": stdin_data, "flavor": flavor, "timeout": timeout })) connection.shutdown(socket.SHUT_WR) data = "" while True: connection.settimeout(max(0, deadline - time.time())) try: received = connection.recv(4096) except socket.error as error: if error.errno == errno.EINTR: continue raise if not received: break data += received connection.close() except socket.timeout as error: raise ProcessTimeout(timeout) except socket.error as error: raise ProcessError("failed to read response: %s" % error) try: data = json_decode(data) except ValueError as error: raise ProcessError("failed to decode response: %s" % error) if data["status"] == "timeout": raise ProcessTimeout(timeout) if data["status"] == "error": raise ProcessError(data["error"]) if data["returncode"] != 0: raise ProcessFailure(data["returncode"], data["stderr"]) return data["stdout"]
from dbutils import Database if "--json-job" in sys.argv[1:]: from resource import getrlimit, setrlimit, RLIMIT_RSS from traceback import print_exc soft_limit, hard_limit = getrlimit(RLIMIT_RSS) rss_limit = configuration.services.CHANGESET["rss_limit"] if soft_limit < rss_limit: setrlimit(RLIMIT_RSS, (rss_limit, hard_limit)) from changeset.create import createChangeset from textutils import json_decode, json_encode request = json_decode(sys.stdin.read()) try: db = Database() createChangeset(db, request) db.close() sys.stdout.write(json_encode(request)) except: print "Request:" print json_encode(request, indent=2) print print_exc(file=sys.stdout)
while True: received = connection.recv(4096) if not received: break data += received connection.close() except EnvironmentError as error: raise HighlightBackgroundServiceError(str(error)) if async: return True if not data: raise HighlightBackgroundServiceError( "returned an invalid response (no response)") try: results = json_decode(data) except ValueError: raise HighlightBackgroundServiceError( "returned an invalid response (%r)" % data) if type(results) != list: # If not a list, the result is probably an error message. raise HighlightBackgroundServiceError(str(results)) if len(results) != len(requests): raise HighlightBackgroundServiceError("didn't process all requests") return True
def main(): parser = argparse.ArgumentParser() parser.add_argument("-u", dest="user_id", type=int) parser.add_argument("-l", dest="auth_labels", action="append", default=[]) parser.add_argument("command", nargs="*") arguments = parser.parse_args() try: init(arguments.user_id, arguments.auth_labels) for command in arguments.command: pending_mails = None if command == "generate-mails-for-batch": data = json_decode(sys.stdin.readline()) batch_id = data["batch_id"] was_accepted = data["was_accepted"] is_accepted = data["is_accepted"] pending_mails = reviewing.utils.generateMailsForBatch(db, batch_id, was_accepted, is_accepted) elif command == "generate-mails-for-assignments-transaction": data = json_decode(sys.stdin.readline()) transaction_id = data["transaction_id"] pending_mails = reviewing.utils.generateMailsForAssignmentsTransaction(db, transaction_id) elif command == "apply-filters": data = json_decode(sys.stdin.readline()) filters = reviewing.filters.Filters() user = dbutils.User.fromId(db, data["user_id"]) if "user_id" in data else None if "review_id" in data: review = dbutils.Review.fromId(db, data["review_id"]) filters.setFiles(db, review=review) filters.load(db, review=review, user=user, added_review_filters=data.get("added_review_filters", []), removed_review_filters=data.get("removed_review_filters", [])) else: repository = gitutils.Repository.fromId(db, data["repository_id"]) filters.setFiles(db, file_ids=data["file_ids"]) filters.load(db, repository=repository, recursive=data.get("recursive", False), user=user) sys.stdout.write(json_encode(filters.data) + "\n") elif command == "generate-custom-mails": pending_mails = [] for data in json_decode(sys.stdin.readline()): from_user = dbutils.User.fromId(db, data["sender"]) if data.get("recipients"): recipients = [dbutils.User.fromId(db, user_id) for user_id in data["recipients"]] else: recipients = None subject = data["subject"] headers = data.get("headers") body = data["body"] if "review_id" in data: review = dbutils.Review.fromId(db, data["review_id"]) else: review = None pending_mails.extend(sendCustomMail( from_user, recipients, subject, headers, body, review)) elif command == "set-review-state": data = json_decode(sys.stdin.readline()) error = "" try: user = dbutils.User.fromId(db, data["user_id"]) review = dbutils.Review.fromId(db, data["review_id"]) if review.state != data["old_state"]: error = "invalid old state" elif data["new_state"] == "open": review.reopen(db, user) elif data["new_state"] == "closed": review.close(db, user) elif data["new_state"] == "dropped": review.drop(db, user) else: error = "invalid new state" except dbutils.NoSuchUser: error = "invalid user id" except dbutils.NoSuchReview: error = "invalid review id" except Exception as error: error = str(error) sys.stdout.write(error + "\n") elif command in HANDLERS: data_in = json_decode(sys.stdin.readline()) data_out = HANDLERS[command](data_in) sys.stdout.write(json_encode(data_out) + "\n") else: sys.stdout.write(json_encode("unknown command: %s" % command) + "\n") sys.exit(0) if pending_mails is not None: sys.stdout.write(json_encode(pending_mails) + "\n") finish() except Exception: sys.stdout.write(json_encode(traceback.format_exc()) + "\n") finally: abort()
delay += delay else: raise if not connected: raise page.utils.DisplayMessage, "Service manager not responding!" connection.send(textutils.json_encode({ "query": "status" })) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received result = textutils.json_decode(data) if result["status"] == "error": raise page.utils.DisplayMessage, result["error"] paleyellow = page.utils.PaleYellowTable(body, "Services") def render(target): table = target.table("services", cellspacing=0, align="center") headings = table.tr("headings") headings.th("name").text("Name") headings.th("module").text("Module") headings.th("pid").text("PID") headings.th("rss").text("RSS") headings.th("cpu").text("CPU")
def __call__(self, req, db, user): from operation.typechecker import TypeCheckerContext if user.isAnonymous() and not self.__accept_anonymous_user: return OperationFailureMustLogin() if req.method == "POST": data = req.read() else: data = req.getParameter("data") if not data: raise OperationError("no input") try: value = json_decode(data) except ValueError as error: raise OperationError("invalid input: %s" % str(error)) try: self.__checker(value, TypeCheckerContext(req, db, user)) return self.process(db, user, **value) except OperationError as error: return error except OperationFailure as failure: return failure except dbutils.NoSuchUser as error: return OperationFailure( code="nosuchuser", title="Who is '%s'?" % error.name, message="There is no user in Critic's database named that.") except dbutils.NoSuchReview as error: return OperationFailure( code="nosuchreview", title="Invalid review ID", message="The review ID r/%d is not valid." % error.id) except dbutils.TransactionRollbackError: return OperationFailure( code="transactionrollback", title="Transaction rolled back", message= "Your database transaction rolled back, probably due to a deadlock. Please try again." ) except extensions.extension.ExtensionError as error: return OperationFailure(code="invalidextension", title="Invalid extension", message=error.message) except: # Decode value again since the type checkers might have modified it. value = json_decode(data) error_message = ("User: %s\nReferrer: %s\nData: %s\n\n%s" % (user.name, req.getReferrer(), json_encode(self.sanitize(value), indent=2), traceback.format_exc())) db.rollback() import mailutils import configuration if not user.hasRole(db, "developer"): mailutils.sendExceptionMessage(db, "wsgi[%s]" % req.path, error_message) if configuration.debug.IS_DEVELOPMENT or user.hasRole( db, "developer"): return OperationError(error_message) else: return OperationError( "An unexpected error occurred. " + "A message has been sent to the system administrator(s) " + "with details about the problem.")
def handleRequestInternal(critic, req): api_version = getAPIVersion(req) if not api_version: if req.method == "GET": documentation.describeRoot() else: raise UsageError("Invalid %s request" % req.method) prefix = [api_version] parameters = Parameters(critic, req) path = req.path.rstrip("/").split("/")[2:] if not path: if req.method == "GET": describe_parameter = parameters.getQueryParameter("describe") if describe_parameter: v1.documentation.describeResource(describe_parameter) v1.documentation.describeVersion() else: raise UsageError("Invalid %s request" % req.method) if req.method in ("POST", "PUT"): try: data = textutils.json_decode(req.read()) except ValueError: raise UsageError("Invalid %s request body" % req.method) context = None resource_class = None while True: next_component = path.pop(0) if resource_class and (next_component in resource_class.objects or next_component in resource_class.lists or next_component in resource_class.maps): subresource_id = [] subresource_path = [] while True: subresource_id.append(next_component) subresource_path.append(next_component) if "/".join(subresource_id) in resource_class.objects: pass elif "/".join(subresource_id) in resource_class.lists: if path: try: subresource_path.append(int(path[0])) except ValueError: raise UsageError( "Item identifier must be an integer: %r" % path[0]) else: del path[0] elif "/".join(subresource_id) in resource_class.maps: if path: subresource_path.append(path[0]) else: raise PathError( "Invalid resource: %r / %r" % ("/".join(resource_path), "/".join(subresource_id))) if not path: break next_component = path.pop(0) parameters.subresource_path = subresource_path break resource_path = prefix + [next_component] resource_class = lookup(resource_path) prefix.append(resource_class.name) value = None values = None resource_id = "/".join(resource_path) try: if path and resource_class.single: arguments = filter(None, path.pop(0).split(",")) if len(arguments) == 0 or (len(arguments) > 1 and path): raise UsageError("Invalid resource path: %s" % req.path) if len(arguments) == 1: with parameters.forResource(resource_class): value = resource_class.single(parameters, arguments[0]) assert isinstance(value, resource_class.value_class) if not path: break else: with parameters.forResource(resource_class): values = [ resource_class.single(parameters, argument) for argument in arguments ] assert all( isinstance(value, resource_class.value_class) for value in values) break elif not path: if req.method == "POST": break if not resource_class.multiple: raise UsageError("Resource requires an argument: %s" % resource_id) with parameters.forResource(resource_class): values = resource_class.multiple(parameters) if isinstance(values, resource_class.value_class): value, values = values, None elif not parameters.range_accessed: begin, end = parameters.getRange() values = itertools.islice(values, begin, end) break except resource_class.exceptions as error: raise PathError("Resource not found: %s" % error.message) if values and not isinstance(values, list): values = list(values) if req.method == "GET": return finishGET(critic, req, parameters, resource_class, value, values) elif req.method == "POST": return finishPOST(critic, req, parameters, resource_class, value, values, data) elif req.method == "PUT": return finishPUT(critic, req, parameters, resource_class, value, values, data) elif req.method == "DELETE": return finishDELETE(critic, req, parameters, resource_class, value, values)
def handle_input(self, value): self.__requests = json_decode(value) self.__results = [] self.server.add_requests(self, self.__requests)
from_user, to_user, recipients, subject, body, message_id=message_id, headers=headers) files.append(filename) return files try: if len(sys.argv) > 1: init() for command in sys.argv[1:]: pending_mails = None if command == "generate-mails-for-batch": data = json_decode(sys.stdin.readline()) batch_id = data["batch_id"] was_accepted = data["was_accepted"] is_accepted = data["is_accepted"] pending_mails = reviewing.utils.generateMailsForBatch(db, batch_id, was_accepted, is_accepted) elif command == "generate-mails-for-assignments-transaction": data = json_decode(sys.stdin.readline()) transaction_id = data["transaction_id"] pending_mails = reviewing.utils.generateMailsForAssignmentsTransaction(db, transaction_id) elif command == "apply-filters": data = json_decode(sys.stdin.readline()) filters = reviewing.filters.Filters() user = dbutils.User.fromId(db, data["user_id"]) if "user_id" in data else None if "review_id" in data: review = dbutils.Review.fromId(db, data["review_id"], load_commits=False) filters.setFiles(db, review=review)
def slave(): import StringIO import traceback import index def reject(message): sys_stdout.write(json_encode({"status": "reject", "message": message})) sys.exit(0) def error(message): sys_stdout.write(json_encode({"status": "error", "error": message})) sys.exit(0) try: data = sys.stdin.read() request = json_decode(data) create_branches = [] delete_branches = [] update_branches = [] create_tags = [] delete_tags = [] update_tags = [] user_name = request["user_name"] repository_name = request["repository_name"] if request[ "flags"] and user_name == configuration.base.SYSTEM_USER_NAME: flags = dict( flag.split("=", 1) for flag in request["flags"].split(",")) else: flags = {} sys.stdout = StringIO.StringIO() index.init() commits_to_process = set() for ref in request["refs"]: name = ref["name"] old_sha1 = ref["old_sha1"] new_sha1 = ref["new_sha1"] if "//" in name: reject("invalid ref name: '%s'" % name) if not name.startswith("refs/"): reject("unexpected ref name: '%s'" % name) if new_sha1 != '0000000000000000000000000000000000000000': commits_to_process.add(new_sha1) name = name[len("refs/"):] if name.startswith("heads/"): name = name[len("heads/"):] if new_sha1 == '0000000000000000000000000000000000000000': delete_branches.append((name, old_sha1)) elif old_sha1 == '0000000000000000000000000000000000000000': create_branches.append((name, new_sha1)) else: update_branches.append((name, old_sha1, new_sha1)) elif name.startswith("tags/"): name = name[len("tags/"):] if old_sha1 == '0000000000000000000000000000000000000000': create_tags.append((name, new_sha1)) elif new_sha1 == '0000000000000000000000000000000000000000': delete_tags.append(name) else: update_tags.append((name, old_sha1, new_sha1)) elif name.startswith("temporary/") or name.startswith( "keepalive/"): # len("temporary/") == len("keepalive/") name = name[len("temporary/"):] if name != new_sha1: reject("invalid update of '%s'; value is not %s" % (ref["name"], name)) else: reject("unexpected ref name: '%s'" % ref["name"]) multiple = (len(delete_branches) + len(update_branches) + len(create_branches) + len(delete_tags) + len(update_tags) + len(create_tags)) > 1 info = [] for sha1 in commits_to_process: index.processCommits(repository_name, sha1) for name, old in delete_branches: index.deleteBranch(user_name, repository_name, name, old) info.append("branch deleted: %s" % name) for name, old, new in update_branches: index.updateBranch(user_name, repository_name, name, old, new, multiple, flags) info.append("branch updated: %s (%s..%s)" % (name, old[:8], new[:8])) index.createBranches(user_name, repository_name, create_branches, flags) for name, new in create_branches: info.append("branch created: %s (%s)" % (name, new[:8])) for name in delete_tags: index.deleteTag(repository_name, name) info.append("tag deleted: %s" % name) for name, old, new in update_tags: index.updateTag(repository_name, name, old, new) info.append("tag updated: %s (%s..%s)" % (name, old[:8], new[:8])) for name, new in create_tags: index.createTag(repository_name, name, new) info.append("tag created: %s (%s)" % (name, new[:8])) sys_stdout.write( json_encode({ "status": "ok", "accept": True, "output": sys.stdout.getvalue(), "info": info })) index.finish() except index.IndexException as exception: sys_stdout.write( json_encode({ "status": "ok", "accept": False, "output": exception.message, "info": info })) except SystemExit: raise except: exception = traceback.format_exc() message = """\ %s Request: %s %s""" % (exception.splitlines()[-1], json_encode( request, indent=2), traceback.format_exc()) sys_stdout.write(json_encode({"status": "error", "error": message})) finally: index.abort()
except gitutils.GitReferenceError: return "invalid commit id" except Exception as exception: return str(exception) HANDLERS = { "propagate-comment": propagateComment } try: if len(sys.argv) > 1: init() for command in sys.argv[1:]: pending_mails = None if command == "generate-mails-for-batch": data = json_decode(sys.stdin.readline()) batch_id = data["batch_id"] was_accepted = data["was_accepted"] is_accepted = data["is_accepted"] pending_mails = reviewing.utils.generateMailsForBatch(db, batch_id, was_accepted, is_accepted) elif command == "generate-mails-for-assignments-transaction": data = json_decode(sys.stdin.readline()) transaction_id = data["transaction_id"] pending_mails = reviewing.utils.generateMailsForAssignmentsTransaction(db, transaction_id) elif command == "apply-filters": data = json_decode(sys.stdin.readline()) filters = reviewing.filters.Filters() user = dbutils.User.fromId(db, data["user_id"]) if "user_id" in data else None if "review_id" in data: review = dbutils.Review.fromId(db, data["review_id"]) filters.setFiles(db, review=review)
# License for the specific language governing permissions and limitations under # the License. import sys import os import signal import time sys.path.insert( 0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import dbutils from textutils import json_encode, json_decode if "--wait-for-update" in sys.argv: data = json_decode(sys.stdin.read()) branch_id = data["branch_id"] timeout = data["timeout"] log_offset = data["log_offset"] db = dbutils.Database() cursor = db.cursor() cursor.execute("SELECT MAX(time) FROM trackedbranchlog WHERE branch=%s", (branch_id, )) last_log_entry = cursor.fetchone()[0] start = time.time() status = None
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import signal import time sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import dbutils from textutils import json_encode, json_decode if "--wait-for-update" in sys.argv: data = json_decode(sys.stdin.read()) branch_id = data["branch_id"] timeout = data["timeout"] log_offset = data["log_offset"] db = dbutils.Database() cursor = db.cursor() cursor.execute("SELECT MAX(time) FROM trackedbranchlog WHERE branch=%s", (branch_id,)) last_log_entry = cursor.fetchone()[0] start = time.time() status = None output = ""
def handleRequestInternal(critic, req): api_version = getAPIVersion(req) if not api_version: if req.method == "GET": documentation.describeRoot() else: raise UsageError("Invalid %s request" % req.method) prefix = [api_version] parameters = Parameters(critic, req) path = req.path.rstrip("/").split("/")[2:] if not path: if req.method == "GET": describe_parameter = parameters.getQueryParameter("describe") if describe_parameter: v1.documentation.describeResource(describe_parameter) v1.documentation.describeVersion() else: raise UsageError("Invalid %s request" % req.method) if req.method in ("POST", "PUT"): try: data = textutils.json_decode(req.read()) except ValueError: raise UsageError("Invalid %s request body" % req.method) context = None resource_class = None while True: next_component = path.pop(0) if resource_class and (next_component in resource_class.objects or next_component in resource_class.lists or next_component in resource_class.maps): subresource_id = [] subresource_path = [] while True: subresource_id.append(next_component) subresource_path.append(next_component) if "/".join(subresource_id) in resource_class.objects: pass elif "/".join(subresource_id) in resource_class.lists: if path: try: subresource_path.append(int(path[0])) except ValueError: raise UsageError( "Item identifier must be an integer: %r" % path[0]) else: del path[0] elif "/".join(subresource_id) in resource_class.maps: if path: subresource_path.append(path[0]) else: raise PathError("Invalid resource: %r / %r" % ("/".join(resource_path), "/".join(subresource_id))) if not path: break next_component = path.pop(0) parameters.subresource_path = subresource_path break resource_path = prefix + [next_component] resource_class = lookup(resource_path) prefix.append(resource_class.name) value = None values = None resource_id = "/".join(resource_path) try: if path and resource_class.single: arguments = filter(None, path.pop(0).split(",")) if len(arguments) == 0 or (len(arguments) > 1 and path): raise UsageError("Invalid resource path: %s" % req.path) if len(arguments) == 1: with parameters.forResource(resource_class): value = resource_class.single(parameters, arguments[0]) assert isinstance(value, resource_class.value_class) if not path: break else: with parameters.forResource(resource_class): values = [resource_class.single(parameters, argument) for argument in arguments] assert all(isinstance(value, resource_class.value_class) for value in values) break elif not path: if req.method == "POST": break if not resource_class.multiple: raise UsageError("Resource requires an argument: %s" % resource_id) with parameters.forResource(resource_class): values = resource_class.multiple(parameters) if isinstance(values, resource_class.value_class): value, values = values, None elif not parameters.range_accessed: begin, end = parameters.getRange() values = itertools.islice(values, begin, end) break except resource_class.exceptions as error: raise PathError("Resource not found: %s" % error.message) if values and not isinstance(values, list): values = list(values) if req.method == "GET": return finishGET(critic, req, parameters, resource_class, value, values) elif req.method == "POST": return finishPOST( critic, req, parameters, resource_class, value, values, data) elif req.method == "PUT": return finishPUT( critic, req, parameters, resource_class, value, values, data) elif req.method == "DELETE": return finishDELETE( critic, req, parameters, resource_class, value, values)
def slave(): import StringIO import traceback import index def reject(message): sys_stdout.write(json_encode({ "status": "reject", "message": message })) sys.exit(0) def error(message): sys_stdout.write(json_encode({ "status": "error", "error": message })) sys.exit(0) try: data = sys.stdin.read() request = json_decode(data) create_branches = [] delete_branches = [] update_branches = [] create_tags = [] delete_tags = [] update_tags = [] user_name = request["user_name"] repository_name = request["repository_name"] if request["flags"] and user_name == configuration.base.SYSTEM_USER_NAME: flags = dict(flag.split("=", 1) for flag in request["flags"].split(",")) else: flags = {} sys.stdout = StringIO.StringIO() index.init() commits_to_process = set() for ref in request["refs"]: name = ref["name"] old_sha1 = ref["old_sha1"] new_sha1 = ref["new_sha1"] if "//" in name: reject("invalid ref name: '%s'" % name) if not name.startswith("refs/"): reject("unexpected ref name: '%s'" % name) if new_sha1 != '0000000000000000000000000000000000000000': commits_to_process.add(new_sha1) name = name[len("refs/"):] if name.startswith("heads/"): name = name[len("heads/"):] if new_sha1 == '0000000000000000000000000000000000000000': delete_branches.append((name, old_sha1)) elif old_sha1 == '0000000000000000000000000000000000000000': create_branches.append((name, new_sha1)) else: update_branches.append((name, old_sha1, new_sha1)) elif name.startswith("tags/"): name = name[len("tags/"):] if old_sha1 == '0000000000000000000000000000000000000000': create_tags.append((name, new_sha1)) elif new_sha1 == '0000000000000000000000000000000000000000': delete_tags.append(name) else: update_tags.append((name, old_sha1, new_sha1)) elif name.startswith("temporary/") or name.startswith("keepalive/"): # len("temporary/") == len("keepalive/") name = name[len("temporary/"):] if name != new_sha1: reject("invalid update of '%s'; value is not %s" % (ref["name"], name)) else: reject("unexpected ref name: '%s'" % ref["name"]) multiple = (len(delete_branches) + len(update_branches) + len(create_branches) + len(delete_tags) + len(update_tags) + len(create_tags)) > 1 info = [] for sha1 in commits_to_process: index.processCommits(repository_name, sha1) for name, old in delete_branches: index.deleteBranch(user_name, repository_name, name, old) info.append("branch deleted: %s" % name) for name, old, new in update_branches: index.updateBranch(user_name, repository_name, name, old, new, multiple, flags) info.append("branch updated: %s (%s..%s)" % (name, old[:8], new[:8])) index.createBranches(user_name, repository_name, create_branches, flags) for name, new in create_branches: info.append("branch created: %s (%s)" % (name, new[:8])) for name in delete_tags: index.deleteTag(repository_name, name) info.append("tag deleted: %s" % name) for name, old, new in update_tags: index.updateTag(repository_name, name, old, new) info.append("tag updated: %s (%s..%s)" % (name, old[:8], new[:8])) for name, new in create_tags: index.createTag(repository_name, name, new) info.append("tag created: %s (%s)" % (name, new[:8])) sys_stdout.write(json_encode({ "status": "ok", "accept": True, "output": sys.stdout.getvalue(), "info": info })) index.finish() except index.IndexException as exception: sys_stdout.write(json_encode({ "status": "ok", "accept": False, "output": exception.message, "info": info })) except SystemExit: raise except: exception = traceback.format_exc() message = """\ %s Request: %s %s""" % (exception.splitlines()[-1], json_encode(request, indent=2), traceback.format_exc()) sys_stdout.write(json_encode({ "status": "error", "error": message })) finally: index.abort()
def processLine(paths, line): try: command, value = line.split(" ", 1) except ValueError: raise InjectError("Invalid line in output: %r" % line) if command not in ("link", "script", "stylesheet", "preference"): raise InjectError("Invalid command: %r" % command) value = value.strip() try: value = json_decode(value) except ValueError: raise InjectError("Invalid JSON: %r" % value) def is_string(value): return isinstance(value, basestring) if command in ("script", "stylesheet") and not is_string(value): raise InjectError("Invalid value for %r: %r (expected string)" % (command, value)) elif command == "link": if isinstance(value, dict): if "label" not in value or not is_string(value["label"]): raise InjectError("Invalid value for %r: %r (expected attribute 'label' of type string)" % (command, value)) elif "url" not in value or not is_string(value["url"]) or value["url"] is None: raise InjectError("Invalid value for %r: %r (expected attribute 'url' of type string or null)" % (command, value)) # Alternatively support [label, url] (backwards compatibility). elif not isinstance(value, list) or len(value) != 2: raise InjectError("Invalid value for %r: %r (expected object { \"label\": LABEL, \"url\": URL })" % (command, value)) elif not is_string(value[0]): raise InjectError("Invalid value for %r: %r (expected string at array[0])" % (command, value)) elif not (is_string(value[1]) or value[1] is None): raise InjectError("Invalid value for %r: %r (expected string or null at array[1])" % (command, value)) else: value = { "label": value[0], "url": value[1] } elif command == "preference": if "config" not in paths: raise InjectError("Invalid command: %r only valid on /config page" % command) elif not isinstance(value, dict): raise InjectError("Invalid value for %r: %r (expected object)" % (command, value)) for name in ("url", "name", "type", "value", "default", "description"): if name not in value: raise InjectError("Invalid value for %r: %r (missing attribute %r)" % (command, value, name)) preference_url = value["url"] preference_name = value["name"] preference_type = value["type"] preference_value = value["value"] preference_default = value["default"] preference_description = value["description"] if not is_string(preference_url): raise InjectError("Invalid value for %r: %r (expected attribute 'url' of type string)" % (command, value)) elif not is_string(preference_name): raise InjectError("Invalid value for %r: %r (expected attribute 'name' of type string)" % (command, value)) elif not is_string(preference_description): raise InjectError("Invalid value for %r: %r (expected attribute 'description' of type string)" % (command, value)) if is_string(preference_type): if preference_type not in ("boolean", "integer", "string"): raise InjectError("Invalid value for %r: %r (unsupported preference type)" % (command, value)) if preference_type == "boolean": type_check = lambda value: isinstance(value, bool) elif preference_type == "integer": type_check = lambda value: isinstance(value, int) else: type_check = is_string if not type_check(preference_value): raise InjectError("Invalid value for %r: %r (type mismatch between 'value' and 'type')" % (command, value)) if not type_check(preference_default): raise InjectError("Invalid value for %r: %r (type mismatch between 'default' and 'type')" % (command, value)) else: if not isinstance(preference_type, list): raise InjectError("Invalid value for %r: %r (invalid 'type', expected string or array)" % (command, value)) for index, choice in enumerate(preference_type): if not isinstance(choice, dict) \ or not isinstance(choice.get("value"), basestring) \ or not isinstance(choice.get("title"), basestring): raise InjectError("Invalid value for %r: %r (invalid preference choice: %r)" % (command, value, choice)) choices = set([choice["value"] for choice in preference_type]) if not is_string(preference_value) or preference_value not in choices: raise InjectError("Invalid value for %r: %r ('value' not among valid choices)" % (command, value)) if not is_string(preference_default) or preference_default not in choices: raise InjectError("Invalid value for %r: %r ('default' not among valid choices)" % (command, value)) return (command, value)
global db if db: db.rollback() db.close() db = None try: if len(sys.argv) > 1: init() for command in sys.argv[1:]: pending_mails = None if command == "generate-mails-for-batch": data = json_decode(sys.stdin.readline()) batch_id = data["batch_id"] was_accepted = data["was_accepted"] is_accepted = data["is_accepted"] pending_mails = reviewing.utils.generateMailsForBatch(db, batch_id, was_accepted, is_accepted) elif command == "generate-mails-for-assignments-transaction": data = json_decode(sys.stdin.readline()) transaction_id = data["transaction_id"] pending_mails = reviewing.utils.generateMailsForAssignmentsTransaction(db, transaction_id) elif command == "apply-filters": data = json_decode(sys.stdin.readline()) filters = reviewing.filters.Filters() user = dbutils.User.fromId(db, data["user_id"]) if "user_id" in data else None if "review_id" in data: review = dbutils.Review.fromId(db, data["review_id"], load_commits=False) filters.setFiles(db, review=review)
def renderServices(req, db, user): req.content_type = "text/html; charset=utf-8" document = htmlutils.Document(req) document.setTitle("Services") html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, current_page="services") document.addExternalStylesheet("resource/services.css") document.addExternalScript("resource/services.js") document.addInternalScript(user.getJS()) delay = 0.5 connected = False while not connected and delay <= 10: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) # This loop is for the case where we just restarted the service manager # via the /services UI. The client-side script immediately reloads the # page after restart, which typically leads to us trying to connect to # the service manager while it's in the process of restarting. So just # try a couple of times if at first the connection fails. try: connection.connect( configuration.services.SERVICEMANAGER["address"]) connected = True except socket.error as error: if error[0] in (errno.ENOENT, errno.ECONNREFUSED): time.sleep(delay) delay += delay else: raise if not connected: raise page.utils.DisplayMessage("Service manager not responding!") connection.send(textutils.json_encode({"query": "status"})) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received result = textutils.json_decode(data) if result["status"] == "error": raise page.utils.DisplayMessage(result["error"]) paleyellow = page.utils.PaleYellowTable(body, "Services") def render(target): table = target.table("services callout") headings = table.tr("headings") headings.th("name").text("Name") headings.th("module").text("Module") headings.th("pid").text("PID") headings.th("rss").text("RSS") headings.th("cpu").text("CPU") headings.th("uptime").text("Uptime") headings.th("commands").text() table.tr("spacer").td("spacer", colspan=4) def formatUptime(seconds): def inner(seconds): if seconds < 60: return "%d seconds" % seconds elif seconds < 60 * 60: return "%d minutes" % (seconds / 60) elif seconds < 60 * 60 * 24: return "%d hours" % (seconds / (60 * 60)) else: return "%d days" % (seconds / (60 * 60 * 24)) return inner(int(seconds)).replace(" ", " ") def formatRSS(bytes): if bytes < 1024: return "%d B" % bytes elif bytes < 1024**2: return "%.1f kB" % (float(bytes) / 1024) elif bytes < 1024**3: return "%.1f MB" % (float(bytes) / 1024**2) else: return "%.1f GB" % (float(bytes) / 1024**3) def formatCPU(seconds): minutes = int(seconds / 60) seconds = seconds - minutes * 60 seconds = "%2.2f" % seconds if seconds.find(".") == 1: seconds = "0" + seconds return "%d:%s" % (minutes, seconds) def getProcessData(pid): try: items = open("/proc/%d/stat" % pid).read().split() return { "cpu": formatCPU( float(int(items[13]) + int(items[14])) / os.sysconf("SC_CLK_TCK")), "rss": formatRSS(int(items[23]) * os.sysconf("SC_PAGE_SIZE")) } except: return {"cpu": "N/A", "rss": "N/A"} for service_name, service_data in sorted(result["services"].items()): process_data = getProcessData(service_data["pid"]) row = table.tr("service") row.td("name").text(service_name) row.td("module").text(service_data["module"]) row.td("pid").text(service_data["pid"] if service_data["pid"] != -1 else "(not running)") row.td("rss").text(process_data["rss"]) row.td("cpu").text(process_data["cpu"]) row.td("uptime").innerHTML(formatUptime(service_data["uptime"])) commands = row.td("commands") commands.a(href="javascript:void(restartService(%s));" % htmlutils.jsify(service_name)).text("[restart]") commands.a(href="javascript:void(getServiceLog(%s));" % htmlutils.jsify(service_name)).text("[log]") for index, pid in enumerate( os.listdir(configuration.paths.WSGI_PIDFILE_DIR)): startup = float( open(os.path.join(configuration.paths.WSGI_PIDFILE_DIR, pid)).read()) uptime = time.time() - startup process_data = getProcessData(int(pid)) row = table.tr("service") row.td("name").text("wsgi:%d" % index) row.td("module").text() row.td("pid").text(pid) row.td("rss").text(process_data["rss"]) row.td("cpu").text(process_data["cpu"]) row.td("uptime").innerHTML(formatUptime(uptime)) commands = row.td("commands") commands.a(href="javascript:void(restartService('wsgi'));").text( "[restart]") paleyellow.addCentered(render) return document