def processFile(file_id): components = dbutils.describe_file(db, file_id).split("/") directories, files = root_directories, root_files for directory_name in components[:-1]: directories, files = directories.setdefault( directory_name, ({}, {})) files[components[-1]] = file_id
def renderFiles(title, cursor): files = [] for file_id, delete_count, insert_count in cursor.fetchall(): files.append((dbutils.describe_file(db, file_id), delete_count, insert_count)) paths = [] deleted = [] inserted = [] for path, delete_count, insert_count in sorted(files): paths.append(path) deleted.append(delete_count) inserted.append(insert_count) if paths: diff.File.eliminateCommonPrefixes(paths) row = table.tr("line") row.td("heading").text(title) files_table = row.td().table("files callout") headers = files_table.thead().tr() headers.th("path").text("Changed Files") headers.th("lines", colspan=2).text("Lines") files = files_table.tbody() for path, delete_count, insert_count in zip(paths, deleted, inserted): file = files.tr() file.td("path").preformatted().innerHTML(path) file.td("lines").preformatted().text("-%d" % delete_count if delete_count else None) file.td("lines").preformatted().text("+%d" % insert_count if insert_count else None) row.td("status").text()
def renderCommentChain(db, target, user, review, chain, context_lines=3, compact=False, tabify=False, original=False, changeset=None, linkify=False): chain.loadComments(db, user) target.addExternalStylesheet("resource/changeset.css") target.addExternalStylesheet("resource/comment.css") target.addExternalStylesheet("resource/review.css") target.addExternalScript("resource/changeset.js") target.addExternalScript("resource/comment.js") target.addExternalScript("resource/review.js") target = target.div("comment-chain", id="c%d" % chain.id) if chain.file_id: try: renderCodeCommentChain(db, target, user, review, chain, context_lines, compact, tabify, original, changeset, linkify) except: cursor = db.cursor() cursor.execute("SELECT first_line, last_line, commit FROM commentchainlines WHERE chain=%s ORDER BY time ASC LIMIT 1", (chain.id,)) path = dbutils.describe_file(db, chain.file_id) first_line, last_line, commit_id = cursor.fetchone() commit = gitutils.Commit.fromId(db, review.repository, commit_id) if first_line == last_line: line = "line %d" % first_line else: line = "lines %d-%d" % (first_line, last_line) message = "<p><b>I'm terribly sorry, but this comment is broken in the database!</b></p><p>It was originally made against %s in some version of <code>%s</code>, in the commit <a href='%s/%s?review=%d&file=%d'>%s</a>.</p>" % (line, path, review.repository.name, commit.sha1, review.id, chain.file_id, commit.sha1) renderReviewCommentChain(db, target, user, review, chain, linkify, message) elif chain.first_commit: renderCommitCommentChain(db, target, user, review, chain, linkify) else: renderReviewCommentChain(db, target, user, review, chain, linkify)
def renderFiles(db, to_user, review, title, files_lines, commits=None, relevant_only=False, relevant_files=None, showcommit_link=False): result = "" if files_lines: files = [] for file_id, delete_count, insert_count in files_lines: if not relevant_only or file_id in relevant_files: files.append((dbutils.describe_file(db, file_id), delete_count, insert_count)) if files: paths = [] deleted = [] inserted = [] for path, delete_count, insert_count in sorted(files): paths.append(path) deleted.append(delete_count) inserted.append(insert_count) paths = diff.File.eliminateCommonPrefixes(paths, text=True) len_paths = max(map(len, paths)) len_deleted = max(map(len, map(str, deleted))) len_inserted = max(map(len, map(str, inserted))) result += title + "\n" for path, delete_count, insert_count in zip(paths, deleted, inserted): if delete_count == 0 and insert_count == 0: result += " %s binary file\n" % path.ljust(len_paths) else: delete_field = delete_count > 0 and "-%d" % delete_count or "" insert_field = insert_count > 0 and "+%d" % insert_count or "" result += " %s %s %s\n" % (path.ljust(len_paths), delete_field.rjust(len_deleted + 1), insert_field.rjust(len_inserted + 1)) if commits: if len(commits) == 1: result += "from this commit:\n" else: result += "from these commits:\n" for commit_id in commits: commit = gitutils.Commit.fromId(db, review.repository, commit_id) result += " %s %s\n" % (commit.sha1[:8], commit.niceSummary()) if showcommit_link: urls = to_user.getCriticURLs(db) try: from_sha1, to_sha1 = showcommit_link url_format = " %%s/showcommit?review=%%d&from=%s&to=%s&filter=pending\n" % (from_sha1, to_sha1) except: url_format = " %s/showcommit?review=%d&filter=pending\n" result += "\nTo review all these changes:\n" for url in urls: result += url_format % (url, review.id) result += "\n\n" return result
def setCustom(self, review, commit, file_id, first_line, last_line): """ Initialize for propagation of a custom location. This mode of operation is used to propagate a new comment chain to all relevant commits current part of the review. Returns false if the creating a comment at the specified location is not supported, typically because the commit is not being reviewed in the review. """ if not review.containsCommit(self.db, commit, True): return False self.review = review self.rebases = review.getReviewRebases(self.db) self.initial_commit = commit self.addressed_by = [] self.file_path = dbutils.describe_file(self.db, file_id) self.file_id = file_id self.location = Location(first_line, last_line) self.active = True file_sha1 = commit.getFileSHA1(self.file_path) self.all_lines = { file_sha1: (first_line, last_line) } self.new_lines = { file_sha1: (first_line, last_line) } return True
def process(self, db, creator, review_id, filters): review = dbutils.Review.fromId(db, review_id) by_user = {} for filter in filters: if "user_ids" in filter: user_ids = set(filter["user_ids"]) else: user_ids = set([]) if "user_names" in filter: for user_name in filter["user_names"]: user_ids.add(dbutils.User.fromName(db, user_name).id) if "paths" in filter: paths = set(reviewing.filters.sanitizePath(path) for path in filter["paths"]) for path in paths: try: reviewing.filters.validatePattern(path) except reviewing.filters.PatternError as error: raise OperationFailure( code="invalidpattern", title="Invalid path pattern", message="There are invalid wild-cards in the path: %s" % error.message) else: paths = set() if "file_ids" in filter: for file_id in filter["file_ids"]: paths.add(dbutils.describe_file(file_id)) for user_id in user_ids: reviewer_paths, watcher_paths = by_user.setdefault(user_id, (set(), set())) if filter["type"] == "reviewer": reviewer_paths |= paths else: watcher_paths |= paths pending_mails = [] for user_id, (reviewer_paths, watcher_paths) in by_user.items(): user = dbutils.User.fromId(db, user_id) if not user: raise OperationFailure(code="invaliduserid", title="Invalid user ID", message="At least one of the specified user IDs was invalid.") pending_mails.extend(reviewing.utils.addReviewFilters(db, creator, user, review, reviewer_paths, watcher_paths)) review = dbutils.Review.fromId(db, review_id) review.incrementSerial(db) db.commit() mailutils.sendPendingMails(pending_mails) return OperationResult()
def propagateCommentChains(db, user, review, commits): import reviewing.comment.propagate cursor = db.cursor() cursor.execute("""SELECT id, uid, type, state, file FROM commentchains WHERE review=%s AND file IS NOT NULL""", (review.id,)) chains_by_file = {} for chain_id, chain_user_id, chain_type, chain_state, file_id in cursor: chains_by_file.setdefault(file_id, {})[chain_id] = (chain_user_id, chain_type, chain_state) commentchainlines_values = [] addressed_values = [] for file_id, chains in chains_by_file.items(): file_path = dbutils.describe_file(db, file_id) file_sha1 = review.branch.head.getFileSHA1(file_path) cursor.execute("""SELECT chain, first_line, last_line FROM commentchainlines WHERE chain=ANY (%s) AND sha1=%s""", (chains.keys(), file_sha1)) for chain_id, first_line, last_line in cursor: propagation = reviewing.comment.propagate.Propagation(db) propagation.setExisting(review, chain_id, review.branch.head, file_id, first_line, last_line) propagation.calculateAdditionalLines(commits) chain_user_id, chain_type, chain_state = chains[chain_id] lines_state = "draft" if chain_state == "draft" else "current" for sha1, (first_line, last_line) in propagation.new_lines.items(): commentchainlines_values.append((chain_id, chain_user_id, lines_state, sha1, first_line, last_line)) if chain_type == "issue" and chain_state in ("open", "draft") and not propagation.active: addressed_values.append((propagation.addressed_by[0].child.getId(db), chain_id)) cursor.executemany("""INSERT INTO commentchainlines (chain, uid, state, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)""", commentchainlines_values) if addressed_values: cursor.executemany("UPDATE commentchains SET state='addressed', addressed_by=%s WHERE id=%s AND state='open'", addressed_values) cursor.executemany("UPDATE commentchains SET addressed_by=%s WHERE id=%s AND state='draft'", addressed_values) print "Addressed issues:" for commit_id, chain_id in addressed_values: chain = CommentChain.fromId(db, chain_id, user, review=review) if chain.state == 'addressed': chain.loadComments(db, user) title = " %s: " % chain.title(False) print "%s%s" % (title, chain.leader(max_length=80 - len(title), text=True))
def generateMailsForAssignmentsTransaction(db, transaction_id): cursor = db.cursor() cursor.execute( "SELECT review, assigner, note FROM reviewassignmentstransactions WHERE id=%s", (transaction_id, )) review_id, assigner_id, note = cursor.fetchone() review = dbutils.Review.fromId(db, review_id) assigner = dbutils.User.fromId(db, assigner_id) cursor.execute( """SELECT uid, directory, file, type, created FROM reviewfilterchanges WHERE transaction=%s""", (transaction_id, )) by_user = {} for reviewer_id, directory_id, file_id, filter_type, created in cursor: added_filters, removed_filters, unassigned, assigned = by_user.setdefault( reviewer_id, ([], [], [], [])) if file_id: path = dbutils.describe_file(db, file_id) elif directory_id: path = dbutils.describe_directory(db, directory_id) else: path = "/" if created: added_filters.append((filter_type, path)) else: removed_filters.append((filter_type, path)) cursor.execute( """SELECT reviewassignmentchanges.uid, reviewassignmentchanges.assigned, reviewfiles.file, SUM(reviewfiles.deleted), SUM(reviewfiles.inserted) FROM reviewfiles JOIN reviewassignmentchanges ON (reviewassignmentchanges.file=reviewfiles.id) WHERE reviewassignmentchanges.transaction=%s GROUP BY reviewassignmentchanges.uid, reviewassignmentchanges.assigned, reviewfiles.file""", (transaction_id, )) for reviewer_id, was_assigned, file_id, deleted, inserted in cursor: added_filters, removed_filters, unassigned, assigned = by_user.setdefault( reviewer_id, (None, None, [], [])) if was_assigned: assigned.append((file_id, deleted, inserted)) else: unassigned.append((file_id, deleted, inserted)) pending_mails = [] for reviewer_id, (added_filters, removed_filters, unassigned, assigned) in by_user.items(): reviewer = dbutils.User.fromId(db, reviewer_id) if assigner != reviewer: pending_mails.extend( mail.sendAssignmentsChanged(db, assigner, reviewer, review, added_filters, removed_filters, unassigned, assigned)) return pending_mails
def formatFiles(files): paths = sorted([dbutils.describe_file(db, file_id) for file_id in files]) if granularity == "file": return diff.File.eliminateCommonPrefixes(paths) else: modules = set() files = [] for path in paths: module = getModuleFromFile(path) if module: modules.add(module) else: files.append(path) return sorted(modules) + diff.File.eliminateCommonPrefixes(files)
def process(self, db, user, repository_id, changeset_id, files): repository = gitutils.Repository.fromId(db, repository_id) cursor = db.cursor() cursor.execute("SELECT parent, child FROM changesets WHERE id=%s", (changeset_id, )) parent_id, child_id = cursor.fetchone() parent = gitutils.Commit.fromId(db, repository, parent_id) child = gitutils.Commit.fromId(db, repository, child_id) try: annotator = LineAnnotator(db, parent, child) for file in files: for block in file["blocks"]: lines = annotator.annotate(file["id"], block["first"], block["last"]) block["lines"] = [{ "offset": offset, "commit": commit } for offset, commit in lines] return OperationResult(commits=[{ "sha1": commit.sha1, "author_name": commit.author.name, "author_email": commit.author.email, "summary": commit.niceSummary(), "message": commit.message, "original": commit == parent, "current": commit == child } for commit in annotator.commits], files=files) except LineAnnotator.NotSupported: blame = gitutils.Blame(parent, child) paths = {} commits = {} for file in files: file_id = file["id"] path = paths.get(file_id) if not path: path = paths[file_id] = dbutils.describe_file(db, file_id) for block in file["blocks"]: block["lines"] = blame.blame(db, path, block["first"], block["last"]) return OperationResult(commits=blame.commits, files=files)
def renderCommentChain(db, target, user, review, chain, context_lines=3, compact=False, tabify=False, original=False, changeset=None, linkify=False): chain.loadComments(db, user) target.addExternalStylesheet("resource/changeset.css") target.addExternalStylesheet("resource/comment.css") target.addExternalStylesheet("resource/review.css") target.addExternalScript("resource/changeset.js") target.addExternalScript("resource/comment.js") target.addExternalScript("resource/review.js") target = target.div("comment-chain", id="c%d" % chain.id) if chain.file_id: try: renderCodeCommentChain(db, target, user, review, chain, context_lines, compact, tabify, original, changeset, linkify) except: cursor = db.cursor() cursor.execute( "SELECT first_line, last_line, commit FROM commentchainlines WHERE chain=%s ORDER BY time ASC LIMIT 1", (chain.id, )) path = dbutils.describe_file(db, chain.file_id) first_line, last_line, commit_id = cursor.fetchone() commit = gitutils.Commit.fromId(db, review.repository, commit_id) if first_line == last_line: line = "line %d" % first_line else: line = "lines %d-%d" % (first_line, last_line) message = "<p><b>I'm terribly sorry, but this comment is broken in the database!</b></p><p>It was originally made against %s in some version of <code>%s</code>, in the commit <a href='%s/%s?review=%d&file=%d'>%s</a>.</p>" % ( line, path, review.repository.name, commit.sha1, review.id, chain.file_id, commit.sha1) renderReviewCommentChain(db, target, user, review, chain, linkify, message) elif chain.first_commit: renderCommitCommentChain(db, target, user, review, chain, linkify) else: renderReviewCommentChain(db, target, user, review, chain, linkify)
def generateMailsForAssignmentsTransaction(db, transaction_id): cursor = db.cursor() cursor.execute("SELECT review, assigner, note FROM reviewassignmentstransactions WHERE id=%s", (transaction_id,)) review_id, assigner_id, note = cursor.fetchone() review = dbutils.Review.fromId(db, review_id) assigner = dbutils.User.fromId(db, assigner_id) cursor.execute("""SELECT uid, directory, file, type, created FROM reviewfilterchanges WHERE transaction=%s""", (transaction_id,)) by_user = {} for reviewer_id, directory_id, file_id, filter_type, created in cursor: added_filters, removed_filters, unassigned, assigned = by_user.setdefault(reviewer_id, ([], [], [], [])) if file_id: path = dbutils.describe_file(db, file_id) elif directory_id: path = dbutils.describe_directory(db, directory_id) else: path = "/" if created: added_filters.append((filter_type, path)) else: removed_filters.append((filter_type, path)) cursor.execute("""SELECT reviewassignmentchanges.uid, reviewassignmentchanges.assigned, reviewfiles.file, SUM(reviewfiles.deleted), SUM(reviewfiles.inserted) FROM reviewfiles JOIN reviewassignmentchanges ON (reviewassignmentchanges.file=reviewfiles.id) WHERE reviewassignmentchanges.transaction=%s GROUP BY reviewassignmentchanges.uid, reviewassignmentchanges.assigned, reviewfiles.file""", (transaction_id,)) for reviewer_id, was_assigned, file_id, deleted, inserted in cursor: added_filters, removed_filters, unassigned, assigned = by_user.setdefault(reviewer_id, (None, None, [], [])) if was_assigned: assigned.append((file_id, deleted, inserted)) else: unassigned.append((file_id, deleted, inserted)) pending_mails = [] for reviewer_id, (added_filters, removed_filters, unassigned, assigned) in by_user.items(): reviewer = dbutils.User.fromId(db, reviewer_id) if assigner != reviewer: pending_mails.extend(mail.sendAssignmentsChanged(db, assigner, reviewer, review, added_filters, removed_filters, unassigned, assigned)) return pending_mails
def setCustom(self, review, commit, file_id, first_line, last_line): """ Initialize for propagation of a custom location. This mode of operation is used to propagate a new comment chain to all relevant commits current part of the review. Returns false if the creating a comment at the specified location is not supported, typically because the commit is not being reviewed in the review. """ assert first_line > 0 assert last_line >= first_line if not review.containsCommit(self.db, commit, True): return False self.review = review self.rebases = review.getReviewRebases(self.db) self.initial_commit = commit self.addressed_by = [] self.file_path = dbutils.describe_file(self.db, file_id) self.file_id = file_id self.location = Location(first_line, last_line) self.active = True file_entry = commit.getFileEntry(self.file_path) if file_entry is None: # File doesn't exist (in the given commit.) return False diff_file = diff.File(new_sha1=file_entry.sha1, new_mode=file_entry.mode, repository=review.repository) diff_file.loadNewLines() if last_line > diff_file.newCount(): # Range of lines is out of bounds. return False self.all_lines = { file_entry.sha1: (first_line, last_line) } self.new_lines = { file_entry.sha1: (first_line, last_line) } return True
def process(self, db, user, repository_id, changeset_id, files): repository = gitutils.Repository.fromId(db, repository_id) cursor = db.cursor() cursor.execute("SELECT parent, child FROM changesets WHERE id=%s", (changeset_id,)) parent_id, child_id = cursor.fetchone() parent = gitutils.Commit.fromId(db, repository, parent_id) child = gitutils.Commit.fromId(db, repository, child_id) try: annotator = LineAnnotator(db, parent, child) for file in files: for block in file["blocks"]: lines = annotator.annotate(file["id"], block["first"], block["last"]) block["lines"] = [{ "offset": offset, "commit": commit } for offset, commit in lines] return OperationResult(commits=[{ "sha1": commit.sha1, "author_name": commit.author.name, "author_email": commit.author.email, "summary": commit.niceSummary(), "message": commit.message, "original": commit == parent, "current": commit == child } for commit in annotator.commits], files=files) except LineAnnotator.NotSupported: blame = gitutils.Blame(parent, child) paths = {} for file in files: file_id = file["id"] path = paths.get(file_id) if not path: path = paths[file_id] = dbutils.describe_file(db, file_id) for block in file["blocks"]: block["lines"] = blame.blame(db, path, block["first"], block["last"]) return OperationResult(commits=blame.commits, files=files)
def process(self, db, creator, review_id, filters): review = dbutils.Review.fromId(db, review_id) by_user = {} for filter in filters: if "user_ids" in filter: user_ids = set(filter["user_ids"]) else: user_ids = set([]) if "user_names" in filter: for user_name in filter["user_names"]: user_ids.add(dbutils.User.fromName(db, user_name).id) if "paths" in filter: paths = set(filter["paths"]) else: paths = set() if "file_ids" in filter: for file_id in filter["file_ids"]: paths.add(dbutils.describe_file(file_id)) for user_id in user_ids: reviewer_paths, watcher_paths = by_user.setdefault(user_id, (set(), set())) if filter["type"] == "reviewer": reviewer_paths |= paths else: watcher_paths |= paths pending_mails = [] for user_id, (reviewer_paths, watcher_paths) in by_user.items(): user = dbutils.User.fromId(db, user_id) pending_mails.extend(reviewing.utils.addReviewFilters(db, creator, user, review, reviewer_paths, watcher_paths)) db.commit() mailutils.sendPendingMails(pending_mails) return OperationResult()
def validateCommentChain(db, review, origin, parent_id, child_id, file_id, offset, count): """ Check whether the commented lines are changed by later commits in the review. If they are, a diff.Changeset object representing the first changeset that modifies those lines is returned. If they are not, None is returned. """ import reviewing.comment.propagate if origin == "old": commit = gitutils.Commit.fromId(db, review.repository, parent_id) else: commit = gitutils.Commit.fromId(db, review.repository, child_id) propagation = reviewing.comment.propagate.Propagation(db) if not propagation.setCustom(review, commit, file_id, offset, offset + count - 1): return "invalid", {} propagation.calculateInitialLines() if propagation.active: file_path = dbutils.describe_file(db, file_id) if commit.getFileSHA1(file_path) != review.branch.head.getFileSHA1( file_path): return "transferred", {} else: return "clean", {} else: addressed_by = propagation.addressed_by[0] return "modified", { "parent_sha1": addressed_by.parent.sha1, "child_sha1": addressed_by.child.sha1, "offset": addressed_by.location.first_line }
def validateCommentChain(db, review, origin, parent_id, child_id, file_id, offset, count): """ Check whether the commented lines are changed by later commits in the review. If they are, a diff.Changeset object representing the first changeset that modifies those lines is returned. If they are not, None is returned. """ import reviewing.comment.propagate if origin == "old": commit = gitutils.Commit.fromId(db, review.repository, parent_id) else: commit = gitutils.Commit.fromId(db, review.repository, child_id) propagation = reviewing.comment.propagate.Propagation(db) if not propagation.setCustom(review, commit, file_id, offset, offset + count - 1): return "invalid", {} propagation.calculateInitialLines() if propagation.active: file_path = dbutils.describe_file(db, file_id) if commit.getFileSHA1(file_path) != review.branch.head.getFileSHA1(file_path): return "transferred", {} else: return "clean", {} else: addressed_by = propagation.addressed_by[0] return ( "modified", { "parent_sha1": addressed_by.parent.sha1, "child_sha1": addressed_by.child.sha1, "offset": addressed_by.location.first_line, }, )
def process(self, db, creator, review_id, filters): review = dbutils.Review.fromId(db, review_id) by_user = {} for filter in filters: if "user_ids" in filter: user_ids = set(filter["user_ids"]) else: user_ids = set([]) if "user_names" in filter: for user_name in filter["user_names"]: user_ids.add(dbutils.User.fromName(db, user_name).id) if "paths" in filter: paths = set(reviewing.filters.sanitizePath(path) for path in filter["paths"]) for path in paths: try: reviewing.filters.validatePattern(path) except reviewing.filters.PatternError, error: raise OperationFailure( code="invalidpattern", title="Invalid path pattern", message="There are invalid wild-cards in the path: %s" % error.message, ) else: paths = set() if "file_ids" in filter: for file_id in filter["file_ids"]: paths.add(dbutils.describe_file(file_id)) for user_id in user_ids: reviewer_paths, watcher_paths = by_user.setdefault(user_id, (set(), set())) if filter["type"] == "reviewer": reviewer_paths |= paths else: watcher_paths |= paths
def renderFiles(title, cursor): files = [] for file_id, delete_count, insert_count in cursor.fetchall(): files.append( (dbutils.describe_file(db, file_id), delete_count, insert_count)) paths = [] deleted = [] inserted = [] for path, delete_count, insert_count in sorted(files): paths.append(path) deleted.append(delete_count) inserted.append(insert_count) if paths: diff.File.eliminateCommonPrefixes(paths) row = table.tr("line") row.td("heading").text(title) files_table = row.td().table("files callout") headers = files_table.thead().tr() headers.th("path").text("Changed Files") headers.th("lines", colspan=2).text("Lines") files = files_table.tbody() for path, delete_count, insert_count in zip( paths, deleted, inserted): file = files.tr() file.td("path").preformatted().innerHTML(path) file.td("lines").preformatted().text( "-%d" % delete_count if delete_count else None) file.td("lines").preformatted().text( "+%d" % insert_count if insert_count else None) row.td("status").text()
def setExisting(self, review, chain_id, commit, file_id, first_line, last_line, reopening=False): """ Initialize for propagation of existing comment chain. This initializes the location to where the comment chain is located in the most recent commit in the review. If the comment chain is not present in the most recent commit in the review, this function returns False. This mode of operation is used to update existing comment chains when adding new commits to a review. """ self.review = review self.rebases = review.getReviewRebases(self.db) self.initial_commit = commit self.addressed_by = [] self.file_path = dbutils.describe_file(self.db, file_id) self.file_id = file_id self.location = Location(first_line, last_line) self.active = True self.all_lines = {} self.new_lines = {} cursor = self.db.cursor() cursor.execute("""SELECT sha1, first_line, last_line FROM commentchainlines WHERE chain=%s""", (chain_id,)) for file_sha1, first_line, last_line in cursor: self.all_lines[file_sha1] = (first_line, last_line) if reopening: self.__setLines(commit.getFileSHA1(self.file_path), self.location) return True
def moduleFromFile(file_id): filename = dbutils.describe_file(db, file_id) return getModuleFromFile(repository, filename) or filename
def propagateCommentChains(db, user, review, commits, replayed_rebases={}): import reviewing.comment.propagate cursor = db.cursor() cursor.execute( """SELECT id, uid, type, state, file FROM commentchains WHERE review=%s AND file IS NOT NULL""", (review.id, )) chains_by_file = {} for chain_id, chain_user_id, chain_type, chain_state, file_id in cursor: chains_by_file.setdefault(file_id, {})[chain_id] = (chain_user_id, chain_type, chain_state) commentchainlines_values = [] addressed_values = [] for file_id, chains in chains_by_file.items(): file_path = dbutils.describe_file(db, file_id) file_sha1 = review.branch.head.getFileSHA1(file_path) cursor.execute( """SELECT chain, first_line, last_line FROM commentchainlines WHERE chain=ANY (%s) AND sha1=%s""", (chains.keys(), file_sha1)) for chain_id, first_line, last_line in cursor: assert len(commits.getHeads()) == 1 head = commits.getHeads().pop() if head in replayed_rebases: head = replayed_rebases[head] propagation = reviewing.comment.propagate.Propagation(db) propagation.setExisting(review, chain_id, review.branch.head, file_id, first_line, last_line) propagation.calculateAdditionalLines(commits, head) chain_user_id, chain_type, chain_state = chains[chain_id] lines_state = "draft" if chain_state == "draft" else "current" for sha1, (first_line, last_line) in propagation.new_lines.items(): commentchainlines_values.append( (chain_id, chain_user_id, lines_state, sha1, first_line, last_line)) if chain_type == "issue" and chain_state in ( "open", "draft") and not propagation.active: addressed_values.append( (propagation.addressed_by[0].child.getId(db), chain_id)) cursor.executemany( """INSERT INTO commentchainlines (chain, uid, state, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)""", commentchainlines_values) if addressed_values: cursor.executemany( "UPDATE commentchains SET state='addressed', addressed_by=%s WHERE id=%s AND state='open'", addressed_values) cursor.executemany( "UPDATE commentchains SET addressed_by=%s WHERE id=%s AND state='draft'", addressed_values) print "Addressed issues:" for commit_id, chain_id in addressed_values: chain = CommentChain.fromId(db, chain_id, user, review=review) if chain.state == 'addressed': chain.loadComments(db, user) title = " %s: " % chain.title(False) print "%s%s" % ( title, chain.leader(max_length=80 - len(title), text=True))
def renderWatchers(target): if review.watchers: for index, watcher in enumerate(review.watchers): if index != 0: target.text(", ") span = target.span("user %s" % watcher.status) span.span("name").text(watcher.fullname) if watcher.status == 'absent': span.span("status").text(" (%s)" % watcher.getAbsence(db)) elif watcher.status == 'retired': span.span("status").text(" (retired)") else: target.i().text("No watchers.") cursor.execute("""SELECT reviewfilters.id, reviewfilters.uid, reviewfilters.directory, reviewfilters.file FROM reviewfilters JOIN users ON (reviewfilters.uid=users.id) WHERE reviewfilters.review=%s AND reviewfilters.type='watcher' AND users.status!='retired'""", (review.id,)) rows = cursor.fetchall() watcher_filters_hidden = [] if rows: table = target.table("reviewfilters watchers") row = table.thead().tr("h1") row.th("h1", colspan=4).text("Custom filters:") filter_data = {} reviewfilters = {} for filter_id, user_id, directory_id, file_id in rows: filter_user = dbutils.User.fromId(db, user_id) if file_id: path = dbutils.describe_file(db, file_id) else: path = dbutils.describe_directory(db, directory_id) + "/" reviewfilters.setdefault(filter_user.fullname, []).append(path) filter_data[(filter_user.fullname, path)] = (filter_id, filter_user) count = 0 tbody = table.tbody() for fullname in sorted(reviewfilters.keys()): original_paths = sorted(reviewfilters[fullname]) trimmed_paths = diff.File.eliminateCommonPrefixes(original_paths[:]) first = True for original_path, trimmed_path in zip(original_paths, trimmed_paths): row = tbody.tr("filter") if first: row.td("username", rowspan=len(original_paths)).text(fullname) row.td("reviews", rowspan=len(original_paths)).text("watches") first = False row.td("path").span().innerHTML(trimmed_path) filter_id, filter_user = filter_data[(fullname, original_path)] href = "javascript:removeReviewFilter(%d, %s, 'watcher', %s, %s);" % (filter_id, filter_user.getJSConstructor(), htmlutils.jsify(original_path), "true" if filter_user != user else "false") row.td("remove").a(href=href).text("[remove]") count += 1 tfoot = table.tfoot() tfoot.tr().td(colspan=4).text("%d line%s hidden" % (count, "s" if count > 1 else "")) if count > 10: tbody.setAttribute("class", "hidden") watcher_filters_hidden.append(True) else: tfoot.setAttribute("class", "hidden") watcher_filters_hidden.append(False) buttons = target.div("buttons") if watcher_filters_hidden: buttons.button("showfilters", onclick="toggleReviewFilters('watchers', $(this));").text("%s Custom Filters" % ("Show" if watcher_filters_hidden[0] else "Hide")) buttons.button("addwatcher", onclick="addWatcher();").text("Add Watcher") if user not in review.reviewers and user not in review.owners: if user not in review.watchers: buttons.button("watch", onclick="watchReview();").text("Watch Review") elif review.watchers[user] == "manual": buttons.button("watch", onclick="unwatchReview();").text("Stop Watching Review")
def createCommentChain(db, user, review, chain_type, commit_id=None, origin=None, file_id=None, parent_id=None, child_id=None, old_sha1=None, new_sha1=None, offset=None, count=None): if chain_type == "issue" and review.state != "open": raise OperationFailure(code="reviewclosed", title="Review is closed!", message="You need to reopen the review before you can raise new issues.") cursor = db.cursor() if file_id is not None and (parent_id == child_id or parent_id is None): cursor.execute("""SELECT 1 FROM reviewchangesets JOIN fileversions USING (changeset) WHERE reviewchangesets.review=%s AND fileversions.file=%s AND fileversions.old_sha1!='0000000000000000000000000000000000000000' AND fileversions.new_sha1!='0000000000000000000000000000000000000000'""", (review.id, file_id)) if cursor.fetchone(): cursor.execute("""SELECT parent, child FROM changesets JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) JOIN fileversions ON (fileversions.changeset=changesets.id) WHERE fileversions.file=%s AND fileversions.new_sha1=%s""", (file_id, new_sha1)) rows = cursor.fetchall() if not rows: cursor.execute("""SELECT parent, child FROM changesets JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) JOIN fileversions ON (fileversions.changeset=changesets.id) WHERE fileversions.file=%s AND fileversions.old_sha1=%s""", (file_id, new_sha1)) rows = cursor.fetchall() parent = child = None for row_parent_id, row_child_id in rows: if row_child_id == child_id: parent = gitutils.Commit.fromId(db, review.repository, row_parent_id) child = gitutils.Commit.fromId(db, review.repository, row_child_id) break elif row_parent_id == child_id and parent is None: parent = gitutils.Commit.fromId(db, review.repository, row_parent_id) child = gitutils.Commit.fromId(db, review.repository, row_child_id) if parent and child: url = "/%s/%s..%s?review=%d&file=%d" % (review.repository.name, parent.sha1[:8], child.sha1[:8], review.id, file_id) link = ("<p>The link below goes to a diff that can be use to create the comment:</p>" + "<p style='padding-left: 2em'><a href='%s'>%s%s</a></p>") % (url, dbutils.getURLPrefix(db), url) else: link = "" raise OperationFailure(code="notsupported", title="File changed in review", message=("<p>Due to limitations in the code used to create comments, " + "it's only possible to create comments via a diff view if " + "the commented file has been changed in the review.</p>" + link), is_html=True) cursor.execute("""INSERT INTO commentchains (review, uid, type, file, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s, %s) RETURNING id""", (review.id, user.id, chain_type, file_id, child_id, child_id)) chain_id = cursor.fetchone()[0] cursor.execute("""INSERT INTO commentchainlines (chain, uid, commit, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)""", (chain_id, user.id, child_id, new_sha1, offset, offset + count - 1)) elif file_id is not None: parents_returned = set() def getFileParent(new_sha1): cursor.execute("""SELECT changesets.id, fileversions.old_sha1 FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.new_sha1=%s""", [review.id, file_id, new_sha1]) try: changeset_id, old_sha1 = cursor.fetchone() if old_sha1 in parents_returned: return None, None parents_returned.add(old_sha1) return changeset_id, old_sha1 except: return None, None children_returned = set() def getFileChild(old_sha1): cursor.execute("""SELECT changesets.id, fileversions.new_sha1 FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.old_sha1=%s""", [review.id, file_id, old_sha1]) try: changeset_id, new_sha1 = cursor.fetchone() if new_sha1 in children_returned: return None, None children_returned.add(new_sha1) return changeset_id, new_sha1 except: return None, None cursor.execute("""SELECT changesets.id FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND changesets.child=%s AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.old_sha1=%s AND fileversions.new_sha1=%s""", [review.id, child_id, file_id, old_sha1, new_sha1]) row = cursor.fetchone() if not row: if origin == "old": cursor.execute("""SELECT changesets.id FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.old_sha1=%s""", [review.id, file_id, old_sha1]) else: cursor.execute("""SELECT changesets.id FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.new_sha1=%s""", [review.id, file_id, new_sha1]) row = cursor.fetchone() primary_changeset_id = row[0] sha1s_older = { } sha1s_newer = { old_sha1: (primary_changeset_id, new_sha1) } sha1 = new_sha1 while True: changeset_id, next_sha1 = getFileParent(sha1) if changeset_id: sha1s_older[sha1] = changeset_id, next_sha1 sha1s_newer[next_sha1] = changeset_id, sha1 sha1 = next_sha1 else: break sha1 = new_sha1 while True: changeset_id, next_sha1 = getFileChild(sha1) if changeset_id: sha1s_newer[sha1] = changeset_id, next_sha1 sha1 = next_sha1 else: break commentchainlines_values = [] processed = set() def searchOrigin(changeset_id, sha1, search_space, first_line, last_line): try: while sha1 not in processed: processed.add(sha1) changeset_id, next_sha1 = search_space[sha1] changeset = changeset_load.loadChangeset(db, review.repository, changeset_id, filtered_file_ids=set([file_id])) if len(changeset.child.parents) > 1: break verdict, next_first_line, next_last_line = updateCommentChain(first_line, last_line, changeset.files[0].chunks, forward) if verdict == "modified": break sha1 = next_sha1 first_line = next_first_line last_line = next_last_line except: pass return changeset_id, sha1, first_line, last_line first_line = offset last_line = offset + count - 1 if origin == 'old': changeset_id, sha1, first_line, last_line = searchOrigin(primary_changeset_id, old_sha1, sha1s_older, first_line, last_line) commit_id = diff.Changeset.fromId(db, review.repository, changeset_id).parent.id else: changeset_id, sha1, first_line, last_line = searchOrigin(primary_changeset_id, new_sha1, sha1s_older, first_line, last_line) commit_id = diff.Changeset.fromId(db, review.repository, changeset_id).child.id commentchainlines_values.append((user.id, commit_id, sha1, first_line, last_line)) processed = set() processed.add(sha1) while sha1 in sha1s_newer: changeset_id, sha1 = sha1s_newer[sha1] if sha1 in processed: break else: processed.add(sha1) changeset = changeset_load.loadChangeset(db, review.repository, changeset_id, filtered_file_ids=set([file_id])) if len(changeset.child.parents) != 1: chunks = diff.parse.parseDifferences(review.repository, from_commit=changeset.parent, to_commit=changeset.child, selected_path=dbutils.describe_file(db, file_id)).chunks else: chunks = changeset.files[0].chunks verdict, first_line, last_line = updateCommentChain(first_line, last_line, chunks) if verdict == "transfer": commentchainlines_values.append((user.id, changeset.child.getId(db), sha1, first_line, last_line)) else: break cursor.execute("INSERT INTO commentchains (review, uid, type, origin, file, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING id", [review.id, user.id, chain_type, origin, file_id, parent_id, child_id]) chain_id = cursor.fetchone()[0] try: cursor.executemany("INSERT INTO commentchainlines (chain, uid, commit, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)", [(chain_id,) + values for values in commentchainlines_values]) except: raise Exception, repr(commentchainlines_values) elif commit_id is not None: commit = gitutils.Commit.fromId(db, review.repository, commit_id) cursor.execute("INSERT INTO commentchains (review, uid, type, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s) RETURNING id", [review.id, user.id, chain_type, commit_id, commit_id]) chain_id = cursor.fetchone()[0] cursor.execute("INSERT INTO commentchainlines (chain, uid, commit, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)", (chain_id, user.id, commit_id, commit.sha1, offset, offset + count - 1)) else: cursor.execute("INSERT INTO commentchains (review, uid, type) VALUES (%s, %s, %s) RETURNING id", [review.id, user.id, chain_type]) chain_id = cursor.fetchone()[0] commentchainusers = set([user.id] + map(int, review.owners)) if file_id is not None: filters = Filters() filters.load(db, review=review) for user_id in filters.listUsers(db, file_id): commentchainusers.add(user_id) cursor.executemany("INSERT INTO commentchainusers (chain, uid) VALUES (%s, %s)", [(chain_id, user_id) for user_id in commentchainusers]) return chain_id
def createCommentChain(db, user, review, chain_type, commit_id=None, origin=None, file_id=None, parent_id=None, child_id=None, old_sha1=None, new_sha1=None, offset=None, count=None): if chain_type == "issue" and review.state != "open": raise OperationFailure( code="reviewclosed", title="Review is closed!", message= "You need to reopen the review before you can raise new issues.") cursor = db.cursor() if file_id is not None and (parent_id == child_id or parent_id is None): cursor.execute( """SELECT 1 FROM reviewchangesets JOIN fileversions USING (changeset) WHERE reviewchangesets.review=%s AND fileversions.file=%s AND fileversions.old_sha1!='0000000000000000000000000000000000000000' AND fileversions.new_sha1!='0000000000000000000000000000000000000000'""", (review.id, file_id)) if cursor.fetchone(): cursor.execute( """SELECT parent, child FROM changesets JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) JOIN fileversions ON (fileversions.changeset=changesets.id) WHERE fileversions.file=%s AND fileversions.new_sha1=%s""", (file_id, new_sha1)) rows = cursor.fetchall() if not rows: cursor.execute( """SELECT parent, child FROM changesets JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) JOIN fileversions ON (fileversions.changeset=changesets.id) WHERE fileversions.file=%s AND fileversions.old_sha1=%s""", (file_id, new_sha1)) rows = cursor.fetchall() parent = child = None for row_parent_id, row_child_id in rows: if row_child_id == child_id: parent = gitutils.Commit.fromId(db, review.repository, row_parent_id) child = gitutils.Commit.fromId(db, review.repository, row_child_id) break elif row_parent_id == child_id and parent is None: parent = gitutils.Commit.fromId(db, review.repository, row_parent_id) child = gitutils.Commit.fromId(db, review.repository, row_child_id) if parent and child: url = "/%s/%s..%s?review=%d&file=%d" % ( review.repository.name, parent.sha1[:8], child.sha1[:8], review.id, file_id) link = ( "<p>The link below goes to a diff that can be use to create the comment:</p>" + "<p style='padding-left: 2em'><a href='%s'>%s%s</a></p>" ) % (url, dbutils.getURLPrefix(db), url) else: link = "" raise OperationFailure( code="notsupported", title="File changed in review", message= ("<p>Due to limitations in the code used to create comments, " + "it's only possible to create comments via a diff view if " + "the commented file has been changed in the review.</p>" + link), is_html=True) cursor.execute( """INSERT INTO commentchains (review, uid, type, file, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s, %s) RETURNING id""", (review.id, user.id, chain_type, file_id, child_id, child_id)) chain_id = cursor.fetchone()[0] cursor.execute( """INSERT INTO commentchainlines (chain, uid, commit, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)""", (chain_id, user.id, child_id, new_sha1, offset, offset + count - 1)) elif file_id is not None: parents_returned = set() def getFileParent(new_sha1): cursor.execute( """SELECT changesets.id, fileversions.old_sha1 FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.new_sha1=%s""", [review.id, file_id, new_sha1]) try: changeset_id, old_sha1 = cursor.fetchone() if old_sha1 in parents_returned: return None, None parents_returned.add(old_sha1) return changeset_id, old_sha1 except: return None, None children_returned = set() def getFileChild(old_sha1): cursor.execute( """SELECT changesets.id, fileversions.new_sha1 FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.old_sha1=%s""", [review.id, file_id, old_sha1]) try: changeset_id, new_sha1 = cursor.fetchone() if new_sha1 in children_returned: return None, None children_returned.add(new_sha1) return changeset_id, new_sha1 except: return None, None cursor.execute( """SELECT changesets.id FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND changesets.child=%s AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.old_sha1=%s AND fileversions.new_sha1=%s""", [review.id, child_id, file_id, old_sha1, new_sha1]) row = cursor.fetchone() if not row: if origin == "old": cursor.execute( """SELECT changesets.id FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.old_sha1=%s""", [review.id, file_id, old_sha1]) else: cursor.execute( """SELECT changesets.id FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.new_sha1=%s""", [review.id, file_id, new_sha1]) row = cursor.fetchone() primary_changeset_id = row[0] sha1s_older = {} sha1s_newer = {old_sha1: (primary_changeset_id, new_sha1)} sha1 = new_sha1 while True: changeset_id, next_sha1 = getFileParent(sha1) if changeset_id: sha1s_older[sha1] = changeset_id, next_sha1 sha1s_newer[next_sha1] = changeset_id, sha1 sha1 = next_sha1 else: break sha1 = new_sha1 while True: changeset_id, next_sha1 = getFileChild(sha1) if changeset_id: sha1s_newer[sha1] = changeset_id, next_sha1 sha1 = next_sha1 else: break commentchainlines_values = [] processed = set() def searchOrigin(changeset_id, sha1, search_space, first_line, last_line): try: while sha1 not in processed: processed.add(sha1) changeset_id, next_sha1 = search_space[sha1] changeset = changeset_load.loadChangeset( db, review.repository, changeset_id, filtered_file_ids=set([file_id])) if len(changeset.child.parents) > 1: break verdict, next_first_line, next_last_line = updateCommentChain( first_line, last_line, changeset.files[0].chunks, forward) if verdict == "modified": break sha1 = next_sha1 first_line = next_first_line last_line = next_last_line except: pass return changeset_id, sha1, first_line, last_line first_line = offset last_line = offset + count - 1 if origin == 'old': changeset_id, sha1, first_line, last_line = searchOrigin( primary_changeset_id, old_sha1, sha1s_older, first_line, last_line) commit_id = diff.Changeset.fromId(db, review.repository, changeset_id).parent.id else: changeset_id, sha1, first_line, last_line = searchOrigin( primary_changeset_id, new_sha1, sha1s_older, first_line, last_line) commit_id = diff.Changeset.fromId(db, review.repository, changeset_id).child.id commentchainlines_values.append( (user.id, commit_id, sha1, first_line, last_line)) processed = set() processed.add(sha1) while sha1 in sha1s_newer: changeset_id, sha1 = sha1s_newer[sha1] if sha1 in processed: break else: processed.add(sha1) changeset = changeset_load.loadChangeset(db, review.repository, changeset_id, filtered_file_ids=set( [file_id])) if len(changeset.child.parents) != 1: chunks = diff.parse.parseDifferences( review.repository, from_commit=changeset.parent, to_commit=changeset.child, selected_path=dbutils.describe_file(db, file_id)).chunks else: chunks = changeset.files[0].chunks verdict, first_line, last_line = updateCommentChain( first_line, last_line, chunks) if verdict == "transfer": commentchainlines_values.append( (user.id, changeset.child.getId(db), sha1, first_line, last_line)) else: break cursor.execute( "INSERT INTO commentchains (review, uid, type, origin, file, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING id", [ review.id, user.id, chain_type, origin, file_id, parent_id, child_id ]) chain_id = cursor.fetchone()[0] try: cursor.executemany( "INSERT INTO commentchainlines (chain, uid, commit, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)", [(chain_id, ) + values for values in commentchainlines_values]) except: raise Exception, repr(commentchainlines_values) elif commit_id is not None: commit = gitutils.Commit.fromId(db, review.repository, commit_id) cursor.execute( "INSERT INTO commentchains (review, uid, type, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s) RETURNING id", [review.id, user.id, chain_type, commit_id, commit_id]) chain_id = cursor.fetchone()[0] cursor.execute( "INSERT INTO commentchainlines (chain, uid, commit, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)", (chain_id, user.id, commit_id, commit.sha1, offset, offset + count - 1)) else: cursor.execute( "INSERT INTO commentchains (review, uid, type) VALUES (%s, %s, %s) RETURNING id", [review.id, user.id, chain_type]) chain_id = cursor.fetchone()[0] commentchainusers = set([user.id] + map(int, review.owners)) if file_id is not None: filters = Filters() filters.load(db, review=review) for user_id in filters.listUsers(db, file_id): commentchainusers.add(user_id) cursor.executemany( "INSERT INTO commentchainusers (chain, uid) VALUES (%s, %s)", [(chain_id, user_id) for user_id in commentchainusers]) return chain_id
def renderCodeCommentChain(db, target, user, review, chain, context_lines=3, compact=False, tabify=False, original=False, changeset=None, linkify=False): repository = review.repository old_sha1 = None new_sha1 = None old = 1 new = 2 cursor = db.cursor() file_id = chain.file_id file_path = dbutils.describe_file(db, file_id) if (chain.state != "addressed" or original) and chain.first_commit == chain.last_commit: sha1 = chain.first_commit.getFileSHA1(file_path) cursor.execute( "SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s", (chain.id, sha1)) first_line, last_line = cursor.fetchone() file = diff.File(file_id, file_path, sha1, sha1, review.repository, chunks=[]) file.loadNewLines(True) start = max(1, first_line - context_lines) end = min(file.newCount(), last_line + context_lines) count = end + 1 - start lines = file.newLines(True) lines = [ diff.Line(diff.Line.CONTEXT, start + index, lines[start + index - 1], start + index, lines[start + index - 1]) for index in range(count) ] file.macro_chunks = [diff.MacroChunk([], lines)] use = new display_type = "new" commit_url_component = "sha1=%s" % chain.first_commit.sha1 else: if chain.state == "addressed" and not original and review.containsCommit( db, chain.addressed_by): parent = gitutils.Commit.fromSHA1(db, review.repository, chain.addressed_by.parents[0]) child = chain.addressed_by use = old else: parent = chain.first_commit child = chain.last_commit if parent == child: if chain.origin == "old": cursor.execute( """SELECT changesets.child FROM changesets, reviewchangesets WHERE changesets.parent=%s AND reviewchangesets.changeset=changesets.id AND reviewchangesets.review=%s""", [child.getId(db), review.id]) try: child = gitutils.Commit.fromId(db, repository, cursor.fetchone()[0]) except: parent = gitutils.Commit.fromSHA1( db, repository, child.parents[0]) else: parent = gitutils.Commit.fromSHA1(db, repository, child.parents[0]) if chain.origin == "old": use = old else: use = new if parent.sha1 in child.parents and len(child.parents) == 1: commit = child from_commit = None to_commit = None else: commit = None from_commit = parent to_commit = child if changeset: assert ((changeset.parent == from_commit and changeset.child == to_commit) if commit is None else (changeset.parent.sha1 == commit.parents[0] and changeset.child == commit)) assert changeset.getFile(file_id) else: changeset = changeset_utils.createChangeset( db, user, repository, commit=commit, from_commit=from_commit, to_commit=to_commit, filtered_file_ids=set((file_id, )))[0] file = changeset.getFile(file_id) if not file: if chain.state == "addressed" and not original: renderCodeCommentChain(db, target, user, review, chain, context_lines, compact, tabify, original=True) return else: raise # Commit so that the diff and its analysis, written to the database by createChangeset(), # can be reused later. db.commit() old_sha1 = file.old_sha1 new_sha1 = file.new_sha1 if use == old and old_sha1 == '0' * 40: use = new elif use == new and new_sha1 == '0' * 40: use = old if use == old: sha1 = old_sha1 else: sha1 = new_sha1 cursor.execute( "SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s", [chain.id, sha1]) first_line, last_line = cursor.fetchone() def readChunks(): return [ diff.Chunk(delete_offset, delete_count, insert_offset, insert_count, analysis=analysis, is_whitespace=is_whitespace) for delete_offset, delete_count, insert_offset, insert_count, analysis, is_whitespace in cursor.fetchall() ] first_context_line = first_line - context_lines last_context_line = last_line + context_lines def includeChunk(chunk): if use == old: chunk_first_line, chunk_last_line = chunk.delete_offset, chunk.delete_offset + chunk.delete_count - 1 else: chunk_first_line, chunk_last_line = chunk.insert_offset, chunk.insert_offset + chunk.insert_count - 1 return chunk_last_line >= first_context_line and chunk_first_line <= last_context_line def lineFilter(line): if use == old: linenr = line.old_offset if linenr == first_context_line and line.type == diff.Line.INSERTED: return False else: linenr = line.new_offset if linenr == first_context_line and line.type == diff.Line.DELETED: return False return first_context_line <= linenr <= last_context_line file.loadOldLines(True) file.loadNewLines(True) context = diff.context.ContextLines(file, file.chunks, [(chain, use == old)]) file.macro_chunks = context.getMacroChunks(context_lines, highlight=True, lineFilter=lineFilter) try: macro_chunk = file.macro_chunks[0] except: raise repr((parent.sha1, child.sha1)) display_type = "both" if chain.state != "addressed": first_line_type = macro_chunk.lines[0].type if first_line_type == diff.Line.CONTEXT or ( use == old and first_line_type == diff.Line.DELETED) or ( use == new and first_line_type == diff.Line.INSERTED): for line in macro_chunk.lines[1:]: if first_line_type != line.type: break else: display_type = "old" if use == old else "new" commit_url_component = "from=%s&to=%s" % (parent.sha1, child.sha1) def renderHeaderLeft(db, target, file): target.span("comment-chain-title").a(href="/showcomment?chain=%d" % chain.id).text(chain.title()) def renderHeaderRight(db, target, file): side = use == old and "o" or "n" uri = "showcommit?%s&review=%d&file=%d#f%d%s%d" % ( commit_url_component, review.id, file.id, file.id, side, first_line) target.span("filename").a(href=uri).text(file.path) def renderCommentsLocal(db, target, **kwargs): if display_type == "both": if use == old: position = "left" else: position = "right" else: position = "center" renderComments(db, target, user, chain, position, linkify) def lineId(base): return "c%d%s" % (chain.id, base) def lineCellId(base): return "c%d%s" % (chain.id, base) target.addInternalScript("commentChainById[%d] = %s;" % (chain.id, chain.getJSConstructor(sha1)), here=True) changeset_html.renderFile(db, target, user, review, file, options={ "support_expand": False, "display_type": display_type, "header_left": renderHeaderLeft, "header_right": renderHeaderRight, "content_after": renderCommentsLocal, "show": True, "expand": True, "line_id": lineId, "line_cell_id": lineCellId, "compact": compact, "tabify": tabify, "include_deleted": True }) data = (chain.id, file_id, use == old and "o" or "n", first_line, chain.id, file_id, use == old and "o" or "n", last_line, htmlutils.jsify(chain.type), htmlutils.jsify(chain.state), chain.id) target.addInternalScript("""$(document).ready(function () { var markers = new CommentMarkers(null); markers.setLines(document.getElementById('c%df%d%s%d'), document.getElementById('c%df%d%s%d')); markers.setType(%s, %s); commentChainById[%d].markers = markers; });""" % data, here=True)
def formatFiles(files): return diff.File.eliminateCommonPrefixes(sorted([dbutils.describe_file(db, file_id) for file_id in files]))
def processFile(file_id): components = dbutils.describe_file(db, file_id).split("/") directories, files = root_directories, root_files for directory_name in components[:-1]: directories, files = directories.setdefault(directory_name, ({}, {})) files[components[-1]] = file_id
def renderHome(req, db, user): if user.isAnonymous(): raise page.utils.NeedLogin, req cursor = db.cursor() readonly = req.getParameter("readonly", "yes" if user.name != req.user else "no") == "yes" repository = req.getParameter("repository", None, gitutils.Repository.FromParameter(db)) if not repository: repository = user.getDefaultRepository(db) title_fullname = user.fullname if title_fullname[-1] == "s": title_fullname += "'" else: title_fullname += "'s" cursor.execute("SELECT email FROM usergitemails WHERE uid=%s ORDER BY email ASC", (user.id,)) gitemails = ", ".join([email for (email,) in cursor]) document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, current_page="home") document.addExternalStylesheet("resource/home.css") document.addExternalScript("resource/home.js") if repository: document.addInternalScript(repository.getJS()) else: document.addInternalScript("var repository = null;") if user.name != req.user and req.getUser(db).hasRole(db, "administrator"): document.addInternalScript("var administrator = true;") else: document.addInternalScript("var administrator = false;") document.addInternalScript(user.getJS()) document.addInternalScript("user.gitEmails = %s;" % htmlutils.jsify(gitemails)) document.setTitle("%s Home" % title_fullname) target = body.div("main") basic = target.table("paleyellow basic", align="center") basic.tr().td("h1", colspan=3).h1().text("%s Home" % title_fullname) def row(heading, value, help=None, status_id=None): main_row = basic.tr("line") main_row.td("heading").text("%s:" % heading) value_cell = main_row.td("value", colspan=2) if callable(value): value(value_cell) else: value_cell.text(value) basic.tr("help").td("help", colspan=3).text(help) def renderFullname(target): if readonly: target.text(user.fullname) else: target.input("value", id="user_fullname", value=user.fullname) target.span("status", id="status_fullname") target.button(onclick="saveFullname();").text("Save") target.button(onclick="resetFullname();").text("Reset") def renderEmail(target): if readonly: target.text(user.email) else: target.input("value", id="user_email", value=user.email) target.span("status", id="status_email") target.button(onclick="saveEmail();").text("Save") target.button(onclick="resetEmail();").text("Reset") def renderGitEmails(target): if readonly: target.text(gitemails) else: target.input("value", id="user_gitemails", value=gitemails) target.span("status", id="status_gitemails") target.button(onclick="saveGitEmails();").text("Save") target.button(onclick="resetGitEmails();").text("Reset") def renderPassword(target): target.text("****") if not readonly: target.button(onclick="changePassword();").text("Change") row("User ID", str(user.id)) row("User Name", user.name) row( "Display Name", renderFullname, "This is the name used when displaying commits or comments.", status_id="status_fullname", ) row("Email", renderEmail, "This is the primary email address, to which emails are sent.", status_id="status_email") row( "Git Emails", renderGitEmails, "These email addresses are used to map Git commits to the user.", status_id="status_gitemails", ) if configuration.base.AUTHENTICATION_MODE == "critic": row("Password", renderPassword) filters = target.table("paleyellow filters", align="center") row = filters.tr() row.td("h1", colspan=2).h1().text("Filters") repositories = row.td("repositories", colspan=2).select() if not repository: repositories.option(value="-", selected="selected", disabled="disabled").text("Select a repository") cursor.execute("SELECT id, path FROM repositories ORDER BY id") for id, path in cursor: repositories.option(value=id, selected="selected" if repository and id == repository.id else None).text( "%s:%s" % (configuration.base.HOSTNAME, path) ) help = filters.tr().td("help", colspan=4) help.p().text( "Filters is the system's mechanism to connect reviews to users. When a review is created or updated, a set of users to associate with the review is calculated by matching the files modified by each commit in the review to the filters set up by users. Each filter selects one file or one directory (and affects all sub-directories and files,) and only the most specific filter per file and user is used when associating users with reviews." ) p = help.p() p.text("There are two types of filters: ") p.code().text("reviewer") p.text(" and ") p.code().text("watcher") p.text(". All files matched by a ") p.code().text("reviewer") p.text( " filter for a user are added to the user's to-do list, meaning the user needs to review all changes made to that file before the review is finished. However, if more than one user is matched as a reviewer for a file, only one of them needs to review the changes. A user associated with a review only by " ) p.code().text("watcher") p.text(" filters will simply receive notifications relating to the review, but isn't required to do anything.") p = help.p() p.text("For a ") p.code().text("reviewer") p.text( ' type filter, a set of "delegates" can also be defined. The delegate field should be a comma-separated list of user names. Delegates are automatically made reviewers of changes by you in the filtered files (since you can\'t review them yourself) regardless of their own filters.' ) p = help.p() p.strong().text("Note: A filter names a directory only if the path ends with a slash ('/').") p.text( " If the path doesn't end with a slash, the filter would name a specific file even if the path is a directory in some or all versions of the actual tree. However, you'll get a warning if you try to add a filter for a file whose path is registered as a directory in the database." ) if repository: headings = filters.tr("headings") headings.td("heading type").text("Type") headings.td("heading path").text("Path") headings.td("heading delegate").text("Delegate") headings.td("heading buttons") cursor.execute( "SELECT directory, file, type, delegate FROM filters WHERE uid=%s AND repository=%s", [user.id, repository.id], ) all_filters = [] for directory_id, file_id, filter_type, delegate in cursor.fetchall(): if file_id == 0: path = dbutils.describe_directory(db, directory_id) + "/" else: path = dbutils.describe_file(db, file_id) all_filters.append((path, directory_id, file_id, filter_type, delegate)) all_filters.sort() empty = filters.tr("empty").td("empty", colspan=4).span(id="empty").text("No filters configured") if filters: empty.setAttribute("style", "display: none") for path, directory_id, file_id, filter_type, delegate in all_filters: row = filters.tr("filter") row.td("filter type").text(filter_type.capitalize()) row.td("filter path").text(path) row.td("filter delegate").text(delegate) buttons = row.td("filter buttons") if readonly: buttons.text() else: buttons.button(onclick="editFilter(this, %d, %d, false);" % (directory_id, file_id)).text("Edit") buttons.button(onclick="deleteFilter(this, %d, %d);" % (directory_id, file_id)).text("Delete") if not readonly: filters.tr("buttons").td("buttons", colspan=4).button(onclick="addFilter(this);").text("Add Filter") return document
def commitRangeFromReview(db, user, review, filter, file_ids): edges = cursor = db.cursor() if filter == "pending": cursor.execute("""SELECT DISTINCT changesets.parent, changesets.child FROM changesets JOIN reviewfiles ON (reviewfiles.changeset=changesets.id) JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewuserfiles.uid=%s AND reviewfiles.state='pending'""", (review.id, user.id)) elif filter == "reviewable": cursor.execute("""SELECT DISTINCT changesets.parent, changesets.child FROM changesets JOIN reviewfiles ON (reviewfiles.changeset=changesets.id) JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewuserfiles.uid=%s""", (review.id, user.id)) elif filter == "relevant": filters = review_filters.Filters() filters.load(db, review=review, user=user) cursor.execute("""SELECT DISTINCT changesets.parent, changesets.child, reviewfiles.file, reviewuserfiles.uid IS NOT NULL FROM changesets JOIN reviewfiles ON (reviewfiles.changeset=changesets.id) LEFT OUTER JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id AND reviewuserfiles.uid=%s) WHERE reviewfiles.review=%s""", (user.id, review.id)) edges = set() for parent_id, child_id, file_id, is_reviewer in cursor: if is_reviewer or filters.isRelevant(db, user, file_id): edges.add((parent_id, child_id)) elif filter == "files": assert len(file_ids) != 0 cursor.execute("""SELECT DISTINCT changesets.parent, changesets.child FROM changesets JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) JOIN fileversions ON (fileversions.changeset=changesets.id) WHERE reviewchangesets.review=%s AND fileversions.file=ANY (%s)""", (review.id, list(file_ids))) else: raise Exception, "invalid filter: %s" % filter listed_commits = set() with_pending = set() for parent_id, child_id in edges: listed_commits.add(child_id) with_pending.add((parent_id, child_id)) if len(listed_commits) == 1: return None, gitutils.Commit.fromId(db, review.repository, child_id).sha1, list(listed_commits), listed_commits if filter in ("reviewable", "relevant", "files"): cursor.execute("SELECT child FROM changesets JOIN reviewchangesets ON (changeset=id) WHERE review=%s", (review.id,)) all_commits = [gitutils.Commit.fromId(db, review.repository, commit_id) for (commit_id,) in cursor] commitset = CommitSet(review.branch.commits) tails = commitset.getFilteredTails(review.repository) if len(commitset) == 0: raise Exception, "empty commit-set" elif len(tails) > 1: ancestor = review.repository.getCommonAncestor(tails) paths = [] cursor.execute("SELECT DISTINCT file FROM reviewfiles WHERE review=%s", (review.id,)) files_in_review = set(file_id for (file_id,) in cursor) if filter == "files": files_in_review &= file_ids paths_in_review = set(dbutils.describe_file(db, file_id) for file_id in files_in_review) paths_in_upstreams = set() for tail in tails: paths_in_upstream = set(review.repository.run("diff", "--name-only", "%s..%s" % (ancestor, tail)).splitlines()) paths_in_upstreams |= paths_in_upstream paths.append((tail, paths_in_upstream)) overlapping_changes = paths_in_review & paths_in_upstreams if overlapping_changes: candidates = [] for index1, data in enumerate(paths): for index2, (tail, paths_in_upstream) in enumerate(paths): if index1 != index2 and paths_in_upstream & paths_in_review: break else: candidates.append(data) else: candidates = paths if not candidates: paths.sort(cmp=lambda a, b: cmp(len(a[1]), len(b[1]))) url = "/%s/%s..%s?file=%s" % (review.repository.name, paths[0][0][:8], review.branch.head.sha1[:8], ",".join(map(str, sorted(files_in_review)))) message = """\ <p>It is not possible to generate a diff of the requested set of commits that contains only changes from those commits.</p> <p>The following files would contain unrelated changes:<p> <pre style='padding-left: 2em'>%s</pre> <p>You can use the URL below if you want to view this diff anyway, including the unrelated changes.</p> <pre style='padding-left: 2em'><a href='%s'>%s%s</a></pre>""" % ("\n".join(sorted(overlapping_changes)), url, dbutils.getURLPrefix(db), url) raise page.utils.DisplayMessage(title="Impossible Diff", body=message, review=review, html=True) else: candidates.sort(cmp=lambda a, b: cmp(len(b[1]), len(a[1]))) return candidates[0][0], review.branch.head.sha1, all_commits, listed_commits elif len(tails) == 0: raise Exception, "impossible commit-set (%r)" % commitset return tails.pop(), review.branch.head.sha1, all_commits, listed_commits
if current_state == 'pending': pending_files.add(file_id) reviewable_files.add(file_id) profiler.check("reviewfiles query") for changeset in changesets: all_files_local = all_files.copy() for file in changeset.files: if file.id in all_files_local: all_files_local.remove(file.id) for file_id in all_files_local: if not file_ids or file_id in file_ids: changeset.files.append(diff.File(file_id, dbutils.describe_file(db, file_id), None, None, repository)) if review_filter == "pending": def isPending(file): return file.id in pending_files changeset.files = filter(isPending, changeset.files) elif review_filter == "reviewable": def isReviewable(file): return file.id in reviewable_files changeset.files = filter(isReviewable, changeset.files) elif review_filter == "relevant": filters = review_filters.Filters() filters.load(db, review=review, user=user) def isRelevant(file): if file.id in reviewable_files: return True
def renderHome(req, db, user): if user.isAnonymous(): raise page.utils.NeedLogin, req cursor = db.cursor() readonly = req.getParameter( "readonly", "yes" if user.name != req.user else "no") == "yes" repository = req.getParameter("repository", None, gitutils.Repository.FromParameter(db)) if not repository: repository = user.getDefaultRepository(db) title_fullname = user.fullname if title_fullname[-1] == 's': title_fullname += "'" else: title_fullname += "'s" cursor.execute( "SELECT email FROM usergitemails WHERE uid=%s ORDER BY email ASC", (user.id, )) gitemails = ", ".join([email for (email, ) in cursor]) document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, current_page="home") document.addExternalStylesheet("resource/home.css") document.addExternalScript("resource/home.js") if repository: document.addInternalScript(repository.getJS()) else: document.addInternalScript("var repository = null;") if user.name != req.user and req.getUser(db).hasRole(db, "administrator"): document.addInternalScript("var administrator = true;") else: document.addInternalScript("var administrator = false;") document.addInternalScript(user.getJS()) document.addInternalScript("user.gitEmails = %s;" % htmlutils.jsify(gitemails)) document.setTitle("%s Home" % title_fullname) target = body.div("main") basic = target.table('paleyellow basic', align='center') basic.tr().td('h1', colspan=3).h1().text("%s Home" % title_fullname) def row(heading, value, help=None, status_id=None): main_row = basic.tr('line') main_row.td('heading').text("%s:" % heading) value_cell = main_row.td('value', colspan=2) if callable(value): value(value_cell) else: value_cell.text(value) basic.tr('help').td('help', colspan=3).text(help) def renderFullname(target): if readonly: target.text(user.fullname) else: target.input("value", id="user_fullname", value=user.fullname) target.span("status", id="status_fullname") target.button(onclick="saveFullname();").text("Save") target.button(onclick="resetFullname();").text("Reset") def renderEmail(target): if readonly: target.text(user.email) else: target.input("value", id="user_email", value=user.email) target.span("status", id="status_email") target.button(onclick="saveEmail();").text("Save") target.button(onclick="resetEmail();").text("Reset") def renderGitEmails(target): if readonly: target.text(gitemails) else: target.input("value", id="user_gitemails", value=gitemails) target.span("status", id="status_gitemails") target.button(onclick="saveGitEmails();").text("Save") target.button(onclick="resetGitEmails();").text("Reset") def renderPassword(target): target.text("****") if not readonly: target.button(onclick="changePassword();").text("Change") row("User ID", str(user.id)) row("User Name", user.name) row("Display Name", renderFullname, "This is the name used when displaying commits or comments.", status_id="status_fullname") row("Email", renderEmail, "This is the primary email address, to which emails are sent.", status_id="status_email") row("Git Emails", renderGitEmails, "These email addresses are used to map Git commits to the user.", status_id="status_gitemails") if configuration.base.AUTHENTICATION_MODE == "critic": row("Password", renderPassword) filters = target.table('paleyellow filters', align='center') row = filters.tr() row.td("h1", colspan=2).h1().text("Filters") repositories = row.td("repositories", colspan=2).select() if not repository: repositories.option(value="-", selected="selected", disabled="disabled").text("Select a repository") cursor.execute("SELECT id, path FROM repositories ORDER BY id") for id, path in cursor: repositories.option(value=id, selected="selected" if repository and id == repository.id else None).text( "%s:%s" % (configuration.base.HOSTNAME, path)) help = filters.tr().td("help", colspan=4) help.p().text( "Filters is the system's mechanism to connect reviews to users. When a review is created or updated, a set of users to associate with the review is calculated by matching the files modified by each commit in the review to the filters set up by users. Each filter selects one file or one directory (and affects all sub-directories and files,) and only the most specific filter per file and user is used when associating users with reviews." ) p = help.p() p.text("There are two types of filters: ") p.code().text("reviewer") p.text(" and ") p.code().text("watcher") p.text(". All files matched by a ") p.code().text("reviewer") p.text( " filter for a user are added to the user's to-do list, meaning the user needs to review all changes made to that file before the review is finished. However, if more than one user is matched as a reviewer for a file, only one of them needs to review the changes. A user associated with a review only by " ) p.code().text("watcher") p.text( " filters will simply receive notifications relating to the review, but isn't required to do anything." ) p = help.p() p.text("For a ") p.code().text("reviewer") p.text( " type filter, a set of \"delegates\" can also be defined. The delegate field should be a comma-separated list of user names. Delegates are automatically made reviewers of changes by you in the filtered files (since you can't review them yourself) regardless of their own filters." ) p = help.p() p.strong().text( "Note: A filter names a directory only if the path ends with a slash ('/')." ) p.text( " If the path doesn't end with a slash, the filter would name a specific file even if the path is a directory in some or all versions of the actual tree. However, you'll get a warning if you try to add a filter for a file whose path is registered as a directory in the database." ) if repository: headings = filters.tr("headings") headings.td("heading type").text("Type") headings.td("heading path").text("Path") headings.td("heading delegate").text("Delegate") headings.td("heading buttons") cursor.execute( "SELECT directory, file, type, delegate FROM filters WHERE uid=%s AND repository=%s", [user.id, repository.id]) all_filters = [] for directory_id, file_id, filter_type, delegate in cursor.fetchall(): if file_id == 0: path = dbutils.describe_directory(db, directory_id) + "/" else: path = dbutils.describe_file(db, file_id) all_filters.append( (path, directory_id, file_id, filter_type, delegate)) all_filters.sort() empty = filters.tr("empty").td( "empty", colspan=4).span(id="empty").text("No filters configured") if filters: empty.setAttribute("style", "display: none") for path, directory_id, file_id, filter_type, delegate in all_filters: row = filters.tr("filter") row.td("filter type").text(filter_type.capitalize()) row.td("filter path").text(path) row.td("filter delegate").text(delegate) buttons = row.td("filter buttons") if readonly: buttons.text() else: buttons.button(onclick="editFilter(this, %d, %d, false);" % (directory_id, file_id)).text("Edit") buttons.button(onclick="deleteFilter(this, %d, %d);" % (directory_id, file_id)).text("Delete") if not readonly: filters.tr("buttons").td("buttons", colspan=4).button( onclick="addFilter(this);").text("Add Filter") return document
def createChangeset(db, user, repository, commit=None, from_commit=None, to_commit=None, rescan=False, reanalyze=False, conflicts=False, filtered_file_ids=None, review=None, do_highlight=True, load_chunks=True): cursor = db.cursor() if conflicts: assert commit and len(commit.parents) > 1 cursor.execute("SELECT replay FROM mergereplays WHERE original=%s", (commit.getId(db), )) row = cursor.fetchone() if row: replay = gitutils.Commit.fromId(db, repository, row[0]) else: replay = repository.replaymerge(db, user, commit) if not replay: return None cursor.execute( "INSERT INTO mergereplays (original, replay) VALUES (%s, %s)", (commit.getId(db), replay.getId(db))) from_commit = replay to_commit = commit parents = [replay] changeset_type = 'conflicts' elif commit: parents = [ gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in commit.parents ] or [None] changeset_type = 'merge' if len(parents) > 1 else 'direct' else: parents = [from_commit] commit = to_commit changeset_type = 'direct' if len( to_commit.parents ) == 1 and from_commit == to_commit.parents[0] else 'custom' changes = None changesets = [] fileversions_values = [] chunks_values = [] thin_diff = False changeset_ids = [] for parent in parents: if parent: cursor.execute( "SELECT id FROM changesets WHERE parent=%s AND child=%s AND type=%s", (parent.getId(db), commit.getId(db), changeset_type)) else: cursor.execute( "SELECT id FROM changesets WHERE parent IS NULL AND child=%s AND type=%s", (commit.getId(db), changeset_type)) row = cursor.fetchone() if row: changeset_ids.append(row[0]) else: break assert len(changeset_ids) in (0, len(parents)) if changeset_ids: if rescan and user.hasRole(db, "developer"): cursor.executemany("DELETE FROM changesets WHERE id=%s", [(changeset_id, ) for changeset_id in changeset_ids]) db.commit() changeset_ids = [] else: for changeset_id in changeset_ids: if changeset_type == 'custom': cursor.execute( "UPDATE customchangesets SET time=NOW() WHERE changeset=%s", (changeset_id, )) changeset = load.loadChangeset( db, repository, changeset_id, filtered_file_ids=filtered_file_ids, load_chunks=load_chunks) changeset.conflicts = conflicts if reanalyze and user.hasRole(db, "developer"): analysis_values = [] for file in changeset.files: if not filtered_file_ids or file.id in filtered_file_ids: for index, chunk in enumerate(file.chunks): old_analysis = chunk.analysis chunk.analyze(file, index == len(file.chunks) - 1, True) if old_analysis != chunk.analysis: analysis_values.append( (chunk.analysis, chunk.id)) if reanalyze == "commit" and analysis_values: cursor.executemany( "UPDATE chunks SET analysis=%s WHERE id=%s", analysis_values) changesets.append(changeset) if not changesets: if len(parents ) == 1 and from_commit and to_commit and filtered_file_ids: iter_commit = to_commit while iter_commit != from_commit: if len(iter_commit.parents) > 1: thin_diff = True break iter_commit = gitutils.Commit.fromSHA1(db, repository, iter_commit.parents[0]) if not thin_diff: if changeset_type == "direct": request = { "changeset_type": "direct", "child_sha1": commit.sha1 } elif changeset_type == "custom": request = { "changeset_type": "custom", "parent_sha1": from_commit.sha1, "child_sha1": to_commit.sha1 } elif changeset_type == "merge": request = { "changeset_type": "merge", "child_sha1": commit.sha1 } else: request = { "changeset_type": "conflicts", "parent_sha1": replay.sha1, "child_sha1": commit.sha1 } request["repository_name"] = repository.name client.requestChangesets([request]) db.commit() for parent in parents: if parent: cursor.execute( "SELECT id FROM changesets WHERE parent=%s AND child=%s AND type=%s", (parent.getId(db), commit.getId(db), changeset_type)) else: cursor.execute( "SELECT id FROM changesets WHERE parent IS NULL AND child=%s AND type=%s", (commit.getId(db), changeset_type)) changeset_id = cursor.fetchone()[0] changeset = load.loadChangeset( db, repository, changeset_id, filtered_file_ids=filtered_file_ids, load_chunks=load_chunks) changeset.conflicts = conflicts changesets.append(changeset) else: changes = diff.parse.parseDifferences( repository, from_commit=from_commit, to_commit=to_commit, filter_paths=[ describe_file(db, file_id) for file_id in filtered_file_ids ])[from_commit.sha1] dbutils.find_files(db, changes) for file in changes: for index, chunk in enumerate(file.chunks): chunk.analyze(file, index == len(file.chunks) - 1) changeset = diff.Changeset(None, from_commit, to_commit, changeset_type) changeset.conflicts = conflicts changeset.files = diff.File.sorted(changes) changesets.append(changeset) if do_highlight: highlights = {} for changeset in changesets: for file in changeset.files: if file.canHighlight(): if file.old_sha1 and file.old_sha1 != '0' * 40: highlights[file.old_sha1] = (file.path, file.getLanguage()) if file.new_sha1 and file.new_sha1 != '0' * 40: highlights[file.new_sha1] = (file.path, file.getLanguage()) syntaxhighlight.request.requestHighlights(repository, highlights) return changesets
def process(self, db, creator, review_id, filters): review = dbutils.Review.fromId(db, review_id) by_user = {} for filter in filters: if "user_ids" in filter: user_ids = set(filter["user_ids"]) else: user_ids = set([]) if "user_names" in filter: for user_name in filter["user_names"]: user_ids.add(dbutils.User.fromName(db, user_name).id) if "paths" in filter: paths = set( reviewing.filters.sanitizePath(path) for path in filter["paths"]) for path in paths: try: reviewing.filters.validatePattern(path) except reviewing.filters.PatternError as error: raise OperationFailure( code="invalidpattern", title="Invalid path pattern", message= "There are invalid wild-cards in the path: %s" % error.message) else: paths = set() if "file_ids" in filter: for file_id in filter["file_ids"]: paths.add(dbutils.describe_file(file_id)) for user_id in user_ids: reviewer_paths, watcher_paths = by_user.setdefault( user_id, (set(), set())) if filter["type"] == "reviewer": reviewer_paths |= paths else: watcher_paths |= paths pending_mails = [] for user_id, (reviewer_paths, watcher_paths) in by_user.items(): try: user = dbutils.User.fromId(db, user_id) except dbutils.InvalidUserId: raise OperationFailure( code="invaliduserid", title="Invalid user ID", message= "At least one of the specified user IDs was invalid.") pending_mails.extend( reviewing.utils.addReviewFilters(db, creator, user, review, reviewer_paths, watcher_paths)) review = dbutils.Review.fromId(db, review_id) review.incrementSerial(db) db.commit() mailutils.sendPendingMails(pending_mails) return OperationResult()
def createChangeset(db, user, repository, commit=None, from_commit=None, to_commit=None, rescan=False, reanalyze=False, conflicts=False, filtered_file_ids=None, review=None, do_highlight=True, load_chunks=True): cursor = db.cursor() if conflicts: if commit: assert len(commit.parents) > 1 cursor.execute("SELECT replay FROM mergereplays WHERE original=%s", (commit.getId(db),)) row = cursor.fetchone() if row: replay = gitutils.Commit.fromId(db, repository, row[0]) else: replay = repository.replaymerge(db, user, commit) if not replay: return None cursor.execute("INSERT INTO mergereplays (original, replay) VALUES (%s, %s)", (commit.getId(db), replay.getId(db))) from_commit = replay to_commit = commit parents = [replay] else: parents = [from_commit] commit = to_commit changeset_type = 'conflicts' elif commit: parents = [gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in commit.parents] or [None] changeset_type = 'merge' if len(parents) > 1 else 'direct' else: parents = [from_commit] commit = to_commit changeset_type = 'direct' if len(to_commit.parents) == 1 and from_commit == to_commit.parents[0] else 'custom' changes = None changesets = [] fileversions_values = [] chunks_values = [] thin_diff = False changeset_ids = [] for parent in parents: if parent: cursor.execute("SELECT id FROM changesets WHERE parent=%s AND child=%s AND type=%s", (parent.getId(db), commit.getId(db), changeset_type)) else: cursor.execute("SELECT id FROM changesets WHERE parent IS NULL AND child=%s AND type=%s", (commit.getId(db), changeset_type)) row = cursor.fetchone() if row: changeset_ids.append(row[0]) else: break assert len(changeset_ids) in (0, len(parents)) if changeset_ids: if rescan and user.hasRole(db, "developer"): cursor.executemany("DELETE FROM changesets WHERE id=%s", [(changeset_id,) for changeset_id in changeset_ids]) db.commit() changeset_ids = [] else: for changeset_id in changeset_ids: if changeset_type == 'custom': cursor.execute("UPDATE customchangesets SET time=NOW() WHERE changeset=%s", (changeset_id,)) changeset = load.loadChangeset(db, repository, changeset_id, filtered_file_ids=filtered_file_ids, load_chunks=load_chunks) changeset.conflicts = conflicts if reanalyze and user.hasRole(db, "developer"): analysis_values = [] for file in changeset.files: if not filtered_file_ids or file.id in filtered_file_ids: for index, chunk in enumerate(file.chunks): old_analysis = chunk.analysis chunk.analyze(file, index == len(file.chunks) - 1, True) if old_analysis != chunk.analysis: analysis_values.append((chunk.analysis, chunk.id)) if reanalyze == "commit" and analysis_values: cursor.executemany("UPDATE chunks SET analysis=%s WHERE id=%s", analysis_values) changesets.append(changeset) if not changesets: if len(parents) == 1 and from_commit and to_commit and filtered_file_ids: if from_commit.isAncestorOf(to_commit): iter_commit = to_commit while iter_commit != from_commit: if len(iter_commit.parents) > 1: thin_diff = True break iter_commit = gitutils.Commit.fromSHA1(db, repository, iter_commit.parents[0]) else: thin_diff = True if not thin_diff: if changeset_type == "direct": request = { "changeset_type": "direct", "child_sha1": commit.sha1 } elif changeset_type == "custom": request = { "changeset_type": "custom", "parent_sha1": from_commit.sha1 if from_commit else "0" * 40, "child_sha1": to_commit.sha1 } elif changeset_type == "merge": request = { "changeset_type": "merge", "child_sha1": commit.sha1 } else: request = { "changeset_type": "conflicts", "parent_sha1": from_commit.sha1, "child_sha1": to_commit.sha1 } request["repository_name"] = repository.name client.requestChangesets([request]) db.commit() for parent in parents: if parent: cursor.execute("SELECT id FROM changesets WHERE parent=%s AND child=%s AND type=%s", (parent.getId(db), commit.getId(db), changeset_type)) else: cursor.execute("SELECT id FROM changesets WHERE parent IS NULL AND child=%s AND type=%s", (commit.getId(db), changeset_type)) changeset_id = cursor.fetchone()[0] changeset = load.loadChangeset(db, repository, changeset_id, filtered_file_ids=filtered_file_ids, load_chunks=load_chunks) changeset.conflicts = conflicts changesets.append(changeset) else: changes = diff.parse.parseDifferences(repository, from_commit=from_commit, to_commit=to_commit, filter_paths=[describe_file(db, file_id) for file_id in filtered_file_ids])[from_commit.sha1] dbutils.find_files(db, changes) for file in changes: for index, chunk in enumerate(file.chunks): chunk.analyze(file, index == len(file.chunks) - 1) changeset = diff.Changeset(None, from_commit, to_commit, changeset_type) changeset.conflicts = conflicts changeset.files = diff.File.sorted(changes) changesets.append(changeset) if do_highlight: highlights = {} for changeset in changesets: for file in changeset.files: if file.canHighlight(): if file.old_sha1 and file.old_sha1 != '0' * 40: highlights[file.old_sha1] = (file.path, file.getLanguage()) if file.new_sha1 and file.new_sha1 != '0' * 40: highlights[file.new_sha1] = (file.path, file.getLanguage()) syntaxhighlight.request.requestHighlights(repository, highlights) return changesets
def renderChainInMail(db, to_user, chain, focus_comment, new_state, new_type, line_length, context_lines): result = "" hr = "-" * line_length urls = to_user.getCriticURLs(db) url = "\n".join([" %s/showcomment?chain=%d" % (url, chain.id) for url in urls]) cursor = db.cursor() if chain.file_id: path = dbutils.describe_file(db, chain.file_id) if chain.first_commit == chain.last_commit: cursor.execute("""SELECT null, sha1 FROM commentchainlines WHERE chain=%s AND commit=%s""", (chain.id, chain.first_commit.getId(db))) elif chain.origin == 'old': cursor.execute("""SELECT old_mode, old_sha1 FROM fileversions, changesets, reviewchangesets WHERE fileversions.changeset=changesets.id AND fileversions.file=%s AND changesets.parent=%s AND reviewchangesets.changeset=changesets.id AND reviewchangesets.review=%s""", [chain.file_id, chain.first_commit.getId(db), chain.review.id]) else: cursor.execute("""SELECT new_mode, new_sha1 FROM fileversions, changesets, reviewchangesets WHERE fileversions.changeset=changesets.id AND fileversions.file=%s AND changesets.child=%s AND reviewchangesets.changeset=changesets.id AND reviewchangesets.review=%s""", [chain.file_id, chain.last_commit.getId(db), chain.review.id]) try: mode, sha1 = cursor.fetchone() except: mode = None sha1 = chain.lines_by_sha1.keys()[0] first_line, count = chain.lines_by_sha1[sha1] context = changeset_utils.getCodeContext(db, sha1, first_line, minimized=True) if context: result += "%s in %s, %s:\n%s\n%s\n" % (chain.type.capitalize(), path, context, url, hr) else: result += "%s in %s:\n%s\n%s\n" % (chain.type.capitalize(), path, url, hr) try: file = diff.File(id=chain.file_id, path=path, new_mode=mode, new_sha1=sha1, repository=chain.review.repository) file.loadNewLines() lines = file.newLines(False) except: raise Exception, repr((chain.id, file.id, mode, sha1)) last_line = first_line + count - 1 first_line = max(1, first_line - context_lines) last_line = min(last_line + context_lines, len(lines)) width = len(str(last_line)) for offset, line in enumerate(lines[first_line - 1:last_line]): result += "%s|%s\n" % (str(first_line + offset).rjust(width), line) result += hr + "\n" elif chain.first_commit: result += "%s in commit %s by %s:\n%s\n%s\n" % (chain.type.capitalize(), chain.first_commit.sha1[:8], chain.first_commit.author.name, url, hr) first_line, count = chain.lines_by_sha1[chain.first_commit.sha1] last_line = first_line + count - 1 lines = chain.first_commit.message.splitlines() for line in lines[first_line:last_line + 1]: result += " %s\n" % line result += hr + "\n" else: result += "General %s:\n%s\n%s\n" % (chain.type, url, hr) mode = to_user.getPreference(db, "email.updatedReview.quotedComments") def formatComment(comment): return "%s at %s:\n%s\n" % (comment.user.fullname, comment.when(), textutils.reflow(comment.comment, line_length, indent=2)) assert not focus_comment or focus_comment == chain.comments[-1], "focus comment (#%d) is not last in chain (#%d) as expected" % (focus_comment.id, chain.id) if not focus_comment or len(chain.comments) > 1: if focus_comment: comments = chain.comments[:-1] else: comments = chain.comments result = "\n".join(["> " + line for line in result.splitlines()]) + "\n" quote1 = "" notshown = "" quote2 = "" if mode == "first": quote1 = formatComment(comments[0]) if len(comments) > 1: notshown = "[%d comment%s not shown]" % (len(comments) - 1, "s" if len(comments) > 2 else "") elif mode == "firstlast": quote1 = formatComment(comments[0]) if len(comments) > 2: notshown = "[%d comment%s not shown]" % (len(comments) - 2, "s" if len(comments) > 3 else "") if len(comments) > 1: quote2 = formatComment(comments[-1]) elif mode == "last": if len(comments) > 1: notshown = "[%d comment%s not shown]" % (len(comments) - 1, "s" if len(comments) > 2 else "") quote2 = formatComment(comments[-1]) else: for comment in comments: quote1 += formatComment(comment) if quote1: result += "\n".join(["> " + line for line in quote1.splitlines()]) + "\n" if notshown: result += notshown + "\n" if quote2: result += "\n".join(["> " + line for line in quote2.splitlines()]) + "\n" if focus_comment: result += "\n" if focus_comment: result += formatComment(focus_comment) if new_type == "issue": result += "\nCONVERTED TO ISSUE!\n" elif new_type == "note": result += "\nCONVERTED TO NOTE!\n" if new_state == "closed": result += "\nISSUE RESOLVED!\n" elif new_state == "addressed": result += "\nISSUE ADDRESSED!\n" elif new_state == "open": result += "\nISSUE REOPENED!\n" elif chain.state == "closed": result += "\n(This issue is resolved.)\n" elif chain.state == "addressed": result += "\n(This issue is addressed.)\n" return result
def reapplyfilters(req, db, user): if user.isAnonymous(): return OperationFailureMustLogin() cursor1 = db.cursor() cursor2 = db.cursor() cursor3 = db.cursor() user = dbutils.User.fromName(db, req.getParameter("user", req.user)) repository_name = req.getParameter("repository", None) if not repository_name: cursor1.execute("""SELECT reviews.id, applyfilters, applyparentfilters, branches.repository FROM reviews JOIN branches ON (reviews.branch=branches.id) WHERE reviews.state!='closed'""") else: repository = gitutils.Repository.fromParameter(db, repository_name) cursor1.execute("""SELECT reviews.id, applyfilters, applyparentfilters, branches.repository FROM reviews JOIN branches ON (reviews.branch=branches.id) WHERE reviews.state!='closed' AND branches.repository=%s""", (repository.id,)) repositories = {} assign_changes = {} watch_reviews = set() own_commit = {} for review_id, applyfilters, applyparentfilters, repository_id in cursor1: if repository_id not in repositories: repositories[repository_id] = gitutils.Repository.fromId(db, repository_id) repository = repositories[repository_id] filters = review_filters.Filters() filters.load(db, review=review_filters.Filters.Review(review_id, applyfilters, applyparentfilters, repository), user=user) if filters.hasFilters(): cursor2.execute("""SELECT changesets.child, reviewfiles.file, reviewfiles.id FROM changesets JOIN reviewfiles ON (reviewfiles.changeset=changesets.id) LEFT OUTER JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id AND reviewuserfiles.uid=%s) WHERE reviewfiles.review=%s AND reviewuserfiles.uid IS NULL""", (user.id, review_id)) for commit_id, file_id, review_file_id in cursor2: users = filters.listUsers(db, file_id) if user.id in users: if commit_id not in own_commit: cursor3.execute("""SELECT uid FROM usergitemails JOIN gitusers USING (email) JOIN commits ON (commits.author_gituser=gitusers.id) WHERE commits.id=%s""", (commit_id,)) own_commit[commit_id] = cursor3.fetchone()[0] == user.id if not own_commit[commit_id]: if users[user.id][0] == 'reviewer': assign_changes.setdefault(review_id, set()).add((file_id, review_file_id)) else: watch_reviews.add(review_id) new_reviews = set() for review_id in itertools.chain(assign_changes, watch_reviews): cursor1.execute("SELECT 1 FROM reviewusers WHERE review=%s AND uid=%s", (review_id, user.id)) if not cursor1.fetchone(): new_reviews.add(review_id) cursor1.executemany("INSERT INTO reviewusers (review, uid) VALUES (%s, %s)", [(review_id, user.id) for review_id in new_reviews]) reviewuserfiles_values = [] for file_ids in assign_changes.values(): reviewuserfiles_values.extend([(review_file_id, user.id) for file_id, review_file_id in file_ids]) cursor1.executemany("INSERT INTO reviewuserfiles (file, uid) VALUES (%s, %s)", reviewuserfiles_values) result = "" for review_id in sorted(assign_changes.keys()): review = dbutils.Review.fromId(db, review_id, load_commits=False) file_ids = assign_changes[review_id] if review.state == 'open': result += "review,%s:%d:%s\n" % ("new" if review_id in new_reviews else "old", review.id, review.summary) paths = [dbutils.describe_file(db, file_id) for file_id, review_file_id in file_ids] for path in diff.File.eliminateCommonPrefixes(sorted(paths), text=True): result += " " + path + "\n" for review_id in sorted(watch_reviews & new_reviews): review = dbutils.Review.fromId(db, review_id, load_commits=False) if review.state == 'open': result += "watch:%d:%s\n" % (review.id, review.summary) db.commit() if not result: result = "nothing\n" return result
def renderCodeCommentChain(db, target, user, review, chain, context_lines=3, compact=False, tabify=False, original=False, changeset=None, linkify=False): repository = review.repository old_sha1 = None new_sha1 = None old = 1 new = 2 cursor = db.cursor() file_id = chain.file_id file_path = dbutils.describe_file(db, file_id) if (chain.state != "addressed" or original) and chain.first_commit == chain.last_commit: sha1 = chain.first_commit.getFileSHA1(file_path) cursor.execute("SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s", (chain.id, sha1)) first_line, last_line = cursor.fetchone() file = diff.File(file_id, file_path, sha1, sha1, review.repository, chunks=[]) file.loadNewLines(True) start = max(1, first_line - context_lines) end = min(file.newCount(), last_line + context_lines) count = end + 1 - start lines = file.newLines(True) lines = [diff.Line(diff.Line.CONTEXT, start + index, lines[start + index - 1], start + index, lines[start + index - 1]) for index in range(count)] file.macro_chunks = [diff.MacroChunk([], lines)] use = new display_type = "new" commit_url_component = "sha1=%s" % chain.first_commit.sha1 else: if chain.state == "addressed" and not original and review.containsCommit(db, chain.addressed_by): parent = gitutils.Commit.fromSHA1(db, review.repository, chain.addressed_by.parents[0]) child = chain.addressed_by use = old else: parent = chain.first_commit child = chain.last_commit if parent == child: if chain.origin == "old": cursor.execute("""SELECT changesets.child FROM changesets, reviewchangesets WHERE changesets.parent=%s AND reviewchangesets.changeset=changesets.id AND reviewchangesets.review=%s""", [child.getId(db), review.id]) try: child = gitutils.Commit.fromId(db, repository, cursor.fetchone()[0]) except: parent = gitutils.Commit.fromSHA1(db, repository, child.parents[0]) else: parent = gitutils.Commit.fromSHA1(db, repository, child.parents[0]) if chain.origin == "old": use = old else: use = new if parent.sha1 in child.parents and len(child.parents) == 1: commit = child from_commit = None to_commit = None else: commit = None from_commit = parent to_commit = child if changeset: assert ((changeset.parent == from_commit and changeset.child == to_commit) if commit is None else (changeset.parent.sha1 == commit.parents[0] and changeset.child == commit)) assert changeset.getFile(file_id) else: changeset = changeset_utils.createChangeset(db, user, repository, commit=commit, from_commit=from_commit, to_commit=to_commit, filtered_file_ids=set((file_id,)))[0] file = changeset.getFile(file_id) if not file: if chain.state == "addressed" and not original: renderCodeCommentChain(db, target, user, review, chain, context_lines, compact, tabify, original=True) return else: raise # Commit so that the diff and its analysis, written to the database by createChangeset(), # can be reused later. db.commit() old_sha1 = file.old_sha1 new_sha1 = file.new_sha1 if use == old and old_sha1 == '0' * 40: use = new elif use == new and new_sha1 == '0' * 40: use = old if use == old: sha1 = old_sha1 else: sha1 = new_sha1 cursor.execute("SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s", [chain.id, sha1]) first_line, last_line = cursor.fetchone() def readChunks(): return [diff.Chunk(delete_offset, delete_count, insert_offset, insert_count, analysis=analysis, is_whitespace=is_whitespace) for delete_offset, delete_count, insert_offset, insert_count, analysis, is_whitespace in cursor.fetchall()] first_context_line = first_line - context_lines last_context_line = last_line + context_lines def includeChunk(chunk): if use == old: chunk_first_line, chunk_last_line = chunk.delete_offset, chunk.delete_offset + chunk.delete_count - 1 else: chunk_first_line, chunk_last_line = chunk.insert_offset, chunk.insert_offset + chunk.insert_count - 1 return chunk_last_line >= first_context_line and chunk_first_line <= last_context_line def lineFilter(line): if use == old: linenr = line.old_offset if linenr == first_context_line and line.type == diff.Line.INSERTED: return False else: linenr = line.new_offset if linenr == first_context_line and line.type == diff.Line.DELETED: return False return first_context_line <= linenr <= last_context_line file.loadOldLines(True) file.loadNewLines(True) context = diff.context.ContextLines(file, file.chunks, [chain]) file.macro_chunks = context.getMacroChunks(context_lines, highlight=True, lineFilter=lineFilter) try: macro_chunk = file.macro_chunks[0] except: raise repr((parent.sha1, child.sha1)) display_type = "both" if chain.state != "addressed": first_line_type = macro_chunk.lines[0].type if first_line_type == diff.Line.CONTEXT or (use == old and first_line_type == diff.Line.DELETED) or (use == new and first_line_type == diff.Line.INSERTED): for line in macro_chunk.lines[1:]: if first_line_type != line.type: break else: display_type = "old" if use == old else "new" commit_url_component = "from=%s&to=%s" % (parent.sha1, child.sha1) def renderHeaderLeft(db, target, file): target.span("comment-chain-title").a(href="/showcomment?chain=%d" % chain.id).text(chain.title()) def renderHeaderRight(db, target, file): side = use == old and "o" or "n" uri = "showcommit?%s&review=%d&file=%d#f%d%s%d" % (commit_url_component, review.id, file.id, file.id, side, first_line) target.span("filename").a(href=uri).text(file.path) def renderCommentsLocal(db, target, **kwargs): if display_type == "both": if use == old: position = "left" else: position = "right" else: position = "center" renderComments(db, target, user, chain, position, linkify) def lineId(base): return "c%d%s" % (chain.id, base) def lineCellId(base): return "c%d%s" % (chain.id, base) target.addInternalScript("commentChainById[%d] = %s;" % (chain.id, chain.getJSConstructor(sha1)), here=True) changeset_html.renderFile(db, target, user, review, file, options={ "support_expand": False, "display_type": display_type, "header_left": renderHeaderLeft, "header_right": renderHeaderRight, "content_after": renderCommentsLocal, "show": True, "expand": True, "line_id": lineId, "line_cell_id": lineCellId, "compact": compact, "tabify": tabify, "include_deleted": True }) data = (chain.id, file_id, use == old and "o" or "n", first_line, chain.id, file_id, use == old and "o" or "n", last_line, htmlutils.jsify(chain.type), htmlutils.jsify(chain.state), chain.id) target.addInternalScript("""$(document).ready(function () { var markers = new CommentMarkers(null); markers.setLines(document.getElementById('c%df%d%s%d'), document.getElementById('c%df%d%s%d')); markers.setType(%s, %s); commentChainById[%d].markers = markers; });""" % data, here=True)
def createCommentChain(db, user, review, chain_type, commit_id=None, origin=None, file_id=None, parent_id=None, child_id=None, old_sha1=None, new_sha1=None, offset=None, count=None): if chain_type == "issue" and review.state != "open": raise Exception, "review not open; can't raise issue" cursor = db.cursor() if file_id is not None and (parent_id == child_id or parent_id is None): cursor.execute("""SELECT 1 FROM reviewchangesets JOIN fileversions USING (changeset) WHERE reviewchangesets.review=%s AND fileversions.file=%s AND fileversions.old_sha1!='0000000000000000000000000000000000000000' AND fileversions.new_sha1!='0000000000000000000000000000000000000000'""", (review.id, file_id)) if cursor.fetchone(): raise Exception, "file changed in review" cursor.execute("INSERT INTO commentchains (review, uid, type, file, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s, %s) RETURNING id", [review.id, user.id, chain_type, file_id, child_id, child_id]) chain_id = cursor.fetchone()[0] cursor.execute("INSERT INTO commentchainlines (chain, uid, commit, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)", (chain_id, user.id, child_id, new_sha1, offset, offset + count - 1)) elif file_id is not None: parents_returned = set() def getFileParent(new_sha1): cursor.execute("""SELECT changesets.id, fileversions.old_sha1 FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.new_sha1=%s""", [review.id, file_id, new_sha1]) try: changeset_id, old_sha1 = cursor.fetchone() if old_sha1 in parents_returned: return None, None parents_returned.add(old_sha1) return changeset_id, old_sha1 except: return None, None children_returned = set() def getFileChild(old_sha1): cursor.execute("""SELECT changesets.id, fileversions.new_sha1 FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.old_sha1=%s""", [review.id, file_id, old_sha1]) try: changeset_id, new_sha1 = cursor.fetchone() if new_sha1 in children_returned: return None, None children_returned.add(new_sha1) return changeset_id, new_sha1 except: return None, None cursor.execute("""SELECT changesets.id FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND changesets.child=%s AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.old_sha1=%s AND fileversions.new_sha1=%s""", [review.id, child_id, file_id, old_sha1, new_sha1]) row = cursor.fetchone() if not row: if origin == "old": cursor.execute("""SELECT changesets.id FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.old_sha1=%s""", [review.id, file_id, old_sha1]) else: cursor.execute("""SELECT changesets.id FROM changesets, reviewchangesets, fileversions WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=changesets.id AND fileversions.changeset=changesets.id AND fileversions.file=%s AND fileversions.new_sha1=%s""", [review.id, file_id, new_sha1]) row = cursor.fetchone() primary_changeset_id = row[0] sha1s_older = { } sha1s_newer = { old_sha1: (primary_changeset_id, new_sha1) } sha1 = new_sha1 while True: changeset_id, next_sha1 = getFileParent(sha1) if changeset_id: sha1s_older[sha1] = changeset_id, next_sha1 sha1s_newer[next_sha1] = changeset_id, sha1 sha1 = next_sha1 else: break sha1 = new_sha1 while True: changeset_id, next_sha1 = getFileChild(sha1) if changeset_id: sha1s_newer[sha1] = changeset_id, next_sha1 sha1 = next_sha1 else: break commentchainlines_values = [] processed = set() def searchOrigin(changeset_id, sha1, search_space, first_line, last_line): try: while sha1 not in processed: processed.add(sha1) changeset_id, next_sha1 = search_space[sha1] changeset = changeset_load.loadChangeset(db, review.repository, changeset_id, filtered_file_ids=set([file_id])) if len(changeset.child.parents) > 1: break verdict, next_first_line, next_last_line = updateCommentChain(first_line, last_line, changeset.files[0].chunks, forward) if verdict == "modified": break sha1 = next_sha1 first_line = next_first_line last_line = next_last_line except: pass return changeset_id, sha1, first_line, last_line first_line = offset last_line = offset + count - 1 if origin == 'old': changeset_id, sha1, first_line, last_line = searchOrigin(primary_changeset_id, old_sha1, sha1s_older, first_line, last_line) commit_id = diff.Changeset.fromId(db, review.repository, changeset_id).parent.id else: changeset_id, sha1, first_line, last_line = searchOrigin(primary_changeset_id, new_sha1, sha1s_older, first_line, last_line) commit_id = diff.Changeset.fromId(db, review.repository, changeset_id).child.id commentchainlines_values.append((user.id, commit_id, sha1, first_line, last_line)) processed = set() processed.add(sha1) while sha1 in sha1s_newer: changeset_id, sha1 = sha1s_newer[sha1] if sha1 in processed: break else: processed.add(sha1) changeset = changeset_load.loadChangeset(db, review.repository, changeset_id, filtered_file_ids=set([file_id])) if len(changeset.child.parents) != 1: chunks = diff.parse.parseDifferences(review.repository, from_commit=changeset.parent, to_commit=changeset.child, selected_path=dbutils.describe_file(db, file_id)).chunks else: chunks = changeset.files[0].chunks verdict, first_line, last_line = updateCommentChain(first_line, last_line, chunks) if verdict == "transfer": commentchainlines_values.append((user.id, changeset.child.getId(db), sha1, first_line, last_line)) else: break cursor.execute("INSERT INTO commentchains (review, uid, type, origin, file, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING id", [review.id, user.id, chain_type, origin, file_id, parent_id, child_id]) chain_id = cursor.fetchone()[0] try: cursor.executemany("INSERT INTO commentchainlines (chain, uid, commit, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)", [(chain_id,) + values for values in commentchainlines_values]) except: raise Exception, repr(commentchainlines_values) elif commit_id is not None: commit = gitutils.Commit.fromId(db, review.repository, commit_id) cursor.execute("INSERT INTO commentchains (review, uid, type, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s) RETURNING id", [review.id, user.id, chain_type, commit_id, commit_id]) chain_id = cursor.fetchone()[0] cursor.execute("INSERT INTO commentchainlines (chain, uid, commit, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)", (chain_id, user.id, commit_id, commit.sha1, offset, offset + count - 1)) else: cursor.execute("INSERT INTO commentchains (review, uid, type) VALUES (%s, %s, %s) RETURNING id", [review.id, user.id, chain_type]) chain_id = cursor.fetchone()[0] commentchainusers = set([user.id] + map(int, review.owners)) if file_id is not None: filters = Filters() filters.load(db, review=review) for user_id in filters.listUsers(db, file_id): commentchainusers.add(user_id) cursor.executemany("INSERT INTO commentchainusers (chain, uid) VALUES (%s, %s)", [(chain_id, user_id) for user_id in commentchainusers]) return chain_id