예제 #1
0
파일: checkrebase.py 프로젝트: Aessy/critic
    def process(self, db, user, review_id, merge_sha1=None, new_head_sha1=None, new_upstream_sha1=None):
        review = dbutils.Review.fromId(db, review_id)

        if merge_sha1 is not None:
            merge = gitutils.Commit.fromSHA1(db, review.repository, merge_sha1)

            changesets = changeset_utils.createChangeset(
                db, user, review.repository, merge, conflicts=True, do_highlight=False)

            url = "/showcommit?repository=%d&sha1=%s&conflicts=yes" % (review.repository.id, merge.sha1)
        else:
            upstreams = review.getCommitSet(db).getFilteredTails(review.repository)

            if len(upstreams) > 1:
                return OperationResult(rebase_supported=False)

            old_head = review.branch.getHead(db)
            old_upstream = gitutils.Commit.fromSHA1(db, review.repository, upstreams.pop())
            new_head = gitutils.Commit.fromSHA1(db, review.repository, new_head_sha1)
            new_upstream = gitutils.Commit.fromSHA1(db, review.repository, new_upstream_sha1)

            replay = reviewing.rebase.replayRebase(db, review, user, old_head, old_upstream, new_head, new_upstream)

            changesets = changeset_utils.createChangeset(
                db, user, review.repository, from_commit=replay, to_commit=new_head, conflicts=True, do_highlight=False)

            url = "/showcommit?repository=%d&from=%s&to=%s&conflicts=yes" % (review.repository.id, replay.sha1, new_head.sha1)

        has_changes = False
        has_conflicts = False

        for changed_file in changesets[0].files:
            changed_file.loadOldLines()

            file_has_conflicts = False

            for chunk in changed_file.chunks:
                lines = changed_file.getOldLines(chunk)
                for line in lines:
                    if line.startswith("<<<<<<<"):
                        has_conflicts = file_has_conflicts = True
                        break
                if file_has_conflicts:
                    break

            if not file_has_conflicts:
                has_changes = True

        return OperationResult(has_conflicts=has_conflicts, has_changes=has_changes, url=url)
예제 #2
0
파일: checkrebase.py 프로젝트: Aessy/critic
    def process(self, db, user, review_id, new_head_sha1, new_upstream_sha1):
        review = dbutils.Review.fromId(db, review_id)
        upstreams = log.commitset.CommitSet(review.branch.getCommits(db)).getFilteredTails(review.repository)

        if len(upstreams) > 1:
            return OperationResult(rebase_supported=False)

        old_head = review.branch.getHead(db)
        old_upstream = gitutils.Commit.fromSHA1(db, review.repository, upstreams.pop())
        new_head = gitutils.Commit.fromSHA1(db, review.repository, new_head_sha1)
        new_upstream = gitutils.Commit.fromSHA1(db, review.repository, new_upstream_sha1)

        equivalent_merge = reviewing.rebase.createEquivalentMergeCommit(
            db, review, user, old_head, old_upstream, new_head, new_upstream)

        changesets = changeset_utils.createChangeset(
            db, user, review.repository, equivalent_merge, do_highlight=False)

        for changeset in changesets:
            if changeset.files:
                has_conflicts = True
                break
        else:
            has_conflicts = False

        return OperationResult(rebase_supported=True,
                               has_conflicts=has_conflicts,
                               merge_sha1=equivalent_merge.sha1)
예제 #3
0
    def __init__(self, db, parent, child, file_ids=None, commits=None, changeset_cache=None):
        self.parent = parent
        self.child = child
        self.commitset = CommitSet.fromRange(db, parent, child, commits=commits)
        self.changesets = {}

        if not self.commitset: raise LineAnnotator.NotSupported

        commits = []

        if not changeset_cache: changeset_cache = {}

        for commit in self.commitset:
            if len(commit.parents) > 1: raise LineAnnotator.NotSupported

            if commit in changeset_cache:
                self.changesets[commit.sha1] = changeset_cache[commit]
            else:
                commits.append(commit)

        for changeset in loadChangesetsForCommits(db, parent.repository, commits, filtered_file_ids=file_ids):
            self.changesets[changeset.child.sha1] = changeset_cache[changeset.child] = changeset

        for commit in set(self.commitset) - set(self.changesets.keys()):
            changesets = createChangeset(db, None, commit.repository, commit=commit, filtered_file_ids=file_ids, do_highlight=False)
            assert len(changesets) == 1
            self.changesets[commit.sha1] = changeset_cache[commit] = changesets[0]

        self.commits = [parent]
        self.commit_index = { parent.sha1: 0 }

        for commit in self.commitset:
            self.commit_index[commit.sha1] = len(self.commits)
            self.commits.append(commit)
예제 #4
0
    def process(self, db, user, review_id, new_head_sha1, new_upstream_sha1):
        review = dbutils.Review.fromId(db, review_id)
        upstreams = log.commitset.CommitSet(
            review.branch.getCommits(db)).getFilteredTails(review.repository)

        if len(upstreams) > 1:
            return OperationResult(rebase_supported=False)

        old_head = review.branch.getHead(db)
        old_upstream = gitutils.Commit.fromSHA1(db, review.repository,
                                                upstreams.pop())
        new_head = gitutils.Commit.fromSHA1(db, review.repository,
                                            new_head_sha1)
        new_upstream = gitutils.Commit.fromSHA1(db, review.repository,
                                                new_upstream_sha1)

        equivalent_merge = reviewing.rebase.createEquivalentMergeCommit(
            db, review, user, old_head, old_upstream, new_head, new_upstream)

        changesets = changeset_utils.createChangeset(db,
                                                     user,
                                                     review.repository,
                                                     equivalent_merge,
                                                     do_highlight=False)

        for changeset in changesets:
            if changeset.files:
                has_conflicts = True
                break
        else:
            has_conflicts = False

        return OperationResult(rebase_supported=True,
                               has_conflicts=has_conflicts,
                               merge_sha1=equivalent_merge.sha1)
예제 #5
0
def getReviewersAndWatchers(db, repository, commits=None, changesets=None, reviewfilters=None,
                            applyfilters=True, applyparentfilters=False):
    """getReviewersAndWatchers(db, commits=None, changesets=None) -> tuple

Returns a tuple containing two dictionaries, each mapping file IDs to
dictionaries mapping user IDs to sets of changeset IDs.  The first dictionary
defines the reviwers of each file, the second dictionary defines the watchers of
each file.  For any changes in a file for which no reviewer is identified, None
is used as a key in the dictionary instead of a real user ID."""

    if changesets is None:
        changesets = []
        changeset_utils.createChangesets(db, repository, commits)
        for commit in commits:
            changesets.extend(changeset_utils.createChangeset(db, None, repository, commit, do_highlight=False))

    cursor = db.cursor()

    filters = Filters()
    filters.setFiles(db, list(getFileIdsFromChangesets(changesets)))

    if applyfilters:
        filters.load(db, repository=repository, recursive=applyparentfilters)

    if reviewfilters:
        filters.addFilters(reviewfilters)

    reviewers = {}
    watchers = {}

    for changeset in changesets:
        author_user_ids = changeset.child.author.getUserIds(db) if changeset.child else set()

        cursor.execute("SELECT DISTINCT file FROM fileversions WHERE changeset=%s", (changeset.id,))

        for (file_id,) in cursor:
            reviewers_found = False

            for user_id, (filter_type, delegate) in filters.listUsers(file_id).items():
                if filter_type == 'reviewer':
                    if user_id not in author_user_ids:
                        reviewer_user_ids = [user_id]
                    elif delegate:
                        reviewer_user_ids = []
                        for delegate_user_name in delegate.split(","):
                            delegate_user = dbutils.User.fromName(db, delegate_user_name)
                            reviewer_user_ids.append(delegate_user.id)
                    else:
                        reviewer_user_ids = []

                    for reviewer_user_id in reviewer_user_ids:
                        reviewers.setdefault(file_id, {}).setdefault(reviewer_user_id, set()).add(changeset.id)
                        reviewers_found = True
                else:
                    watchers.setdefault(file_id, {}).setdefault(user_id, set()).add(changeset.id)

            if not reviewers_found:
                reviewers.setdefault(file_id, {}).setdefault(None, set()).add(changeset.id)

    return reviewers, watchers
예제 #6
0
파일: utils.py 프로젝트: Haster2004/critic
def getReviewersAndWatchers(db, repository, commits=None, changesets=None, reviewfilters=None,
                            applyfilters=True, applyparentfilters=False):
    """getReviewersAndWatchers(db, commits=None, changesets=None) -> tuple

Returns a tuple containing two dictionaries, each mapping file IDs to
dictionaries mapping user IDs to sets of changeset IDs.  The first dictionary
defines the reviwers of each file, the second dictionary defines the watchers of
each file.  For any changes in a file for which no reviewer is identified, None
is used as a key in the dictionary instead of a real user ID."""

    if changesets is None:
        changesets = []
        changeset_utils.createChangesets(db, repository, commits)
        for commit in commits:
            changesets.extend(changeset_utils.createChangeset(db, None, repository, commit, do_highlight=False))

    cursor = db.cursor()

    filters = Filters()
    filters.setFiles(db, list(getFileIdsFromChangesets(changesets)))

    if applyfilters:
        filters.load(db, repository=repository, recursive=applyparentfilters)

    if reviewfilters:
        filters.addFilters(reviewfilters)

    reviewers = {}
    watchers = {}

    for changeset in changesets:
        author_user_ids = changeset.child.author.getUserIds(db) if changeset.child else set()

        cursor.execute("SELECT DISTINCT file FROM fileversions WHERE changeset=%s", (changeset.id,))

        for (file_id,) in cursor:
            reviewers_found = False

            for user_id, (filter_type, delegate) in filters.listUsers(file_id).items():
                if filter_type == 'reviewer':
                    if user_id not in author_user_ids:
                        reviewer_user_ids = [user_id]
                    elif delegate:
                        reviewer_user_ids = []
                        for delegate_user_name in delegate.split(","):
                            delegate_user = dbutils.User.fromName(db, delegate_user_name)
                            reviewer_user_ids.append(delegate_user.id)
                    else:
                        reviewer_user_ids = []

                    for reviewer_user_id in reviewer_user_ids:
                        reviewers.setdefault(file_id, {}).setdefault(reviewer_user_id, set()).add(changeset.id)
                        reviewers_found = True
                else:
                    watchers.setdefault(file_id, {}).setdefault(user_id, set()).add(changeset.id)

            if not reviewers_found:
                reviewers.setdefault(file_id, {}).setdefault(None, set()).add(changeset.id)

    return reviewers, watchers
예제 #7
0
def createChangesetsForCommits(db,
                               commits,
                               silent_if_empty=set(),
                               full_merges=set(),
                               replayed_rebases={}):
    repository = commits[0].repository
    changesets = []
    silent_commits = set()
    silent_changesets = set()

    simple_commits = []
    for commit in commits:
        if commit not in full_merges and commit not in replayed_rebases:
            simple_commits.append(commit)
    if simple_commits:
        changeset_utils.createChangesets(db, repository, simple_commits)

    for commit in commits:
        if commit in full_merges:
            commit_changesets = changeset_utils.createFullMergeChangeset(
                db, user, repository, commit, do_highlight=False)
        elif commit in replayed_rebases:
            commit_changesets = changeset_utils.createChangeset(
                db,
                user,
                repository,
                from_commit=commit,
                to_commit=replayed_rebases[commit],
                conflicts=True,
                do_highlight=False)
        else:
            commit_changesets = changeset_utils.createChangeset(
                db, user, repository, commit, do_highlight=False)

        if commit in silent_if_empty:
            for commit_changeset in commit_changesets:
                if commit_changeset.files:
                    break
            else:
                silent_commits.add(commit)
                silent_changesets.update(commit_changesets)

        changesets.extend(commit_changesets)

    return changesets, silent_commits, silent_changesets
예제 #8
0
파일: utils.py 프로젝트: Haster2004/critic
def createChangesetsForCommits(db, commits, silent_if_empty=set(), full_merges=set(), replayed_rebases={}):
    repository = commits[0].repository
    changesets = []
    silent_commits = set()
    silent_changesets = set()

    simple_commits = []
    for commit in commits:
        if commit not in full_merges and commit not in replayed_rebases:
            simple_commits.append(commit)
    if simple_commits:
        changeset_utils.createChangesets(db, repository, simple_commits)

    for commit in commits:
        if commit in full_merges:
            commit_changesets = changeset_utils.createFullMergeChangeset(
                db, user, repository, commit, do_highlight=False)
        elif commit in replayed_rebases:
            commit_changesets = changeset_utils.createChangeset(
                db, user, repository,
                from_commit=commit, to_commit=replayed_rebases[commit],
                conflicts=True, do_highlight=False)
        else:
            commit_changesets = changeset_utils.createChangeset(
                db, user, repository, commit, do_highlight=False)

        if commit in silent_if_empty:
            for commit_changeset in commit_changesets:
                if commit_changeset.files:
                    break
            else:
                silent_commits.add(commit)
                silent_changesets.update(commit_changesets)

        changesets.extend(commit_changesets)

    return changesets, silent_commits, silent_changesets
예제 #9
0
    def __getChanges(self, from_commit, to_commit):
        changesets = createChangeset(self.db,
                                     user=None,
                                     repository=self.review.repository,
                                     from_commit=from_commit,
                                     to_commit=to_commit,
                                     filtered_file_ids=set([self.file_id]),
                                     do_highlight=False)

        assert len(changesets) == 1

        if changesets[0].files:
            assert changesets[0].files[0].id == self.file_id
            return changesets[0].files[0].chunks
        else:
            return None
예제 #10
0
    def __getChanges(self, from_commit, to_commit):
        changesets = createChangeset(self.db,
                                     user=None,
                                     repository=self.review.repository,
                                     from_commit=from_commit,
                                     to_commit=to_commit,
                                     filtered_file_ids=set([self.file_id]),
                                     do_highlight=False)

        assert len(changesets) == 1

        if changesets[0].files:
            changed_file = changesets[0].files[0]
            assert changed_file.id == self.file_id
            removed = changed_file.new_sha1 == "0" * 40
            added = changed_file.old_sha1 == "0" * 40
            return changesets[0].files[0].chunks, removed, added
        else:
            return None, False, False
예제 #11
0
def assignChanges(db,
                  user,
                  review,
                  commits=None,
                  changesets=None,
                  update=False):
    cursor = db.cursor()

    if changesets is None:
        assert commits is not None

        changesets = []

        for commit in commits:
            changesets.extend(
                changeset_utils.createChangeset(db, user, review.repository,
                                                commit))

    applyfilters = review.applyfilters
    applyparentfilters = review.applyparentfilters

    reviewers, watchers = getReviewersAndWatchers(
        db,
        review.repository,
        changesets=changesets,
        reviewfilters=review.getReviewFilters(db),
        applyfilters=applyfilters,
        applyparentfilters=applyparentfilters)

    cursor.execute("SELECT uid FROM reviewusers WHERE review=%s",
                   (review.id, ))

    reviewusers = set([user_id for (user_id, ) in cursor])
    reviewusers_values = set()
    reviewuserfiles_values = set()

    reviewuserfiles_existing = {}

    if update:
        cursor.execute(
            """SELECT reviewuserfiles.uid, reviewfiles.changeset, reviewfiles.file
                            FROM reviewfiles
                            JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id)
                           WHERE reviewfiles.review=%s""", (review.id, ))
        for user_id, changeset_id, file_id in cursor:
            reviewuserfiles_existing[(user_id, changeset_id, file_id)] = True

    new_reviewers = set()
    new_watchers = set()

    cursor.execute(
        """SELECT DISTINCT uid
                        FROM reviewfiles
                        JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id)
                       WHERE review=%s""", (review.id, ))
    old_reviewers = set([user_id for (user_id, ) in cursor])

    for file_id, file_users in reviewers.items():
        for user_id, user_changesets in file_users.items():
            if user_id:
                new_reviewers.add(user_id)

                if user_id not in reviewusers:
                    reviewusers.add(user_id)
                    reviewusers_values.add((review.id, user_id))
                for changeset_id in user_changesets:
                    if (user_id, changeset_id,
                            file_id) not in reviewuserfiles_existing:
                        reviewuserfiles_values.add(
                            (user_id, review.id, changeset_id, file_id))

    for file_id, file_users in watchers.items():
        for user_id, user_changesets in file_users.items():
            if user_id:
                if user_id not in reviewusers:
                    new_watchers.add(user_id)
                    reviewusers.add(user_id)
                    reviewusers_values.add((review.id, user_id))

    new_reviewers -= old_reviewers
    new_watchers -= old_reviewers | new_reviewers

    cursor.executemany("INSERT INTO reviewusers (review, uid) VALUES (%s, %s)",
                       reviewusers_values)
    cursor.executemany(
        "INSERT INTO reviewuserfiles (file, uid) SELECT id, %s FROM reviewfiles WHERE review=%s AND changeset=%s AND file=%s",
        reviewuserfiles_values)

    if configuration.extensions.ENABLED:
        cursor.execute(
            """SELECT id, uid, extension, path
                            FROM extensionhookfilters
                           WHERE repository=%s""", (review.repository.id, ))

        rows = cursor.fetchall()

        if rows:
            if commits is None:
                commits = set()
                for changeset in changesets:
                    commits.add(changeset.child)
                commits = list(commits)

            filters = Filters()
            filters.setFiles(db, list(getFileIdsFromChangesets(changesets)))

            for filter_id, user_id, extension_id, path in rows:
                filters.addFilter(user_id, path, None, None, filter_id)

            for filter_id, file_ids in filters.matched_files.items():
                extensions.role.filterhook.queueFilterHookEvent(
                    db, filter_id, review, user, commits, file_ids)

    return new_reviewers, new_watchers
예제 #12
0
    def process(self,
                db,
                user,
                review_id,
                merge_sha1=None,
                new_head_sha1=None,
                new_upstream_sha1=None):
        review = dbutils.Review.fromId(db, review_id)

        if merge_sha1 is not None:
            merge = gitutils.Commit.fromSHA1(db, review.repository, merge_sha1)

            changesets = changeset_utils.createChangeset(db,
                                                         user,
                                                         review.repository,
                                                         merge,
                                                         conflicts=True,
                                                         do_highlight=False)

            url = "/showcommit?repository=%d&sha1=%s&conflicts=yes" % (
                review.repository.id, merge.sha1)
        else:
            upstreams = review.getCommitSet(db).getFilteredTails(
                review.repository)

            if len(upstreams) > 1:
                return OperationResult(rebase_supported=False)

            old_head = review.branch.getHead(db)
            old_upstream = gitutils.Commit.fromSHA1(db, review.repository,
                                                    upstreams.pop())
            new_head = gitutils.Commit.fromSHA1(db, review.repository,
                                                new_head_sha1)
            new_upstream = gitutils.Commit.fromSHA1(db, review.repository,
                                                    new_upstream_sha1)

            replay = reviewing.rebase.replayRebase(db, review, user, old_head,
                                                   old_upstream, new_head,
                                                   new_upstream)

            changesets = changeset_utils.createChangeset(db,
                                                         user,
                                                         review.repository,
                                                         from_commit=replay,
                                                         to_commit=new_head,
                                                         conflicts=True,
                                                         do_highlight=False)

            url = "/showcommit?repository=%d&from=%s&to=%s&conflicts=yes" % (
                review.repository.id, replay.sha1, new_head.sha1)

        has_changes = False
        has_conflicts = False

        for changed_file in changesets[0].files:
            changed_file.loadOldLines()

            file_has_conflicts = False

            for chunk in changed_file.chunks:
                lines = changed_file.getOldLines(chunk)
                for line in lines:
                    if line.startswith("<<<<<<<"):
                        has_conflicts = file_has_conflicts = True
                        break
                if file_has_conflicts:
                    break

            if not file_has_conflicts:
                has_changes = True

        return OperationResult(has_conflicts=has_conflicts,
                               has_changes=has_changes,
                               url=url)
예제 #13
0
def renderCreateReview(req, db, user):
    if user.isAnonymous(): raise page.utils.NeedLogin(req)

    repository = req.getParameter("repository", filter=gitutils.Repository.FromParameter(db), default=None)
    applyparentfilters = req.getParameter("applyparentfilters", "yes" if user.getPreference(db, 'review.applyUpstreamFilters') else "no") == "yes"

    cursor = db.cursor()

    if req.method == "POST":
        data = json_decode(req.read())

        summary = data.get("summary")
        description = data.get("description")
        review_branch_name = data.get("review_branch_name")
        commit_ids = data.get("commit_ids")
        commit_sha1s = data.get("commit_sha1s")
    else:
        summary = req.getParameter("summary", None)
        description = req.getParameter("description", None)
        review_branch_name = req.getParameter("reviewbranchname", None)

        commit_ids = None
        commit_sha1s = None

        commits_arg = req.getParameter("commits", None)
        remote = req.getParameter("remote", None)
        upstream = req.getParameter("upstream", "master")
        branch_name = req.getParameter("branch", None)

        if commits_arg:
            try: commit_ids = map(int, commits_arg.split(","))
            except: commit_sha1s = [repository.revparse(ref) for ref in commits_arg.split(",")]
        elif branch_name:
            cursor.execute("""SELECT commit
                                FROM reachable
                                JOIN branches ON (branch=id)
                               WHERE repository=%s
                                 AND name=%s""",
                           (repository.id, branch_name))
            commit_ids = [commit_id for (commit_id,) in cursor]

            if len(commit_ids) > configuration.limits.MAXIMUM_REVIEW_COMMITS:
                raise page.utils.DisplayMessage(
                    "Too many commits!",
                    (("<p>The branch <code>%s</code> contains %d commits.  Reviews can"
                      "be created from branches that contain at most %d commits.</p>"
                      "<p>This limit can be adjusted by modifying the system setting"
                      "<code>configuration.limits.MAXIMUM_REVIEW_COMMITS</code>.</p>")
                     % (htmlutils.htmlify(branch_name), len(commit_ids),
                        configuration.limits.MAXIMUM_REVIEW_COMMITS)),
                    html=True)
        else:
            return renderSelectSource(req, db, user)

    req.content_type = "text/html; charset=utf-8"

    if commit_ids:
        commits = [gitutils.Commit.fromId(db, repository, commit_id) for commit_id in commit_ids]
    elif commit_sha1s:
        commits = [gitutils.Commit.fromSHA1(db, repository, commit_sha1) for commit_sha1 in commit_sha1s]
    else:
        commits = []

    if not commit_ids:
        commit_ids = [commit.getId(db) for commit in commits]
    if not commit_sha1s:
        commit_sha1s = [commit.sha1 for commit in commits]

    if summary is None:
        if len(commits) == 1:
            summary = commits[0].summary()
        else:
            summary = ""

    if review_branch_name:
        invalid_branch_name = "false"
        default_branch_name = review_branch_name
    else:
        invalid_branch_name = htmlutils.jsify(user.name + "/")
        default_branch_name = user.name + "/"

        match = re.search("(?:^|[Ff]ix(?:e[ds])?(?: +for)?(?: +bug)? +)([A-Z][A-Z0-9]+-[0-9]+)", summary)
        if match:
            invalid_branch_name = "false"
            default_branch_name = htmlutils.htmlify(match.group(1))

    changesets = []
    changeset_utils.createChangesets(db, repository, commits)
    for commit in commits:
        changesets.extend(changeset_utils.createChangeset(db, None, repository, commit, do_highlight=False))
    changeset_ids = [changeset.id for changeset in changesets]

    all_reviewers, all_watchers = reviewing.utils.getReviewersAndWatchers(
        db, repository, changesets=changesets, applyparentfilters=applyparentfilters)

    document = htmlutils.Document(req)
    html = document.html()
    head = html.head()

    document.addInternalScript(user.getJS(db))

    if branch_name:
        document.addInternalScript("var fromBranch = %s;" % htmlutils.jsify(branch_name))

    if remote:
        document.addInternalScript("var trackedbranch = { remote: %s, name: %s };" % (htmlutils.jsify(remote), htmlutils.jsify(branch_name)))

    head.title().text("Create Review")

    body = html.body(onload="document.getElementById('branch_name').focus()")

    page.utils.generateHeader(body, db, user, lambda target: target.button(onclick="submitReview();").text("Submit Review"))

    document.addExternalStylesheet("resource/createreview.css")
    document.addExternalScript("resource/createreview.js")
    document.addExternalScript("resource/reviewfilters.js")
    document.addExternalScript("resource/autocomplete.js")

    document.addInternalScript("""
var invalid_branch_name = %s;
var review = { commit_ids: %r,
               commit_sha1s: %r,
               changeset_ids: %r };""" % (invalid_branch_name, commit_ids, commit_sha1s, changeset_ids))
    document.addInternalScript(repository.getJS())

    main = body.div("main")

    table = main.table("basic paleyellow", align="center")
    table.tr().td("h1", colspan=3).h1().text("Create Review")

    row = table.tr("line")
    row.td("heading").text("Branch Name:")
    row.td("value").text("r/").input("value", id="branch_name", value=default_branch_name)
    row.td("status")

    row = table.tr()

    if not remote:
        row.td("help", colspan=3).div().text("""\
This is the main identifier of the review.  It will be created in the review
repository to contain the commits below.  Reviewers can fetch it from there, and
additional commits can be added to the review later by pushing them to this
branch in the review repository.""")
    else:
        row.td("help", colspan=3).div().text("""\
This is the main identifier of the review.  It will be created in the review
repository to contain the commits below, and reviewers can fetch it from there.""")

    if remote:
        row = table.tr("line")
        row.td("heading").text("Tracked Branch:")
        value = row.td("value")
        value.code("branch inset").text(branch_name, linkify=linkify.Context(remote=remote))
        value.text(" in ")
        value.code("remote inset").text(remote, linkify=linkify.Context())
        row.td("status")

        row = table.tr()
        row.td("help", colspan=3).div().text("""\
Rather than pushing directly to the review branch in Critic's repository to add
commits to the review, you will be pushing to this branch (in a separate
repository,) from which Critic will fetch commits and add them to the review
automatically.""")

    row = table.tr("line")
    row.td("heading").text("Summary:")
    row.td("value").input("value", id="summary", value=summary)
    row.td("status")

    row = table.tr()
    row.td("help", colspan=3).div().text("""\
The summary should be a short summary of the changes in the review.  It will
appear in the subject of all emails sent about the review.
""")

    row = table.tr("line description")
    row.td("heading").text("Description:")
    textarea = row.td("value").textarea(id="description", rows=12)
    textarea.preformatted()
    if description: textarea.text(description)
    row.td("status")

    row = table.tr()
    row.td("help", colspan=3).div().text("""\
The description should describe the changes to be reviewed.  It is usually fine
to leave the description empty, since the commit messages are also available in
the review.
""")

    generateReviewersAndWatchersTable(db, repository, main, all_reviewers, all_watchers, applyparentfilters=applyparentfilters)

    row = table.tr("line recipients")
    row.td("heading").text("Recipient List:")
    cell = row.td("value", colspan=2).preformatted()
    cell.span("mode").text("Everyone")
    cell.span("users")
    cell.text(".")
    buttons = cell.div("buttons")
    buttons.button(onclick="editRecipientList();").text("Edit Recipient List")

    row = table.tr()
    row.td("help", colspan=3).div().text("""\
The basic recipient list for e-mails sent about the review.
""")

    log.html.render(db, main, "Commits", commits=commits)

    return document
예제 #14
0
        title += "%s (%s)" % (commit.niceSummary(), commit.describe(db))
    else:
        title += "%s..%s" % (from_commit.describe(db), to_commit.describe(db))

    document.setTitle(title)

    if review_filter == "pending":
        document.setLink("next", "javascript:submitChanges();")

    commits = None
    rebases = None

    profiler.check("prologue")

    if from_commit and to_commit:
        changesets = changeset_utils.createChangeset(db, user, repository, from_commit=from_commit, to_commit=to_commit, rescan=rescan, reanalyze=reanalyze, filtered_file_ids=file_ids)
        assert len(changesets) == 1

        if review and review_filter in ("reviewable", "relevant", "files"):
            cursor.execute("""SELECT old_head, new_head, new_upstream, uid, branch
                                FROM reviewrebases
                               WHERE review=%s AND new_head IS NOT NULL""",
                           (review.id,))

            all_rebases = [(None,
                            gitutils.Commit.fromId(db, repository, old_head),
                            gitutils.Commit.fromId(db, repository, new_head),
                            dbutils.User.fromId(db, user_id),
                            gitutils.Commit.fromId(db, repository, new_upstream) if new_upstream is not None else None,
                            branch_name)
                           for old_head, new_head, new_upstream, user_id, branch_name in cursor]
예제 #15
0
def addCommitsToReview(db,
                       user,
                       review,
                       commits,
                       new_review=False,
                       commitset=None,
                       pending_mails=None,
                       silent_if_empty=set(),
                       full_merges=set(),
                       tracked_branch=False):
    cursor = db.cursor()

    if not new_review:
        import index

        new_commits = log_commitset.CommitSet(commits)
        old_commits = log_commitset.CommitSet(review.branch.commits)
        merges = new_commits.getMerges()

        for merge in merges:
            # We might have stripped it in a previous pass.
            if not merge in new_commits: continue

            tails = filter(
                lambda sha1: sha1 not in old_commits and sha1 not in merge.
                parents, new_commits.getTailsFrom(merge))

            if tails:
                if tracked_branch:
                    raise index.IndexException, """\
Merge %s adds merged-in commits.  Please push the merge manually
and follow the instructions.""" % merge.sha1[:8]

                cursor.execute(
                    "SELECT id, confirmed, tail FROM reviewmergeconfirmations WHERE review=%s AND uid=%s AND merge=%s",
                    (review.id, user.id, merge.getId(db)))

                row = cursor.fetchone()

                if not row or not row[1]:
                    if not row:
                        cursor.execute(
                            "INSERT INTO reviewmergeconfirmations (review, uid, merge) VALUES (%s, %s, %s) RETURNING id",
                            (review.id, user.id, merge.getId(db)))
                        confirmation_id = cursor.fetchone()[0]

                        merged = set()

                        for tail_sha1 in tails:
                            children = new_commits.getChildren(tail_sha1)

                            while children:
                                child = children.pop()
                                if child not in merged and new_commits.isAncestorOf(
                                        child, merge):
                                    merged.add(child)
                                    children.update(
                                        new_commits.getChildren(child) -
                                        merged)

                        merged_values = [(confirmation_id, commit.getId(db))
                                         for commit in merged]
                        cursor.executemany(
                            "INSERT INTO reviewmergecontributions (id, merged) VALUES (%s, %s)",
                            merged_values)
                        db.commit()
                    else:
                        confirmation_id = row[0]

                    message = "Merge %s adds merged-in commits:" % merge.sha1[:
                                                                              8]

                    for tail_sha1 in tails:
                        for parent_sha1 in merge.parents:
                            if parent_sha1 in new_commits:
                                parent = new_commits.get(parent_sha1)
                                if tail_sha1 in new_commits.getTailsFrom(
                                        parent):
                                    message += "\n  %s..%s" % (tail_sha1[:8],
                                                               parent_sha1[:8])

                    message += """
Please confirm that this is intended by loading:
  %s/confirmmerge?id=%d""" % (dbutils.getURLPrefix(db), confirmation_id)

                    raise index.IndexException, message
                elif row[2] is not None:
                    if row[2] == merge.getId(db):
                        cursor.execute(
                            "SELECT merged FROM reviewmergecontributions WHERE id=%s",
                            (row[0], ))

                        for (merged_id, ) in cursor:
                            merged = gitutils.Commit.fromId(
                                db, review.repository, merged_id)
                            if merged.sha1 in merge.parents:
                                new_commits = new_commits.without([merged])
                                break
                    else:
                        tail = gitutils.Commit.fromId(db, review.repository,
                                                      row[2])
                        cut = [
                            gitutils.Commit.fromSHA1(db, review.repository,
                                                     sha1)
                            for sha1 in tail.parents if sha1 in new_commits
                        ]
                        new_commits = new_commits.without(cut)

        if commitset:
            commitset &= set(new_commits)
            commits = [commit for commit in commits if commit in commitset]

    changesets = []
    silent_changesets = set()

    simple_commits = []
    for commit in commits:
        if commit not in full_merges:
            simple_commits.append(commit)
    if simple_commits:
        changeset_utils.createChangesets(db, review.repository, simple_commits)

    for commit in commits:
        if commit in full_merges:
            commit_changesets = changeset_utils.createFullMergeChangeset(
                db, user, review.repository, commit)
        else:
            commit_changesets = changeset_utils.createChangeset(
                db, user, review.repository, commit)

        if commit in silent_if_empty:
            for commit_changeset in commit_changesets:
                if commit_changeset.files:
                    break
            else:
                silent_changesets.update(commit_changesets)

        changesets.extend(commit_changesets)

    if not new_review:
        print "Adding %d commit%s to the review at:\n  %s" % (
            len(commits), len(commits) > 1 and "s" or "", review.getURL(db))

    reviewchangesets_values = [(review.id, changeset.id)
                               for changeset in changesets]

    cursor.executemany(
        """INSERT INTO reviewchangesets (review, changeset) VALUES (%s, %s)""",
        reviewchangesets_values)
    cursor.executemany(
        """INSERT INTO reviewfiles (review, changeset, file, deleted, inserted)
                               SELECT reviewchangesets.review, reviewchangesets.changeset, fileversions.file, COALESCE(SUM(chunks.deleteCount), 0), COALESCE(SUM(chunks.insertCount), 0)
                                 FROM reviewchangesets
                                 JOIN fileversions USING (changeset)
                      LEFT OUTER JOIN chunks USING (changeset, file)
                                WHERE reviewchangesets.review=%s
                                  AND reviewchangesets.changeset=%s
                             GROUP BY reviewchangesets.review, reviewchangesets.changeset, fileversions.file""",
        reviewchangesets_values)

    new_reviewers, new_watchers = assignChanges(db,
                                                user,
                                                review,
                                                changesets=changesets)

    cursor.execute(
        "SELECT include FROM reviewrecipientfilters WHERE review=%s AND uid=0",
        (review.id, ))

    try:
        opt_out = cursor.fetchone()[0] is True
    except:
        opt_out = True

    if not new_review:
        for user_id in new_reviewers:
            new_reviewuser = dbutils.User.fromId(db, user_id)
            print "Added reviewer: %s <%s>" % (new_reviewuser.fullname,
                                               new_reviewuser.email)

            if opt_out:
                # If the user has opted out from receiving e-mails about this
                # review while only watching it, clear the opt-out now that the
                # user becomes a reviewer.
                cursor.execute(
                    "DELETE FROM reviewrecipientfilters WHERE review=%s AND uid=%s AND include=FALSE",
                    (review.id, user_id))

        for user_id in new_watchers:
            new_reviewuser = dbutils.User.fromId(db, user_id)
            print "Added watcher:  %s <%s>" % (new_reviewuser.fullname,
                                               new_reviewuser.email)

        review.incrementSerial(db)

    for changeset in changesets:
        review_comment.updateCommentChains(db, user, review, changeset)

    if pending_mails is None: pending_mails = []

    notify_changesets = filter(
        lambda changeset: changeset not in silent_changesets, changesets)

    if not new_review and notify_changesets:
        recipients = review.getRecipients(db)
        for to_user in recipients:
            pending_mails.extend(
                mail.sendReviewAddedCommits(db,
                                            user,
                                            to_user,
                                            recipients,
                                            review,
                                            notify_changesets,
                                            tracked_branch=tracked_branch))

    mail.sendPendingMails(pending_mails)

    review.reviewers.extend(
        [User.fromId(db, user_id) for user_id in new_reviewers])

    for user_id in new_watchers:
        review.watchers[User.fromId(db, user_id)] = "automatic"

    return True
예제 #16
0
def renderShowComments(req, db, user):
    context_lines = req.getParameter("context", user.getPreference(db, "comment.diff.contextLines"), filter=int)

    default_compact = "yes" if user.getPreference(db, "commit.diff.compactMode") else "no"
    compact = req.getParameter("compact", default_compact) == "yes"

    default_tabify = "yes" if user.getPreference(db, "commit.diff.visualTabs") else "no"
    tabify = req.getParameter("tabify", default_tabify) == "yes"

    original = req.getParameter("original", "no") == "yes"

    review_id = req.getParameter("review", filter=int)
    batch_id = req.getParameter("batch", None, filter=int)
    filter = req.getParameter("filter", "all")
    blame = req.getParameter("blame", None)

    profiler = profiling.Profiler()

    review = dbutils.Review.fromId(db, review_id)
    review.repository.enableBlobCache()

    cursor = db.cursor()

    profiler.check("create review")

    if blame is not None:
        blame_user = dbutils.User.fromName(db, blame)

        cursor.execute("""SELECT commentchains.id
                            FROM commentchains
                            JOIN commentchainlines ON (commentchainlines.chain=commentchains.id)
                            JOIN fileversions ON (fileversions.new_sha1=commentchainlines.sha1)
                            JOIN changesets ON (changesets.id=fileversions.changeset)
                            JOIN commits ON (commits.id=changesets.child)
                            JOIN gitusers ON (gitusers.id=commits.author_gituser)
                            JOIN usergitemails USING (email)
                            JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id AND reviewchangesets.review=commentchains.review)
                           WHERE commentchains.review=%s
                             AND usergitemails.uid=%s
                             AND commentchains.state!='empty'
                             AND (commentchains.state!='draft' OR commentchains.uid=%s)
                        ORDER BY commentchains.file, commentchainlines.commit, commentchainlines.first_line""",
                       (review.id, blame_user.id, user.id))

        include_chain_ids = set([chain_id for (chain_id,) in cursor])

        profiler.check("initial blame filtering")
    else:
        include_chain_ids = None

    if filter == "toread":
        query = "SELECT commentchains.id FROM commentchains JOIN comments ON (comments.chain=commentchains.id) JOIN commentstoread ON (commentstoread.comment=comments.id) LEFT OUTER JOIN commentchainlines ON (commentchainlines.chain=commentchains.id) WHERE review=%s AND commentstoread.uid=%s ORDER BY file, commit, first_line"

        cursor.execute(query, (review.id, user.id))
    else:
        query = "SELECT id FROM commentchains LEFT OUTER JOIN commentchainlines ON (chain=id) WHERE review=%s AND commentchains.state!='empty'"
        arguments = [review.id]

        if filter == "issues":
            query += " AND type='issue' AND (commentchains.state!='draft' OR commentchains.uid=%s)"
            arguments.append(user.id)
        elif filter == "draft-issues":
            query += " AND type='issue' AND commentchains.state='draft' AND commentchains.uid=%s"
            arguments.append(user.id)
        elif filter == "open-issues":
            query += " AND type='issue' AND commentchains.state='open'"
        elif filter == "addressed-issues":
            query += " AND type='issue' AND commentchains.state='addressed'"
        elif filter == "closed-issues":
            query += " AND type='issue' AND commentchains.state='closed'"
        elif filter == "notes":
            query += " AND type='note' AND (commentchains.state!='draft' OR commentchains.uid=%s)"
            arguments.append(user.id)
        elif filter == "draft-notes":
            query += " AND type='note' AND commentchains.state='draft' AND commentchains.uid=%s"
            arguments.append(user.id)
        elif filter == "open-notes":
            query += " AND type='note' AND commentchains.state='open'"
        else:
            query += " AND (commentchains.state!='draft' OR commentchains.uid=%s)"
            arguments.append(user.id)

        if batch_id is not None:
            query += " AND batch=%s"
            arguments.append(batch_id)

        # This ordering is inaccurate if comments apply to the same file but
        # different commits, but then, in that case there isn't really a
        # well-defined natural order either.  Two comments that apply to the
        # same file and commit will at least be order by line number, and that's
        # better than nothing.
        query += " ORDER BY file, commit, first_line"

        cursor.execute(query, arguments)

    profiler.check("main query")

    if include_chain_ids is None:
        chain_ids = [chain_id for (chain_id,) in cursor]
    else:
        chain_ids = [chain_id for (chain_id,) in cursor if chain_id in include_chain_ids]

    profiler.check("query result")

    document = htmlutils.Document(req)

    html = document.html()
    head = html.head()
    body = html.body()

    document.addInternalScript(user.getJS(db))
    document.addInternalScript(review.getJS())

    page.utils.generateHeader(body, db, user, lambda target: review_utils.renderDraftItems(db, user, review, target), extra_links=[("r/%d" % review.id, "Back to Review", True)])

    profiler.check("page header")

    target = body.div("main")

    if chain_ids and not user.isAnonymous() and user.name == req.user:
        document.addInternalScript("$(function () { markChainsAsRead([%s]); });" % ", ".join(map(str, chain_ids)))

    #yield document.render(stop=target, pretty=not compact)

    if chain_ids:
        processed = set()

        chains = []
        file_ids = set()
        changesets_files = {}
        changesets = {}

        if blame is not None:
            annotators = {}
            review.branch.loadCommits(db)
            commits = log.commitset.CommitSet(review.branch.commits)

        for chain_id in chain_ids:
            if chain_id in processed:
                continue
            else:
                processed.add(chain_id)

                chain = review_comment.CommentChain.fromId(db, chain_id, user, review=review)
                chains.append(chain)

                if chain.file_id is not None:
                    file_ids.add(chain.file_id)
                    changesets_files.setdefault((chain.first_commit, chain.last_commit), set()).add(chain.file_id)

        profiler.check("load chains")

        changeset_cache = {}

        for (from_commit, to_commit), filtered_file_ids in changesets_files.items():
            changesets[(from_commit, to_commit)] = changeset_utils.createChangeset(db, user, review.repository, from_commit=from_commit, to_commit=to_commit, filtered_file_ids=filtered_file_ids)[0]
            profiler.check("create changesets")

            if blame is not None:
                annotators[(from_commit, to_commit)] = operation.blame.LineAnnotator(db, from_commit, to_commit, file_ids=file_ids, commits=commits, changeset_cache=changeset_cache)
                profiler.check("create annotators")

        for chain in chains:
            if blame is not None and chain.file_id is not None:
                changeset = changesets[(chain.first_commit, chain.last_commit)]
                annotator = annotators[(chain.first_commit, chain.last_commit)]

                offset, count = chain.lines_by_sha1[changeset.getFile(chain.file_id).new_sha1]

                if not annotator.annotate(chain.file_id, offset, offset + count - 1, check_user=blame_user):
                    continue

            profiler.check("detailed blame filtering")

            if chain.file_id is not None:
                from_commit, to_commit = review_html.getCodeCommentChainChangeset(db, chain, original)
                changeset = changesets.get((from_commit, to_commit))
            else:
                changeset = None

            review_html.renderCommentChain(db, target, user, review, chain,
                                           context_lines=context_lines,
                                           compact=compact,
                                           tabify=tabify,
                                           original=original,
                                           changeset=changeset,
                                           linkify=linkify.Context(db=db, request=req, review=review))

            profiler.check("rendering")

            yield document.render(stop=target, pretty=not compact) + "<script>console.log((new Date).toString());</script>"

            profiler.check("transfer")

        page.utils.renderShortcuts(target, "showcomments")
    else:
        target.h1(align="center").text("No comments.")

    profiler.output(db, user, document)

    yield document.render(pretty=not compact)
예제 #17
0
파일: utils.py 프로젝트: ahockersten/critic
def addCommitsToReview(db, user, review, commits, new_review=False, commitset=None, pending_mails=None, silent_if_empty=set(), full_merges=set(), replayed_rebases={}, tracked_branch=False):
    cursor = db.cursor()

    if not new_review:
        import index

        new_commits = log_commitset.CommitSet(commits)
        old_commits = log_commitset.CommitSet(review.branch.commits)
        merges = new_commits.getMerges()

        for merge in merges:
            # We might have stripped it in a previous pass.
            if not merge in new_commits: continue

            tails = filter(lambda sha1: sha1 not in old_commits and sha1 not in merge.parents, new_commits.getTailsFrom(merge))

            if tails:
                if tracked_branch:
                    raise index.IndexException("""\
Merge %s adds merged-in commits.  Please push the merge manually
and follow the instructions.""" % merge.sha1[:8])

                cursor.execute("SELECT id, confirmed, tail FROM reviewmergeconfirmations WHERE review=%s AND uid=%s AND merge=%s", (review.id, user.id, merge.getId(db)))

                row = cursor.fetchone()

                if not row or not row[1]:
                    if not row:
                        cursor.execute("INSERT INTO reviewmergeconfirmations (review, uid, merge) VALUES (%s, %s, %s) RETURNING id", (review.id, user.id, merge.getId(db)))
                        confirmation_id = cursor.fetchone()[0]

                        merged = set()

                        for tail_sha1 in tails:
                            children = new_commits.getChildren(tail_sha1)

                            while children:
                                child = children.pop()
                                if child not in merged and new_commits.isAncestorOf(child, merge):
                                    merged.add(child)
                                    children.update(new_commits.getChildren(child) - merged)

                        merged_values = [(confirmation_id, commit.getId(db)) for commit in merged]
                        cursor.executemany("INSERT INTO reviewmergecontributions (id, merged) VALUES (%s, %s)", merged_values)
                        db.commit()
                    else:
                        confirmation_id = row[0]

                    message = "Merge %s adds merged-in commits:" % merge.sha1[:8]

                    for tail_sha1 in tails:
                        for parent_sha1 in merge.parents:
                            if parent_sha1 in new_commits:
                                parent = new_commits.get(parent_sha1)
                                if tail_sha1 in new_commits.getTailsFrom(parent):
                                    message += "\n  %s..%s" % (tail_sha1[:8], parent_sha1[:8])

                    message += """
Please confirm that this is intended by loading:
  %s/confirmmerge?id=%d""" % (dbutils.getURLPrefix(db, user), confirmation_id)

                    raise index.IndexException(message)
                elif row[2] is not None:
                    if row[2] == merge.getId(db):
                        cursor.execute("SELECT merged FROM reviewmergecontributions WHERE id=%s",
                                       (row[0],))

                        for (merged_id,) in cursor:
                            merged = gitutils.Commit.fromId(db, review.repository, merged_id)
                            if merged.sha1 in merge.parents:
                                new_commits = new_commits.without([merged])
                                break
                    else:
                        tail = gitutils.Commit.fromId(db, review.repository, row[2])
                        cut = [gitutils.Commit.fromSHA1(db, review.repository, sha1)
                               for sha1 in tail.parents if sha1 in new_commits]
                        new_commits = new_commits.without(cut)

        if commitset:
            commitset &= set(new_commits)
            commits = [commit for commit in commits if commit in commitset]

    changesets = []
    silent_commits = set()
    silent_changesets = set()

    simple_commits = []
    for commit in commits:
        if commit not in full_merges and commit not in replayed_rebases:
            simple_commits.append(commit)
    if simple_commits:
        changeset_utils.createChangesets(db, review.repository, simple_commits)

    for commit in commits:
        if commit in full_merges:
            commit_changesets = changeset_utils.createFullMergeChangeset(
                db, user, review.repository, commit, do_highlight=False)
        elif commit in replayed_rebases:
            commit_changesets = changeset_utils.createChangeset(
                db, user, review.repository,
                from_commit=commit, to_commit=replayed_rebases[commit],
                conflicts=True, do_highlight=False)
        else:
            commit_changesets = changeset_utils.createChangeset(
                db, user, review.repository, commit, do_highlight=False)

        if commit in silent_if_empty:
            for commit_changeset in commit_changesets:
                if commit_changeset.files:
                    break
            else:
                silent_commits.add(commit)
                silent_changesets.update(commit_changesets)

        changesets.extend(commit_changesets)

    if not new_review:
        print "Adding %d commit%s to the review at:\n  %s" % (len(commits), len(commits) > 1 and "s" or "", review.getURL(db))

    reviewchangesets_values = [(review.id, changeset.id) for changeset in changesets]

    cursor.executemany("""INSERT INTO reviewchangesets (review, changeset) VALUES (%s, %s)""", reviewchangesets_values)
    cursor.executemany("""INSERT INTO reviewfiles (review, changeset, file, deleted, inserted)
                               SELECT reviewchangesets.review, reviewchangesets.changeset, fileversions.file,
                                      COALESCE(SUM(chunks.deleteCount), 0), COALESCE(SUM(chunks.insertCount), 0)
                                 FROM reviewchangesets
                                 JOIN fileversions USING (changeset)
                      LEFT OUTER JOIN chunks USING (changeset, file)
                                WHERE reviewchangesets.review=%s
                                  AND reviewchangesets.changeset=%s
                             GROUP BY reviewchangesets.review, reviewchangesets.changeset, fileversions.file""",
                       reviewchangesets_values)

    new_reviewers, new_watchers = assignChanges(db, user, review, changesets=changesets)

    cursor.execute("SELECT include FROM reviewrecipientfilters WHERE review=%s AND uid IS NULL", (review.id,))

    try: opt_out = cursor.fetchone()[0] is True
    except: opt_out = True

    if not new_review:
        for user_id in new_reviewers:
            new_reviewuser = dbutils.User.fromId(db, user_id)
            print "Added reviewer: %s <%s>" % (new_reviewuser.fullname, new_reviewuser.email)

            if opt_out:
                # If the user has opted out from receiving e-mails about this
                # review while only watching it, clear the opt-out now that the
                # user becomes a reviewer.
                cursor.execute("DELETE FROM reviewrecipientfilters WHERE review=%s AND uid=%s AND include=FALSE", (review.id, user_id))

        for user_id in new_watchers:
            new_reviewuser = dbutils.User.fromId(db, user_id)
            print "Added watcher:  %s <%s>" % (new_reviewuser.fullname, new_reviewuser.email)

        review.incrementSerial(db)

        reviewing.comment.propagateCommentChains(db, user, review, new_commits, replayed_rebases)

    if pending_mails is None: pending_mails = []

    notify_commits = filter(lambda commit: commit not in silent_commits, commits)
    notify_changesets = filter(lambda changeset: changeset not in silent_changesets, changesets)

    if not new_review and notify_changesets:
        recipients = review.getRecipients(db)
        for to_user in recipients:
            pending_mails.extend(mail.sendReviewAddedCommits(
                    db, user, to_user, recipients, review, notify_commits,
                    notify_changesets, tracked_branch=tracked_branch))

    mail.sendPendingMails(pending_mails)

    review.reviewers.extend([User.fromId(db, user_id) for user_id in new_reviewers])

    for user_id in new_watchers:
        review.watchers[User.fromId(db, user_id)] = "automatic"

    return True
예제 #18
0
파일: showcommit.py 프로젝트: KurSh/critic
        title += "%s (%s)" % (commit.niceSummary(), commit.describe(db))
    else:
        title += "%s..%s" % (from_commit.describe(db), to_commit.describe(db))

    document.setTitle(title)

    if review_filter == "pending":
        document.setLink("next", "javascript:submitChanges();")

    commits = None
    rebases = None

    profiler.check("prologue")

    if from_commit and to_commit:
        changesets = changeset_utils.createChangeset(db, user, repository, from_commit=from_commit, to_commit=to_commit, rescan=rescan, reanalyze=reanalyze, filtered_file_ids=file_ids)
        assert len(changesets) == 1

        if review and review_filter in ("reviewable", "relevant", "files"):
            cursor.execute("""SELECT old_head, new_head, new_upstream, uid, branch
                                FROM reviewrebases
                               WHERE review=%s AND new_head IS NOT NULL""",
                           (review.id,))

            all_rebases = [(None,
                            gitutils.Commit.fromId(db, repository, old_head),
                            gitutils.Commit.fromId(db, repository, new_head),
                            dbutils.User.fromId(db, user_id),
                            gitutils.Commit.fromId(db, repository, new_upstream) if new_upstream is not None else None,
                            branch_name)
                           for old_head, new_head, new_upstream, user_id, branch_name in cursor]
예제 #19
0
파일: utils.py 프로젝트: Haster2004/critic
def assignChanges(db, user, review, commits=None, changesets=None, update=False):
    cursor = db.cursor()

    if changesets is None:
        assert commits is not None

        changesets = []

        for commit in commits:
            changesets.extend(changeset_utils.createChangeset(db, user, review.repository, commit))

    applyfilters = review.applyfilters
    applyparentfilters = review.applyparentfilters

    reviewers, watchers = getReviewersAndWatchers(db, review.repository, changesets=changesets, reviewfilters=review.getReviewFilters(db),
                                                  applyfilters=applyfilters, applyparentfilters=applyparentfilters)

    cursor.execute("SELECT uid FROM reviewusers WHERE review=%s", (review.id,))

    reviewusers = set([user_id for (user_id,) in cursor])
    reviewusers_values = set()
    reviewuserfiles_values = set()

    reviewuserfiles_existing = {}

    if update:
        cursor.execute("""SELECT reviewuserfiles.uid, reviewfiles.changeset, reviewfiles.file
                            FROM reviewfiles
                            JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id)
                           WHERE reviewfiles.review=%s""", (review.id,))
        for user_id, changeset_id, file_id in cursor:
            reviewuserfiles_existing[(user_id, changeset_id, file_id)] = True

    new_reviewers = set()
    new_watchers = set()

    cursor.execute("""SELECT DISTINCT uid
                        FROM reviewfiles
                        JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id)
                       WHERE review=%s""", (review.id,))
    old_reviewers = set([user_id for (user_id,) in cursor])

    for file_id, file_users in reviewers.items():
        for user_id, user_changesets in file_users.items():
            if user_id:
                new_reviewers.add(user_id)

                if user_id not in reviewusers:
                    reviewusers.add(user_id)
                    reviewusers_values.add((review.id, user_id))
                for changeset_id in user_changesets:
                    if (user_id, changeset_id, file_id) not in reviewuserfiles_existing:
                        reviewuserfiles_values.add((user_id, review.id, changeset_id, file_id))

    for file_id, file_users in watchers.items():
        for user_id, user_changesets in file_users.items():
            if user_id:
                if user_id not in reviewusers:
                    new_watchers.add(user_id)
                    reviewusers.add(user_id)
                    reviewusers_values.add((review.id, user_id))

    new_reviewers -= old_reviewers
    new_watchers -= old_reviewers | new_reviewers

    cursor.executemany("INSERT INTO reviewusers (review, uid) VALUES (%s, %s)", reviewusers_values)
    cursor.executemany("INSERT INTO reviewuserfiles (file, uid) SELECT id, %s FROM reviewfiles WHERE review=%s AND changeset=%s AND file=%s", reviewuserfiles_values)

    if configuration.extensions.ENABLED:
        cursor.execute("""SELECT id, uid, extension, path
                            FROM extensionhookfilters
                           WHERE repository=%s""",
                       (review.repository.id,))

        rows = cursor.fetchall()

        if rows:
            if commits is None:
                commits = set()
                for changeset in changesets:
                    commits.add(changeset.child)
                commits = list(commits)

            filters = Filters()
            filters.setFiles(db, list(getFileIdsFromChangesets(changesets)))

            for filter_id, user_id, extension_id, path in rows:
                filters.addFilter(user_id, path, None, None, filter_id)

            for filter_id, file_ids in filters.matched_files.items():
                extensions.role.filterhook.queueFilterHookEvent(
                    db, filter_id, review, user, commits, file_ids)

    return new_reviewers, new_watchers
예제 #20
0
파일: utils.py 프로젝트: ahockersten/critic
def assignChanges(db, user, review, commits=None, changesets=None, update=False):
    cursor = db.cursor()

    if changesets is None:
        assert commits is not None

        changesets = []

        for commit in commits:
            changesets.extend(changeset_utils.createChangeset(db, user, review.repository, commit))

    applyfilters = review.applyfilters
    applyparentfilters = review.applyparentfilters

    reviewers, watchers = getReviewersAndWatchers(db, review.repository, changesets=changesets, reviewfilters=review.getReviewFilters(db),
                                                  applyfilters=applyfilters, applyparentfilters=applyparentfilters)

    cursor.execute("SELECT uid FROM reviewusers WHERE review=%s", (review.id,))

    reviewusers = set([user_id for (user_id,) in cursor])
    reviewusers_values = set()
    reviewuserfiles_values = set()

    reviewuserfiles_existing = {}

    if update:
        cursor.execute("""SELECT reviewuserfiles.uid, reviewfiles.changeset, reviewfiles.file
                            FROM reviewfiles
                            JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id)
                           WHERE reviewfiles.review=%s""", (review.id,))
        for user_id, changeset_id, file_id in cursor:
            reviewuserfiles_existing[(user_id, changeset_id, file_id)] = True

    new_reviewers = set()
    new_watchers = set()

    cursor.execute("""SELECT DISTINCT uid
                        FROM reviewfiles
                        JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id)
                       WHERE review=%s""", (review.id,))
    old_reviewers = set([user_id for (user_id,) in cursor])

    for file_id, file_users in reviewers.items():
        for user_id, user_changesets in file_users.items():
            if user_id:
                new_reviewers.add(user_id)

                if user_id not in reviewusers:
                    reviewusers.add(user_id)
                    reviewusers_values.add((review.id, user_id))
                for changeset_id in user_changesets:
                    if (user_id, changeset_id, file_id) not in reviewuserfiles_existing:
                        reviewuserfiles_values.add((user_id, review.id, changeset_id, file_id))

    for file_id, file_users in watchers.items():
        for user_id, user_changesets in file_users.items():
            if user_id:
                if user_id not in reviewusers:
                    new_watchers.add(user_id)
                    reviewusers.add(user_id)
                    reviewusers_values.add((review.id, user_id))

    new_reviewers -= old_reviewers
    new_watchers -= old_reviewers | new_reviewers

    cursor.executemany("INSERT INTO reviewusers (review, uid) VALUES (%s, %s)", reviewusers_values)
    cursor.executemany("INSERT INTO reviewuserfiles (file, uid) SELECT id, %s FROM reviewfiles WHERE review=%s AND changeset=%s AND file=%s", reviewuserfiles_values)

    return new_reviewers, new_watchers
예제 #21
0
파일: html.py 프로젝트: yanlimin9/critic
def renderCodeCommentChain(db,
                           target,
                           user,
                           review,
                           chain,
                           context_lines=3,
                           compact=False,
                           tabify=False,
                           original=False,
                           changeset=None,
                           linkify=False):
    repository = review.repository

    old_sha1 = None
    new_sha1 = None

    old = 1
    new = 2

    cursor = db.cursor()

    file_id = chain.file_id
    file_path = dbutils.describe_file(db, file_id)

    if (chain.state != "addressed"
            or original) and chain.first_commit == chain.last_commit:
        sha1 = chain.first_commit.getFileSHA1(file_path)

        cursor.execute(
            "SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s",
            (chain.id, sha1))
        first_line, last_line = cursor.fetchone()

        file = diff.File(file_id,
                         file_path,
                         sha1,
                         sha1,
                         review.repository,
                         chunks=[])
        file.loadNewLines(True)

        start = max(1, first_line - context_lines)
        end = min(file.newCount(), last_line + context_lines)
        count = end + 1 - start

        lines = file.newLines(True)
        lines = [
            diff.Line(diff.Line.CONTEXT, start + index,
                      lines[start + index - 1], start + index,
                      lines[start + index - 1]) for index in range(count)
        ]

        file.macro_chunks = [diff.MacroChunk([], lines)]

        use = new
        display_type = "new"
        commit_url_component = "sha1=%s" % chain.first_commit.sha1
    else:
        if chain.state == "addressed" and not original and review.containsCommit(
                db, chain.addressed_by):
            parent = gitutils.Commit.fromSHA1(db, review.repository,
                                              chain.addressed_by.parents[0])
            child = chain.addressed_by
            use = old
        else:
            parent = chain.first_commit
            child = chain.last_commit

            if parent == child:
                if chain.origin == "old":
                    cursor.execute(
                        """SELECT changesets.child
                                        FROM changesets, reviewchangesets
                                       WHERE changesets.parent=%s
                                         AND reviewchangesets.changeset=changesets.id
                                         AND reviewchangesets.review=%s""",
                        [child.getId(db), review.id])

                    try:
                        child = gitutils.Commit.fromId(db, repository,
                                                       cursor.fetchone()[0])
                    except:
                        parent = gitutils.Commit.fromSHA1(
                            db, repository, child.parents[0])
                else:
                    parent = gitutils.Commit.fromSHA1(db, repository,
                                                      child.parents[0])

            if chain.origin == "old": use = old
            else: use = new

        if parent.sha1 in child.parents and len(child.parents) == 1:
            commit = child
            from_commit = None
            to_commit = None
        else:
            commit = None
            from_commit = parent
            to_commit = child

        if changeset:
            assert ((changeset.parent == from_commit
                     and changeset.child == to_commit) if commit is None else
                    (changeset.parent.sha1 == commit.parents[0]
                     and changeset.child == commit))
            assert changeset.getFile(file_id)
        else:
            changeset = changeset_utils.createChangeset(
                db,
                user,
                repository,
                commit=commit,
                from_commit=from_commit,
                to_commit=to_commit,
                filtered_file_ids=set((file_id, )))[0]

        file = changeset.getFile(file_id)

        if not file:
            if chain.state == "addressed" and not original:
                renderCodeCommentChain(db,
                                       target,
                                       user,
                                       review,
                                       chain,
                                       context_lines,
                                       compact,
                                       tabify,
                                       original=True)
                return
            else:
                raise

        # Commit so that the diff and its analysis, written to the database by createChangeset(),
        # can be reused later.
        db.commit()

        old_sha1 = file.old_sha1
        new_sha1 = file.new_sha1

        if use == old and old_sha1 == '0' * 40: use = new
        elif use == new and new_sha1 == '0' * 40: use = old

        if use == old: sha1 = old_sha1
        else: sha1 = new_sha1

        cursor.execute(
            "SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s",
            [chain.id, sha1])

        first_line, last_line = cursor.fetchone()

        def readChunks():
            return [
                diff.Chunk(delete_offset,
                           delete_count,
                           insert_offset,
                           insert_count,
                           analysis=analysis,
                           is_whitespace=is_whitespace)
                for delete_offset, delete_count, insert_offset, insert_count,
                analysis, is_whitespace in cursor.fetchall()
            ]

        first_context_line = first_line - context_lines
        last_context_line = last_line + context_lines

        def includeChunk(chunk):
            if use == old:
                chunk_first_line, chunk_last_line = chunk.delete_offset, chunk.delete_offset + chunk.delete_count - 1
            else:
                chunk_first_line, chunk_last_line = chunk.insert_offset, chunk.insert_offset + chunk.insert_count - 1

            return chunk_last_line >= first_context_line and chunk_first_line <= last_context_line

        def lineFilter(line):
            if use == old:
                linenr = line.old_offset
                if linenr == first_context_line and line.type == diff.Line.INSERTED:
                    return False
            else:
                linenr = line.new_offset
                if linenr == first_context_line and line.type == diff.Line.DELETED:
                    return False

            return first_context_line <= linenr <= last_context_line

        file.loadOldLines(True)
        file.loadNewLines(True)

        context = diff.context.ContextLines(file, file.chunks,
                                            [(chain, use == old)])
        file.macro_chunks = context.getMacroChunks(context_lines,
                                                   highlight=True,
                                                   lineFilter=lineFilter)

        try:
            macro_chunk = file.macro_chunks[0]
        except:
            raise repr((parent.sha1, child.sha1))

        display_type = "both"

        if chain.state != "addressed":
            first_line_type = macro_chunk.lines[0].type
            if first_line_type == diff.Line.CONTEXT or (
                    use == old and first_line_type == diff.Line.DELETED) or (
                        use == new and first_line_type == diff.Line.INSERTED):
                for line in macro_chunk.lines[1:]:
                    if first_line_type != line.type:
                        break
                else:
                    display_type = "old" if use == old else "new"

        commit_url_component = "from=%s&to=%s" % (parent.sha1, child.sha1)

    def renderHeaderLeft(db, target, file):
        target.span("comment-chain-title").a(href="/showcomment?chain=%d" %
                                             chain.id).text(chain.title())

    def renderHeaderRight(db, target, file):
        side = use == old and "o" or "n"
        uri = "showcommit?%s&review=%d&file=%d#f%d%s%d" % (
            commit_url_component, review.id, file.id, file.id, side,
            first_line)
        target.span("filename").a(href=uri).text(file.path)

    def renderCommentsLocal(db, target, **kwargs):
        if display_type == "both":
            if use == old: position = "left"
            else: position = "right"
        else:
            position = "center"

        renderComments(db, target, user, chain, position, linkify)

    def lineId(base):
        return "c%d%s" % (chain.id, base)

    def lineCellId(base):
        return "c%d%s" % (chain.id, base)

    target.addInternalScript("commentChainById[%d] = %s;" %
                             (chain.id, chain.getJSConstructor(sha1)),
                             here=True)

    changeset_html.renderFile(db,
                              target,
                              user,
                              review,
                              file,
                              options={
                                  "support_expand": False,
                                  "display_type": display_type,
                                  "header_left": renderHeaderLeft,
                                  "header_right": renderHeaderRight,
                                  "content_after": renderCommentsLocal,
                                  "show": True,
                                  "expand": True,
                                  "line_id": lineId,
                                  "line_cell_id": lineCellId,
                                  "compact": compact,
                                  "tabify": tabify,
                                  "include_deleted": True
                              })

    data = (chain.id, file_id, use == old and "o"
            or "n", first_line, chain.id, file_id, use == old and "o"
            or "n", last_line, htmlutils.jsify(chain.type),
            htmlutils.jsify(chain.state), chain.id)

    target.addInternalScript("""$(document).ready(function ()
  {
    var markers = new CommentMarkers(null);
    markers.setLines(document.getElementById('c%df%d%s%d'), document.getElementById('c%df%d%s%d'));
    markers.setType(%s, %s);
    commentChainById[%d].markers = markers;
  });""" % data,
                             here=True)
예제 #22
0
def renderShowComments(req, db, user):
    context_lines = req.getParameter("context",
                                     user.getPreference(
                                         db, "comment.diff.contextLines"),
                                     filter=int)

    default_compact = "yes" if user.getPreference(
        db, "commit.diff.compactMode") else "no"
    compact = req.getParameter("compact", default_compact) == "yes"

    default_tabify = "yes" if user.getPreference(
        db, "commit.diff.visualTabs") else "no"
    tabify = req.getParameter("tabify", default_tabify) == "yes"

    original = req.getParameter("original", "no") == "yes"

    review_id = req.getParameter("review", filter=int)
    batch_id = req.getParameter("batch", None, filter=int)
    filter = req.getParameter("filter", "all")
    blame = req.getParameter("blame", None)

    profiler = profiling.Profiler()

    review = dbutils.Review.fromId(db, review_id)
    review.repository.enableBlobCache()

    cursor = db.cursor()

    profiler.check("create review")

    if blame is not None:
        blame_user = dbutils.User.fromName(db, blame)

        cursor.execute(
            """SELECT commentchains.id
                            FROM commentchains
                            JOIN commentchainlines ON (commentchainlines.chain=commentchains.id)
                            JOIN fileversions ON (fileversions.new_sha1=commentchainlines.sha1)
                            JOIN changesets ON (changesets.id=fileversions.changeset)
                            JOIN commits ON (commits.id=changesets.child)
                            JOIN gitusers ON (gitusers.id=commits.author_gituser)
                            JOIN usergitemails USING (email)
                            JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id AND reviewchangesets.review=commentchains.review)
                           WHERE commentchains.review=%s
                             AND usergitemails.uid=%s
                             AND commentchains.state!='empty'
                             AND (commentchains.state!='draft' OR commentchains.uid=%s)
                        ORDER BY commentchains.file, commentchainlines.first_line""",
            (review.id, blame_user.id, user.id))

        include_chain_ids = set([chain_id for (chain_id, ) in cursor])

        profiler.check("initial blame filtering")
    else:
        include_chain_ids = None

    if filter == "toread":
        query = """SELECT commentchains.id
                     FROM commentchains
                     JOIN comments ON (comments.chain=commentchains.id)
                     JOIN commentstoread ON (commentstoread.comment=comments.id)
          LEFT OUTER JOIN commentchainlines ON (commentchainlines.chain=commentchains.id)
                    WHERE review=%s
                      AND commentstoread.uid=%s
                 ORDER BY file, first_line"""

        cursor.execute(query, (review.id, user.id))
    else:
        query = """SELECT commentchains.id
                     FROM commentchains
          LEFT OUTER JOIN commentchainlines ON (chain=id)
                    WHERE review=%s
                      AND commentchains.state!='empty'"""

        arguments = [review.id]

        if filter == "issues":
            query += " AND type='issue' AND (commentchains.state!='draft' OR commentchains.uid=%s)"
            arguments.append(user.id)
        elif filter == "draft-issues":
            query += " AND type='issue' AND commentchains.state='draft' AND commentchains.uid=%s"
            arguments.append(user.id)
        elif filter == "open-issues":
            query += " AND type='issue' AND commentchains.state='open'"
        elif filter == "addressed-issues":
            query += " AND type='issue' AND commentchains.state='addressed'"
        elif filter == "closed-issues":
            query += " AND type='issue' AND commentchains.state='closed'"
        elif filter == "notes":
            query += " AND type='note' AND (commentchains.state!='draft' OR commentchains.uid=%s)"
            arguments.append(user.id)
        elif filter == "draft-notes":
            query += " AND type='note' AND commentchains.state='draft' AND commentchains.uid=%s"
            arguments.append(user.id)
        elif filter == "open-notes":
            query += " AND type='note' AND commentchains.state='open'"
        else:
            query += " AND (commentchains.state!='draft' OR commentchains.uid=%s)"
            arguments.append(user.id)

        if batch_id is not None:
            query += " AND batch=%s"
            arguments.append(batch_id)

        # This ordering is inaccurate if comments apply to the same file but
        # different commits, but then, in that case there isn't really a
        # well-defined natural order either.  Two comments that apply to the
        # same file and commit will at least be order by line number, and that's
        # better than nothing.
        query += " ORDER BY file, first_line"

        cursor.execute(query, arguments)

    profiler.check("main query")

    if include_chain_ids is None:
        chain_ids = [chain_id for (chain_id, ) in cursor]
    else:
        chain_ids = [
            chain_id for (chain_id, ) in cursor
            if chain_id in include_chain_ids
        ]

    profiler.check("query result")

    document = htmlutils.Document(req)

    html = document.html()
    head = html.head()
    body = html.body()

    document.addInternalScript(user.getJS(db))
    document.addInternalScript(review.getJS())

    page.utils.generateHeader(
        body,
        db,
        user,
        lambda target: review_utils.renderDraftItems(db, user, review, target),
        extra_links=[("r/%d" % review.id, "Back to Review")])

    profiler.check("page header")

    target = body.div("main")

    if chain_ids and not user.isAnonymous() and user.name == req.user:
        document.addInternalScript(
            "$(function () { markChainsAsRead([%s]); });" %
            ", ".join(map(str, chain_ids)))

    if chain_ids:
        processed = set()

        chains = []
        file_ids = set()
        changesets_files = {}
        changesets = {}

        if blame is not None:
            annotators = {}
            review.branch.loadCommits(db)
            commits = log.commitset.CommitSet(review.branch.commits)

        for chain_id in chain_ids:
            if chain_id in processed:
                continue
            else:
                processed.add(chain_id)

                chain = review_comment.CommentChain.fromId(db,
                                                           chain_id,
                                                           user,
                                                           review=review)
                chains.append(chain)

                if chain.file_id is not None:
                    file_ids.add(chain.file_id)
                    parent, child = review_html.getCodeCommentChainChangeset(
                        db, chain, original)
                    if parent and child:
                        changesets_files.setdefault((parent, child),
                                                    set()).add(chain.file_id)

        profiler.check("load chains")

        changeset_cache = {}

        for (from_commit,
             to_commit), filtered_file_ids in changesets_files.items():
            changesets[(from_commit,
                        to_commit)] = changeset_utils.createChangeset(
                            db,
                            user,
                            review.repository,
                            from_commit=from_commit,
                            to_commit=to_commit,
                            filtered_file_ids=filtered_file_ids)[0]
            profiler.check("create changesets")

            if blame is not None:
                annotators[(from_commit,
                            to_commit)] = operation.blame.LineAnnotator(
                                db,
                                from_commit,
                                to_commit,
                                file_ids=file_ids,
                                commits=commits,
                                changeset_cache=changeset_cache)
                profiler.check("create annotators")

        for chain in chains:
            if blame is not None and chain.file_id is not None:
                try:
                    changeset = changesets[(chain.first_commit,
                                            chain.last_commit)]
                    annotator = annotators[(chain.first_commit,
                                            chain.last_commit)]
                except KeyError:
                    # Most likely a comment created via /showfile.  Such a
                    # comment could be in code that 'blame_user' modified in the
                    # review, but for now, let's skip the comment.
                    continue
                else:
                    file_in_changeset = changeset.getFile(chain.file_id)

                    if not file_in_changeset:
                        continue

                    try:
                        offset, count = chain.lines_by_sha1[
                            file_in_changeset.new_sha1]
                    except KeyError:
                        # Probably a chain raised against the "old" side of the diff.
                        continue
                    else:
                        if not annotator.annotate(chain.file_id,
                                                  offset,
                                                  offset + count - 1,
                                                  check_user=blame_user):
                            continue

            profiler.check("detailed blame filtering")

            if chain.file_id is not None:
                from_commit, to_commit = review_html.getCodeCommentChainChangeset(
                    db, chain, original)
                changeset = changesets.get((from_commit, to_commit))
            else:
                changeset = None

            review_html.renderCommentChain(db,
                                           target,
                                           user,
                                           review,
                                           chain,
                                           context_lines=context_lines,
                                           compact=compact,
                                           tabify=tabify,
                                           original=original,
                                           changeset=changeset,
                                           linkify=linkify.Context(
                                               db=db,
                                               request=req,
                                               review=review))

            profiler.check("rendering")

            yield document.render(
                stop=target, pretty=not compact
            ) + "<script>console.log((new Date).toString());</script>"

            profiler.check("transfer")

        page.utils.renderShortcuts(target, "showcomments", review=review)
    else:
        target.h1(align="center").text("No comments.")

    profiler.output(db, user, document)

    yield document.render(pretty=not compact)
예제 #23
0
def renderCreateReview(req, db, user):
    if user.isAnonymous(): raise page.utils.NeedLogin(req)

    repository = req.getParameter("repository", filter=gitutils.Repository.FromParameter(db), default=None)
    applyparentfilters = req.getParameter("applyparentfilters", "yes" if user.getPreference(db, 'review.applyUpstreamFilters') else "no") == "yes"

    cursor = db.cursor()

    if req.method == "POST":
        data = json_decode(req.read())

        summary = data.get("summary")
        description = data.get("description")
        review_branch_name = data.get("review_branch_name")
        commit_ids = data.get("commit_ids")
        commit_sha1s = data.get("commit_sha1s")
    else:
        summary = req.getParameter("summary", None)
        description = req.getParameter("description", None)
        review_branch_name = req.getParameter("reviewbranchname", None)

        commit_ids = None
        commit_sha1s = None

        commits_arg = req.getParameter("commits", None)
        remote = req.getParameter("remote", None)
        upstream = req.getParameter("upstream", "master")
        branch_name = req.getParameter("branch", None)

        if commits_arg:
            try: commit_ids = map(int, commits_arg.split(","))
            except: commit_sha1s = [repository.revparse(ref) for ref in commits_arg.split(",")]
        elif branch_name:
            cursor.execute("""SELECT commit
                                FROM reachable
                                JOIN branches ON (branch=id)
                               WHERE repository=%s
                                 AND name=%s""",
                           (repository.id, branch_name))
            commit_ids = [commit_id for (commit_id,) in cursor]

            if len(commit_ids) > configuration.limits.MAXIMUM_REVIEW_COMMITS:
                raise page.utils.DisplayMessage(
                    "Too many commits!",
                    (("<p>The branch <code>%s</code> contains %d commits.  Reviews can"
                      "be created from branches that contain at most %d commits.</p>"
                      "<p>This limit can be adjusted by modifying the system setting"
                      "<code>configuration.limits.MAXIMUM_REVIEW_COMMITS</code>.</p>")
                     % (htmlutils.htmlify(branch_name), len(commit_ids),
                        configuration.limits.MAXIMUM_REVIEW_COMMITS)),
                    html=True)
        else:
            return renderSelectSource(req, db, user)

    req.content_type = "text/html; charset=utf-8"

    if commit_ids:
        commits = [gitutils.Commit.fromId(db, repository, commit_id) for commit_id in commit_ids]
    elif commit_sha1s:
        commits = [gitutils.Commit.fromSHA1(db, repository, commit_sha1) for commit_sha1 in commit_sha1s]
    else:
        commits = []

    if not commit_ids:
        commit_ids = [commit.getId(db) for commit in commits]
    if not commit_sha1s:
        commit_sha1s = [commit.sha1 for commit in commits]

    if summary is None:
        if len(commits) == 1:
            summary = commits[0].summary()
        else:
            summary = ""

    if review_branch_name:
        invalid_branch_name = "false"
        default_branch_name = review_branch_name
    else:
        invalid_branch_name = htmlutils.jsify(user.name + "/")
        default_branch_name = user.name + "/"

        match = re.search("(?:^|[Ff]ix(?:e[ds])?(?: +for)?(?: +bug)? +)([A-Z][A-Z0-9]+-[0-9]+)", summary)
        if match:
            invalid_branch_name = "false"
            default_branch_name = htmlutils.htmlify(match.group(1))

    changesets = []
    changeset_utils.createChangesets(db, repository, commits)
    for commit in commits:
        changesets.extend(changeset_utils.createChangeset(db, None, repository, commit, do_highlight=False))
    changeset_ids = [changeset.id for changeset in changesets]

    all_reviewers, all_watchers = reviewing.utils.getReviewersAndWatchers(
        db, repository, changesets=changesets, applyparentfilters=applyparentfilters)

    document = htmlutils.Document(req)
    html = document.html()
    head = html.head()

    document.addInternalScript(user.getJS(db))

    if branch_name:
        document.addInternalScript("var fromBranch = %s;" % htmlutils.jsify(branch_name))

    if remote:
        document.addInternalScript("var trackedbranch = { remote: %s, name: %s };" % (htmlutils.jsify(remote), htmlutils.jsify(branch_name)))

    head.title().text("Create Review")

    body = html.body(onload="document.getElementById('branch_name').focus()")

    page.utils.generateHeader(body, db, user, lambda target: target.button(onclick="submitReview();").text("Submit Review"))

    document.addExternalStylesheet("resource/createreview.css")
    document.addExternalScript("resource/createreview.js")
    document.addExternalScript("resource/reviewfilters.js")
    document.addExternalScript("resource/autocomplete.js")

    document.addInternalScript("""
var invalid_branch_name = %s;
var review_data = { commit_ids: %r,
                    commit_sha1s: %r,
                    changeset_ids: %r };""" % (invalid_branch_name,
                                               commit_ids,
                                               commit_sha1s,
                                               changeset_ids))
    document.addInternalScript(repository.getJS())

    main = body.div("main")

    table = main.table("basic paleyellow", align="center")
    table.tr().td("h1", colspan=3).h1().text("Create Review")

    row = table.tr("line")
    row.td("heading").text("Branch Name:")
    row.td("value").text("r/").input("value", id="branch_name", value=default_branch_name)
    row.td("status")

    row = table.tr()

    if not remote:
        row.td("help", colspan=3).div().text("""\
This is the main identifier of the review.  It will be created in the review
repository to contain the commits below.  Reviewers can fetch it from there, and
additional commits can be added to the review later by pushing them to this
branch in the review repository.""")
    else:
        row.td("help", colspan=3).div().text("""\
This is the main identifier of the review.  It will be created in the review
repository to contain the commits below, and reviewers can fetch it from there.""")

    if remote:
        row = table.tr("line")
        row.td("heading").text("Tracked Branch:")
        value = row.td("value")
        value.code("branch inset").text(branch_name, linkify=linkify.Context(remote=remote))
        value.text(" in ")
        value.code("remote inset").text(remote, linkify=linkify.Context())
        row.td("status")

        row = table.tr()
        row.td("help", colspan=3).div().text("""\
Rather than pushing directly to the review branch in Critic's repository to add
commits to the review, you will be pushing to this branch (in a separate
repository,) from which Critic will fetch commits and add them to the review
automatically.""")

    row = table.tr("line")
    row.td("heading").text("Summary:")
    row.td("value").input("value", id="summary", value=summary)
    row.td("status")

    row = table.tr()
    row.td("help", colspan=3).div().text("""\
The summary should be a short summary of the changes in the review.  It will
appear in the subject of all emails sent about the review.
""")

    row = table.tr("line description")
    row.td("heading").text("Description:")
    textarea = row.td("value").textarea(id="description", rows=12)
    textarea.preformatted()
    if description: textarea.text(description)
    row.td("status")

    row = table.tr()
    row.td("help", colspan=3).div().text("""\
The description should describe the changes to be reviewed.  It is usually fine
to leave the description empty, since the commit messages are also available in
the review.
""")

    generateReviewersAndWatchersTable(db, repository, main, all_reviewers, all_watchers, applyparentfilters=applyparentfilters)

    row = table.tr("line recipients")
    row.td("heading").text("Recipient List:")
    cell = row.td("value", colspan=2).preformatted()
    cell.span("mode").text("Everyone")
    cell.span("users")
    cell.text(".")
    buttons = cell.div("buttons")
    buttons.button(onclick="editRecipientList();").text("Edit Recipient List")

    row = table.tr()
    row.td("help", colspan=3).div().text("""\
The basic recipient list for e-mails sent about the review.
""")

    log.html.render(db, main, "Commits", commits=commits)

    return document
예제 #24
0
파일: html.py 프로젝트: ryfow/critic
def renderCodeCommentChain(db, target, user, review, chain, context_lines=3, compact=False, tabify=False, original=False, changeset=None, linkify=False):
    repository = review.repository

    old_sha1 = None
    new_sha1 = None

    old = 1
    new = 2

    cursor = db.cursor()

    file_id = chain.file_id
    file_path = dbutils.describe_file(db, file_id)

    if (chain.state != "addressed" or original) and chain.first_commit == chain.last_commit:
        sha1 = chain.first_commit.getFileSHA1(file_path)

        cursor.execute("SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s", (chain.id, sha1))
        first_line, last_line = cursor.fetchone()

        file = diff.File(file_id, file_path, sha1, sha1, review.repository, chunks=[])
        file.loadNewLines(True)

        start = max(1, first_line - context_lines)
        end = min(file.newCount(), last_line + context_lines)
        count = end + 1 - start

        lines = file.newLines(True)
        lines = [diff.Line(diff.Line.CONTEXT, start + index, lines[start + index - 1], start + index, lines[start + index - 1]) for index in range(count)]

        file.macro_chunks = [diff.MacroChunk([], lines)]

        use = new
        display_type = "new"
        commit_url_component = "sha1=%s" % chain.first_commit.sha1
    else:
        if chain.state == "addressed" and not original and review.containsCommit(db, chain.addressed_by):
            parent = gitutils.Commit.fromSHA1(db, review.repository, chain.addressed_by.parents[0])
            child = chain.addressed_by
            use = old
        else:
            parent = chain.first_commit
            child = chain.last_commit

            if parent == child:
                if chain.origin == "old":
                    cursor.execute("""SELECT changesets.child
                                        FROM changesets, reviewchangesets
                                       WHERE changesets.parent=%s
                                         AND reviewchangesets.changeset=changesets.id
                                         AND reviewchangesets.review=%s""",
                                   [child.getId(db), review.id])

                    try:
                        child = gitutils.Commit.fromId(db, repository, cursor.fetchone()[0])
                    except:
                        parent = gitutils.Commit.fromSHA1(db, repository, child.parents[0])
                else:
                    parent = gitutils.Commit.fromSHA1(db, repository, child.parents[0])

            if chain.origin == "old": use = old
            else: use = new

        if parent.sha1 in child.parents and len(child.parents) == 1:
            commit = child
            from_commit = None
            to_commit = None
        else:
            commit = None
            from_commit = parent
            to_commit = child

        if changeset:
            assert ((changeset.parent == from_commit and changeset.child == to_commit)
                    if commit is None else
                    (changeset.parent.sha1 == commit.parents[0] and changeset.child == commit))
            assert changeset.getFile(file_id)
        else:
            changeset = changeset_utils.createChangeset(db, user, repository, commit=commit, from_commit=from_commit, to_commit=to_commit, filtered_file_ids=set((file_id,)))[0]

        file = changeset.getFile(file_id)

        if not file:
            if chain.state == "addressed" and not original:
                renderCodeCommentChain(db, target, user, review, chain, context_lines, compact, tabify, original=True)
                return
            else:
                raise

        # Commit so that the diff and its analysis, written to the database by createChangeset(),
        # can be reused later.
        db.commit()

        old_sha1 = file.old_sha1
        new_sha1 = file.new_sha1

        if use == old and old_sha1 == '0' * 40: use = new
        elif use == new and new_sha1 == '0' * 40: use = old

        if use == old: sha1 = old_sha1
        else: sha1 = new_sha1

        cursor.execute("SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s", [chain.id, sha1])

        first_line, last_line = cursor.fetchone()

        def readChunks():
            return [diff.Chunk(delete_offset, delete_count, insert_offset, insert_count, analysis=analysis, is_whitespace=is_whitespace)
                    for delete_offset, delete_count, insert_offset, insert_count, analysis, is_whitespace
                    in cursor.fetchall()]

        first_context_line = first_line - context_lines
        last_context_line = last_line + context_lines

        def includeChunk(chunk):
            if use == old: chunk_first_line, chunk_last_line = chunk.delete_offset, chunk.delete_offset + chunk.delete_count - 1
            else: chunk_first_line, chunk_last_line = chunk.insert_offset, chunk.insert_offset + chunk.insert_count - 1

            return chunk_last_line >= first_context_line and chunk_first_line <= last_context_line

        def lineFilter(line):
            if use == old:
                linenr = line.old_offset
                if linenr == first_context_line and line.type == diff.Line.INSERTED:
                    return False
            else:
                linenr = line.new_offset
                if linenr == first_context_line and line.type == diff.Line.DELETED:
                    return False

            return first_context_line <= linenr <= last_context_line

        file.loadOldLines(True)
        file.loadNewLines(True)

        context = diff.context.ContextLines(file, file.chunks, [chain])
        file.macro_chunks = context.getMacroChunks(context_lines, highlight=True, lineFilter=lineFilter)

        try: macro_chunk = file.macro_chunks[0]
        except: raise repr((parent.sha1, child.sha1))

        display_type = "both"

        if chain.state != "addressed":
            first_line_type = macro_chunk.lines[0].type
            if first_line_type == diff.Line.CONTEXT or (use == old and first_line_type == diff.Line.DELETED) or (use == new and first_line_type == diff.Line.INSERTED):
                for line in macro_chunk.lines[1:]:
                    if first_line_type != line.type:
                        break
                else:
                    display_type = "old" if use == old else "new"

        commit_url_component = "from=%s&to=%s" % (parent.sha1, child.sha1)

    def renderHeaderLeft(db, target, file):
        target.span("comment-chain-title").a(href="/showcomment?chain=%d" % chain.id).text(chain.title())

    def renderHeaderRight(db, target, file):
        side = use == old and "o" or "n"
        uri = "showcommit?%s&review=%d&file=%d#f%d%s%d" % (commit_url_component, review.id, file.id, file.id, side, first_line)
        target.span("filename").a(href=uri).text(file.path)

    def renderCommentsLocal(db, target, **kwargs):
        if display_type == "both":
            if use == old: position = "left"
            else: position = "right"
        else:
            position = "center"

        renderComments(db, target, user, chain, position, linkify)

    def lineId(base):
        return "c%d%s" % (chain.id, base)

    def lineCellId(base):
        return "c%d%s" % (chain.id, base)

    target.addInternalScript("commentChainById[%d] = %s;" % (chain.id, chain.getJSConstructor(sha1)), here=True)

    changeset_html.renderFile(db, target, user, review, file, options={ "support_expand": False, "display_type": display_type, "header_left": renderHeaderLeft, "header_right": renderHeaderRight, "content_after": renderCommentsLocal, "show": True, "expand": True, "line_id": lineId, "line_cell_id": lineCellId, "compact": compact, "tabify": tabify, "include_deleted": True })

    data = (chain.id, file_id, use == old and "o" or "n", first_line,
            chain.id, file_id, use == old and "o" or "n", last_line,
            htmlutils.jsify(chain.type), htmlutils.jsify(chain.state),
            chain.id)

    target.addInternalScript("""$(document).ready(function ()
  {
    var markers = new CommentMarkers(null);
    markers.setLines(document.getElementById('c%df%d%s%d'), document.getElementById('c%df%d%s%d'));
    markers.setType(%s, %s);
    commentChainById[%d].markers = markers;
  });""" % data, here=True)
예제 #25
0
def assignChanges(db,
                  user,
                  review,
                  commits=None,
                  changesets=None,
                  update=False,
                  parentfiltersonly=False):
    cursor = db.cursor()

    if changesets is None:
        assert commits is not None

        changesets = []

        for commit in commits:
            changesets.extend(
                changeset_utils.createChangeset(db, user, review.repository,
                                                commit))

    applyfilters = review.applyfilters
    applyparentfilters = review.applyparentfilters

    # Doesn't make sense to apply only parent filters if they're not supposed to
    # be applied in the first place.
    assert not parentfiltersonly or applyparentfilters

    reviewers, watchers = getReviewersAndWatchers(
        db,
        review.repository,
        changesets=changesets,
        reviewfilters=review.getReviewFilters(db),
        applyfilters=applyfilters,
        applyparentfilters=applyparentfilters,
        parentfiltersonly=parentfiltersonly)

    cursor.execute("SELECT uid FROM reviewusers WHERE review=%s",
                   (review.id, ))

    reviewusers = set([user_id for (user_id, ) in cursor])
    reviewusers_values = set()
    reviewuserfiles_values = set()

    reviewuserfiles_existing = {}

    if update:
        cursor.execute(
            """SELECT reviewuserfiles.uid, reviewfiles.changeset, reviewfiles.file
                            FROM reviewfiles
                            JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id)
                           WHERE reviewfiles.review=%s""", (review.id, ))
        for user_id, changeset_id, file_id in cursor:
            reviewuserfiles_existing[(user_id, changeset_id, file_id)] = True

    new_reviewers = set()
    new_watchers = set()

    cursor.execute(
        """SELECT DISTINCT uid
                        FROM reviewfiles
                        JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id)
                       WHERE review=%s""", (review.id, ))
    old_reviewers = set([user_id for (user_id, ) in cursor])

    for file_id, file_users in reviewers.items():
        for user_id, user_changesets in file_users.items():
            if user_id:
                new_reviewers.add(user_id)

                if user_id not in reviewusers:
                    reviewusers.add(user_id)
                    reviewusers_values.add((review.id, user_id))
                for changeset_id in user_changesets:
                    if (user_id, changeset_id,
                            file_id) not in reviewuserfiles_existing:
                        reviewuserfiles_values.add(
                            (user_id, review.id, changeset_id, file_id))

    for file_id, file_users in watchers.items():
        for user_id, user_changesets in file_users.items():
            if user_id:
                if user_id not in reviewusers:
                    new_watchers.add(user_id)
                    reviewusers.add(user_id)
                    reviewusers_values.add((review.id, user_id))

    new_reviewers -= old_reviewers
    new_watchers -= old_reviewers | new_reviewers

    cursor.executemany("INSERT INTO reviewusers (review, uid) VALUES (%s, %s)",
                       reviewusers_values)
    cursor.executemany(
        "INSERT INTO reviewuserfiles (file, uid) SELECT id, %s FROM reviewfiles WHERE review=%s AND changeset=%s AND file=%s",
        reviewuserfiles_values)

    return new_reviewers, new_watchers