Beispiel #1
0
    def as_html(self):
        user = self.request.user

        commit_data = fetch_commit_data(self.review_request_details)
        commit_id = commit_data.get_for(self.review_request_details,
                                        COMMIT_ID_KEY)

        review_request = self.review_request_details.get_review_request()
        parent = get_parent_rr(review_request)
        parent_details = parent.get_draft(user) or parent

        author = commit_data.extra_data.get(AUTHOR_KEY, None)

        # If a user can view the parent draft they should also have
        # permission to view every child. We check if the child is
        # accessible anyways in case it has been restricted for other
        # reasons.
        children_details = [
            child for child in gen_child_rrs(parent_details, user=user)
            if child.is_accessible_by(user)
        ]

        # Generate the import and pull input field contents
        import_text = pull_text = ""
        repo_path = review_request.repository.path

        if commit_id:
            import_text = "hg import %s/rev/%s" % (repo_path, commit_id)

        last_child_commit_id = commit_id
        if is_parent(self.review_request_details, commit_data=commit_data):
            last_child_commit_data = fetch_commit_data(children_details[-1])
            last_child_commit_id = (
                last_child_commit_data.extra_data.get(COMMIT_ID_KEY))

        pull_text = "hg pull -r %s %s" % (last_child_commit_id, repo_path)

        # Get just the extended commit message details for display
        commit_message_detail = "\n".join(
            self.review_request_details.description.splitlines()[1:]).strip()

        return get_template('mozreview/commit-main.html').render(
            Context({
                'review_request_details': self.review_request_details,
                'parent_details': parent_details,
                'user': user,
                'author': author,
                'pull_text': pull_text,
                'import_text': import_text,
                'commit_message_detail': commit_message_detail,
            }))
    def as_html(self):
        commit_id = self.commit_data.extra_data.get(COMMIT_ID_KEY)

        if is_parent(self.review_request_details, self.commit_data):
            user = self.request.user
            parent = get_parent_rr(
                self.review_request_details.get_review_request(),
                self.commit_data)
            parent_details = parent.get_draft() or parent
            children = [
                child for child in gen_child_rrs(parent_details, user=user)
                if child.is_accessible_by(user)]

            commit_data = fetch_commit_data(children[-1])
            commit_id = commit_data.extra_data.get(COMMIT_ID_KEY)

        review_request = self.review_request_details.get_review_request()
        repo_path = review_request.repository.path

        if not commit_id:
            logger.error('No commit_id for review request: %d' % (
                review_request.id))
            return ''

        return get_template('mozreview/hg-pull.html').render(Context({
                'commit_id': commit_id,
                'repo_path': repo_path,
        }))
def _close_child_review_requests(user, review_request, status,
                                 child_close_description, commit_data=None):
    """Closes all child review requests for a squashed review request."""
    commit_data = fetch_commit_data(review_request, commit_data)
    # At the point of closing, it's possible that if this review
    # request was never published, that most of the fields are empty
    # (See https://code.google.com/p/reviewboard/issues/detail?id=3465).
    # Luckily, the extra_data is still around, and more luckily, it's
    # not exposed in the UI for user-meddling. We can find all of the
    # child review requests via extra_data.p2rb.commits.
    for child in gen_child_rrs(review_request, commit_data=commit_data):
        child.close(status,
                    user=user,
                    description=child_close_description)

    # We want to discard any review requests that this squashed review
    # request never got to publish, so were never part of its "commits"
    # list.
    for child in gen_rrs_by_extra_data_key(review_request,
                                           UNPUBLISHED_KEY,
                                           commit_data=commit_data):
        child.close(ReviewRequest.DISCARDED,
                    user=user,
                    description=NEVER_USED_DESCRIPTION)

    commit_data.extra_data[UNPUBLISHED_KEY] = '[]'
    commit_data.extra_data[DISCARD_ON_PUBLISH_KEY] = '[]'
    commit_data.save(update_fields=['extra_data'])
Beispiel #4
0
def on_review_request_closed_discarded(user, review_request, type, **kwargs):
    if type != ReviewRequest.DISCARDED:
        return

    commit_data = fetch_commit_data(review_request)

    if is_parent(review_request, commit_data):
        # close_child_review_requests will call save on this review request, so
        # we don't have to worry about it.
        review_request.commit = None

        _close_child_review_requests(user,
                                     review_request,
                                     ReviewRequest.DISCARDED,
                                     AUTO_CLOSE_DESCRIPTION,
                                     commit_data=commit_data)
    else:
        # TODO: Remove this once we properly prevent users from closing
        # commit review requests.
        b = Bugzilla(get_bugzilla_api_key(user))
        bug = int(review_request.get_bug_list()[0])
        attachment_updates = BugzillaAttachmentUpdates(b, bug)
        attachment_updates.obsolete_review_attachments(
            get_diff_url(review_request))
        attachment_updates.do_updates()
Beispiel #5
0
def _close_child_review_requests(user,
                                 review_request,
                                 status,
                                 child_close_description,
                                 commit_data=None):
    """Closes all child review requests for a squashed review request."""
    commit_data = fetch_commit_data(review_request, commit_data)
    # At the point of closing, it's possible that if this review
    # request was never published, that most of the fields are empty
    # (See https://code.google.com/p/reviewboard/issues/detail?id=3465).
    # Luckily, the extra_data is still around, and more luckily, it's
    # not exposed in the UI for user-meddling. We can find all of the
    # child review requests via extra_data.p2rb.commits.
    for child in gen_child_rrs(review_request, commit_data=commit_data):
        child.close(status, user=user, description=child_close_description)

    # We want to discard any review requests that this squashed review
    # request never got to publish, so were never part of its "commits"
    # list.
    for child in gen_rrs_by_extra_data_key(review_request,
                                           UNPUBLISHED_KEY,
                                           commit_data=commit_data):
        child.close(ReviewRequest.DISCARDED,
                    user=user,
                    description=NEVER_USED_DESCRIPTION)

    commit_data.extra_data[UNPUBLISHED_KEY] = '[]'
    commit_data.extra_data[DISCARD_ON_PUBLISH_KEY] = '[]'
    commit_data.save(update_fields=['extra_data'])
Beispiel #6
0
def get_discard_on_publish_rids(squashed_rr, commit_data=None):
    """A list of review request ids that should be discarded when publishing.
    Adding to this list will mark a review request as to-be-discarded when
    the squashed draft is published on Review Board.
    """
    commit_data = fetch_commit_data(squashed_rr, commit_data)
    return map(int, json.loads(commit_data.extra_data[DISCARD_ON_PUBLISH_KEY]))
Beispiel #7
0
def get_previous_commits(squashed_rr, commit_data=None):
    """Retrieve the previous commits from a squashed review request.

    This will return a list of tuples specifying the previous commit
    id as well as the review request it is represented by. ex::

        [
            # (<commit-id>, <review-request-id>),
            ('d4bd89322f54', 13),
            ('373537353134', 14),
        ]
    """
    commit_data = fetch_commit_data(squashed_rr, commit_data)

    if not squashed_rr.public:
        extra_data = commit_data.draft_extra_data
    else:
        extra_data = commit_data.extra_data

    if COMMITS_KEY not in extra_data:
        return []

    commits = []
    for node, rid in json.loads(extra_data[COMMITS_KEY]):
        # JSON decoding likes to give us unicode types. We speak str
        # internally, so convert.
        if isinstance(node, unicode):
            node = node.encode('utf-8')

        assert isinstance(node, str)

        commits.append((node, int(rid)))

    return commits
    def as_html(self):
        commit_id = self.commit_data.extra_data.get(COMMIT_ID_KEY)

        if is_parent(self.review_request_details, self.commit_data):
            user = self.request.user
            parent = get_parent_rr(
                self.review_request_details.get_review_request(),
                self.commit_data)
            parent_details = parent.get_draft() or parent
            children = [
                child for child in gen_child_rrs(parent_details, user=user)
                if child.is_accessible_by(user)]

            commit_data = fetch_commit_data(children[-1])
            commit_id = commit_data.extra_data.get(COMMIT_ID_KEY)

        review_request = self.review_request_details.get_review_request()
        repo_path = review_request.repository.path

        if not commit_id:
            logger.error('No commit_id for review request: %d' % (
                review_request.id))
            return ''

        return get_template('mozreview/hg-pull.html').render(Context({
                'commit_id': commit_id,
                'repo_path': repo_path,
        }))
def get_previous_commits(squashed_rr, commit_data=None):
    """Retrieve the previous commits from a squashed review request.

    This will return a list of tuples specifying the previous commit
    id as well as the review request it is represented by. ex::

        [
            # (<commit-id>, <review-request-id>),
            ('d4bd89322f54', 13),
            ('373537353134', 14),
        ]
    """
    commit_data = fetch_commit_data(squashed_rr, commit_data)

    if not squashed_rr.public:
        extra_data = commit_data.draft_extra_data
    else:
        extra_data = commit_data.extra_data

    if COMMITS_KEY not in extra_data:
        return []

    commits = []
    for node, rid in json.loads(extra_data[COMMITS_KEY]):
        # JSON decoding likes to give us unicode types. We speak str
        # internally, so convert.
        if isinstance(node, unicode):
            node = node.encode('utf-8')

        assert isinstance(node, str)

        commits.append((node, int(rid)))

    return commits
def get_discard_on_publish_rids(squashed_rr, commit_data=None):
    """A list of review request ids that should be discarded when publishing.
    Adding to this list will mark a review request as to-be-discarded when
    the squashed draft is published on Review Board.
    """
    commit_data = fetch_commit_data(squashed_rr, commit_data)
    return map(int, json.loads(
               commit_data.extra_data[DISCARD_ON_PUBLISH_KEY]))
def get_unpublished_rids(squashed_rr, commit_data=None):
    """A list of review request ids that have been created for individual commits
    but have not been published. If this list contains an item, it should be
    re-used for indiviual commits instead of creating a brand new review
    request.
    """
    commit_data = fetch_commit_data(squashed_rr, commit_data)
    return map(int, json.loads(commit_data.extra_data[UNPUBLISHED_KEY]))
Beispiel #12
0
def get_unpublished_rids(squashed_rr, commit_data=None):
    """A list of review request ids that have been created for individual commits
    but have not been published. If this list contains an item, it should be
    re-used for indiviual commits instead of creating a brand new review
    request.
    """
    commit_data = fetch_commit_data(squashed_rr, commit_data)
    return map(int, json.loads(commit_data.extra_data[UNPUBLISHED_KEY]))
def get_commit_table_context(request, review_request_details):
    """Get the data needed to display the commits table.

    Information provided includes the parent and child review requests,
    as well as autoland information.
    """
    commit_data = fetch_commit_data(review_request_details)

    user = request.user
    parent = get_parent_rr(review_request_details.get_review_request(), commit_data=commit_data)
    parent_details = parent.get_draft(user) or parent

    # If a user can view the parent draft they should also have
    # permission to view every child. We check if the child is
    # accessible anyways in case it has been restricted for other
    # reasons.
    children_details = [
        child for child in gen_child_rrs(parent_details, user=user)
        if child.is_accessible_by(user)]
    n_children = len(children_details)
    current_child_num = prev_child = next_child = None

    if not is_parent(review_request_details, commit_data=commit_data):
        cur_index = children_details.index(review_request_details)
        current_child_num = cur_index + 1
        next_child = (children_details[cur_index + 1]
                      if cur_index + 1 < n_children else None)
        prev_child = (children_details[cur_index - 1]
                      if cur_index - 1 >= 0 else None)

    latest_autoland_requests = []
    try_syntax = ''
    repo_urls = set()
    autoland_requests = AutolandRequest.objects.filter(
        review_request_id=parent.id).order_by('-autoland_id')

    # We would like to fetch the latest AutolandRequest for each
    # different repository.
    for land_request in autoland_requests:
        if land_request.repository_url in repo_urls:
            continue

        repo_urls.add(land_request.repository_url)
        latest_autoland_requests.append(land_request)
        try_syntax = try_syntax or land_request.extra_data.get('try_syntax', '')

    return {
        'review_request_details': review_request_details,
        'parent_details': parent_details,
        'children_details': children_details,
        'num_children': n_children,
        'current_child_num': current_child_num,
        'next_child': next_child,
        'prev_child': prev_child,
        'latest_autoland_requests': latest_autoland_requests,
        'user': user,
        'try_syntax': try_syntax,
    }
def on_review_request_closed_submitted(user, review_request, type, **kwargs):
    if type != ReviewRequest.SUBMITTED:
        return

    commit_data = fetch_commit_data(review_request)

    if not is_parent(review_request, commit_data):
        return

    _close_child_review_requests(user, review_request, ReviewRequest.SUBMITTED,
                                 AUTO_SUBMITTED_DESCRIPTION,
                                 commit_data=commit_data)
def on_review_request_reopened(user, review_request, **kwargs):
    if not is_parent(review_request):
        return

    commit_data = fetch_commit_data(review_request)
    identifier = commit_data.extra_data[IDENTIFIER_KEY]

    # If we're reviving a squashed review request that was discarded, it means
    # we're going to want to restore the commit ID field back, since we remove
    # it on discarding. This might be a problem if there's already a review
    # request with the same commit ID somewhere on Review Board, since commit
    # IDs are unique.
    #
    # When this signal fires, the state of the review request has already
    # changed, so we query for a review request with the same commit ID that is
    # not equal to the revived review request.
    try:
        preexisting_review_request = ReviewRequest.objects.get(
            commit_id=identifier, repository=review_request.repository)
        if preexisting_review_request != review_request:
            logger.error(
                'Could not revive review request with ID %s because its '
                'commit id (%s) is already being used by a review request '
                'with ID %s.' % (
                    review_request.id,
                    identifier,
                    preexisting_review_request.id))
            # TODO: We need Review Board to recognize exceptions in these
            # signal handlers so that the UI can print out a useful message.
            raise Exception(
                'Revive failed because a review request with commit ID %s '
                'already exists.' % identifier)
    except ReviewRequest.DoesNotExist:
        # Great! This is a success case.
        pass

    for child in gen_child_rrs(review_request):
        child.reopen(user=user)

    # If the review request had been discarded, then the commit ID would
    # have been cleared out. If the review request had been submitted,
    # this is a no-op, since the commit ID would have been there already.
    review_request.commit = identifier
    review_request.save()

    # If the review request has a draft, we have to set the commit ID there as
    # well, otherwise it'll get overwritten on publish.
    draft = review_request.get_draft(user)
    if draft:
        draft.commit = identifier
        draft.save()
Beispiel #16
0
    def _summarize_families(self, request, families):
        """Returns a list of dicts summarizing a parent and its children.

        'families' should be a list of dicts, each containing a 'parent' key
        mapping to a single ReviewRequest and a 'children' key containing a
        list of ReviewRequests.

        Each dict in the returned list also has a 'parent' key, mapped to a
        summarized ReviewRequest, and a 'children' key, mapped to a list of
        summarized ReviewRequests. See the docstring for
        _summarize_review_request() for an example of a summarized
        ReviewRequest.
        """
        summaries = []

        for family in families.itervalues():
            child_rrids = [
                int(rrid) for commit_id, rrid in
                json.loads(fetch_commit_data(family['parent']).get_for(
                    family['parent'], COMMITS_KEY))
            ]
            summaries.append({
                'parent': self._summarize_review_request(
                    request, family['parent']),
                'children': [
                    self._summarize_review_request(
                        request,
                        family['children'][child_rrid],
                        fetch_commit_data(
                            family['children'][child_rrid]
                        ).extra_data[COMMIT_ID_KEY]
                    )
                    for child_rrid in child_rrids
                    if child_rrid in family['children']
                ]
            })

        return summaries
    def _summarize_families(self, request, families):
        """Returns a list of dicts summarizing a parent and its children.

        'families' should be a list of dicts, each containing a 'parent' key
        mapping to a single ReviewRequest and a 'children' key containing a
        list of ReviewRequests.

        Each dict in the returned list also has a 'parent' key, mapped to a
        summarized ReviewRequest, and a 'children' key, mapped to a list of
        summarized ReviewRequests. See the docstring for
        _summarize_review_request() for an example of a summarized
        ReviewRequest.
        """
        summaries = []

        for family in families.itervalues():
            child_rrids = [
                int(rrid) for commit_id, rrid in
                json.loads(fetch_commit_data(family['parent']).get_for(
                    family['parent'], COMMITS_KEY))
            ]
            summaries.append({
                'parent': self._summarize_review_request(
                    request, family['parent']),
                'children': [
                    self._summarize_review_request(
                        request,
                        family['children'][child_rrid],
                        fetch_commit_data(
                            family['children'][child_rrid]
                        ).extra_data[COMMIT_ID_KEY]
                    )
                    for child_rrid in child_rrids
                    if child_rrid in family['children']
                ]
            })

        return summaries
Beispiel #18
0
def on_review_request_closed_submitted(user, review_request, type, **kwargs):
    if type != ReviewRequest.SUBMITTED:
        return

    commit_data = fetch_commit_data(review_request)

    if not is_parent(review_request, commit_data):
        return

    _close_child_review_requests(user,
                                 review_request,
                                 ReviewRequest.SUBMITTED,
                                 AUTO_SUBMITTED_DESCRIPTION,
                                 commit_data=commit_data)
Beispiel #19
0
def on_review_request_reopened(user, review_request, **kwargs):
    if not is_parent(review_request):
        return

    commit_data = fetch_commit_data(review_request)
    identifier = commit_data.extra_data[IDENTIFIER_KEY]

    # If we're reviving a squashed review request that was discarded, it means
    # we're going to want to restore the commit ID field back, since we remove
    # it on discarding. This might be a problem if there's already a review
    # request with the same commit ID somewhere on Review Board, since commit
    # IDs are unique.
    #
    # When this signal fires, the state of the review request has already
    # changed, so we query for a review request with the same commit ID that is
    # not equal to the revived review request.
    try:
        preexisting_review_request = ReviewRequest.objects.get(
            commit_id=identifier, repository=review_request.repository)
        if preexisting_review_request != review_request:
            logger.error(
                'Could not revive review request with ID %s because its '
                'commit id (%s) is already being used by a review request '
                'with ID %s.' %
                (review_request.id, identifier, preexisting_review_request.id))
            # TODO: We need Review Board to recognize exceptions in these
            # signal handlers so that the UI can print out a useful message.
            raise Exception(
                'Revive failed because a review request with commit ID %s '
                'already exists.' % identifier)
    except ReviewRequest.DoesNotExist:
        # Great! This is a success case.
        pass

    for child in gen_child_rrs(review_request):
        child.reopen(user=user)

    # If the review request had been discarded, then the commit ID would
    # have been cleared out. If the review request had been submitted,
    # this is a no-op, since the commit ID would have been there already.
    review_request.commit = identifier
    review_request.save()

    # If the review request has a draft, we have to set the commit ID there as
    # well, otherwise it'll get overwritten on publish.
    draft = review_request.get_draft(user)
    if draft:
        draft.commit = identifier
        draft.save()
Beispiel #20
0
def commits_summary_table_fragment(request, parent_id=None, child_id=None):
    """Return the #mozreview-child-requests table."""

    # Load the parent.

    try:
        parent_request = ReviewRequest.objects.get(id=parent_id)
    except ReviewRequest.DoesNotExist:
        return HttpResponseNotFound('Parent Not Found')
    if not parent_request.is_accessible_by(request.user):
        return HttpResponseNotAllowed('Permission denied')

    commit_data = fetch_commit_data(parent_request)

    # Sanity check parent.

    if not is_parent(parent_request, commit_data):
        return HttpResponseNotAllowed('Invalid parent')
    if COMMITS_KEY not in commit_data.extra_data:
        logging.error('Parent review request %s missing COMMITS_KEY' %
                      parent_request.id)
        return HttpResponseNotAllowed('Invalid parent')

    # Load the current child.

    try:
        child_request = ReviewRequest.objects.get(id=child_id)
    except ReviewRequest.DoesNotExist:
        return HttpResponseNotFound('Child Not Found')

    # Sanity check child.

    if is_parent(child_request):
        return HttpResponseNotAllowed('Invalid child')

    # Load all other children and ensure requested child matches parent.

    children_details = list(gen_child_rrs(parent_request, user=request.user))
    if not any(r for r in children_details if r.id == child_request.id):
        return HttpResponseNotAllowed('Invalid child')

    # Return rendered template.

    return render(
        request, 'mozreview/commits-requests.html', {
            'user': request.user,
            'review_request_details': child_request,
            'children_details': children_details,
        })
Beispiel #21
0
    def get_list(self, request, *args, **kwargs):
        if not self.has_list_access_permissions(request, *args, **kwargs):
            return self.get_no_access_error(request, *args, **kwargs)

        try:
            queryset = self.get_queryset(request, is_list=True, *args,
                                         **kwargs)
        except ReviewRequest.DoesNotExist:
            return DOES_NOT_EXIST

        # Sort out the families.
        families = self._sort_families(request, queryset)
        missing_rrids = set()

        # Verify that we aren't missing any review requests.  We want
        # complete families, even if some do not match the requested bug,
        # i.e., if some children are associated with different bugs.
        # Because a few old review requests are currently in weird states,
        # and since all review requests in a given family should currently
        # have the same bug ID; we skip this part if we can't get COMMITS_KEY
        # out of the parent's extra_data.
        for parent_id, family in families.iteritems():
            parent_commit_data = fetch_commit_data(family['parent'])
            commits_json = parent_commit_data.get_for(family['parent'],
                                                      COMMITS_KEY)

            if family['parent'] and commits_json is not None:
                commit_tuples = json.loads(commits_json)
                [missing_rrids.add(child_rrid) for sha, child_rrid in
                 commit_tuples if child_rrid not in family['children']]
            else:
                missing_rrids.add(parent_id)

        self._sort_families(
            request, ReviewRequest.objects.filter(id__in=missing_rrids),
            families=families)

        summaries = self._summarize_families(request, families)

        data = {
            self.list_result_key: summaries,
            'total_results': len(summaries),
            'links': self.get_links(request=request)
        }

        return 200, data
    def get_list(self, request, *args, **kwargs):
        if not self.has_list_access_permissions(request, *args, **kwargs):
            return self.get_no_access_error(request, *args, **kwargs)

        try:
            queryset = self.get_queryset(request, is_list=True, *args,
                                         **kwargs)
        except ReviewRequest.DoesNotExist:
            return DOES_NOT_EXIST

        # Sort out the families.
        families = self._sort_families(request, queryset)
        missing_rrids = set()

        # Verify that we aren't missing any review requests.  We want
        # complete families, even if some do not match the requested bug,
        # i.e., if some children are associated with different bugs.
        # Because a few old review requests are currently in weird states,
        # and since all review requests in a given family should currently
        # have the same bug ID; we skip this part if we can't get COMMITS_KEY
        # out of the parent's extra_data.
        for parent_id, family in families.iteritems():
            parent_commit_data = fetch_commit_data(family['parent'])
            commits_json = parent_commit_data.get_for(family['parent'],
                                                      COMMITS_KEY)

            if family['parent'] and commits_json is not None:
                commit_tuples = json.loads(commits_json)
                [missing_rrids.add(child_rrid) for sha, child_rrid in
                 commit_tuples if child_rrid not in family['children']]
            else:
                missing_rrids.add(parent_id)

        self._sort_families(
            request, ReviewRequest.objects.filter(id__in=missing_rrids),
            families=families)

        summaries = self._summarize_families(request, families)

        data = {
            self.list_result_key: summaries,
            'total_results': len(summaries),
            'links': self.get_links(request=request)
        }

        return 200, data
Beispiel #23
0
    def filter(self, files):
        """Filter out commit message FileDiff."""
        if not files:
            return []

        history = files[0].diffset.history
        if not history:
            # No history, no commit message FileDiff has been created yet
            return files

        rr = history.review_request.all()[0]
        commit_data = fetch_commit_data(rr)
        commit_msg_ids = commit_data.get_for(rr, COMMIT_MSG_FILEDIFF_IDS_KEY)
        # ReviewRequests created before implementing commit message FileDiff
        # have no COMMIT_MSG_FILEDIFF_IDS_KEY in their extra_data.
        if not commit_msg_ids:
            return files

        commit_msg_ids = json.loads(commit_msg_ids).values()
        return [x for x in files if x.id not in commit_msg_ids]
def on_review_request_closed_discarded(user, review_request, type, **kwargs):
    if type != ReviewRequest.DISCARDED:
        return

    commit_data = fetch_commit_data(review_request)

    if is_parent(review_request, commit_data):
        # close_child_review_requests will call save on this review request, so
        # we don't have to worry about it.
        review_request.commit = None

        _close_child_review_requests(user, review_request,
                                     ReviewRequest.DISCARDED,
                                     AUTO_CLOSE_DESCRIPTION,
                                     commit_data=commit_data)
    else:
        # TODO: Remove this once we properly prevent users from closing
        # commit review requests.
        b = Bugzilla(get_bugzilla_api_key(user))
        bug = int(review_request.get_bug_list()[0])
        diff_url = '%sdiff/#index_header' % get_obj_url(review_request)
        b.obsolete_review_attachments(bug, diff_url)
Beispiel #25
0
    def is_approved_parent(self, review_request):
        """Check approval for a parent review request"""
        children = list(gen_child_rrs(review_request))

        if not children:
            # This parent review request had no children, so it's either
            # private or something has gone seriously wrong.
            logger.error('Review request %s has no children' %
                         review_request.id)
            return False, 'Review request has no children.'

        for rr in children:
            if not rr.approved:
                commit_data = fetch_commit_data(rr)
                commit_id = commit_data.extra_data.get(COMMIT_ID_KEY, None)

                if commit_id is None:
                    logger.error('Review request %s missing commit_id' % rr.id)
                    return False, 'A Commit is not approved.'

                return False, 'Commit %s is not approved.' % commit_id

        return True
Beispiel #26
0
    def is_approved_parent(self, review_request):
        """Check approval for a parent review request"""
        children = list(gen_child_rrs(review_request))

        if not children:
            # This parent review request had no children, so it's either
            # private or something has gone seriously wrong.
            logger.error('Review request %s has no children' %
                         review_request.id)
            return False, 'Review request has no children.'

        for rr in children:
            if not rr.approved:
                commit_data = fetch_commit_data(rr)
                commit_id = commit_data.extra_data.get(COMMIT_ID_KEY, None)

                if commit_id is None:
                    logger.error('Review request %s missing commit_id'
                                 % rr.id)
                    return False, 'A Commit is not approved.'

                return False, 'Commit %s is not approved.' % commit_id

        return True
Beispiel #27
0
def commit_id(review_request_details):
    """Return the commit id of a review request or review request draft"""
    commit_data = fetch_commit_data(review_request_details)
    return str(commit_data.get_for(review_request_details, COMMIT_ID_KEY))
Beispiel #28
0
def update_review_request(local_site, request, privileged_user, reviewer_cache,
                          rr, commit, create_commit_msg_filediff):
    """Synchronize the state of a review request with a commit.

    Updates the commit message, refreshes the diff, etc.
    """
    try:
        draft = rr.draft.get()
    except ReviewRequestDraft.DoesNotExist:
        draft = ReviewRequestDraft.create(rr)

    draft.summary = commit['message'].splitlines()[0]
    draft.description = commit['message']
    draft.bugs_closed = commit['bug']

    commit_data = fetch_commit_data(draft)

    reviewer_users, unrecognized_reviewers = \
        resolve_reviewers(reviewer_cache, commit.get('reviewers', []))
    requal_reviewer_users, unrecognized_requal_reviewers = \
        resolve_reviewers(reviewer_cache, commit.get('requal_reviewers', []))

    warnings = []

    for reviewer in unrecognized_reviewers | unrecognized_requal_reviewers:
        warnings.append('unrecognized reviewer: %s' % reviewer)
        logger.info('unrecognized reviewer: %s' % reviewer)

    if requal_reviewer_users:
        pr = previous_reviewers(rr)
        for user in requal_reviewer_users:
            if not pr.get(user.username, False):
                warnings.append('commit message for %s has r=%s but they '
                                'have not granted a ship-it. review will be '
                                'requested on your behalf' %
                                (commit['id'][:12], user.username))

        reviewer_users |= requal_reviewer_users

    # Commit message FileDiff creation.
    base_commit_id = commit.get('base_commit_id')
    commit_message_filediff = None

    if create_commit_msg_filediff:
        # Prepare commit message data
        commit_message_name = commit_data.draft_extra_data.get(
            COMMIT_MSG_FILENAME_KEY, 'commit-message-%s' % base_commit_id[0:5])
        commit_message_lines = commit['message'].split('\n')
        commit_message_diff = COMMIT_MSG_DIFF_FORMAT % {
            'source_filename': commit_message_name,
            'target_filename': commit_message_name,
            'num_lines': len(commit_message_lines),
            'diff':
            '%s\n' % '\n'.join(['+%s' % l for l in commit_message_lines])
        }

        commit_data.extra_data[COMMIT_MSG_FILENAME_KEY] = commit_message_name

        # Commit message FileDiff has to be displayed as the first one.
        # Therefore it needs to be created before other FileDiffs in
        # the DiffSet.
        # FileDiff object has a required DiffSet field. Because target DiffSet
        # is created along with other FileDiffs, there is a need to
        # create a temporary one.
        # Later in the code temporary DiffSet is replaced in the commit
        # message FileDiff with the target one.
        temp_diffset = get_temp_diffset(rr.repository)

        commit_message_filediff = FileDiff.objects.create(
            diffset=temp_diffset,
            source_file=commit_message_name,
            dest_file=commit_message_name,
            source_revision=PRE_CREATION,
            dest_detail='',
            parent_diff='',
            binary=False,
            status='M',
            diff=commit_message_diff)

    # Carry over from last time unless commit message overrules.
    if reviewer_users:
        draft.target_people.clear()
    for user in sorted(reviewer_users):
        draft.target_people.add(user)
        logger.debug('adding reviewer %s to #%d' % (user.username, rr.id))

    try:
        diffset = DiffSet.objects.create_from_data(
            repository=rr.repository,
            diff_file_name='diff',
            diff_file_contents=commit['diff_b64'].encode('ascii').decode(
                'base64'),
            parent_diff_file_name='diff',
            parent_diff_file_contents=None,
            diffset_history=None,
            basedir='',
            request=request,
            base_commit_id=base_commit_id,
            save=True,
        )

        update_diffset_history(rr, diffset)
        diffset.save()

        DiffSetVerification(diffset=diffset).save(
            authorized_user=privileged_user, force_insert=True)
    except Exception:
        logger.exception('error processing diff')
        raise DiffProcessingException()

    # Now that the proper DiffSet has been created, re-assign
    # the commit message FileDiff we created to the new DiffSet.
    if commit_message_filediff:
        commit_message_filediff.diffset = diffset
        commit_message_filediff.save()

        commit_msg_filediff_ids = json.loads(
            commit_data.draft_extra_data.get(COMMIT_MSG_FILEDIFF_IDS_KEY,
                                             '{}'))
        commit_msg_filediff_ids[str(
            diffset.revision)] = commit_message_filediff.pk
        # Store commit message FileDiffs ids in extra_data
        commit_data.draft_extra_data[COMMIT_MSG_FILEDIFF_IDS_KEY] = json.dumps(
            commit_msg_filediff_ids)

    commit_data.draft_extra_data.update({
        AUTHOR_KEY:
        commit['author'],
        COMMIT_ID_KEY:
        commit['id'],
        FIRST_PUBLIC_ANCESTOR_KEY:
        commit['first_public_ancestor'],
    })
    commit_data.save(update_fields=['draft_extra_data', 'extra_data'])

    update_review_request_draft_diffset(rr, diffset, draft=draft)

    return draft, warnings
Beispiel #29
0
def on_draft_pre_delete(sender, instance, using, **kwargs):
    """ Handle draft discards.

    There are no handy signals built into Review Board (yet) for us to detect
    when a squashed Review Request Draft is discarded. Instead, we monitor for
    deletions of models, and handle cases where the models being deleted are
    ReviewRequestDrafts. We then do some processing to ensure that the draft
    is indeed a draft of a squashed review request that we want to handle,
    and then propagate the discard down to the child review requests.
    """
    if not sender == ReviewRequestDraft:
        return

    # Drafts can get deleted for a number of reasons. They get deleted when
    # drafts are discarded, obviously, but also whenever review requests are
    # published, because the data gets copied over to the review request, and
    # then the draft is blown away. Unfortunately, on_pre_delete doesn't give
    # us too many clues about which scenario we're in, so we have to infer it
    # based on other things attached to the model. This is a temporary fix
    # until we get more comprehensive draft deletion signals built into Review
    # Board.
    #
    # In the case where the review request is NOT public yet, the draft will
    # not have a change description. In this case, we do not need to
    # differentiate between publish and discard because discards of non-public
    # review request's drafts will always cause the review request to be closed
    # as discarded, and this case is handled by on_review_request_closed().
    #
    # In the case where the review request has a change description, but it's
    # set to public, we must have just published this draft before deleting it,
    # so there's nothing to do here.
    if (instance.changedesc is None or instance.changedesc.public):
        return

    review_request = instance.review_request

    if not review_request:
        return

    commit_data = fetch_commit_data(review_request)

    if not is_parent(review_request, commit_data):
        return

    # If the review request is marked as discarded, then we must be closing
    # it, and so the on_review_request_closed() handler will take care of it.
    if review_request.status == ReviewRequest.DISCARDED:
        return

    user = review_request.submitter

    for child in gen_child_rrs(review_request, commit_data=commit_data):
        draft = child.get_draft()
        if draft:
            draft.delete()

    for child in gen_rrs_by_extra_data_key(review_request,
                                           UNPUBLISHED_KEY,
                                           commit_data=commit_data):
        child.close(ReviewRequest.DISCARDED,
                    user=user,
                    description=NEVER_USED_DESCRIPTION)

    commit_data.extra_data[DISCARD_ON_PUBLISH_KEY] = '[]'
    commit_data.extra_data[UNPUBLISHED_KEY] = '[]'
    commit_data.save(update_fields=['extra_data'])
def handle_commits_published(extension=None, **kwargs):
    """Handle sending 'mozreview.commits.published'.

    This message is only sent when the parent review request, in a set of
    pushed review requests, is published with new commit information.

    This is a useful message for consumers who care about new or modified
    commits being published for review.
    """
    review_request = kwargs.get('review_request')

    if review_request is None:
        return

    commit_data = fetch_commit_data(review_request)

    if (not is_pushed(review_request, commit_data)
            or not is_parent(review_request, commit_data)):
        return

    # Check the change description and only continue if it contains a change
    # to the commit information. Currently change descriptions won't include
    # information about our extra data field, so we'll look for a change to
    # the diff which is mandatory if the commits changed. TODO: Properly use
    # the commit information once we start populating the change description
    # with it.
    #
    # A change description will not exist if this is the first publish of the
    # review request. In that case we know there must be commits since this
    # is a pushed request.
    cd = kwargs.get('changedesc')
    if (cd is not None and ('diff' not in cd.fields_changed
                            or 'added' not in cd.fields_changed['diff'])):
        return

    # We publish both the review repository url as well as the landing
    # ("inbound") repository url. This gives consumers which perform hg
    # operations the option to avoid cloning the review repository, which may
    # be large.
    repo = review_request.repository
    repo_url = repo.path
    landing_repo_url = repo.extra_data.get('landing_repository_url')

    child_rrids = []
    commits = []
    ext_commits = json.loads(commit_data.extra_data.get(COMMITS_KEY, '[]'))

    for rev, rrid in ext_commits:
        child_rrids.append(int(rrid))
        commits.append({
            'rev': rev,
            'review_request_id': int(rrid),
            'diffset_revision': None
        })

    # In order to retrieve the diff revision for each commit we need to fetch
    # their correpsonding child review request.
    review_requests = dict(
        (obj.id, obj)
        for obj in ReviewRequest.objects.filter(pk__in=child_rrids))

    for commit_info in commits:
        # TODO: Every call to get_latest_diffset() makes its own query to the
        # database. It is probably possible to retrieve the diffsets we care
        # about using a single query through Django's ORM, but it's not trivial.
        commit_info['diffset_revision'] = review_requests[
            commit_info['review_request_id']].get_latest_diffset().revision

    msg = base.GenericMessage()
    msg.routing_parts.append('mozreview.commits.published')
    msg.data['parent_review_request_id'] = review_request.id
    msg.data['parent_diffset_revision'] = review_request.get_latest_diffset(
    ).revision
    msg.data['commits'] = commits
    msg.data['repository_url'] = repo_url
    msg.data['landing_repository_url'] = landing_repo_url

    # TODO: Make work with RB localsites.
    msg.data['review_board_url'] = get_server_url()

    publish_message(extension, msg)
def update_review_request(local_site, request, privileged_user, reviewer_cache,
                          rr, commit):
    """Synchronize the state of a review request with a commit.

    Updates the commit message, refreshes the diff, etc.
    """
    try:
        draft = rr.draft.get()
    except ReviewRequestDraft.DoesNotExist:
        draft = ReviewRequestDraft.create(rr)

    draft.summary = commit['message'].splitlines()[0]
    draft.description = commit['message']
    draft.bugs_closed = commit['bug']

    commit_data = fetch_commit_data(draft)
    commit_data.draft_extra_data.update({
        AUTHOR_KEY:
        commit['author'],
        COMMIT_ID_KEY:
        commit['id'],
        FIRST_PUBLIC_ANCESTOR_KEY:
        commit['first_public_ancestor'],
    })
    commit_data.save(update_fields=['draft_extra_data'])

    reviewer_users, unrecognized_reviewers = \
        resolve_reviewers(reviewer_cache, commit.get('reviewers', []))
    requal_reviewer_users, unrecognized_requal_reviewers = \
        resolve_reviewers(reviewer_cache, commit.get('requal_reviewers', []))

    warnings = []

    for reviewer in unrecognized_reviewers | unrecognized_requal_reviewers:
        warnings.append('unrecognized reviewer: %s' % reviewer)
        logger.info('unrecognized reviewer: %s' % reviewer)

    if requal_reviewer_users:
        pr = previous_reviewers(rr)
        for user in requal_reviewer_users:
            if not pr.get(user.username, False):
                warnings.append('commit message for %s has r=%s but they '
                                'have not granted a ship-it. review will be '
                                'requested on your behalf' %
                                (commit['id'][:12], user.username))

        reviewer_users |= requal_reviewer_users

    # Carry over from last time unless commit message overrules.
    if reviewer_users:
        draft.target_people.clear()
    for user in sorted(reviewer_users):
        draft.target_people.add(user)
        logger.debug('adding reviewer %s to #%d' % (user.username, rr.id))

    try:
        diffset = DiffSet.objects.create_from_data(
            repository=rr.repository,
            diff_file_name='diff',
            diff_file_contents=commit['diff_b64'].encode('ascii').decode(
                'base64'),
            parent_diff_file_name='diff',
            parent_diff_file_contents=None,
            diffset_history=None,
            basedir='',
            request=request,
            base_commit_id=commit.get('base_commit_id'),
            save=True,
        )
        update_diffset_history(rr, diffset)
        diffset.save()

        DiffSetVerification(diffset=diffset).save(
            authorized_user=privileged_user, force_insert=True)
    except Exception:
        logger.exception('error processing diff')
        raise DiffProcessingException()

    update_review_request_draft_diffset(rr, diffset, draft=draft)

    return draft, warnings
 def load_value(self, review_request_details):
     # This must use a CommitData for ``review_request_details`` instead
     # of the one stored in ``self.commit_data``. See comment on
     # BaseReviewRequestField
     commit_data = fetch_commit_data(review_request_details)
     return commit_data.get_for(review_request_details, self.field_id)
    def __init__(self, review_request_details, *args, **kwargs):
        self.commit_data = fetch_commit_data(review_request_details)

        super(PullCommitField, self).__init__(review_request_details,
                                              *args, **kwargs)
    def create(self, request, review_request_id, commit_descriptions, *args, **kwargs):
        try:
            rr = ReviewRequest.objects.get(pk=review_request_id)
        except ReviewRequest.DoesNotExist:
            return DOES_NOT_EXIST

        commit_data = fetch_commit_data(rr)

        if not is_pushed(rr, commit_data) or not is_parent(rr, commit_data):
            logger.error(
                "Failed triggering Autoland because the review "
                "request is not pushed, or not the parent review "
                "request."
            )
            return NOT_PUSHED_PARENT_REVIEW_REQUEST

        target_repository = rr.repository.extra_data.get("landing_repository_url")
        push_bookmark = rr.repository.extra_data.get("landing_bookmark")

        if not target_repository:
            return AUTOLAND_CONFIGURATION_ERROR.with_message(
                "Autoland has not been configured with a proper landing URL."
            )

        last_revision = json.loads(commit_data.extra_data.get(COMMITS_KEY))[-1][0]

        ext = get_extension_manager().get_enabled_extension("mozreview.extension.MozReviewExtension")

        logger.info(
            "Submitting a request to Autoland for review request "
            "ID %s for revision %s destination %s" % (review_request_id, last_revision, target_repository)
        )

        autoland_url = ext.get_settings("autoland_url")
        if not autoland_url:
            return AUTOLAND_CONFIGURATION_ERROR

        autoland_user = ext.get_settings("autoland_user")
        autoland_password = ext.get_settings("autoland_password")

        if not autoland_user or not autoland_password:
            return AUTOLAND_CONFIGURATION_ERROR

        pingback_url = autoland_request_update_resource.get_uri(request)

        lock_id = get_autoland_lock_id(rr.id, target_repository, last_revision)
        if not acquire_lock(lock_id):
            return AUTOLAND_REQUEST_IN_PROGRESS
        try:
            response = requests.post(
                autoland_url + "/autoland",
                data=json.dumps(
                    {
                        "ldap_username": request.mozreview_profile.ldap_username,
                        "tree": rr.repository.name,
                        "pingback_url": pingback_url,
                        "rev": last_revision,
                        "destination": target_repository,
                        "push_bookmark": push_bookmark,
                        "commit_descriptions": json.loads(commit_descriptions),
                    }
                ),
                headers={"content-type": "application/json"},
                timeout=AUTOLAND_REQUEST_TIMEOUT,
                auth=(autoland_user, autoland_password),
            )
        except requests.exceptions.RequestException:
            logger.error("We hit a RequestException when submitting a " "request to Autoland")
            release_lock(lock_id)
            return AUTOLAND_ERROR
        except requests.exceptions.Timeout:
            logger.error("We timed out when submitting a request to " "Autoland")
            release_lock(lock_id)
            return AUTOLAND_TIMEOUT

        if response.status_code != 200:
            release_lock(lock_id)
            return AUTOLAND_ERROR, {"status_code": response.status_code, "message": response.json().get("error")}

        # We succeeded in scheduling the job.
        try:
            autoland_request_id = int(response.json().get("request_id", 0))
        finally:
            if autoland_request_id is None:
                release_lock(lock_id)
                return AUTOLAND_ERROR, {"status_code": response.status_code, "request_id": None}

        autoland_request = AutolandRequest.objects.create(
            autoland_id=autoland_request_id,
            push_revision=last_revision,
            repository_url=target_repository,
            review_request_id=rr.id,
            user_id=request.user.id,
        )

        AutolandEventLogEntry.objects.create(
            status=AutolandEventLogEntry.REQUESTED, autoland_request_id=autoland_request_id
        )

        self.save_autolandrequest_id("p2rb.autoland", rr, autoland_request_id)

        return 200, {}
Beispiel #35
0
    def create(self, request, review_request_id, try_syntax, *args, **kwargs):
        try:
            rr = ReviewRequest.objects.get(pk=review_request_id)
        except ReviewRequest.DoesNotExist:
            return DOES_NOT_EXIST

        if not try_syntax.startswith('try: '):
            return INVALID_FORM_DATA, {
                'fields': {
                    'try_syntax': ['The provided try syntax was invalid']
                }
            }

        commit_data = fetch_commit_data(rr)

        if not is_pushed(rr, commit_data) or not is_parent(rr, commit_data):
            logger.error('Failed triggering Autoland because the review '
                         'request is not pushed, or not the parent review '
                         'request.')
            return NOT_PUSHED_PARENT_REVIEW_REQUEST

        enabled = rr.repository.extra_data.get('autolanding_to_try_enabled')

        if not enabled:
            return AUTOLAND_CONFIGURATION_ERROR.with_message(
                'Autolanding to try not enabled.')

        target_repository = rr.repository.extra_data.get('try_repository_url')

        if not target_repository:
            return AUTOLAND_CONFIGURATION_ERROR.with_message(
                'Autoland has not been configured with a proper try URL.')

        last_revision = json.loads(
            commit_data.extra_data.get(COMMITS_KEY))[-1][0]
        ext = get_extension_manager().get_enabled_extension(
            'mozreview.extension.MozReviewExtension')

        logger.info('Submitting a request to Autoland for review request '
                    'ID %s for revision %s destination try' %
                    (review_request_id, last_revision))

        autoland_url = ext.get_settings('autoland_url')

        if not autoland_url:
            return AUTOLAND_CONFIGURATION_ERROR

        autoland_user = ext.get_settings('autoland_user')
        autoland_password = ext.get_settings('autoland_password')

        if not autoland_user or not autoland_password:
            return AUTOLAND_CONFIGURATION_ERROR

        pingback_url = autoland_request_update_resource.get_uri(request)

        lock_id = get_autoland_lock_id(rr.id, target_repository, last_revision)

        if not acquire_lock(lock_id):
            return AUTOLAND_REQUEST_IN_PROGRESS

        try:
            # We use a hard-coded destination here. If we ever open this up
            # to make the destination a parameter to this resource, we need to
            # verify that the destination is in fact an "scm_level_1"
            # repository to ensure that people don't try to land to inbound
            # using this resource.
            response = requests.post(
                autoland_url + '/autoland',
                data=json.dumps({
                    'ldap_username': request.mozreview_profile.ldap_username,
                    'tree': rr.repository.name,
                    'pingback_url': pingback_url,
                    'rev': last_revision,
                    'destination': TRY_AUTOLAND_DESTINATION,
                    'trysyntax': try_syntax,
                }),
                headers={
                    'content-type': 'application/json',
                },
                timeout=AUTOLAND_REQUEST_TIMEOUT,
                auth=(autoland_user, autoland_password))
        except requests.exceptions.RequestException:
            logger.error('We hit a RequestException when submitting a '
                         'request to Autoland')
            release_lock(lock_id)
            return AUTOLAND_ERROR
        except requests.exceptions.Timeout:
            logger.error('We timed out when submitting a request to '
                         'Autoland')
            release_lock(lock_id)
            return AUTOLAND_TIMEOUT

        if response.status_code != 200:
            release_lock(lock_id)
            return AUTOLAND_ERROR, {
                'status_code': response.status_code,
                'message': response.json().get('error'),
            }

        # We succeeded in scheduling the job.
        try:
            autoland_request_id = int(response.json().get('request_id', 0))
        finally:
            if autoland_request_id is None:
                release_lock(lock_id)
                return AUTOLAND_ERROR, {
                    'status_code': response.status_code,
                    'request_id': None,
                }

        AutolandRequest.objects.create(autoland_id=autoland_request_id,
                                       push_revision=last_revision,
                                       repository_url=target_repository,
                                       review_request_id=rr.id,
                                       user_id=request.user.id,
                                       extra_data=json.dumps(
                                           {'try_syntax': try_syntax}))

        AutolandEventLogEntry.objects.create(
            status=AutolandEventLogEntry.REQUESTED,
            autoland_request_id=autoland_request_id)

        self.save_autolandrequest_id('p2rb.autoland_try', rr,
                                     autoland_request_id)

        return 200, {}
Beispiel #36
0
    def create(self, request, review_request_id, commit_descriptions, *args,
               **kwargs):
        try:
            rr = ReviewRequest.objects.get(pk=review_request_id)
        except ReviewRequest.DoesNotExist:
            return DOES_NOT_EXIST

        commit_data = fetch_commit_data(rr)

        if not is_pushed(rr, commit_data) or not is_parent(rr, commit_data):
            logger.error('Failed triggering Autoland because the review '
                         'request is not pushed, or not the parent review '
                         'request.')
            return NOT_PUSHED_PARENT_REVIEW_REQUEST

        enabled = rr.repository.extra_data.get('autolanding_enabled')

        if not enabled:
            return AUTOLAND_CONFIGURATION_ERROR.with_message(
                'Autolanding not enabled.')

        target_repository = rr.repository.extra_data.get(
            'landing_repository_url')
        push_bookmark = rr.repository.extra_data.get('landing_bookmark')

        if not target_repository:
            return AUTOLAND_CONFIGURATION_ERROR.with_message(
                'Autoland has not been configured with a proper landing URL.')

        last_revision = json.loads(
            commit_data.extra_data.get(COMMITS_KEY))[-1][0]
        ext = get_extension_manager().get_enabled_extension(
            'mozreview.extension.MozReviewExtension')

        logger.info('Submitting a request to Autoland for review request '
                    'ID %s for revision %s destination %s' %
                    (review_request_id, last_revision, target_repository))

        autoland_url = ext.get_settings('autoland_url')

        if not autoland_url:
            return AUTOLAND_CONFIGURATION_ERROR

        autoland_user = ext.get_settings('autoland_user')
        autoland_password = ext.get_settings('autoland_password')

        if not autoland_user or not autoland_password:
            return AUTOLAND_CONFIGURATION_ERROR

        pingback_url = autoland_request_update_resource.get_uri(request)
        lock_id = get_autoland_lock_id(rr.id, target_repository, last_revision)

        if not acquire_lock(lock_id):
            return AUTOLAND_REQUEST_IN_PROGRESS

        try:
            response = requests.post(
                autoland_url + '/autoland',
                data=json.dumps({
                    'ldap_username':
                    request.mozreview_profile.ldap_username,
                    'tree':
                    rr.repository.name,
                    'pingback_url':
                    pingback_url,
                    'rev':
                    last_revision,
                    'destination':
                    target_repository,
                    'push_bookmark':
                    push_bookmark,
                    'commit_descriptions':
                    json.loads(commit_descriptions),
                }),
                headers={
                    'content-type': 'application/json',
                },
                timeout=AUTOLAND_REQUEST_TIMEOUT,
                auth=(autoland_user, autoland_password))
        except requests.exceptions.RequestException:
            logger.error('We hit a RequestException when submitting a '
                         'request to Autoland')
            release_lock(lock_id)
            return AUTOLAND_ERROR
        except requests.exceptions.Timeout:
            logger.error('We timed out when submitting a request to '
                         'Autoland')
            release_lock(lock_id)
            return AUTOLAND_TIMEOUT

        if response.status_code != 200:
            release_lock(lock_id)

            try:
                error_message = response.json().get('error')
            except ValueError:
                error_message = response.text

            return AUTOLAND_ERROR, {
                'status_code': response.status_code,
                'message': error_message,
            }

        # We succeeded in scheduling the job.
        try:
            autoland_request_id = int(response.json().get('request_id', 0))
        finally:
            if autoland_request_id is None:
                release_lock(lock_id)
                return AUTOLAND_ERROR, {
                    'status_code': response.status_code,
                    'request_id': None,
                }

        AutolandRequest.objects.create(
            autoland_id=autoland_request_id,
            push_revision=last_revision,
            repository_url=target_repository,
            review_request_id=rr.id,
            user_id=request.user.id,
        )

        AutolandEventLogEntry.objects.create(
            status=AutolandEventLogEntry.REQUESTED,
            autoland_request_id=autoland_request_id)

        self.save_autolandrequest_id('p2rb.autoland', rr, autoland_request_id)

        return 200, {}
def commit_id(review_request_details):
    """Return the commit id of a review request or review request draft"""
    commit_data = fetch_commit_data(review_request_details)
    return str(commit_data.get_for(review_request_details, COMMIT_ID_KEY))
def handle_commits_published(extension=None, **kwargs):
    """Handle sending 'mozreview.commits.published'.

    This message is only sent when the parent review request, in a set of
    pushed review requests, is published with new commit information.

    This is a useful message for consumers who care about new or modified
    commits being published for review.
    """
    review_request = kwargs.get('review_request')

    if review_request is None:
        return

    commit_data = fetch_commit_data(review_request)

    if (not is_pushed(review_request, commit_data) or
            not is_parent(review_request, commit_data)):
        return

    # Check the change description and only continue if it contains a change
    # to the commit information. Currently change descriptions won't include
    # information about our extra data field, so we'll look for a change to
    # the diff which is mandatory if the commits changed. TODO: Properly use
    # the commit information once we start populating the change description
    # with it.
    #
    # A change description will not exist if this is the first publish of the
    # review request. In that case we know there must be commits since this
    # is a pushed request.
    cd = kwargs.get('changedesc')
    if (cd is not None and ('diff' not in cd.fields_changed or
                            'added' not in cd.fields_changed['diff'])):
        return

    # We publish both the review repository url as well as the landing
    # ("inbound") repository url. This gives consumers which perform hg
    # operations the option to avoid cloning the review repository, which may
    # be large.
    repo = review_request.repository
    repo_url = repo.path
    landing_repo_url = repo.extra_data.get('landing_repository_url')

    child_rrids = []
    commits = []
    ext_commits = json.loads(commit_data.extra_data.get(COMMITS_KEY, '[]'))

    for rev, rrid in ext_commits:
        child_rrids.append(int(rrid))
        commits.append({
            'rev': rev,
            'review_request_id': int(rrid),
            'diffset_revision': None
        })

    # In order to retrieve the diff revision for each commit we need to fetch
    # their correpsonding child review request.
    review_requests = dict(
        (obj.id, obj) for obj in
        ReviewRequest.objects.filter(pk__in=child_rrids))

    for commit_info in commits:
        # TODO: Every call to get_latest_diffset() makes its own query to the
        # database. It is probably possible to retrieve the diffsets we care
        # about using a single query through Django's ORM, but it's not trivial.
        commit_info['diffset_revision'] = review_requests[
            commit_info['review_request_id']
        ].get_latest_diffset().revision

    msg = base.GenericMessage()
    msg.routing_parts.append('mozreview.commits.published')
    msg.data['parent_review_request_id'] = review_request.id
    msg.data['parent_diffset_revision'] = review_request.get_latest_diffset().revision
    msg.data['commits'] = commits
    msg.data['repository_url'] = repo_url
    msg.data['landing_repository_url'] = landing_repo_url

    # TODO: Make work with RB localsites.
    msg.data['review_board_url'] = get_server_url()

    publish_message(extension, msg)
    def _process_submission(self, request, local_site, user, privileged_user,
                            repo, identifier, commits):
        logger.info('processing batch submission %s to %s with %d commits' % (
                    identifier, repo.name, len(commits['individual'])))

        try:
            squashed_rr = ReviewRequest.objects.get(commit_id=identifier,
                                                    repository=repo)
            if not squashed_rr.is_mutable_by(user):
                logger.warn('%s not mutable by %s' % (squashed_rr.id, user))
                raise SubmissionException(self.get_no_access_error(request))

            if squashed_rr.status != ReviewRequest.PENDING_REVIEW:
                logger.warn('%s is not a pending review request; cannot edit' %
                            squashed_rr.id)
                raise SubmissionException((INVALID_FORM_DATA, {
                    'fields': {
                        'identifier': ['Parent review request is '
                               'submitted or discarded']}}))

            logger.info('using squashed review request %d' % squashed_rr.id)

        except ReviewRequest.DoesNotExist:
            squashed_rr = ReviewRequest.objects.create(
                    user=user, repository=repo, commit_id=identifier,
                    local_site=local_site)

            squashed_commit_data = fetch_commit_data(squashed_rr)
            squashed_commit_data.extra_data.update({
                MOZREVIEW_KEY: True,
                IDENTIFIER_KEY: identifier,
                FIRST_PUBLIC_ANCESTOR_KEY: (
                    commits['squashed']['first_public_ancestor']),
                SQUASHED_KEY: True,
                DISCARD_ON_PUBLISH_KEY: '[]',
                UNPUBLISHED_KEY: '[]',
            })
            squashed_commit_data.draft_extra_data.update({
                IDENTIFIER_KEY: identifier,
            })
            squashed_commit_data.save(
                update_fields=['extra_data', 'draft_extra_data'])

            logger.info('created squashed review request #%d' % squashed_rr.id)

        # The diffs on diffsets can't be updated, only replaced. So always
        # construct the diffset.

        try:
            # TODO consider moving diffset creation outside of the transaction
            # since it can be quite time consuming.
            # Calling create_from_data() instead of create_from_upload() skips
            # diff size validation. We allow unlimited diff sizes, so no biggie.
            logger.info('%s: generating squashed diffset for %d' % (
                        identifier, squashed_rr.id))
            diffset = DiffSet.objects.create_from_data(
                repository=repo,
                diff_file_name='diff',
                # The original value is a unicode instance. Python 3 can't
                # .encode() a unicode instance, so go to str first.
                diff_file_contents=commits['squashed']['diff_b64'].encode('ascii').decode('base64'),
                parent_diff_file_name=None,
                parent_diff_file_contents=None,
                diffset_history=None,
                basedir='',
                request=request,
                base_commit_id=commits['squashed'].get('base_commit_id'),
                save=True,
                )

            update_diffset_history(squashed_rr, diffset)
            diffset.save()

            # We pass `force_insert=True` to save to make sure Django generates
            # an SQL INSERT rather than an UPDATE if the DiffSetVerification
            # already exists. It should never already exist so we want the
            # exception `force_insert=True` will cause if that's the case.
            DiffSetVerification(diffset=diffset).save(
                authorized_user=privileged_user, force_insert=True)
        except Exception:
            logger.exception('error processing squashed diff')
            raise DiffProcessingException()

        update_review_request_draft_diffset(squashed_rr, diffset)
        logger.info('%s: updated squashed diffset for %d' % (
                    identifier, squashed_rr.id))

        # TODO: We need to take into account the commits data from the squashed
        # review request's draft. This data represents the mapping from commit
        # to rid in the event that we would have published. We're overwritting
        # this data. This will only come into play if we start trusting the server
        # instead of the client when matching review request ids. Bug 1047516

        squashed_commit_data = fetch_commit_data(squashed_rr)
        previous_commits = get_previous_commits(squashed_rr,
                                                squashed_commit_data)
        remaining_nodes = get_remaining_nodes(previous_commits)
        discard_on_publish_rids = get_discard_on_publish_rids(
            squashed_rr, squashed_commit_data)
        unpublished_rids = get_unpublished_rids(
            squashed_rr, squashed_commit_data)
        unclaimed_rids = get_unclaimed_rids(previous_commits,
                                            discard_on_publish_rids,
                                            unpublished_rids)

        logger.info('%s: %d previous commits; %d discard on publish; '
                    '%d unpublished' % (identifier, len(previous_commits),
                                        len(discard_on_publish_rids),
                                        len(unpublished_rids)))

        # Previously pushed nodes which have been processed and had their review
        # request updated or did not require updating.
        processed_nodes = set()

        node_to_rid = {}

        # A mapping from review request id to the corresponding ReviewRequest.
        review_requests = {}

        # A mapping of review request id to dicts of additional metadata.
        review_data = {}

        squashed_reviewers = set()
        reviewer_cache = ReviewerCache(request)

        warnings = []

        # Do a pass and find all commits that map cleanly to old review requests.
        for commit in commits['individual']:
            node = commit['id']

            if node not in remaining_nodes:
                continue

            # If the commit appears in an old review request, by definition of
            # commits deriving from content, the commit has not changed and there
            # is nothing to update. Update our accounting and move on.
            rid = remaining_nodes[node]
            logger.info('%s: commit %s unchanged; using existing request %d' % (
                        identifier, node, rid))

            del remaining_nodes[node]
            unclaimed_rids.remove(rid)
            processed_nodes.add(node)
            node_to_rid[node] = rid

            rr = ReviewRequest.objects.get(pk=rid)
            review_requests[rid] = rr
            review_data[rid] = get_review_request_data(rr)

            try:
                discard_on_publish_rids.remove(rid)
            except ValueError:
                pass

        logger.info('%s: %d/%d commits mapped exactly' % (
                    identifier, len(processed_nodes),
                    len(commits['individual'])))

        # Find commits that map to a previous version.
        for commit in commits['individual']:
            node = commit['id']
            if node in processed_nodes:
                continue

            # The client may have sent obsolescence data saying which commit this
            # commit has derived from. Use that data (if available) to try to find
            # a mapping to an old review request.
            for precursor in commit['precursors']:
                rid = remaining_nodes.get(precursor)
                if not rid:
                    continue

                logger.info('%s: found precursor to commit %s; '
                            'using existing review request %d' % (
                            identifier, node, rid))

                del remaining_nodes[precursor]
                unclaimed_rids.remove(rid)

                rr = ReviewRequest.objects.get(pk=rid)
                draft, warns = update_review_request(local_site, request,
                                                     privileged_user,
                                                     reviewer_cache, rr,
                                                     commit)
                squashed_reviewers.update(u for u in draft.target_people.all())
                warnings.extend(warns)
                processed_nodes.add(node)
                node_to_rid[node] = rid
                review_requests[rid] = rr
                review_data[rid] = get_review_request_data(rr)

                try:
                    discard_on_publish_rids.remove(rid)
                except ValueError:
                    pass

                break

        logger.info('%s: %d/%d mapped exactly or to precursors' % (
                    identifier, len(processed_nodes),
                    len(commits['individual'])))

        # Clients should add "MozReview-Commit-ID" unique identifiers to
        # commit messages. Search for them and match up accordingly.

        unclaimed_rrs = [ReviewRequest.objects.get(pk=rid)
                         for rid in unclaimed_rids]

        for commit in commits['individual']:
            node = commit['id']
            if node in processed_nodes:
                continue

            commit_id = parse_commit_id(commit['message'])
            if not commit_id:
                logger.warn('%s: commit %s does not have commit id' % (
                            identifier, node))
                continue

            for rr in unclaimed_rrs:
                rr_commit_id = parse_commit_id(rr.description)
                if commit_id != rr_commit_id:
                    continue

                # commit ID in commit found in existing review request. Map
                # it up.
                logger.info('%s: commit ID %s for %s found in review request %d' % (
                            identifier, commit_id, node, rr.id))

                try:
                    del remaining_nodes[node]
                except KeyError:
                    pass

                unclaimed_rids.remove(rr.id)
                unclaimed_rrs.remove(rr)
                draft, warns = update_review_request(local_site, request,
                                                     privileged_user,
                                                     reviewer_cache, rr,
                                                     commit)
                squashed_reviewers.update(u for u in draft.target_people.all())
                warnings.extend(warns)
                processed_nodes.add(node)
                node_to_rid[node] = rr.id
                review_requests[rr.id] = rr
                review_data[rr.id] = get_review_request_data(rr)
                try:
                    discard_on_publish_rids.remove(rr.id)
                except ValueError:
                    pass

                break

        logger.info('%s: %d/%d mapped after commit ID matching' % (
                    identifier, len(processed_nodes),
                    len(commits['individual'])))

        logger.info('%s: %d unclaimed review requests' % (identifier, len(unclaimed_rids)))

        # Now do a pass over the commits that didn't map cleanly.
        for commit in commits['individual']:
            node = commit['id']
            if node in processed_nodes:
                continue

            # We haven't seen this commit before *and* our mapping above didn't
            # do anything useful with it.

            # This is where things could get complicated. We could involve
            # heuristic based matching (comparing commit messages, changed
            # files, etc). We may do that in the future.

            # For now, match the commit up against the next one in the index.
            # The unclaimed rids list contains review requests which were created
            # when previously updating this review identifier, but not published.
            # If we have more commits than were previously published we'll start
            # reusing these private review requests before creating new ones.
            #
            # We don't reuse existing review requests when obsolescence data is
            # available because the lack of a clean commit mapping (from above)
            # means that the commit is logically new and shouldn't be
            # associated with a review request that belonged to a different
            # logical commit.
            if unclaimed_rids and not commits.get('obsolescence', False):
                assumed_old_rid = unclaimed_rids.pop(0)

                logger.info('%s: mapping %s to unclaimed request %d' % (
                            identifier, node, assumed_old_rid))

                rr = ReviewRequest.objects.get(pk=assumed_old_rid)
                draft, warns = update_review_request(local_site, request,
                                                     privileged_user,
                                                     reviewer_cache, rr,
                                                     commit)
                squashed_reviewers.update(u for u in draft.target_people.all())
                warnings.extend(warns)
                processed_nodes.add(commit['id'])
                node_to_rid[node] = assumed_old_rid
                review_requests[assumed_old_rid] = rr
                review_data[assumed_old_rid] = get_review_request_data(rr)

                try:
                    discard_on_publish_rids.remove(assumed_old_rid)
                except ValueError:
                    pass

                continue

            # There are no more unclaimed review request IDs. This means we have
            # more commits than before. Create new review requests as appropriate.
            rr = ReviewRequest.objects.create(user=user,
                                              repository=repo,
                                              commit_id=None,
                                              local_site=local_site)

            commit_data = fetch_commit_data(rr)
            commit_data.extra_data.update({
                MOZREVIEW_KEY: True,
                IDENTIFIER_KEY: identifier,
                SQUASHED_KEY: False,
            })
            commit_data.draft_extra_data.update({
                AUTHOR_KEY: commit['author'],
                IDENTIFIER_KEY: identifier,
            })
            commit_data.save(
                update_fields=['extra_data', 'draft_extra_data'])

            logger.info('%s: created review request %d for commit %s' % (
                        identifier, rr.id, node))
            draft, warns = update_review_request(local_site, request,
                                                 privileged_user,
                                                 reviewer_cache, rr, commit)
            squashed_reviewers.update(u for u in draft.target_people.all())
            warnings.extend(warns)
            processed_nodes.add(commit['id'])
            node_to_rid[node] = rr.id
            review_requests[rr.id] = rr
            review_data[rr.id] = get_review_request_data(rr)
            unpublished_rids.append(rr.id)

        # At this point every incoming commit has been accounted for.
        # If there are any remaining review requests, they must belong to
        # deleted commits. (Or, we made a mistake and updated the wrong review
        # request)
        logger.info('%s: %d unclaimed review requests left over' % (
                    identifier, len(unclaimed_rids)))
        for rid in unclaimed_rids:
            rr = ReviewRequest.objects.get(pk=rid)

            if rr.public and rid not in discard_on_publish_rids:
                # This review request has already been published so we'll need to
                # discard it when we publish the squashed review request.
                discard_on_publish_rids.append(rid)
            elif not rr.public and rid not in unpublished_rids:
                # We've never published this review request so it may be reused in
                # the future for *any* commit. Keep track of it.
                unpublished_rids.append(rid)
            else:
                # This means we've already marked the review request properly
                # in a previous push, so do nothing.
                pass

        commit_list = []
        for commit in commits['individual']:
            node = commit['id']
            commit_list.append([node, node_to_rid[node]])

        # We need to refresh the squashed rr and draft because post save hooks
        # in ReviewBoard result in magical changes to some of its fields.
        squashed_rr = ReviewRequest.objects.get(pk=squashed_rr.id)
        squashed_draft = squashed_rr.draft.get()
        squashed_commit_data = fetch_commit_data(squashed_rr)

        squashed_draft.summary = identifier

        # Reviewboard does not allow review requests with empty descriptions to
        # be published, so we insert some filler here.
        squashed_draft.description = 'This is the parent review request'
        squashed_draft.bugs_closed = ','.join(sorted(set(commit['bug'] for commit in commits['individual'])))

        squashed_draft.depends_on.clear()
        for rrid in sorted(node_to_rid.values()):
            rr = ReviewRequest.objects.for_id(rrid, local_site)
            squashed_draft.depends_on.add(rr)

        squashed_draft.target_people.clear()
        for user in sorted(squashed_reviewers):
            squashed_draft.target_people.add(user)

        squashed_commit_data.draft_extra_data[COMMITS_KEY] = json.dumps(
            commit_list)

        if 'base_commit_id' in commits['squashed']:
            squashed_commit_data.draft_extra_data[BASE_COMMIT_KEY] = (
                commits['squashed']['base_commit_id'])

        squashed_commit_data.extra_data.update({
            DISCARD_ON_PUBLISH_KEY: json.dumps(discard_on_publish_rids),
            FIRST_PUBLIC_ANCESTOR_KEY: (
                commits['squashed']['first_public_ancestor']),
            UNPUBLISHED_KEY: json.dumps(unpublished_rids),
        })

        squashed_draft.save()
        squashed_rr.save(update_fields=['extra_data'])
        squashed_commit_data.save(
            update_fields=['extra_data', 'draft_extra_data'])

        review_requests[squashed_rr.id] = squashed_rr
        review_data[squashed_rr.id] = get_review_request_data(squashed_rr)

        return squashed_rr, node_to_rid, review_data, warnings
Beispiel #40
0
def commit_message_filediff_ids(review_request_details):
    """Return the commit message FileDiff ids of a ReviewRequest or Draft"""
    commit_data = fetch_commit_data(review_request_details)
    return commit_data.get_for(review_request_details,
                               COMMIT_MSG_FILEDIFF_IDS_KEY,
                               default='[]')
    def create(self, request, review_request_id, try_syntax, *args, **kwargs):
        try:
            rr = ReviewRequest.objects.get(pk=review_request_id)
        except ReviewRequest.DoesNotExist:
            return DOES_NOT_EXIST

        if not try_syntax.startswith("try: "):
            return INVALID_FORM_DATA, {"fields": {"try_syntax": ["The provided try syntax was invalid"]}}

        commit_data = fetch_commit_data(rr)

        if not is_pushed(rr, commit_data) or not is_parent(rr, commit_data):
            logger.error(
                "Failed triggering Autoland because the review "
                "request is not pushed, or not the parent review "
                "request."
            )
            return NOT_PUSHED_PARENT_REVIEW_REQUEST

        target_repository = rr.repository.extra_data.get("try_repository_url")

        if not target_repository:
            return AUTOLAND_CONFIGURATION_ERROR.with_message("Autoland has not been configured with a proper try URL.")

        last_revision = json.loads(commit_data.extra_data.get(COMMITS_KEY))[-1][0]

        ext = get_extension_manager().get_enabled_extension("mozreview.extension.MozReviewExtension")

        logger.info(
            "Submitting a request to Autoland for review request "
            "ID %s for revision %s destination try" % (review_request_id, last_revision)
        )

        autoland_url = ext.get_settings("autoland_url")
        if not autoland_url:
            return AUTOLAND_CONFIGURATION_ERROR

        autoland_user = ext.get_settings("autoland_user")
        autoland_password = ext.get_settings("autoland_password")

        if not autoland_user or not autoland_password:
            return AUTOLAND_CONFIGURATION_ERROR

        pingback_url = autoland_request_update_resource.get_uri(request)

        lock_id = get_autoland_lock_id(rr.id, target_repository, last_revision)
        if not acquire_lock(lock_id):
            return AUTOLAND_REQUEST_IN_PROGRESS

        try:
            # We use a hard-coded destination here. If we ever open this up
            # to make the destination a parameter to this resource, we need to
            # verify that the destination is in fact an "scm_level_1"
            # repository to ensure that people don't try to land to inbound
            # using this resource.
            response = requests.post(
                autoland_url + "/autoland",
                data=json.dumps(
                    {
                        "ldap_username": request.mozreview_profile.ldap_username,
                        "tree": rr.repository.name,
                        "pingback_url": pingback_url,
                        "rev": last_revision,
                        "destination": TRY_AUTOLAND_DESTINATION,
                        "trysyntax": try_syntax,
                    }
                ),
                headers={"content-type": "application/json"},
                timeout=AUTOLAND_REQUEST_TIMEOUT,
                auth=(autoland_user, autoland_password),
            )
        except requests.exceptions.RequestException:
            logger.error("We hit a RequestException when submitting a " "request to Autoland")
            release_lock(lock_id)
            return AUTOLAND_ERROR
        except requests.exceptions.Timeout:
            logger.error("We timed out when submitting a request to " "Autoland")
            release_lock(lock_id)
            return AUTOLAND_TIMEOUT

        if response.status_code != 200:
            release_lock(lock_id)
            return AUTOLAND_ERROR, {"status_code": response.status_code, "message": response.json().get("error")}

        # We succeeded in scheduling the job.
        try:
            autoland_request_id = int(response.json().get("request_id", 0))
        finally:
            if autoland_request_id is None:
                release_lock(lock_id)
                return AUTOLAND_ERROR, {"status_code": response.status_code, "request_id": None}

        autoland_request = AutolandRequest.objects.create(
            autoland_id=autoland_request_id,
            push_revision=last_revision,
            repository_url=target_repository,
            review_request_id=rr.id,
            user_id=request.user.id,
            extra_data=json.dumps({"try_syntax": try_syntax}),
        )

        AutolandEventLogEntry.objects.create(
            status=AutolandEventLogEntry.REQUESTED, autoland_request_id=autoland_request_id
        )

        self.save_autolandrequest_id("p2rb.autoland_try", rr, autoland_request_id)

        return 200, {}
    def get(self, request, *args, **kwargs):
        try:
            parent_request = get_parent_rr(ReviewRequest.objects.get(
                id=kwargs[self.uri_object_key]))
        except ReviewRequest.DoesNotExist:
            return DOES_NOT_EXIST
        if parent_request is None:
            return DOES_NOT_EXIST

        commit_data = fetch_commit_data(parent_request)

        if not is_parent(parent_request, commit_data):
            return NOT_PARENT
        if not parent_request.is_accessible_by(request.user):
            return PERMISSION_DENIED
        if COMMITS_KEY not in commit_data.extra_data:
            logging.error('Parent review request %s missing COMMIT_KEY'
                          % parent_request.id)
            return NOT_PARENT

        result = []
        children = json.loads(commit_data.extra_data[COMMITS_KEY])
        for child in children:
            try:
                child_request = ReviewRequest.objects.get(id=child[1])
            except ReviewRequest.DoesNotExist:
                return DOES_NOT_EXIST
            if not child_request.approved:
                return AUTOLAND_REVIEW_NOT_APPROVED

            reviewers = [
                r.user.username for r in gen_latest_reviews(child_request) if
                r.ship_it and
                r.user != child_request.submitter
            ]

            if not reviewers and child_request.approved:
                # This review request is approved (the repeated check is
                # to ensure this is guaranteed if other parts of the code
                # change) but we have an empty list of reviewers. We'll
                # assume the author has just approved this themself and
                # set r=me
                reviewers.append('me')

            # Detect if the commit has been changed since the last review.
            shipit_carryforward = has_shipit_carryforward(child_request)

            result.append({
                'commit': child[0],
                'id': child[1],
                'reviewers': reviewers,
                'shipit_carryforward': shipit_carryforward,
                'summary': replace_reviewers(child_request.description,
                                             reviewers)
            })

        return 200, {
            'commits': result,
            'total_results': len(result),
            'links': self.get_links(request=request),
        }
def on_review_request_publishing(user, review_request_draft, **kwargs):
    # There have been strange cases (all local, and during development), where
    # when attempting to publish a review request, this handler will fail
    # because the draft does not exist. This is a really strange case, and not
    # one we expect to happen in production. However, since we've seen it
    # locally, we handle it here, and log.
    if not review_request_draft:
        logger.error('Strangely, there was no review request draft on the '
                     'review request we were attempting to publish.')
        return

    # If the review request draft has a new DiffSet we will only allow
    # publishing if that DiffSet has been verified. It is important to
    # do this for every review request, not just pushed ones, because
    # we can't trust the storage mechanism which indicates it was pushed.
    # TODO: This will be fixed when we transition away from extra_data.
    if review_request_draft.diffset:
        try:
            DiffSetVerification.objects.get(
                diffset=review_request_draft.diffset)
        except DiffSetVerification.DoesNotExist:
            logger.error(
                'An attempt was made by User %s to publish an unverified '
                'DiffSet with id %s',
                user.id,
                review_request_draft.diffset.id)

            raise PublishError(
                'This review request draft contained a manually uploaded '
                'diff, which is prohibited. Please push to the review server '
                'to create review requests. If you believe you received this '
                'message in error, please file a bug.')

    review_request = review_request_draft.get_review_request()
    commit_data = fetch_commit_data(review_request)

    # skip review requests that were not pushed
    if not is_pushed(review_request, commit_data=commit_data):
        return

    if not is_parent(review_request, commit_data):
        # Send a signal asking for approval to publish this review request.
        # We only want to publish this commit request if we are in the middle
        # of publishing the parent. If the parent is publishing it will be
        # listening for this signal to approve it.
        approvals = commit_request_publishing.send_robust(
            sender=review_request,
            user=user,
            review_request_draft=review_request_draft)

        for receiver, approved in approvals:
            if approved:
                break
        else:
            # This publish is not approved by the parent review request.
            raise CommitPublishProhibited()

    # The reviewid passed through p2rb is, for Mozilla's instance anyway,
    # bz://<bug id>/<irc nick>.
    reviewid = commit_data.draft_extra_data.get(IDENTIFIER_KEY, None)
    m = REVIEWID_RE.match(reviewid)

    if not m:
        raise InvalidBugIdError('<unknown>')

    bug_id = m.group(1)

    try:
        bug_id = int(bug_id)
    except (TypeError, ValueError):
        raise InvalidBugIdError(bug_id)

    siteconfig = SiteConfiguration.objects.get_current()
    using_bugzilla = (
        siteconfig.settings.get("auth_backend", "builtin") == "bugzilla")

    if using_bugzilla:
        b = Bugzilla(get_bugzilla_api_key(user))

        try:
            if b.is_bug_confidential(bug_id):
                raise ConfidentialBugError
        except BugzillaError as e:
            # Special cases:
            #   100: Invalid Bug Alias
            #   101: Bug does not exist
            if e.fault_code and (e.fault_code == 100 or e.fault_code == 101):
                raise InvalidBugIdError(bug_id)
            raise

    # Note that the bug ID has already been set when the review was created.

    # If this is a squashed/parent review request, automatically publish all
    # relevant children.
    if is_parent(review_request, commit_data):
        unpublished_rids = map(int, json.loads(
            commit_data.extra_data[UNPUBLISHED_KEY]))
        discard_on_publish_rids = map(int, json.loads(
            commit_data.extra_data[DISCARD_ON_PUBLISH_KEY]))
        child_rrs = list(gen_child_rrs(review_request_draft))

        # Create or update Bugzilla attachments for each draft commit.  This
        # is done before the children are published to ensure that MozReview
        # doesn't get into a strange state if communication with Bugzilla is
        # broken or attachment creation otherwise fails.  The Bugzilla
        # attachments will then, of course, be in a weird state, but that
        # should be fixed by the next successful publish.
        if using_bugzilla:
            for child in child_rrs:
                child_draft = child.get_draft(user=user)

                if child_draft:
                    if child.id in discard_on_publish_rids:
                        b.obsolete_review_attachments(
                            bug_id, get_obj_url(child))
                    post_bugzilla_attachment(b, bug_id, child_draft, child)

        # Publish draft commits. This will already include items that are in
        # unpublished_rids, so we'll remove anything we publish out of
        # unpublished_rids.
        for child in child_rrs:
            if child.get_draft(user=user) or not child.public:
                def approve_publish(sender, user, review_request_draft,
                                    **kwargs):
                    return child is sender

                # Setup the parent signal handler to approve the publish
                # and then publish the child.
                commit_request_publishing.connect(approve_publish,
                                                  sender=child,
                                                  weak=False)
                try:
                    child.publish(user=user)
                finally:
                    commit_request_publishing.disconnect(
                        receiver=approve_publish,
                        sender=child,
                        weak=False)

                if child.id in unpublished_rids:
                    unpublished_rids.remove(child.id)

        # The remaining unpubished_rids need to be closed as discarded because
        # they have never been published, and they will appear in the user's
        # dashboard unless closed.
        for child in gen_rrs_by_rids(unpublished_rids):
            child.close(ReviewRequest.DISCARDED,
                        user=user,
                        description=NEVER_USED_DESCRIPTION)

        # We also close the discard_on_publish review requests because, well,
        # we don't need them anymore. We use a slightly different message
        # though.
        for child in gen_rrs_by_rids(discard_on_publish_rids):
            child.close(ReviewRequest.DISCARDED,
                        user=user,
                        description=OBSOLETE_DESCRIPTION)

        commit_data.extra_data[UNPUBLISHED_KEY] = '[]'
        commit_data.extra_data[DISCARD_ON_PUBLISH_KEY] = '[]'

    # Copy any drafted CommitData from draft_extra_data to extra_data.
    for key in DRAFTED_COMMIT_DATA_KEYS:
        if key in commit_data.draft_extra_data:
            commit_data.extra_data[key] = commit_data.draft_extra_data[key]

    commit_data.save(update_fields=['extra_data'])

    review_request.save()
    def __init__(self, review_request_details, *args, **kwargs):
        self.commit_data = fetch_commit_data(review_request_details)

        super(PullCommitField, self).__init__(review_request_details,
                                              *args, **kwargs)
 def save_value(self, value):
     commit_data = fetch_commit_data(self.review_request_details)
     commit_data.set_for(self.review_request_details, self.field_id, value)
     commit_data.save(update_fields=['extra_data', 'draft_extra_data'])
 def load_value(self, review_request_details):
     # This must use a CommitData for ``review_request_details`` instead
     # of the one stored in ``self.commit_data``. See comment on
     # BaseReviewRequestField
     commit_data = fetch_commit_data(review_request_details)
     return commit_data.get_for(review_request_details, self.field_id)
def on_draft_pre_delete(sender, instance, using, **kwargs):
    """ Handle draft discards.

    There are no handy signals built into Review Board (yet) for us to detect
    when a squashed Review Request Draft is discarded. Instead, we monitor for
    deletions of models, and handle cases where the models being deleted are
    ReviewRequestDrafts. We then do some processing to ensure that the draft
    is indeed a draft of a squashed review request that we want to handle,
    and then propagate the discard down to the child review requests.
    """
    if not sender == ReviewRequestDraft:
        return

    # Drafts can get deleted for a number of reasons. They get deleted when
    # drafts are discarded, obviously, but also whenever review requests are
    # published, because the data gets copied over to the review request, and
    # then the draft is blown away. Unfortunately, on_pre_delete doesn't give
    # us too many clues about which scenario we're in, so we have to infer it
    # based on other things attached to the model. This is a temporary fix
    # until we get more comprehensive draft deletion signals built into Review
    # Board.
    #
    # In the case where the review request is NOT public yet, the draft will
    # not have a change description. In this case, we do not need to
    # differentiate between publish and discard because discards of non-public
    # review request's drafts will always cause the review request to be closed
    # as discarded, and this case is handled by on_review_request_closed().
    #
    # In the case where the review request has a change description, but it's
    # set to public, we must have just published this draft before deleting it,
    # so there's nothing to do here.
    if (instance.changedesc is None or instance.changedesc.public):
        return

    review_request = instance.review_request

    if not review_request:
        return

    commit_data = fetch_commit_data(review_request)

    if not is_parent(review_request, commit_data):
        return

    # If the review request is marked as discarded, then we must be closing
    # it, and so the on_review_request_closed() handler will take care of it.
    if review_request.status == ReviewRequest.DISCARDED:
        return

    user = review_request.submitter

    for child in gen_child_rrs(review_request, commit_data=commit_data):
        draft = child.get_draft()
        if draft:
            draft.delete()

    for child in gen_rrs_by_extra_data_key(review_request,
                                           UNPUBLISHED_KEY,
                                           commit_data=commit_data):
        child.close(ReviewRequest.DISCARDED,
                    user=user,
                    description=NEVER_USED_DESCRIPTION)

    commit_data.extra_data[DISCARD_ON_PUBLISH_KEY] = '[]'
    commit_data.extra_data[UNPUBLISHED_KEY] = '[]'
    commit_data.save(update_fields=['extra_data'])
def update_review_request(local_site, request, privileged_user, reviewer_cache,
                          rr, commit):
    """Synchronize the state of a review request with a commit.

    Updates the commit message, refreshes the diff, etc.
    """
    try:
        draft = rr.draft.get()
    except ReviewRequestDraft.DoesNotExist:
        draft = ReviewRequestDraft.create(rr)

    draft.summary = commit['message'].splitlines()[0]
    draft.description = commit['message']
    draft.bugs_closed = commit['bug']

    commit_data = fetch_commit_data(draft)
    commit_data.draft_extra_data.update({
        AUTHOR_KEY: commit['author'],
        COMMIT_ID_KEY: commit['id'],
        FIRST_PUBLIC_ANCESTOR_KEY: commit['first_public_ancestor'],
    })
    commit_data.save(
        update_fields=['draft_extra_data'])

    reviewer_users, unrecognized_reviewers = \
        resolve_reviewers(reviewer_cache, commit.get('reviewers', []))
    requal_reviewer_users, unrecognized_requal_reviewers = \
        resolve_reviewers(reviewer_cache, commit.get('requal_reviewers', []))

    warnings = []

    for reviewer in unrecognized_reviewers | unrecognized_requal_reviewers:
        warnings.append('unrecognized reviewer: %s' % reviewer)
        logger.info('unrecognized reviewer: %s' % reviewer)

    if requal_reviewer_users:
        pr = previous_reviewers(rr)
        for user in requal_reviewer_users:
            if not pr.get(user.username, False):
                warnings.append('commit message for %s has r=%s but they '
                                'have not granted a ship-it. review will be '
                                'requested on your behalf' % (
                                commit['id'][:12], user.username))

        reviewer_users |= requal_reviewer_users

    # Carry over from last time unless commit message overrules.
    if reviewer_users:
        draft.target_people.clear()
    for user in sorted(reviewer_users):
        draft.target_people.add(user)
        logger.debug('adding reviewer %s to #%d' % (user.username, rr.id))

    try:
        diffset = DiffSet.objects.create_from_data(
            repository=rr.repository,
            diff_file_name='diff',
            diff_file_contents=commit['diff_b64'].encode('ascii').decode('base64'),
            parent_diff_file_name='diff',
            parent_diff_file_contents=None,
            diffset_history=None,
            basedir='',
            request=request,
            base_commit_id=commit.get('base_commit_id'),
            save=True,
        )
        update_diffset_history(rr, diffset)
        diffset.save()

        DiffSetVerification(diffset=diffset).save(
            authorized_user=privileged_user, force_insert=True)
    except Exception:
        logger.exception('error processing diff')
        raise DiffProcessingException()

    update_review_request_draft_diffset(rr, diffset, draft=draft)

    return draft, warnings
    def get(self, request, *args, **kwargs):
        try:
            parent_request = get_parent_rr(
                ReviewRequest.objects.get(id=kwargs[self.uri_object_key]))
        except ReviewRequest.DoesNotExist:
            return DOES_NOT_EXIST
        if parent_request is None:
            return DOES_NOT_EXIST

        commit_data = fetch_commit_data(parent_request)

        if not is_parent(parent_request, commit_data):
            return NOT_PARENT
        if not parent_request.is_accessible_by(request.user):
            return PERMISSION_DENIED
        if COMMITS_KEY not in commit_data.extra_data:
            logging.error('Parent review request %s missing COMMIT_KEY' %
                          parent_request.id)
            return NOT_PARENT

        result = []
        children = json.loads(commit_data.extra_data[COMMITS_KEY])
        for child in children:
            try:
                child_request = ReviewRequest.objects.get(id=child[1])
            except ReviewRequest.DoesNotExist:
                return DOES_NOT_EXIST
            if not child_request.approved:
                return AUTOLAND_REVIEW_NOT_APPROVED

            reviewers = [
                r.user.username for r in gen_latest_reviews(child_request)
                if r.ship_it and r.user != child_request.submitter
            ]

            if not reviewers and child_request.approved:
                # This review request is approved (the repeated check is
                # to ensure this is guaranteed if other parts of the code
                # change) but we have an empty list of reviewers. We'll
                # assume the author has just approved this themself.
                reviewers.append(child_request.submitter.username)

            # Detect if the commit has been changed since the last review.
            shipit_carryforward = has_shipit_carryforward(child_request)

            result.append({
                'commit':
                child[0],
                'id':
                child[1],
                'reviewers':
                reviewers,
                'shipit_carryforward':
                shipit_carryforward,
                'summary':
                replace_reviewers(child_request.description, reviewers)
            })

        return 200, {
            'commits': result,
            'total_results': len(result),
            'links': self.get_links(request=request),
        }
Beispiel #50
0
    def _process_submission(self, request, local_site, user, privileged_user,
                            repo, identifier, commits):
        logger.info('processing batch submission %s to %s with %d commits' %
                    (identifier, repo.name, len(commits['individual'])))

        try:
            squashed_rr = ReviewRequest.objects.get(commit_id=identifier,
                                                    repository=repo)
            if not squashed_rr.is_mutable_by(user):
                logger.warn('%s not mutable by %s' % (squashed_rr.id, user))
                raise SubmissionException(self.get_no_access_error(request))

            if squashed_rr.status != ReviewRequest.PENDING_REVIEW:
                logger.warn('%s is not a pending review request; cannot edit' %
                            squashed_rr.id)
                raise SubmissionException(
                    REVIEW_REQUEST_UPDATE_NOT_ALLOWED.with_message(
                        SUBMITTED_OR_DISCARDED_ERROR))

            logger.info('using squashed review request %d' % squashed_rr.id)

        except ReviewRequest.DoesNotExist:
            squashed_rr = ReviewRequest.objects.create(user=user,
                                                       repository=repo,
                                                       commit_id=identifier,
                                                       local_site=local_site)

            squashed_commit_data = fetch_commit_data(squashed_rr)
            squashed_commit_data.extra_data.update({
                MOZREVIEW_KEY:
                True,
                IDENTIFIER_KEY:
                identifier,
                FIRST_PUBLIC_ANCESTOR_KEY:
                (commits['squashed']['first_public_ancestor']),
                HAS_COMMIT_MSG_FILEDIFF_KEY:
                True,
                SQUASHED_KEY:
                True,
                DISCARD_ON_PUBLISH_KEY:
                '[]',
                UNPUBLISHED_KEY:
                '[]',
            })
            squashed_commit_data.draft_extra_data.update({
                IDENTIFIER_KEY:
                identifier,
            })
            squashed_commit_data.save(
                update_fields=['extra_data', 'draft_extra_data'])

            logger.info('created squashed review request #%d' % squashed_rr.id)

        # The diffs on diffsets can't be updated, only replaced. So always
        # construct the diffset.

        try:
            # TODO consider moving diffset creation outside of the transaction
            # since it can be quite time consuming.
            # Calling create_from_data() instead of create_from_upload() skips
            # diff size validation. We allow unlimited diff sizes, so no biggie.
            logger.info('%s: generating squashed diffset for %d' %
                        (identifier, squashed_rr.id))
            diffset = DiffSet.objects.create_from_data(
                repository=repo,
                diff_file_name='diff',
                # The original value is a unicode instance. Python 3 can't
                # .encode() a unicode instance, so go to str first.
                diff_file_contents=commits['squashed']['diff_b64'].encode(
                    'ascii').decode('base64'),
                parent_diff_file_name=None,
                parent_diff_file_contents=None,
                diffset_history=None,
                basedir='',
                request=request,
                base_commit_id=commits['squashed'].get('base_commit_id'),
                save=True,
            )

            update_diffset_history(squashed_rr, diffset)
            diffset.save()

            # We pass `force_insert=True` to save to make sure Django generates
            # an SQL INSERT rather than an UPDATE if the DiffSetVerification
            # already exists. It should never already exist so we want the
            # exception `force_insert=True` will cause if that's the case.
            DiffSetVerification(diffset=diffset).save(
                authorized_user=privileged_user, force_insert=True)
        except Exception:
            logger.exception('error processing squashed diff')
            raise DiffProcessingException()

        update_review_request_draft_diffset(squashed_rr, diffset)
        logger.info('%s: updated squashed diffset for %d' %
                    (identifier, squashed_rr.id))

        # TODO: We need to take into account the commits data from the squashed
        # review request's draft. This data represents the mapping from commit
        # to rid in the event that we would have published. We're overwritting
        # this data. This will only come into play if we start trusting the server
        # instead of the client when matching review request ids. Bug 1047516

        squashed_commit_data = fetch_commit_data(squashed_rr)
        previous_commits = get_previous_commits(squashed_rr,
                                                squashed_commit_data)
        remaining_nodes = get_remaining_nodes(previous_commits)
        discard_on_publish_rids = get_discard_on_publish_rids(
            squashed_rr, squashed_commit_data)
        unpublished_rids = get_unpublished_rids(squashed_rr,
                                                squashed_commit_data)
        unclaimed_rids = get_unclaimed_rids(previous_commits,
                                            discard_on_publish_rids,
                                            unpublished_rids)

        logger.info('%s: %d previous commits; %d discard on publish; '
                    '%d unpublished' %
                    (identifier, len(previous_commits),
                     len(discard_on_publish_rids), len(unpublished_rids)))

        # Previously pushed nodes which have been processed and had their review
        # request updated or did not require updating.
        processed_nodes = set()

        node_to_rid = {}

        # A mapping from review request id to the corresponding ReviewRequest.
        review_requests = {}

        # A mapping of review request id to dicts of additional metadata.
        review_data = {}

        squashed_reviewers = set()
        reviewer_cache = ReviewerCache(request)

        warnings = []

        # Do a pass and find all commits that map cleanly to old review requests.
        for commit in commits['individual']:
            node = commit['id']

            if node not in remaining_nodes:
                continue

            # If the commit appears in an old review request, by definition of
            # commits deriving from content, the commit has not changed and there
            # is nothing to update. Update our accounting and move on.
            rid = remaining_nodes[node]
            logger.info('%s: commit %s unchanged; using existing request %d' %
                        (identifier, node, rid))

            del remaining_nodes[node]
            unclaimed_rids.remove(rid)
            processed_nodes.add(node)
            node_to_rid[node] = rid

            rr = ReviewRequest.objects.get(pk=rid)
            review_requests[rid] = rr
            review_data[rid] = get_review_request_data(rr)

            try:
                discard_on_publish_rids.remove(rid)
            except ValueError:
                pass

        logger.info(
            '%s: %d/%d commits mapped exactly' %
            (identifier, len(processed_nodes), len(commits['individual'])))

        # Commit msg FileDiff should be created only if this is a completely
        # new ReviewRequest, or if the ReviewRequest we're updating
        # already had commit message FileDiff.
        create_comm_msg_filediff = squashed_commit_data.extra_data.get(
            HAS_COMMIT_MSG_FILEDIFF_KEY, False)

        # Find commits that map to a previous version.
        for commit in commits['individual']:
            node = commit['id']
            if node in processed_nodes:
                continue

            # The client may have sent obsolescence data saying which commit this
            # commit has derived from. Use that data (if available) to try to find
            # a mapping to an old review request.
            for precursor in commit['precursors']:
                rid = remaining_nodes.get(precursor)
                if not rid:
                    continue

                logger.info('%s: found precursor to commit %s; '
                            'using existing review request %d' %
                            (identifier, node, rid))

                del remaining_nodes[precursor]
                unclaimed_rids.remove(rid)

                rr = ReviewRequest.objects.get(pk=rid)
                draft, warns = update_review_request(local_site, request,
                                                     privileged_user,
                                                     reviewer_cache, rr,
                                                     commit,
                                                     create_comm_msg_filediff)
                squashed_reviewers.update(u for u in draft.target_people.all())
                warnings.extend(warns)
                processed_nodes.add(node)
                node_to_rid[node] = rid
                review_requests[rid] = rr
                review_data[rid] = get_review_request_data(rr)

                try:
                    discard_on_publish_rids.remove(rid)
                except ValueError:
                    pass

                break

        logger.info(
            '%s: %d/%d mapped exactly or to precursors' %
            (identifier, len(processed_nodes), len(commits['individual'])))

        # Clients should add "MozReview-Commit-ID" unique identifiers to
        # commit messages. Search for them and match up accordingly.

        unclaimed_rrs = [
            ReviewRequest.objects.get(pk=rid) for rid in unclaimed_rids
        ]

        for commit in commits['individual']:
            node = commit['id']
            if node in processed_nodes:
                continue

            commit_id = parse_commit_id(commit['message'])
            if not commit_id:
                logger.warn('%s: commit %s does not have commit id' %
                            (identifier, node))
                continue

            for rr in unclaimed_rrs:
                rr_commit_id = parse_commit_id(rr.description)
                if commit_id != rr_commit_id:
                    continue

                # commit ID in commit found in existing review request. Map
                # it up.
                logger.info(
                    '%s: commit ID %s for %s found in review request %d' %
                    (identifier, commit_id, node, rr.id))

                try:
                    del remaining_nodes[node]
                except KeyError:
                    pass

                unclaimed_rids.remove(rr.id)
                unclaimed_rrs.remove(rr)
                draft, warns = update_review_request(local_site, request,
                                                     privileged_user,
                                                     reviewer_cache, rr,
                                                     commit,
                                                     create_comm_msg_filediff)
                squashed_reviewers.update(u for u in draft.target_people.all())
                warnings.extend(warns)
                processed_nodes.add(node)
                node_to_rid[node] = rr.id
                review_requests[rr.id] = rr
                review_data[rr.id] = get_review_request_data(rr)
                try:
                    discard_on_publish_rids.remove(rr.id)
                except ValueError:
                    pass

                break

        logger.info(
            '%s: %d/%d mapped after commit ID matching' %
            (identifier, len(processed_nodes), len(commits['individual'])))

        logger.info('%s: %d unclaimed review requests' %
                    (identifier, len(unclaimed_rids)))

        # Now do a pass over the commits that didn't map cleanly.
        for commit in commits['individual']:
            node = commit['id']
            if node in processed_nodes:
                continue

            # We haven't seen this commit before *and* our mapping above didn't
            # do anything useful with it.

            # This is where things could get complicated. We could involve
            # heuristic based matching (comparing commit messages, changed
            # files, etc). We may do that in the future.

            # For now, match the commit up against the next one in the index.
            # The unclaimed rids list contains review requests which were created
            # when previously updating this review identifier, but not published.
            # If we have more commits than were previously published we'll start
            # reusing these private review requests before creating new ones.
            #
            # We don't reuse existing review requests when obsolescence data is
            # available because the lack of a clean commit mapping (from above)
            # means that the commit is logically new and shouldn't be
            # associated with a review request that belonged to a different
            # logical commit.
            if unclaimed_rids and not commits.get('obsolescence', False):
                assumed_old_rid = unclaimed_rids.pop(0)

                logger.info('%s: mapping %s to unclaimed request %d' %
                            (identifier, node, assumed_old_rid))

                rr = ReviewRequest.objects.get(pk=assumed_old_rid)
                draft, warns = update_review_request(local_site, request,
                                                     privileged_user,
                                                     reviewer_cache, rr,
                                                     commit,
                                                     create_comm_msg_filediff)
                squashed_reviewers.update(u for u in draft.target_people.all())
                warnings.extend(warns)
                processed_nodes.add(commit['id'])
                node_to_rid[node] = assumed_old_rid
                review_requests[assumed_old_rid] = rr
                review_data[assumed_old_rid] = get_review_request_data(rr)

                try:
                    discard_on_publish_rids.remove(assumed_old_rid)
                except ValueError:
                    pass

                continue

            # There are no more unclaimed review request IDs. This means we have
            # more commits than before. Create new review requests as appropriate.
            rr = ReviewRequest.objects.create(user=user,
                                              repository=repo,
                                              commit_id=None,
                                              local_site=local_site)

            commit_data = fetch_commit_data(rr)
            commit_data.extra_data.update({
                MOZREVIEW_KEY: True,
                IDENTIFIER_KEY: identifier,
                SQUASHED_KEY: False,
            })
            commit_data.draft_extra_data.update({
                AUTHOR_KEY: commit['author'],
                IDENTIFIER_KEY: identifier,
            })
            commit_data.save(update_fields=['extra_data', 'draft_extra_data'])

            logger.info('%s: created review request %d for commit %s' %
                        (identifier, rr.id, node))
            draft, warns = update_review_request(local_site, request,
                                                 privileged_user,
                                                 reviewer_cache, rr, commit,
                                                 create_comm_msg_filediff)
            squashed_reviewers.update(u for u in draft.target_people.all())
            warnings.extend(warns)
            processed_nodes.add(commit['id'])
            node_to_rid[node] = rr.id
            review_requests[rr.id] = rr
            review_data[rr.id] = get_review_request_data(rr)
            unpublished_rids.append(rr.id)

        # At this point every incoming commit has been accounted for.
        # If there are any remaining review requests, they must belong to
        # deleted commits. (Or, we made a mistake and updated the wrong review
        # request)
        logger.info('%s: %d unclaimed review requests left over' %
                    (identifier, len(unclaimed_rids)))
        for rid in unclaimed_rids:
            rr = ReviewRequest.objects.get(pk=rid)

            if rr.public and rid not in discard_on_publish_rids:
                # This review request has already been published so we'll need to
                # discard it when we publish the squashed review request.
                discard_on_publish_rids.append(rid)
            elif not rr.public and rid not in unpublished_rids:
                # We've never published this review request so it may be reused in
                # the future for *any* commit. Keep track of it.
                unpublished_rids.append(rid)
            else:
                # This means we've already marked the review request properly
                # in a previous push, so do nothing.
                pass

        commit_list = []
        for commit in commits['individual']:
            node = commit['id']
            commit_list.append([node, node_to_rid[node]])

        # We need to refresh the squashed rr and draft because post save hooks
        # in ReviewBoard result in magical changes to some of its fields.
        squashed_rr = ReviewRequest.objects.get(pk=squashed_rr.id)
        squashed_draft = squashed_rr.draft.get()
        squashed_commit_data = fetch_commit_data(squashed_rr)

        squashed_draft.summary = identifier

        # Reviewboard does not allow review requests with empty descriptions to
        # be published, so we insert some filler here.
        squashed_draft.description = 'This is the parent review request'
        squashed_draft.bugs_closed = ','.join(
            sorted(set(commit['bug'] for commit in commits['individual'])))

        squashed_draft.depends_on.clear()
        for rrid in sorted(node_to_rid.values()):
            rr = ReviewRequest.objects.for_id(rrid, local_site)
            squashed_draft.depends_on.add(rr)

        squashed_draft.target_people.clear()
        for user in sorted(squashed_reviewers):
            squashed_draft.target_people.add(user)

        squashed_commit_data.draft_extra_data[COMMITS_KEY] = json.dumps(
            commit_list)

        if 'base_commit_id' in commits['squashed']:
            squashed_commit_data.draft_extra_data[BASE_COMMIT_KEY] = (
                commits['squashed']['base_commit_id'])

        squashed_commit_data.extra_data.update({
            DISCARD_ON_PUBLISH_KEY:
            json.dumps(discard_on_publish_rids),
            FIRST_PUBLIC_ANCESTOR_KEY:
            (commits['squashed']['first_public_ancestor']),
            UNPUBLISHED_KEY:
            json.dumps(unpublished_rids),
        })

        squashed_draft.save()
        squashed_rr.save(update_fields=['extra_data'])
        squashed_commit_data.save(
            update_fields=['extra_data', 'draft_extra_data'])

        review_requests[squashed_rr.id] = squashed_rr
        review_data[squashed_rr.id] = get_review_request_data(squashed_rr)

        return squashed_rr, node_to_rid, review_data, warnings
 def save_value(self, value):
     commit_data = fetch_commit_data(self.review_request_details)
     commit_data.set_for(self.review_request_details, self.field_id, value)
     commit_data.save(update_fields=['extra_data', 'draft_extra_data'])
Beispiel #52
0
def on_review_request_publishing(user, review_request_draft, **kwargs):
    # There have been strange cases (all local, and during development), where
    # when attempting to publish a review request, this handler will fail
    # because the draft does not exist. This is a really strange case, and not
    # one we expect to happen in production. However, since we've seen it
    # locally, we handle it here, and log.
    if not review_request_draft:
        logger.error('Strangely, there was no review request draft on the '
                     'review request we were attempting to publish.')
        return

    # If the review request draft has a new DiffSet we will only allow
    # publishing if that DiffSet has been verified. It is important to
    # do this for every review request, not just pushed ones, because
    # we can't trust the storage mechanism which indicates it was pushed.
    # TODO: This will be fixed when we transition away from extra_data.
    if review_request_draft.diffset:
        try:
            DiffSetVerification.objects.get(
                diffset=review_request_draft.diffset)
        except DiffSetVerification.DoesNotExist:
            logger.error(
                'An attempt was made by User %s to publish an unverified '
                'DiffSet with id %s', user.id, review_request_draft.diffset.id)

            raise PublishError(
                'This review request draft contained a manually uploaded '
                'diff, which is prohibited. Please push to the review server '
                'to create review requests. If you believe you received this '
                'message in error, please file a bug.')

    review_request = review_request_draft.get_review_request()
    commit_data = fetch_commit_data(review_request)

    # skip review requests that were not pushed
    if not is_pushed(review_request, commit_data=commit_data):
        return

    if not is_parent(review_request, commit_data):
        # Send a signal asking for approval to publish this review request.
        # We only want to publish this commit request if we are in the middle
        # of publishing the parent. If the parent is publishing it will be
        # listening for this signal to approve it.
        approvals = commit_request_publishing.send_robust(
            sender=review_request,
            user=user,
            review_request_draft=review_request_draft)

        for receiver, approved in approvals:
            if approved:
                break
        else:
            # This publish is not approved by the parent review request.
            raise CommitPublishProhibited()

    # The reviewid passed through p2rb is, for Mozilla's instance anyway,
    # bz://<bug id>/<irc nick>.
    reviewid = commit_data.draft_extra_data.get(IDENTIFIER_KEY, None)
    m = REVIEWID_RE.match(reviewid)

    if not m:
        raise InvalidBugIdError('<unknown>')

    bug_id = m.group(1)

    try:
        bug_id = int(bug_id)
    except (TypeError, ValueError):
        raise InvalidBugIdError(bug_id)

    siteconfig = SiteConfiguration.objects.get_current()
    using_bugzilla = (siteconfig.settings.get("auth_backend",
                                              "builtin") == "bugzilla")

    if using_bugzilla:
        commit_data = fetch_commit_data(review_request_draft)
        publish_as_id = commit_data.draft_extra_data.get(PUBLISH_AS_KEY)
        if publish_as_id:
            u = User.objects.get(id=publish_as_id)
            b = Bugzilla(get_bugzilla_api_key(u))
        else:
            b = Bugzilla(get_bugzilla_api_key(user))

        try:
            if b.is_bug_confidential(bug_id):
                raise ConfidentialBugError
        except BugzillaError as e:
            # Special cases:
            #   100: Invalid Bug Alias
            #   101: Bug does not exist
            if e.fault_code and (e.fault_code == 100 or e.fault_code == 101):
                raise InvalidBugIdError(bug_id)
            raise

    # Note that the bug ID has already been set when the review was created.

    # If this is a squashed/parent review request, automatically publish all
    # relevant children.
    if is_parent(review_request, commit_data):
        unpublished_rids = map(
            int, json.loads(commit_data.extra_data[UNPUBLISHED_KEY]))
        discard_on_publish_rids = map(
            int, json.loads(commit_data.extra_data[DISCARD_ON_PUBLISH_KEY]))
        child_rrs = list(gen_child_rrs(review_request_draft))

        # Create or update Bugzilla attachments for each draft commit.  This
        # is done before the children are published to ensure that MozReview
        # doesn't get into a strange state if communication with Bugzilla is
        # broken or attachment creation otherwise fails.  The Bugzilla
        # attachments will then, of course, be in a weird state, but that
        # should be fixed by the next successful publish.
        if using_bugzilla:
            children_to_post = []
            children_to_obsolete = []

            for child in child_rrs:
                child_draft = child.get_draft(user=user)

                if child_draft:
                    if child.id in discard_on_publish_rids:
                        children_to_obsolete.append(child)

                    children_to_post.append((child_draft, child))

            if children_to_post or children_to_obsolete:
                update_bugzilla_attachments(b, bug_id, children_to_post,
                                            children_to_obsolete)

        # Publish draft commits. This will already include items that are in
        # unpublished_rids, so we'll remove anything we publish out of
        # unpublished_rids.
        for child in child_rrs:
            if child.get_draft(user=user) or not child.public:

                def approve_publish(sender, user, review_request_draft,
                                    **kwargs):
                    return child is sender

                # Setup the parent signal handler to approve the publish
                # and then publish the child.
                commit_request_publishing.connect(approve_publish,
                                                  sender=child,
                                                  weak=False)
                try:
                    child.publish(user=user)
                except NotModifiedError:
                    # As we create empty drafts as part of allowing reviewer
                    # delegation, delete these empty drafts instead of
                    # throwing an error.
                    child.get_draft(user=user).delete()
                finally:
                    commit_request_publishing.disconnect(
                        receiver=approve_publish, sender=child, weak=False)

                if child.id in unpublished_rids:
                    unpublished_rids.remove(child.id)

        # The remaining unpubished_rids need to be closed as discarded because
        # they have never been published, and they will appear in the user's
        # dashboard unless closed.
        for child in gen_rrs_by_rids(unpublished_rids):
            child.close(ReviewRequest.DISCARDED,
                        user=user,
                        description=NEVER_USED_DESCRIPTION)

        # We also close the discard_on_publish review requests because, well,
        # we don't need them anymore. We use a slightly different message
        # though.
        for child in gen_rrs_by_rids(discard_on_publish_rids):
            child.close(ReviewRequest.DISCARDED,
                        user=user,
                        description=OBSOLETE_DESCRIPTION)

        commit_data.extra_data[UNPUBLISHED_KEY] = '[]'
        commit_data.extra_data[DISCARD_ON_PUBLISH_KEY] = '[]'

    # Copy any drafted CommitData from draft_extra_data to extra_data.
    for key in DRAFTED_COMMIT_DATA_KEYS:
        if key in commit_data.draft_extra_data:
            commit_data.extra_data[key] = commit_data.draft_extra_data[key]

    commit_data.save(update_fields=['extra_data'])

    review_request.save()