示例#1
0
def update_pr(pr_number):
    logging.debug("Updating pull request %i" % pr_number)
    pr = Issue.get_or_create(pr_number)
    issue_response = raw_github_request(get_pulls_base() + '/%i' % pr_number,
                                        oauth_token=oauth_token, etag=pr.etag)
    if issue_response is None:
        logging.debug("PR %i hasn't changed since last visit; skipping" % pr_number)
        return "Done updating pull request %i (nothing changed)" % pr_number
    pr.pr_json = json.loads(issue_response.content)
    pr.etag = issue_response.headers["ETag"]
    pr.state = pr.pr_json['state']
    pr.user = pr.pr_json['user']['login']
    pr.updated_at = \
        parse_datetime(pr.pr_json['updated_at']).astimezone(tz.tzutc()).replace(tzinfo=None)

    for issue_number in pr.parsed_title['jiras']:
        try:
            link_issue_to_pr("%s-%s" % (app.config['JIRA_PROJECT'], issue_number), pr)
        except:
            logging.exception("Exception when linking to JIRA issue %s-%s" %
                              (app.config['JIRA_PROJECT'], issue_number))
        try:
            start_issue_progress("%s-%s" % (app.config['JIRA_PROJECT'], issue_number))
        except:
            logging.exception(
                "Exception when starting progress on JIRA issue %s-%s" %
                (app.config['JIRA_PROJECT'], issue_number))

    pr.put()  # Write our modifications back to the database

    subtasks = [".update_pr_comments", ".update_pr_review_comments", ".update_pr_files"]
    for task in subtasks:
        taskqueue.add(url=url_for(task, pr_number=pr_number), queue_name='fresh-prs')

    return "Done updating pull request %i" % pr_number
示例#2
0
def update_pr_files(pr_number):
    pr = Issue.get(pr_number)
    files_response = paginated_github_request(get_pulls_base() + "/%i/files" % pr_number,
                                              oauth_token=oauth_token, etag=pr.files_etag)
    if files_response is None:
        return "Files for PR %i are up-to-date" % pr_number
    else:
        pr.files_json, pr.files_etag = files_response
        pr.put()  # Write our modifications back to the database
        return "Done updating files for PR %i" % pr_number
示例#3
0
def update_pr_files(pr_number):
    pr = Issue.get(pr_number)
    files_response = paginated_github_request(get_pulls_base() + "/%i/files" % pr_number,
                                              oauth_token=oauth_token, etag=pr.files_etag)
    if files_response is None:
        return "Files for PR %i are up-to-date" % pr_number
    else:
        pr.files_json, pr.files_etag = files_response
        pr.put()  # Write our modifications back to the database
        return "Done updating files for PR %i" % pr_number
示例#4
0
def update_pr_review_comments(pr_number):
    pr = Issue.get(pr_number)
    pr_comments_response = paginated_github_request(get_pulls_base() + '/%i/comments' % pr_number,
                                                    oauth_token=oauth_token)
    # TODO: after fixing #32, re-enable etags here: etag=self.pr_review_comments_etag)
    if pr_comments_response is None:
        return "Review comments for PR %i are up-to-date" % pr_number
    else:
        pr.pr_comments_json, pr.pr_comments_etag = pr_comments_response
        pr.cached_commenters = pr._compute_commenters()
        pr.put()  # Write our modifications back to the database
        return "Done updating review comments for PR %i" % pr_number
示例#5
0
def update_pr_review_comments(pr_number):
    pr = Issue.get(pr_number)
    pr_comments_response = paginated_github_request(get_pulls_base() + '/%i/comments' % pr_number,
                                                    oauth_token=oauth_token)
    # TODO: after fixing #32, re-enable etags here: etag=self.pr_review_comments_etag)
    if pr_comments_response is None:
        return "Review comments for PR %i are up-to-date" % pr_number
    else:
        pr.pr_comments_json, pr.pr_comments_etag = pr_comments_response
        pr.cached_commenters = pr._compute_commenters()
        pr.put()  # Write our modifications back to the database
        return "Done updating review comments for PR %i" % pr_number
示例#6
0
def update_pr(pr_number):
    logging.debug("Updating pull request %i" % pr_number)
    pr = Issue.get_or_create(pr_number)
    try:
        issue_response = raw_github_request(get_pulls_base() + '/%i' % pr_number,
                                            oauth_token=oauth_token, etag=pr.etag)
    except HTTPError as e:
        if e.code == 404:
            logging.debug("Pull request %i has been deleted" % pr_number)
            pr.state = "deleted"
            pr.put()
            return "Done updating pull request %i (PR deleted)" % pr_number
        else:
            raise
    if issue_response is None:
        logging.debug("PR %i hasn't changed since last visit; skipping" % pr_number)
        return "Done updating pull request %i (nothing changed)" % pr_number
    pr.pr_json = json.loads(issue_response.content)
    pr.etag = issue_response.headers["ETag"]
    pr.state = pr.pr_json['state']
    pr.user = pr.pr_json['user']['login']
    pr.updated_at = \
        parse_datetime(pr.pr_json['updated_at']).astimezone(tz.tzutc()).replace(tzinfo=None)

    for issue_number in pr.parsed_title['jiras']:
        try:
            link_issue_to_pr("%s-%s" % (app.config['JIRA_PROJECT'], issue_number), pr)
        except:
            logging.exception("Exception when linking to JIRA issue %s-%s" %
                              (app.config['JIRA_PROJECT'], issue_number))
        try:
            start_issue_progress("%s-%s" % (app.config['JIRA_PROJECT'], issue_number))
        except:
            logging.exception(
                "Exception when starting progress on JIRA issue %s-%s" %
                (app.config['JIRA_PROJECT'], issue_number))

    pr.put()  # Write our modifications back to the database

    subtasks = [".update_pr_comments", ".update_pr_review_comments", ".update_pr_files"]
    for task in subtasks:
        taskqueue.add(url=url_for(task, pr_number=pr_number), queue_name='fresh-prs')

    return "Done updating pull request %i" % pr_number
示例#7
0
def update_github_prs():
    last_update_time = KVS.get("issues_since")
    if last_update_time:
        last_update_time = \
            parse_datetime(last_update_time).astimezone(tz.tzutc()).replace(tzinfo=None)
    else:
        # If no update has ever run successfully, store "now" as the watermark. If this update
        # task fails (because there are too many old PRs to load / backfill) then there's a chance
        # that this initial timestamp won't be the true watermark. If we are trying to bulk-load
        # old data then this should be done by calling /github/backfill-prs instead.
        last_update_time = datetime.min
        KVS.put('issues_since',
                datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"))

    def fetch_and_process(url):
        logging.debug("Following url %s" % url)
        response = raw_github_request(url, oauth_token=oauth_token)
        prs = json.loads(response.content)
        now = datetime.utcnow()
        should_continue_loading = True
        update_time = last_update_time
        for pr in prs:
            updated_at = \
                parse_datetime(pr['updated_at']).astimezone(tz.tzutc()).replace(tzinfo=None)
            update_time = max(update_time, updated_at)
            if updated_at < last_update_time:
                should_continue_loading = False
                break
            is_fresh = (now - updated_at
                        ).total_seconds() < app.config['FRESHNESS_THRESHOLD']
            queue_name = ("fresh-prs" if is_fresh else "old-prs")
            taskqueue.add(url=url_for(".update_pr", pr_number=pr['number']),
                          queue_name=queue_name)
        if should_continue_loading:
            link_header = parse_link_header(response.headers.get('Link', ''))
            for link in link_header.links:
                if link.rel == 'next':
                    fetch_and_process(link.href)
        return update_time
    update_time = \
        fetch_and_process(get_pulls_base() + "?sort=updated&state=all&direction=desc&per_page=100")
    KVS.put('issues_since', update_time.strftime("%Y-%m-%dT%H:%M:%SZ"))
    return "Done fetching updated GitHub issues"
示例#8
0
def update_github_prs():
    last_update_time = KVS.get("issues_since")
    if last_update_time:
        last_update_time = \
            parse_datetime(last_update_time).astimezone(tz.tzutc()).replace(tzinfo=None)
    else:
        # If no update has ever run successfully, store "now" as the watermark. If this update
        # task fails (because there are too many old PRs to load / backfill) then there's a chance
        # that this initial timestamp won't be the true watermark. If we are trying to bulk-load
        # old data then this should be done by calling /github/backfill-prs instead.
        last_update_time = datetime.min
        KVS.put('issues_since', datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"))

    def fetch_and_process(url):
        logging.debug("Following url %s" % url)
        response = raw_github_request(url, oauth_token=oauth_token)
        prs = json.loads(response.content)
        now = datetime.utcnow()
        should_continue_loading = True
        update_time = last_update_time
        for pr in prs:
            updated_at = \
                parse_datetime(pr['updated_at']).astimezone(tz.tzutc()).replace(tzinfo=None)
            update_time = max(update_time, updated_at)
            if updated_at < last_update_time:
                should_continue_loading = False
                break
            is_fresh = (now - updated_at).total_seconds() < app.config['FRESHNESS_THRESHOLD']
            queue_name = ("fresh-prs" if is_fresh else "old-prs")
            taskqueue.add(url=url_for(".update_pr", pr_number=pr['number']), queue_name=queue_name)
        if should_continue_loading:
            link_header = parse_link_header(response.headers.get('Link', ''))
            for link in link_header.links:
                if link.rel == 'next':
                    fetch_and_process(link.href)
        return update_time
    update_time = \
        fetch_and_process(get_pulls_base() + "?sort=updated&state=all&direction=desc&per_page=100")
    KVS.put('issues_since', update_time.strftime("%Y-%m-%dT%H:%M:%SZ"))
    return "Done fetching updated GitHub issues"
示例#9
0
def backfill_prs():
    """
    This method attempts to update every PR ever opened against the repository.
    As a result, this method should only be invoked by admins when trying to bootstrap a new
    deployment of the PR board.
    """
    # Determine the number of PRs:
    url = get_pulls_base() + "?sort=created&state=all&direction=desc"
    response = raw_github_request(url, oauth_token=oauth_token)
    latest_prs = json.loads(response.content)
    latest_pr_number = int(latest_prs[0]['number'])
    queue = taskqueue.Queue('old-prs')
    update_tasks = []
    for num in reversed(xrange(1, latest_pr_number + 1)):
        update_tasks.append(taskqueue.Task(url=url_for(".update_pr", pr_number=num)))
    # Can only enqueue up to 100 tasks per API call
    async_call_results = []
    for group_of_tasks in chunked(update_tasks, 100):
        async_call_results.append(queue.add_async(group_of_tasks))
    # Block until the async calls are finished:
    for r in async_call_results:
        r.get_result()
    return "Enqueued tasks to backfill %i PRs" % latest_pr_number
示例#10
0
def backfill_prs():
    """
    This method attempts to update every PR ever opened against the repository.
    As a result, this method should only be invoked by admins when trying to bootstrap a new
    deployment of the PR board.
    """
    # Determine the number of PRs:
    url = get_pulls_base() + "?sort=created&state=all&direction=desc"
    response = raw_github_request(url, oauth_token=oauth_token)
    latest_prs = json.loads(response.content)
    latest_pr_number = int(latest_prs[0]['number'])
    queue = taskqueue.Queue('old-prs')
    update_tasks = []
    for num in reversed(xrange(1, latest_pr_number + 1)):
        update_tasks.append(taskqueue.Task(url=url_for(".update_pr", pr_number=num)))
    # Can only enqueue up to 100 tasks per API call
    async_call_results = []
    for group_of_tasks in chunked(update_tasks, 100):
        async_call_results.append(queue.add_async(group_of_tasks))
    # Block until the async calls are finished:
    for r in async_call_results:
        r.get_result()
    return "Enqueued tasks to backfill %i PRs" % latest_pr_number