Beispiel #1
0
async def timeout_awaiting_merger(gh: GitHubAPI, token: str,
                                  repository_name: str) -> None:
    print("Timing out awaiting_merger PRs")
    search_results = gh_util.search_issues(
        gh,
        token,
        query_parameters=[
            f"repo:{repository_name}",
            "is:open",
            "is:pr",
            "label:timeout_pending",
            "label:awaiting_merger",
            "label:marvin",
            "sort:updated-asc",  # stale first
        ],
    )
    async for issue in search_results:
        last_updated = datetime.strptime(issue["updated_at"],
                                         "%Y-%m-%dT%H:%M:%S%z")
        age = datetime.now(timezone.utc) - last_updated
        if age.total_seconds() < AFTER_WARNING_SECONDS:
            break

        print(
            f"awaiting_merger -> needs_merger: #{issue['number']} ({issue['title']})"
        )
        await set_issue_status(issue, "needs_merger", gh, token)

    print("Posting warnings in awaiting_merger PRs")
    search_results = gh_util.search_issues(
        gh,
        token,
        query_parameters=[
            f"repo:{repository_name}",
            "is:open",
            "is:pr",
            "label:awaiting_merger",
            "-label:timeout_pending",
            "label:marvin",
            "sort:updated-asc",
        ],
    )
    async for issue in search_results:
        last_updated = datetime.strptime(issue["updated_at"],
                                         "%Y-%m-%dT%H:%M:%S%z")
        age = datetime.now(timezone.utc) - last_updated
        if age.total_seconds() < AWAITING_REVIEWER_TIMEOUT_SECONDS:
            break

        print(
            f"awaiting_merger reminder: #{issue['number']} ({issue['title']})")
        await post_comment(
            gh,
            token,
            issue["comments_url"],
            MERGE_REMINDER_TEXT,
        )
Beispiel #2
0
async def assign_reviewers(gh: GitHubAPI, token: str,
                           repository_name: str) -> None:
    print("Assigning reviewers to needs_reviewer PRs")
    search_results = gh_util.search_issues(
        gh,
        token,
        query_parameters=[
            f"repo:{repository_name}",
            "is:open",
            "is:pr",
            "label:needs_reviewer",
            "label:marvin",
            "sort:created-asc",  # oldest first
        ],
    )
    async for issue in search_results:
        reviewer = await team.get_reviewer(gh,
                                           token,
                                           issue,
                                           merge_permission_needed=False)
        if reviewer is not None:
            print(f"Requesting review from {reviewer} for #{issue['number']}.")
            await gh_util.request_review_fallback(gh, token,
                                                  issue["pull_request"]["url"],
                                                  issue["comments_url"],
                                                  reviewer)
            await set_issue_status(issue, "awaiting_reviewer", gh, token)
        else:
            print(f"No reviewer found for #{issue['number']}.")
Beispiel #3
0
    async def request_allowed(self, gh: gh_aiohttp.GitHubAPI, token: str) -> bool:
        """Determine whether a given active PR limit over a timeframe has already been reached.

        This searches GitHub for recently active nixpkgs PRs the user is involved
        in (ignoring any activity after the PR was merged) and compares the number
        of results to a limit. This is useful when you want to only get a request
        for new reviews when your current open-source work "plate" is not yet full.
        """
        if datetime.now(timezone.utc) < self.cached_no_until:
            print(
                f"Cached: Limit ({self.limit}/{self.days}d) exceeded until {self.cached_no_until}."
            )
            return False

        # GitHub rate limits us to 30 searches per minute. This prevents us
        # from exceeding that limit. Not pretty but it works for now. Shouldn't
        # slow the reviewer search down too much due to caching.
        await asyncio.sleep(3)
        timeframe_start = (
            datetime.now(timezone.utc) - timedelta(days=self.days)
        ).strftime("%Y-%m-%dT%H:%M:%S+00:00")
        search_results = gh_util.search_issues(
            gh,
            token,
            query_parameters=[
                "repo:NixOS/nixpkgs",
                f"involves:{self.gh_name}",
                f"updated:>={timeframe_start}",
                f"-merged:<{timeframe_start}",
            ],
        )
        cur_issue = 0
        async for issue in search_results:
            cur_issue += 1
            if cur_issue == self.limit:
                last_updated = datetime.strptime(
                    issue["updated_at"], "%Y-%m-%dT%H:%M:%S%z"
                )
                # Remember when the PR that pushed us over the limit will "fall
                # out" of the time window.
                self.cached_no_until = last_updated + timedelta(days=self.days)
                print(
                    f"Limit ({self.limit}/{self.days}d) exceeded until {self.cached_no_until}."
                )
                return False

        return True