def get_pull_cached( self, repo: Repository, number: int, obj_updated_at: Optional[datetime] = None) -> PullRequest: cache_file = self.cache_path / f"pr-{number}.pickle" if cache_file.is_file(): is_updated, cached_pr = self._is_cache_updated( cache_file, obj_updated_at) if is_updated: logger.debug("Getting PR #%s from cache", number) return cached_pr # type: ignore logger.debug("Getting PR #%s from API", number) for i in range(self.retries): try: pr = repo.get_pull(number) break except RateLimitExceededException: if i == self.retries - 1: raise self.sleep_on_rate_limit() logger.debug("Caching PR #%s from API in %s", number, cache_file) with open(cache_file, "wb") as prfd: self.dump(pr, prfd) # type: ignore return pr
def get_pr_from_commit(repo: Repository, sha: str) -> Optional[PullRequest]: cached = redis.get_int(f'github:head:{sha}') if cached: try: pr = repo.get_pull(cached) if pr.head.sha == sha and pr.state == 'open': return pr except UnknownObjectException: pass for pr in repo.get_pulls(): head = pr.head.sha redis.store(f'github:head:{head}', pr.number, ex=3600) if head == sha: return pr return None
async def merge(self, context: commands.Context, repo: Repository, pr_id: int): pr = repo.get_pull(pr_id) embed = self.as_embed([pr]) mergeable = self.all_checks_passed(embed) and pr.state == "open" if self.has_valid_request(pr_id, context.author.id) and mergeable: self.merge_confirmations.pop(pr_id, None) pr.create_review(body="YOLO", event="APPROVE") pr.merge(commit_title=pr.title, commit_message="YOLO", merge_method="squash") await context.send("Welp. See you on the other side, brother.") elif mergeable: await context.send( "Bruh, that'll merge this god-awful pull request... are you sure you trust it? Only Tom would push this...", embed=embed) self.merge_confirmations[pr_id] = MergeConfirmation( context.author.id, context.message.created_at) else: await context.send("Bruh. Come on. I can't merge this garbage.", embed=embed)
def get_pull_cached(repo: Repository, number: int, updated_at: Optional[datetime] = None) -> PullRequest: pr_cache_file = p.join(CACHE_PATH, f"{number}.pickle") if updated_at is None: updated_at = datetime.now() - timedelta(hours=-1) if p.isfile(pr_cache_file): cache_updated = datetime.fromtimestamp(p.getmtime(pr_cache_file)) if cache_updated > updated_at: with open(pr_cache_file, "rb") as prfd: return GitHub.load(prfd) # type: ignore while True: try: pr = repo.get_pull(number) break except RateLimitExceededException: sleep_on_rate_limit() with open(pr_cache_file, "wb") as prfd: GitHub.dump(pr, prfd) # type: ignore return pr
def generate_description(item: PullRequest, repo: Repository) -> Optional[Description]: backport_number = item.number if item.head.ref.startswith("backport/"): branch_parts = item.head.ref.split("/") if len(branch_parts) == 3: item = repo.get_pull(int(branch_parts[-1])) else: logging.warning( "The branch %s doesn't match backport template, using PR %s as is", item.head.ref, item.number, ) description = item.body # Don't skip empty lines because they delimit parts of description lines = [ x.strip() for x in (description.split("\n") if description else []) ] lines = [re.sub(r"\s+", " ", ln) for ln in lines] category = "" entry = "" if lines: i = 0 while i < len(lines): if re.match(r"(?i)^[#>*_ ]*change\s*log\s*category", lines[i]): i += 1 if i >= len(lines): break # Can have one empty line between header and the category itself. # Filter it out. if not lines[i]: i += 1 if i >= len(lines): break category = re.sub(r"^[-*\s]*", "", lines[i]) i += 1 elif re.match( r"(?i)^[#>*_ ]*(short\s*description|change\s*log\s*entry)", lines[i]): i += 1 # Can have one empty line between header and the entry itself. # Filter it out. if i < len(lines) and not lines[i]: i += 1 # All following lines until empty one are the changelog entry. entry_lines = [] while i < len(lines) and lines[i]: entry_lines.append(lines[i]) i += 1 entry = " ".join(entry_lines) else: i += 1 if not category: # Shouldn't happen, because description check in CI should catch such PRs. # Fall through, so that it shows up in output and the user can fix it. category = "NO CL CATEGORY" # Filter out the PR categories that are not for changelog. if re.match( r"(?i)doc|((non|in|not|un)[-\s]*significant)|(not[ ]*for[ ]*changelog)", category, ): return None if backport_number != item.number: entry = f"Backported in #{backport_number}: {entry}" if not entry: # Shouldn't happen, because description check in CI should catch such PRs. category = "NO CL ENTRY" entry = "NO CL ENTRY: '" + item.title + "'" entry = entry.strip() if entry[-1] != ".": entry += "." for c in categories_preferred_order: if ratio(category.lower(), c.lower()) >= 90: category = c break return Description(item.number, item.user, item.html_url, entry, category)