def _get_latest_run(self, workflow: dict, branch: str, commit: Optional[str] = None) -> dict: """Filter out the latest workflow run.""" workflow_id = workflow["id"] logger.debug("Fetching workflow runs. workflow_id=%s", workflow_id) if commit: paged_workflow_runs = paged(self.gh_api.actions.list_workflow_runs, workflow_id, status="completed") for page in paged_workflow_runs: workflow_runs = next( filter(lambda r: r["head_sha"].startswith(commit), page.workflow_runs), None, ) if workflow_runs: return workflow_runs return self.gh_api.actions.list_workflow_runs( workflow_id, branch=branch, status="completed").workflow_runs[0]
def _get_pyfiles(self): pyfiles_pages = paged(self.client.pulls.list_files, *self.repo_tuple, self.pull_request["number"]) pyfiles = [[f for f in page if f["filename"].endswith(".py")] for page in pyfiles_pages] pyfiles = [item for sublist in pyfiles for item in sublist] return pyfiles
def gh_issues(): res = {} if 'CI' not in os.environ or ('GITHUB_ACTIONS' in os.environ and sys.version_info.minor >= 8): try: api = GhApi(owner='atmos-cloud-sim-uj', repo='PySDM') pages = paged(api.issues.list_for_repo, owner='atmos-cloud-sim-uj', repo='PySDM', state='all', per_page=100) for page in pages: for item in page.items: res[item.number] = item.state except ExceptionsHTTP[403]: pass return res
def gh_issues(): """ pytest fixture providing a dictionary with github issue ids as keys and their state as value """ res = {} if 'CI' not in os.environ or ('GITHUB_ACTIONS' in os.environ and sys.version_info.minor >= 8): try: api = GhApi(owner='atmos-cloud-sim-uj', repo='PyMPDATA') pages = paged(api.issues.list_for_repo, owner='atmos-cloud-sim-uj', repo='PyMPDATA', state='all', per_page=100) for page in pages: for item in page.items: res[item.number] = item.state except ExceptionsHTTP[403]: pass return res
def gh_issues(): res = {} if "CI" not in os.environ or ("GITHUB_ACTIONS" in os.environ and sys.version_info.minor >= 8): try: api = GhApi(owner="atmos-cloud-sim-uj", repo="PySDM") pages = paged( api.issues.list_for_repo, owner="atmos-cloud-sim-uj", repo="PySDM", state="all", per_page=100, ) for page in pages: for item in page.items: res[item.number] = item.state except ExceptionsHTTP[403]: pass return res
def check_commits(self): """ For each commit in the PR, check for the following: - incorrect summary line formatting - missing DCOO / Signed-off-by line - missing blank line between summary line and message body Returns a dict indicating whether each of the above is true for any commit in the PR. """ warns = defaultdict(bool) commit_pages = paged( self.client.pulls.list_commits, *self.repo_tuple, self.pull_request["number"], ) for page in commit_pages: for commit in page: msg = commit["commit"]["message"] if len(msg) == 0: LOG.warning("[-] Zero length commit message; weird") continue if msg.startswith("Revert") or msg.startswith("Merge"): continue lines = msg.split("\n") if len(lines) < 2 or len(lines[1]) > 0: warns["blankln"] = True if ":" not in lines[0]: warns["bad_msg"] = True if not re.search(r"Signed-off-by: .* <.*@.*>", msg): warns["signoff"] = True return warns
import numpy as np owner = os.environ.get("OWNER", "yamt") repo = os.environ.get("REPO", "garbage") api = GhApi(owner=owner, repo=repo) api.debug = print_summary # default: sort=created, direction=desc # XXX should filter on target branch # XXX esp-idf often uses an awkward way to merge PRs. # how can i deal with it? # eg. https://github.com/espressif/esp-idf/pull/8248 # XXX for some reasons, state=closed often causes 502 # for kubernetes/kubernetes. pgs = paged(api.pulls.list, state="all", per_page=100) l = itertools.chain.from_iterable(pgs) l = filter(lambda p: p.merged_at is not None, l) l = itertools.islice(l, 500) day_in_sec = 24 * 60 * 60.0 x = dict() for p in l: created_at = dateutil.parser.isoparse(p.created_at) merged_at = dateutil.parser.isoparse(p.merged_at) d = merged_at - created_at author = p.user.login print(f"{p.number} {author} {d}") if author not in x: x[author] = [] x[author].append(d.total_seconds() / day_in_sec)
def add_labels(self): """ Label a pull request using component directories present in the commit message subject lines. """ # directory -> label label_map = { "alpine": "packaging", "babeld": "babel", "bfdd": "bfd", "bgpd": "bgp", "debian": "packaging", "doc": "documentation", "docker": "docker", "eigrpd": "eigrp", "fpm": "fpm", "isisd": "isis", "ldpd": "ldp", "lib": "libfrr", "nhrpd": "nhrp", "ospf6d": "ospfv3", "ospfd": "ospf", "pbrd": "pbr", "pimd": "pim", "pkgsrc": "packaging", "python": "clippy", "redhat": "packaging", "ripd": "rip", "ripngd": "ripng", "sharpd": "sharp", "snapcraft": "packaging", "solaris": "packaging", "staticd": "staticd", "tests": "tests", "tools": "tools", "vtysh": "vtysh", "vrrpd": "vrrp", "watchfrr": "watchfrr", "yang": "yang", "zebra": "zebra", # files "configure.ac": "build", "makefile.am": "build", "bootstrap.sh": "build", } labels = set() commit_pages = paged( self.client.pulls.list_commits, *self.repo_tuple, self.pull_request["number"], ) for page in commit_pages: for commit in page: msg = commit["commit"]["message"] match = re.match(r"^([^:\n]+):", msg) if match: lbls = match.groups()[0].split(",") lbls = map(lambda x: x.strip(), lbls) lbls = map(lambda x: x.lower(), lbls) lbls = filter(lambda x: x in label_map, lbls) lbls = map(lambda x: label_map[x], lbls) labels = labels | set(lbls) lines = msg.split("\n") if lines[0].find(" fix ") != -1 or msg.find("Fixes:") != -1: labels.add("bugfix") if labels: self.client.issues.add_labels(*self.repo_tuple, self.pull_request["number"], list(labels))