Example #1
0
    def rerun_github_actions(self) -> None:
        workflow_ids = []
        for item in self.head_commit(
        )["statusCheckRollup"]["contexts"]["nodes"]:
            if "checkSuite" in item and item["conclusion"] == "FAILURE":
                workflow_id = item["checkSuite"]["workflowRun"]["databaseId"]
                workflow_ids.append(workflow_id)

        workflow_ids = list(set(workflow_ids))
        logging.info(
            f"Rerunning GitHub Actions workflows with IDs: {workflow_ids}")
        actions_github = GitHubRepo(user=self.github.user,
                                    repo=self.github.repo,
                                    token=GH_ACTIONS_TOKEN)
        for workflow_id in workflow_ids:
            if self.dry_run:
                logging.info(f"Dry run, not restarting workflow {workflow_id}")
            else:
                try:
                    actions_github.post(
                        f"actions/runs/{workflow_id}/rerun-failed-jobs",
                        data={})
                except RuntimeError as e:
                    logging.exception(e)
                    # Ignore errors about jobs that are part of the same workflow to avoid
                    # having to figure out which jobs are in which workflows ahead of time
                    if "The workflow run containing this job is already running" in str(
                            e):
                        pass
                    else:
                        raise e
Example #2
0
    def __init__(
        self,
        number: int,
        owner: str,
        repo: str,
        dry_run: bool = False,
        raw_data: Dict[str, Any] = None,
    ):
        self.owner = owner
        self.number = number
        self.repo_name = repo
        self.dry_run = dry_run

        if dry_run and raw_data:
            # In test mode there is no need to fetch anything
            self.raw = raw_data
            self.github = None
        else:
            self.github = GitHubRepo(user=owner,
                                     repo=repo,
                                     token=os.environ["GITHUB_TOKEN"])
            if os.getenv("DEBUG", "0") == "1":
                # For local runs fill in the requested data but cache it for
                # later use
                cached_path = Path("pr.json")
                if not cached_path.exists():
                    self.raw = self.fetch_data()
                    with open(cached_path, "w") as f:
                        json.dump(self.raw, f, indent=2)
                else:
                    with open(cached_path) as f:
                        self.raw = json.load(f)
            else:
                # Usual path, fetch the PR's data based on the number from
                # GitHub
                self.raw = self.fetch_data()

        def checker(obj, parent_key):
            """
            Verify that any paged results don't have extra data (if so the bot
            may still work since most relevant comments will be more recent)
            """
            if parent_key == "pageInfo":
                if obj.get("hasPreviousPage", False):
                    warnings.warn(
                        f"Found {obj} with a previous page, bot may be missing data"
                    )
                if obj.get("hasNextPage", False):
                    warnings.warn(
                        f"Found {obj} with a next page, bot may be missing data"
                    )

        walk(self.raw, checker)

        logging.info(f"Verified data, running with PR {to_json_str(self.raw)}")
Example #3
0
    def check_pr_title():
        remote = git(["config", "--get", f"remote.{args.remote}.url"])
        user, repo = parse_remote(remote)

        if args.pr_title:
            title = args.pr_title
        else:
            github = GitHubRepo(token=os.environ["TOKEN"],
                                user=user,
                                repo=repo)
            pr = github.get(f"pulls/{args.pr}")
            title = pr["title"]
        print("pr title:", title)
        return title.startswith("[skip ci]")
Example #4
0
    def check_pr_title():
        remote = git(["config", "--get", f"remote.{args.remote}.url"])
        user, repo = parse_remote(remote)

        if args.pr_title:
            title = args.pr_title
        else:
            github = GitHubRepo(token=os.environ["TOKEN"],
                                user=user,
                                repo=repo)
            pr = github.get(f"pulls/{args.pr}")
            title = pr["title"]
        logging.info(f"pr title: {title}")
        tags = tags_from_title(title)
        logging.info(f"Found title tags: {tags}")
        return "skip ci" in tags
Example #5
0
def fetch_pr_data(args, cache):
    github = GitHubRepo(user=user, repo=repo, token=GITHUB_TOKEN)

    if args.from_commit is None or args.to_commit is None:
        print(
            "--from-commit and --to-commit must be specified if --skip-query is not used"
        )
        exit(1)

    i = 0
    page_size = 80
    cursor = f"{args.from_commit} {i}"

    while True:
        r = github.graphql(
            query=PRS_QUERY,
            variables={
                "owner": user,
                "name": repo,
                "after": cursor,
                "pageSize": page_size,
            },
        )
        data = r["data"]["repository"]["defaultBranchRef"]["target"]["history"]
        if not data["pageInfo"]["hasNextPage"]:
            break
        cursor = data["pageInfo"]["endCursor"]
        results = data["nodes"]

        to_add = []
        stop = False
        for r in results:
            if r["oid"] == args.to_commit:
                print(f"Found {r['oid']}, stopping")
                stop = True
                break
            else:
                to_add.append(r)

        oids = [r["oid"] for r in to_add]
        print(oids)
        append_and_save(to_add, cache)
        if stop:
            break
        print(i)
        i += page_size
Example #6
0
        f"  time cutoff: {wait_time}\n"
        f"  number cutoff: {cutoff_pr_number}\n"
        f"  dry run: {args.dry_run}\n"
        f"  user/repo: {user}/{repo}\n",
        end="",
    )

    # [slow rollout]
    # This code is here to gate this feature to a limited set of people before
    # deploying it for everyone to avoid spamming in the case of bugs or
    # ongoing development.
    if args.allowlist:
        author_allowlist = args.allowlist.split(",")
    else:
        github = GitHubRepo(token=os.environ["GITHUB_TOKEN"],
                            user=user,
                            repo=repo)
        allowlist_issue = github.get("issues/9983")
        author_allowlist = set(find_reviewers(allowlist_issue["body"]))

    if args.pr_json:
        r = json.loads(args.pr_json)
    else:
        q = prs_query(user, repo)
        r = github.graphql(q)

    now = datetime.datetime.utcnow()
    if args.now:
        now = datetime.datetime.strptime(args.now, GIT_DATE_FORMAT)

    # Loop until all PRs have been checked
    if args.dry_run:
        logging.info("Dry run, would have committed Jenkinsfile")
    else:
        logging.info(f"Creating git commit")
        git(["checkout", "-B", BRANCH])
        git(["add", str(JENKINSFILE.relative_to(REPO_ROOT))])
        git(["add", str(GENERATED_JENKINSFILE.relative_to(REPO_ROOT))])
        git(["config", "user.name", "tvm-bot"])
        git([
            "config", "user.email", "*****@*****.**"
        ])
        git(["commit", "-m", message])
        git(["push", "--set-upstream", args.remote, BRANCH, "--force"])

    logging.info(f"Sending PR to GitHub")
    github = GitHubRepo(user=user, repo=repo, token=GITHUB_TOKEN)
    data = {
        "title": title,
        "body": body,
        "head": BRANCH,
        "base": "main",
        "maintainer_can_modify": True,
    }
    url = "pulls"
    if args.dry_run:
        logging.info(f"Dry run, would have sent {data} to {url}")
    else:
        try:
            github.post(url, data=data)
        except error.HTTPError as e:
            # Ignore the exception if the PR already exists (which gives a 422). The