def main( pull_request: str, github: ghstack.github.GitHubEndpoint, sh: Optional[ghstack.shell.Shell] = None, close: bool = False, ) -> None: params = ghstack.github_utils.parse_pull_request(pull_request) pr_result = github.graphql( """ query ($owner: String!, $name: String!, $number: Int!) { repository(name: $name, owner: $owner) { pullRequest(number: $number) { id } } } """, **params) pr_id = pr_result["data"]["repository"]["pullRequest"]["id"] if close: logging.info("Closing {owner}/{name}#{number}".format(**params)) github.graphql(""" mutation ($input: ClosePullRequestInput!) { closePullRequest(input: $input) { clientMutationId } } """, input={ "pullRequestId": pr_id, "clientMutationId": "A" })
def lookup_pr_to_orig_ref(github: ghstack.github.GitHubEndpoint, *, github_url: Optional[str] = None, owner: str, name: str, number: int) -> str: pr_result = github.graphql(""" query ($owner: String!, $name: String!, $number: Int!) { repository(name: $name, owner: $owner) { pullRequest(number: $number) { headRefName } } } """, owner=owner, name=name, number=number) head_ref = pr_result["data"]["repository"]["pullRequest"]["headRefName"] assert isinstance(head_ref, str) orig_ref = re.sub(r'/head$', '/orig', head_ref) if orig_ref == head_ref: logging.warning( "The ref {} doesn't look like a ghstack reference".format( head_ref)) return orig_ref
def main( pull_request: str, github: ghstack.github.GitHubEndpoint, sh: ghstack.shell.Shell, remote_name: str, ) -> None: params = ghstack.github_utils.parse_pull_request(pull_request) pr_result = github.graphql( """ query ($owner: String!, $name: String!, $number: Int!) { repository(name: $name, owner: $owner) { pullRequest(number: $number) { headRefName } } } """, **params) head_ref = pr_result["data"]["repository"]["pullRequest"]["headRefName"] orig_ref = re.sub(r'/head$', '/orig', head_ref) if orig_ref == head_ref: logging.warning( "The ref {} doesn't look like a ghstack reference".format( head_ref)) # TODO: Handle remotes correctly too (so this subsumes hub) sh.git("fetch", "--prune", remote_name) sh.git("checkout", remote_name + "/" + orig_ref)
def main( pull_request: str, github: ghstack.github.GitHubEndpoint, sh: Optional[ghstack.shell.Shell] = None, close: bool = False, ) -> None: m = RE_PR_URL.match(pull_request) if not m: raise RuntimeError("Did not understand PR argument. PR must be URL") owner = m.group("owner") repo = m.group("repo") number = int(m.group("number")) pr_result = github.graphql(""" query ($owner: String!, $repo: String!, $number: Int!) { repository(name: $repo, owner: $owner) { pullRequest(number: $number) { id } } } """, owner=owner, repo=repo, number=number) pr_id = pr_result["data"]["repository"]["pullRequest"]["id"] if close: logging.info("Closing {}/{}#{}".format(owner, repo, number)) github.graphql(""" mutation ($input: ClosePullRequestInput!) { closePullRequest(input: $input) { clientMutationId } } """, input={ "pullRequestId": pr_id, "clientMutationId": "A" })
def get_github_repo_info( *, github: ghstack.github.GitHubEndpoint, sh: ghstack.shell.Shell, repo_owner: Optional[str] = None, repo_name: Optional[str] = None, github_url: str, remote_name: str, ) -> GitHubRepoInfo: if repo_owner is None or repo_name is None: name_with_owner = get_github_repo_name_with_owner( sh=sh, github_url=github_url, remote_name=remote_name, ) else: name_with_owner = {"owner": repo_owner, "name": repo_name} # TODO: Cache this guy repo = github.graphql(""" query ($owner: String!, $name: String!) { repository(name: $name, owner: $owner) { id isFork defaultBranchRef { name } } }""", owner=name_with_owner["owner"], name=name_with_owner["name"])["data"]["repository"] return { "name_with_owner": name_with_owner, "id": repo["id"], "is_fork": repo["isFork"], "default_branch": repo["defaultBranchRef"]["name"], }
def main(msg: Optional[str], username: str, github: ghstack.github.GitHubEndpoint, update_fields: bool = False, sh: Optional[ghstack.shell.Shell] = None, stack_header: str = STACK_HEADER, repo_owner: Optional[str] = None, repo_name: Optional[str] = None, short: bool = False, force: bool = False, no_skip: bool = False, draft: bool = False, github_url: str = "github.com", default_branch: str = "master", remote_name: str = "origin" ) -> List[Optional[DiffMeta]]: if sh is None: # Use CWD sh = ghstack.shell.Shell() if repo_owner is None or repo_name is None: # Grovel in remotes to figure it out remote_url = sh.git("remote", "get-url", remote_name) while True: match = r'^git@{github_url}:([^/]+)/([^.]+)(?:\.git)?$'.format( github_url=github_url ) m = re.match(match, remote_url) if m: repo_owner_nonopt = m.group(1) repo_name_nonopt = m.group(2) break search = r'{github_url}/([^/]+)/([^.]+)'.format( github_url=github_url ) m = re.search(search, remote_url) if m: repo_owner_nonopt = m.group(1) repo_name_nonopt = m.group(2) break raise RuntimeError( "Couldn't determine repo owner and name from url: {}" .format(remote_url)) else: repo_owner_nonopt = repo_owner repo_name_nonopt = repo_name # TODO: Cache this guy repo = github.graphql( """ query ($owner: String!, $name: String!) { repository(name: $name, owner: $owner) { id isFork } }""", owner=repo_owner_nonopt, name=repo_name_nonopt)["data"]["repository"] if repo["isFork"]: raise RuntimeError( "Cowardly refusing to upload diffs to a repository that is a " "fork. ghstack expects '{}' of your Git checkout to point " "to the upstream repository in question. If your checkout does " "not comply, please either adjust your remotes (by editing " ".git/config) or change the 'remote_name' field in your .ghstackrc " "file to point to the correct remote. If this message is in " "error, please register your complaint on GitHub issues (or edit " "this line to delete the check above).".format(remote_name)) repo_id = repo["id"] sh.git("fetch", remote_name) base = GitCommitHash(sh.git("merge-base", f"{remote_name}/{default_branch}", "HEAD")) # compute the stack of commits to process (reverse chronological order), stack = ghstack.git.parse_header( sh.git("rev-list", "--header", "^" + base, "HEAD"), github_url, ) # compute the base commit base_obj = ghstack.git.split_header(sh.git("rev-list", "--header", "^" + base + "^@", base))[0] assert len(stack) > 0 ghstack.logging.record_status( "{} \"{}\"".format(stack[0].oid[:9], stack[0].title)) submitter = Submitter(github=github, sh=sh, username=username, repo_owner=repo_owner_nonopt, repo_name=repo_name_nonopt, repo_id=repo_id, base_commit=base, base_tree=base_obj.tree(), stack_header=stack_header, update_fields=update_fields, msg=msg, short=short, force=force, no_skip=no_skip, draft=draft, stack=list(reversed(stack)), github_url=github_url, remote_name=remote_name) submitter.prepare_updates() submitter.push_updates() # NB: earliest first return submitter.stack_meta
def main(msg: Optional[str], username: str, github: ghstack.github.GitHubEndpoint, update_fields: bool = False, sh: Optional[ghstack.shell.Shell] = None, stack_header: str = STACK_HEADER, repo_owner: Optional[str] = None, repo_name: Optional[str] = None, short: bool = False, ) -> List[DiffMeta]: if sh is None: # Use CWD sh = ghstack.shell.Shell() if repo_owner is None or repo_name is None: # Grovel in remotes to figure it out origin_url = sh.git("remote", "get-url", "origin") while True: m = re.match(r'^[email protected]:([^/]+)/([^.]+)(?:\.git)?$', origin_url) if m: repo_owner_nonopt = m.group(1) repo_name_nonopt = m.group(2) break m = re.search(r'github.com/([^/]+)/([^.]+)', origin_url) if m: repo_owner_nonopt = m.group(1) repo_name_nonopt = m.group(2) break raise RuntimeError( "Couldn't determine repo owner and name from url: {}" .format(origin_url)) else: repo_owner_nonopt = repo_owner repo_name_nonopt = repo_name # TODO: Cache this guy repo = github.graphql( """ query ($owner: String!, $name: String!) { repository(name: $name, owner: $owner) { id isFork } }""", owner=repo_owner_nonopt, name=repo_name_nonopt)["data"]["repository"] if repo["isFork"]: raise RuntimeError( "Cowardly refusing to upload diffs to a repository that is a " "fork. ghstack expects 'origin' of your Git checkout to point " "to the upstream repository in question. If your checkout does " "not comply, please adjust your remotes (by editing .git/config) " "to make it so. If this message is in error, please register " "your complaint on GitHub issues (or edit this line to delete " "the check above.") repo_id = repo["id"] sh.git("fetch", "origin") base = GitCommitHash(sh.git("merge-base", "origin/master", "HEAD")) # compute the stack of commits to process (reverse chronological order), # INCLUDING the base commit stack = ghstack.git.split_header( sh.git("rev-list", "--header", "^" + base + "^@", "HEAD")) assert len(stack) > 0 ghstack.logging.record_status( "{} \"{}\"".format(stack[0].commit_id()[:9], stack[0].title())) # start with the earliest commit g = reversed(stack) base_obj = next(g) submitter = Submitter(github=github, sh=sh, username=username, repo_owner=repo_owner_nonopt, repo_name=repo_name_nonopt, repo_id=repo_id, base_commit=base, base_tree=base_obj.tree(), stack_header=stack_header, update_fields=update_fields, msg=msg, short=short) for s in g: submitter.process_commit(s) submitter.post_process() # NB: earliest first return submitter.stack_meta
async def main(pull_request: str, github: ghstack.github.GitHubEndpoint, circleci: ghstack.circleci.CircleCIEndpoint) -> None: # Game plan: # 1. Query GitHub to find out what the current statuses are # (TODO: if we got rate limited we'll miss stuff) # 2. For each status in parallel: # a. Query CircleCI for job status # b. (Future work) Query output_url to get log information # (it's gzip'ed) # # For now: # - Print if the job actually ran, or was skipped # - Easy way to determine: check if "Should run job after # checkout" is last step # - I inspected circleci.get('project/github/pytorch/pytorch/1773555') # to see if there were other options, there did not appear # to be any indication that a halt was called. So we'll # have to rely on the (OS X jobs, take note!) params = ghstack.github_utils.parse_pull_request(pull_request) # TODO: stop hard-coding number of commits r = github.graphql( """ query ($name: String!, $owner: String!, $number: Int!) { repository(name: $name, owner: $owner) { pullRequest(number: $number) { commits(last: 100) { nodes { commit { oid messageHeadline status { contexts { context state targetUrl } } } } } } } } """, **params) nodes = r['data']['repository']['pullRequest']['commits']['nodes'] async def process_node(n: Dict[str, Any]) -> str: commit = n['commit'] status = commit['status'] icon = "❔" text = "" buildid_text = "" if status is not None: contexts = status['contexts'] else: contexts = [] for c in contexts: # TODO: Stop hard-coding me if c['context'] != 'ci/circleci: pytorch_linux_xenial_py3_clang5_asan_test': continue m = RE_CIRCLECI_URL.match(c['targetUrl']) if not m: icon = "🍆" break if c['state'] == 'SUCCESS': icon = "✅" break buildid = m.group(1) buildid_text = " ({})".format(buildid) r = await circleci.get( "project/github/{name}/{owner}/{buildid}".format( buildid=buildid, **params)) if not r["failed"]: # It was just cancelled (don't check "cancelled"; that's # true even if the job failed otherwise; it just means # workflow got cancelled) icon = "❔" break icon = "❌" async with aiohttp.request( 'get', r['steps'][-1]['actions'][-1]['output_url']) as resp: log_json = await resp.json() buf = [] for e in log_json: buf.append(e["message"]) text = "\n" + strip_sccache("\n".join(buf)) text = text[-1500:] return "{} {} {}{}{}".format(icon, commit['oid'][:8], commit['messageHeadline'], buildid_text, text) for n in nodes: print(await process_node(n))
async def main( pull_request: str, # noqa: C901 github: ghstack.github.GitHubEndpoint, circleci: ghstack.circleci.CircleCIEndpoint) -> None: # Game plan: # 1. Query GitHub to find out what the current statuses are # (TODO: if we got rate limited we'll miss stuff) # 2. For each status in parallel: # a. Query CircleCI for job status # b. (Future work) Query output_url to get log information # (it's gzip'ed) # # For now: # - Print if the job actually ran, or was skipped # - Easy way to determine: check if "Should run job after # checkout" is last step # - I inspected circleci.get('project/github/pytorch/pytorch/1773555') # to see if there were other options, there did not appear # to be any indication that a halt was called. So we'll # have to rely on the (OS X jobs, take note!) params = ghstack.github_utils.parse_pull_request(pull_request) ContextPayload = TypedDict("ContextPayload", { "context": str, "state": str, "targetUrl": str, }) r = github.graphql( """ query ($name: String!, $owner: String!, $number: Int!) { repository(name: $name, owner: $owner) { pullRequest(number: $number) { commits(last: 1) { nodes { commit { status { contexts { context state targetUrl } } } } } } } } """, **params) contexts = r['data']['repository']['pullRequest']['commits']['nodes'][0][ 'commit']['status']['contexts'] async def process_context(context: ContextPayload) -> str: text = "" if 'circleci' in context['context']: m = RE_CIRCLECI_URL.match(context['targetUrl']) if not m: logging.warning("Malformed CircleCI URL {}".format( context['targetUrl'])) return "INTERNAL ERROR {}".format(context['context']) buildid = m.group(1) r = await circleci.get( "project/github/{name}/{owner}/{buildid}".format( buildid=buildid, **params)) if context['state'] not in {'SUCCESS', 'PENDING'}: state = context['state'] else: if r["failed"]: state = "FAILURE" elif r["canceled"]: state = "CANCELED" elif "Should Run Job" in r["steps"][-1]["name"]: state = "SKIPPED" else: state = "SUCCESS" if state == "FAILURE": async with aiohttp.request( 'get', r['steps'][-1]['actions'][-1]['output_url']) as resp: log_json = await resp.json() buf = [] for e in log_json: buf.append(e["message"]) text = "\n" + strip_sccache("\n".join(buf)) text = text[-1500:] else: state = context['state'] if state == "SUCCESS": state = "✅" elif state == "SKIPPED": state = "❔" elif state == "CANCELED": state = "💜" elif state == "PENDING": state = "🚸" elif state == "FAILURE": state = "❌" name = context['context'] url = context["targetUrl"] url = url.replace( "?utm_campaign=vcs-integration-link&utm_medium=referral&utm_source=github-build-link", "") return "{} {} {}{}".format(state, name.ljust(70), url, text) results = await asyncio.gather( *[asyncio.ensure_future(process_context(c)) for c in contexts]) print("\n".join(sorted(results)))