示例#1
0
文件: common.py 项目: Nik-09/oppia
def check_prs_for_current_release_are_released(
        repo: github.Repository.Repository) -> None:
    """Checks that all pull requests for current release have a
    'PR: released' label.

    Args:
        repo: github.Repository.Repository. The PyGithub object for the repo.

    Raises:
        Exception. Some pull requests for current release do not have a
            "PR: released" label.
    """
    current_release_label = repo.get_label(
        constants.release_constants.LABEL_FOR_CURRENT_RELEASE_PRS)
    current_release_prs = repo.get_issues(state='all',
                                          labels=[current_release_label])
    for pr in current_release_prs:
        label_names = [label.name for label in pr.labels]
        if constants.release_constants.LABEL_FOR_RELEASED_PRS not in (
                label_names):
            open_new_tab_in_browser_if_possible(
                'https://github.com/oppia/oppia/pulls?utf8=%E2%9C%93&q=is%3Apr'
                '+label%3A%22PR%3A+for+current+release%22+')
            raise Exception(
                'There are PRs for current release which do not have '
                'a \'PR: released\' label. Please ensure that they are '
                'released before release summary generation.')
示例#2
0
def update_packages(repo: github.Repository.Repository, branch_name: str,
                    config: dict) -> None:
    try:
        packages_content = repo.get_contents("packages.yml")
        packages = ruamel.yaml.load(
            packages_content.decoded_content,
            Loader=ruamel.yaml.RoundTripLoader,
            preserve_quotes=True,
        )

        for package in packages["packages"]:
            if "package" in package:
                name = package["package"]
                if name in config["packages"]:
                    package["version"] = config["packages"][name]
            if "git" in package:
                name = package["git"]
                if name in config["packages"]:
                    package["revision"] = config["packages"][name]

        repo.update_file(
            path=packages_content.path,
            message="Updating package dependendcies",
            content=ruamel.yaml.dump(packages,
                                     Dumper=ruamel.yaml.RoundTripDumper),
            sha=packages_content.sha,
            branch=branch_name,
        )
    except github.GithubException:
        print("'packages.yml' not found in repo.")
示例#3
0
def create_labels(current: List[github.Label.Label], expected: List[Label],
                  repo: github.Repository.Repository, run: bool) -> None:
    def __creation_plan(current: List[github.Label.Label],
                        expected: List[Label]) -> List[Label]:
        current_names: List[str] = [l.name for l in current]
        expected_names: List[str] = [l.name for l in expected]
        names: List[str] = list(set(expected_names) - set(current_names))
        return [l for l in expected if l.name in names]

    for l in __creation_plan(current, expected):
        print('  Create `' + l.name + '`')
        if run:
            repo.create_label(l.name, l.color, l.description)
示例#4
0
    def search_repo(self, repo: github.Repository.Repository,
                    repo_out: github.Repository.Repository):
        for issue_state in self._issue_state:
            issues = repo.get_issues(state=issue_state, sort='updated')
            total = 0
            for _ in issues:
                total = total + 1
            for i, issue in enumerate(issues):
                upstream_tag = False
                confirm_tag = False

                tag = ''
                for label in issue.labels:
                    if label.name == 'upstream':
                        upstream_tag = True
                    elif label.name == 'confirm':
                        confirm_tag = True
                    else:
                        tag = label.name.title()
                if not (not confirm_tag and upstream_tag):
                    continue

                # GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request.
                # You can identify pull requests by the pull_request key.
                if issue.pull_request:
                    continue

                print("--------")
                print('[ISSUE][%s][%d/%d]\t%s %s' %
                      (issue_state, i + 1, total, repo.name, issue.title))
                title = '%s: %s' % (tag, issue.title)
                print(title)
                print('Moved from: %s#%d' % (repo.full_name, issue.number))
                print('Original author: @%s' % (issue.user.login))
                issue_body = issue.body.strip()
                if not issue_body:
                    issue_body = '**No description provided.** :sleeping:'
                body = \
'''
Moved from: %s#%d
Original author: @%s

### Issue description
%s
'''% (repo.full_name, issue.number, issue.user.login, issue_body)
                print(body)

                # close issue and create a new one
                issue.edit(state='closed')
                repo_out.create_issue(title=title, body=body)
    def search_repo(self, repo: github.Repository.Repository):
        if not self._pr and not self._issue:
            self._pr = True

        if self._pr:
            for pr_state in self._pr_state:
                prs = repo.get_pulls(state=pr_state, sort='updated')
                # totalCount does not work
                # https://github.com/PyGithub/PyGithub/issues/870
                total = 0
                for _ in prs:
                    total = total + 1
                for i, pr in enumerate(prs):
                    self.printcl('[PR][%s][%d/%d]\t\t%s' % (self._state_to_utf[pr_state], i, total, repo.name))
                    # It's not me
                    if self._search_username not in pr.user.login:
                        continue

                    # Check for max age
                    if (datetime.now().date() - pr.updated_at.date()).days > self._age:
                        break

                    self.print_pr(pr)

        if self._issue:
            for issue_state in self._issue_state:
                issues = repo.get_issues(state=issue_state, sort='updated')
                total = 0
                for _ in issues:
                    total = total + 1
                for i, issue in enumerate(issues):
                    # GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request.
                    # You can identify pull requests by the pull_request key.
                    if issue.pull_request:
                        continue
                    self.printcl('[ISSUE][%s][%d/%d]\t%s' % (self._state_to_utf[issue_state], i + 1, total, repo.name))
                    # It's not me
                    check_for_name_in = [issue.user.login]
                    if issue.closed_by:
                        check_for_name_in.append(issue.closed_by.login)
                    if issue.assignee:
                        check_for_name_in.append(issue.assignee.login)
                    if self._search_username not in check_for_name_in:
                        continue

                    # Check for max age
                    if (datetime.now().date() - issue.updated_at.date()).days > self._age:
                        break

                    self.print_issue(issue)
示例#6
0
def phab_login_to_github_login(phab_token: str,
                               repo: github.Repository.Repository,
                               phab_login: str) -> str:
    """
    Tries to translate a Phabricator login to a github login by
    finding a commit made in Phabricator's Differential.
    The commit's SHA1 is then looked up in the github repo and
    the committer's login associated with that commit is returned.

    :param str phab_token: The Conduit API token to use for communication with Pabricator
    :param github.Repository.Repository repo: The github repo to use when looking for the SHA1 found in Differential
    :param str phab_login: The Phabricator login to be translated.
    """

    args = {
        "constraints[authors][0]": phab_login,
        # PHID for "LLVM Github Monorepo" repository
        "constraints[repositories][0]": "PHID-REPO-f4scjekhnkmh7qilxlcy",
        "limit": 1
    }
    # API documentation: https://reviews.llvm.org/conduit/method/diffusion.commit.search/
    r = phab_api_call(phab_token,
                      "https://reviews.llvm.org/api/diffusion.commit.search",
                      args)
    data = r['result']['data']
    if len(data) == 0:
        # Can't find any commits associated with this user
        return None

    commit_sha = data[0]['fields']['identifier']
    return repo.get_commit(commit_sha).committer.login
示例#7
0
def add_base_files(moss: mosspy.Moss, base_files: Iterable,
                   repo: github.Repository.Repository):
    print(f'Adding base files from repo: {repo.full_name}')
    for path in base_files:
        print(f'\t{path}')
        content_file = repo.get_contents(path=path)
        head, tail = os.path.split(path)
        file_path = f'{repo.name}_{tail}'
        with open(file_path, 'wb') as f:
            f.write(content_file.decoded_content)
        moss.addBaseFile(file_path=file_path, display_name=file_path)
示例#8
0
def get_prs(
    repo: github.Repository.Repository,
    t_a: datetime.datetime,
    t_b: datetime.datetime,
):
    merged = []  # PRs merged during the month
    for pr in repo.get_pulls("closed", sort="updated",
                             direction="desc"):  # most recently merged first
        if pr.merged:
            if t_a <= pr.merged_at < t_b:  # could check `.month` instead...
                merged.append(pr)
            if pr.merged_at < t_a:
                break

    wip = []  # WIP PRs (not merged, still open at this time)
    for pr in repo.get_pulls("open"):
        if pr.created_at < t_b:  # and pr.updated_at >= t_a:
            wip.append(pr)

    return _Prs(merged, wip)
示例#9
0
    def search_repo(self, repo: github.Repository.Repository,
                    repo_out: github.Repository.Repository):
        repo_path = "/tmp/%s" % (repo.name)
        call([
            "git", "clone",
            "https://github.com/%s" % (repo.full_name), repo_path
        ])
        call(("git --git-dir %s/.git remote add output https://github.com/%s" %
              (repo_path, repo_out.full_name)).split(' '))
        call(["ls", "-l", repo_path])

        for pr_state in self._pr_state:
            prs = repo.get_pulls(state=pr_state, sort='updated')
            total = 0
            for _ in prs:
                total = total + 1
            for i, pr in enumerate(prs):
                print('[PR][%s][%d/%d]\t%s %s' %
                      (pr_state, i + 1, total, repo.name, pr.title))
                #print('Moved from: %s#%d' % (repo.full_name, pr.number))
                #print('Original author: @%s' % (pr.user.login))
                #print('Body: %s' % (pr.body))

                call(("git --git-dir %s/.git fetch origin pull/%d/head:%d" %
                      (repo_path, pr.number, pr.number)).split(' '))
                call(("git --git-dir %s/.git push output %d" %
                      (repo_path, pr.number)).split(' '))
                head = "%s:%d" % (repo_out.owner.login, pr.number)
                body = \
'''
%s

Moved from: %s#%d
Original author: @%s
''' % (pr.body, repo.full_name, pr.number, pr.user.login)

                repo_out.create_pull(title=pr.title,
                                     head=head,
                                     base="master",
                                     body=body)
示例#10
0
def get_repo_clones_traffic(self,
                            repo: github.Repository.Repository,
                            save_dir: Union[str, pathlib.Path] = 'clone_data',
                            load=False):
    """gets clones traffic for `repo` and saves as csv in `save_dir`

    Parameters
    ----------
    repo : Union[str,github.Repository.Repository]
        repository from `org`
    save_dir : Union[str, pathlib.Path], optional
        directory where output CSV should be saved, by default 'clone_data'
    load : bool, optional
        load data into a Pandas DataFrame, by default False

    Returns
    -------
    pd.DataFrame
        contains unique and total clones for `repo` with dates
    """
    if type(repo) == str:
        repo = self.org.get_repo(repo)
    clones = repo.get_clones_traffic()
    clones_dict = {
        view.timestamp: {
            "total_clones": view.count,
            "unique_clones": view.uniques,
        }
        for view in clones['clones']
    }

    try:
        old_clones_data = pd.read_csv(
            f'{save_dir}/{repo.name}_clones_traffic.csv',
            index_col="_date",
            parse_dates=["_date"]).to_dict(orient="index")
        updated_clones_dict = {**old_clones_data, **clones_dict}
        clones_frame = pd.DataFrame.from_dict(
            data=updated_clones_dict,
            orient="index",
            columns=["total_clones", "unique_clones"])
    except:
        clones_frame = pd.DataFrame.from_dict(
            data=clones_dict,
            orient="index",
            columns=["total_clones", "unique_clones"])
    clones_frame.index.name = "_date"
    if not Path(save_dir).exists():
        Path(save_dir).mkdir()
    clones_frame.to_csv(f'{save_dir}/{repo.name}_clones_traffic.csv')
    if load:
        return clones_frame
示例#11
0
def update_requirements(repo: github.Repository.Repository, branch_name: str,
                        config: str) -> None:
    try:
        requirements_content = repo.get_contents(
            "integration_tests/requirements.txt")
        new_content = ""
        for requirement in config["requirements"]:
            new_content += f"{requirement['name']}=={requirement['version']}\n"
        repo.update_file(
            path=requirements_content.path,
            message="Updating dbt version in requirements.txt",
            content=new_content,
            sha=requirements_content.sha,
            branch=branch_name,
        )
    except github.GithubException:
        repo.create_file(
            path="integration_tests/requirements.txt",
            message="Updating dbt version in requirements.txt",
            content=new_content,
            branch=branch_name,
        )
示例#12
0
def repo_isempty(repo: github.Repository.Repository) -> bool:
    """
    is a GitHub repo empty?

    Parameters
    ----------
    repo : github.Repository
        handle to GitHub repo

    Results
    -------
    empty : bool
        GitHub repo empty
    """
    try:
        repo.get_contents("/")
        empty = False
    except github.GithubException as e:
        logging.error(f"{repo.name} is empty. \n")
        empty = True
        logging.info(str(e))

    return empty
示例#13
0
def get_prs(
    repo: github.Repository.Repository,
    t_a: datetime.datetime,
    t_b: datetime.datetime,
):
    merged = []  # PRs merged during the month
    pl = repo.get_pulls("closed", sort="updated",
                        direction="desc")  # most recently *updated* first
    for pr in _maybe_prog(pl, desc=repo.name):
        if pr.merged:
            if t_a <= pr.merged_at < t_b:
                merged.append(pr)
            # if pr.merged_at < t_a:
            #     break
            # ^ This causes some to be missed since recently updated not equiv. to recently merged.
    merged.sort(key=lambda pr: pr.merged_at)  # earliest merged first

    wip = []  # WIP PRs (not merged, still open at this time)
    for pr in repo.get_pulls("open"):
        if pr.created_at < t_b:  # and pr.updated_at >= t_a:
            wip.append(pr)

    return _Prs(merged, wip)
示例#14
0
def update_project(repo: github.Repository.Repository, branch_name: str,
                   config: str) -> None:
    project_content = repo.get_contents("dbt_project.yml")
    project = ruamel.yaml.load(
        project_content.decoded_content,
        Loader=ruamel.yaml.RoundTripLoader,
        preserve_quotes=True,
    )

    project["require-dbt-version"] = config["require-dbt-version"]

    current_version = project["version"]
    bump_type = config["version-bump-type"]

    current_version_split = current_version.split(".")

    if bump_type == "patch":
        current_version_split[2] = str(int(current_version_split[2]) + 1)
    elif bump_type == "minor":
        current_version_split[1] = str(int(current_version_split[1]) + 1)
        current_version_split[2] = "0"
    elif bump_type == "major":
        current_version_split[0] = str(int(current_version_split[0]) + 1)
        current_version_split[1] = "0"
        current_version_split[2] = "0"

    new_version = ".".join(current_version_split)
    project["version"] = new_version

    repo.update_file(
        path=project_content.path,
        message="Updating require-dbt-version",
        content=ruamel.yaml.dump(project, Dumper=ruamel.yaml.RoundTripDumper),
        sha=project_content.sha,
        branch=branch_name,
    )
示例#15
0
def open_pull_request(repo: github.Repository.Repository, branch_name: str,
                      default_branch: str) -> None:
    body = """
    #### This pull request was created automatically 🎉

    Before merging this PR:
    - [ ] Verify that all the tests pass.
    - [ ] Tag a release 
    """

    pull = repo.create_pull(
        title="[MagicBot] Bumping package version",
        body=body,
        head=branch_name,
        base=default_branch,
    )

    print(pull.html_url)
示例#16
0
def create_scrum_issue(
        repo: github.Repository.Repository,
        date: datetime.date,
        previous_issue: github.Issue.Issue = None,
    ) -> typing.Optional[github.Issue.Issue]:
    """
    Create a scrum issue for the given date.
    If not None, previous_issue is used to set an issue body
    that refers to the previous issue.
    """
    kwargs = {'title': f"{date}: e-scrum for {date:%A, %B %-d, %Y}"}
    if previous_issue:
        kwargs['body'] = 'Preceeding e-scrum in {}.'.format(previous_issue.html_url)
    print('Creating {title!r}'.format(**kwargs), file=sys.stderr)
    try:
        return repo.create_issue(**kwargs)
    except Exception:
        print('Creating issue failed:\n{}'.fomrat(traceback.format_exc()), file=sys.stderr)
示例#17
0
def check_isolated_branch(repo: github.Repository.Repository,
                          branch_name: str) -> bool:
    """Check if a branch is old and not associated with open PRs."""

    # find all commits in the branch
    commits = repo.get_commits(sha=branch_name)
    # find the latest commit
    latest_commit = max(commits,
                        key=lambda commit: commit.commit.committer.date)
    # find out if the latest commit is older than our time range
    latest_commit_timestamp = latest_commit.commit.committer.date
    if datetime.today() - latest_commit_timestamp > timedelta(
            days=DAYS_DEFINED_AS_STALE):
        pull_requests = latest_commit.get_pulls()
        # not associated with any open PR
        return all([pr.state == 'closed' for pr in pull_requests])
    # ignore if branch is recent
    return False
示例#18
0
def has_python_code(repo: github.Repository.Repository):
    return LANGUAGE in repo.get_languages()
示例#19
0
 def check_if_pull_request_exists(self, repo: github.Repository.Repository,
                                  head: str) -> bool:
     pulls = repo.get_pulls(head=head)
     return pulls.totalCount != 0
示例#20
0
def has_python_code(repo: github.Repository.Repository):
    return LANGUAGE in repo.get_languages()
示例#21
0
def fork_prober(
    repo: github.Repository.Repository,
    sess: github.Github,
    ahead: list[tuple[str, int]],
    branch: str = None,
    verbose: bool = False,
) -> list[tuple[str, int]]:
    """
    check a GitHub repo for forks

    Parameters
    ----------
    repo :
        handle to GitHub repo
    sess :
        handle to GitHub session
    ahead : list of tuple of str, int
        forked with repos with number of commits they're ahead of your repo
    branch : str, optional
        Git branch to examine
    verbose : bool, optional
        verbosity

    Results
    -------
    ahead : list of tuple of str, int
        forked with repos with number of commits they're ahead of your repo
    """
    check_api_limit(sess)

    b = repo.default_branch if not branch else branch

    try:
        master = repo.get_branch(b)
    except github.GithubException as e:
        logging.error(f"{repo.full_name}  {e}")
        return ahead

    forks = repo.get_forks()
    for fork in forks:
        sleep(0.1)  # don't hammer the API, avoiding 502 errors

        check_api_limit(sess)

        try:
            fmaster = fork.get_branch(b)
        except github.GithubException as e:
            if (
                    e.data["message"] == "Not Found"
            ):  # repo/branch that they deleted  FIXME: should we check their default branch?
                continue

            logging.error(f"{repo.full_name} {fork.full_name}  {e}")
            continue

        try:
            comp = repo.compare(master.commit.sha, fmaster.commit.sha)
        except github.GithubException as excp:
            # if excp.data["message"].startswith("No common ancestor"):
            #     continue

            logging.error(f"{repo.full_name} {fork.full_name}  {excp}")
            continue

        if comp.ahead_by:
            ahead.append((fork.full_name, comp.ahead_by))
            print(f"{fork.full_name} ahead by {comp.ahead_by}", end="")
            if verbose and comp.behind_by:
                print(f"behind by {comp.behind_by}", end="")
            print()

    return ahead