Beispiel #1
0
def get_project_and_column(
        repo: Repository, project_name: str, column_name: str,
        create_missing: bool
) -> tuple[Optional[Project], Optional[ProjectColumn]]:
    """
    Gets a project and column from a repo
    """
    project = search_for_object(
        repo.get_projects(),
        project_name,
        create_missing=create_missing,
        object_name="project",
        create=lambda: repo.create_project(
            name=project_name,
            body=ask("Enter the project's description..."),
        ),
    )

    if project is None:
        return None, None

    column = search_for_object(
        project.get_columns(),
        column_name,
        create_missing=create_missing,
        object_name="column",
        create=lambda: project.create_column(name=column_name),
    )

    return project, column
Beispiel #2
0
def get_forks_info(origin: Repository, client: Client) -> List[ForkInfo]:
    """
    Requests forks and wraps them in 'ForkInfo' objects
    """
    result = []
    forks = origin.get_forks()
    log.info('got list of forks, total %d', forks.totalCount)
    client.count_rate_limit(1)
    try:
        rate_limits_check(client, forks.totalCount + pagination_correction(forks.totalCount, 30))
    except RateLimitError:
        return
    for fork in forks:
        try:
            log.info('comparing fork: %s', fork.full_name)
            comparison = origin.compare(origin.owner.login + ":master", fork.owner.login + ":master")
            fi = ForkInfo(fork.html_url, (abs(datetime.now() - fork.updated_at)).days, fork.stargazers_count, comparison.ahead_by, comparison.behind_by)
            result.append(fi)
        except UnknownObjectException as e:
            log.exception('possibly removed fork or user: %s, %d, message: %s', fork.html_url, e.status,
                          e.data.get('message', ''))
        except GithubException as e:
            message = e.data.get('message', '')
            if e.status == 404 and 'No common ancestor between ' in message:  # that can be handled
                log.error('404 %s', message)
                handle_github_exception(result, fork)
            else:
                log.exception('github error')
    client.count_rate_limit(forks.totalCount + pagination_correction(forks.totalCount, 30))
    return result
class TestOwnership(unittest.TestCase):
    """TestOwnership"""
    @parameterized.expand([("small-valid-repo-set", [{
        'repo':
        Repository(None, {}, {'html_url': "opg-lpa"}, True),
        'metadata': {
            'owners': ["make-an-lpa"],
            "dependencies": ["opg-data-lpa"]
        }
    }, {
        'repo':
        Repository(None, {}, {'html_url': "opg-use-an-lpa"}, True),
        'metadata': {
            'owners': ["use-an-lpa"],
            "dependencies":
            ["opg-pdf-service", "opg-data-lpa", "opg-data-lpa-codes"]
        }
    }, {
        'repo':
        Repository(None, {}, {'html_url': "opg-pdf-service"}, True),
        'metadata': {
            'owners': ["use-an-lpa"]
        }
    }], 2)])
    def test_add_owner_count(self, name: str, repos_with_metadata: list,
                             owner_count: int):
        """Test .add """
        report: Ownership = Ownership()
        for repo_meta in repos_with_metadata:
            report.add(repo_meta.get('repo'), repo_meta.get('metadata'))
        # number of owners and number of keys should match owner count
        self.assertEqual(owner_count, len(report.owners))
        self.assertEqual(owner_count, len(report.owner_repositories.keys()))
def update_changelog(version: str, github_repository: Repository) -> None:
    """
    Add a version title to the changelog.
    """
    changelog_path = Path('CHANGELOG.rst')
    branch = 'master'
    changelog_content_file = github_repository.get_contents(
        path=str(changelog_path),
        ref=branch,
    )
    # ``get_contents`` can return a ``ContentFile`` or a list of
    # ``ContentFile``s.
    assert isinstance(changelog_content_file, ContentFile)
    changelog_bytes = changelog_content_file.decoded_content
    changelog_contents = changelog_bytes.decode('utf-8')
    new_changelog_contents = changelog_contents.replace(
        'Next\n----',
        f'Next\n----\n\n{version}\n------------',
    )
    github_repository.update_file(
        path=str(changelog_path),
        message=f'Update for release {version}',
        content=new_changelog_contents,
        sha=changelog_content_file.sha,
    )
def get_stale_branch_names_with_contrib(
        repo: Repository) -> List[str]:  # noqa: E999
    """Return the list of branches that have the prefix of "contrib/" without open pull requests
    and that have not been updated for 2 months (stale)

    Args:
        repo (Repository): The repository whose branches will be searched and listed

    Returns:
        (List[str]): List of branch names that are stale and have the "contrib/" prefix
    """
    # set now with GMT timezone
    now = datetime.now(timezone.min)
    organization = 'demisto'
    branch_names = []
    for branch in repo.get_branches():
        # Make sure the branch is contrib
        if branch.name.startswith('contrib/'):
            prs_with_branch_as_base = repo.get_pulls(state='OPEN',
                                                     base=branch.name)
            prs_with_branch_as_head = repo.get_pulls(
                state='OPEN', head=f'{organization}:{branch.name})')

            # Make sure there are no open prs pointing to/from the branch
            if prs_with_branch_as_base.totalCount < 1 and prs_with_branch_as_head.totalCount < 1:
                # Make sure HEAD commit is stale
                if (last_modified := branch.commit.commit.last_modified) and (
                        last_commit_datetime := parse(last_modified)):
                    elapsed_days = (now - last_commit_datetime).days
                    if elapsed_days >= 60:
                        branch_names.append(branch.name)
                else:
                    print(f"Couldn't load HEAD for {branch.name}")
Beispiel #6
0
def build_release_notes(repository: Repository.Repository,
                        tag: GitRef.GitRef) -> str:
    signature = (
        "\n---\nReleased with [mozilla/task-boot](https://github.com/mozilla/task-boot)"
    )

    # Get all commits between both versions using the comparison endpoint
    try:
        latest_release = repository.get_latest_release()
        diff = repository.compare(latest_release.tag_name, tag.ref)
        commits = diff.commits
    except UnknownObjectException:
        logger.info(
            "No previous release available, will use all commits on repo")
        commits = [commit for commit in repository.get_commits()]

    # List existing tags sha
    tags = [tag.commit.sha for tag in repository.get_tags()]

    # Use first line of every commit in between versions
    lines = [
        "- {}".format(commit.commit.message.splitlines()[0])
        for commit in commits if not is_release_commit(commit, tags)
    ]

    return "\n".join(lines) + signature
Beispiel #7
0
 def from_gh_repo(gh_repo: Repository):
     return RadarGithubRepo(name=gh_repo.full_name,
                            url=gh_repo.html_url,
                            stars=gh_repo.stargazers_count,
                            desc=gh_repo.description,
                            topics=gh_repo.get_topics(),
                            forks=gh_repo.forks_count,
                            languages=gh_repo.get_languages())
Beispiel #8
0
def add_label(repo: Repository, label_filter,label_to_add: Label):
    """Add hacktoberfest label to all issues labeled with filter label."""
    if label_filter!=None:
        issues_list = repo.get_issues(state="open",labels=[label_filter])
    else:
        issues_list = repo.get_issues(state="open")
    for issue in issues_list:
        issue.add_to_labels(label_to_add)
    print('Hacktoberfest label added')
Beispiel #9
0
def get_milestone_from_name(repo: Repository, create_missing: bool,
                            name: str) -> Optional[Milestone]:
    return search_for_object(
        repo.get_milestones(),
        name,
        create_missing=create_missing,
        object_name="milestone",
        create=lambda: repo.create_milestone(title=name),
        get_name=lambda obj: obj.title,
    )
def create_github_release(version: str, gh_repo: Repository) -> None:
    """Create new release on Github."""
    print("Creating release...")
    gh_repo.create_git_release(
        tag=version,
        name=version,
        target_commitish="main",
        message=f"Generated for `homeassitant {version}`.",
        prerelease=AwesomeVersion(version).modifier is not None,
    )
Beispiel #11
0
def __execute_master_default(change: Change[str],
                             repo: Repository) -> Change[str]:
    print_debug("[%s] Enforcing master as the default branch" %
                highlight(repo.name))
    try:
        repo.edit(default_branch="master")
    except GithubException:
        return change.failure()

    return change.success()
def assert_baseline(repo: Repository):
    user_data = list(repo.get_collaborators())
    invited_users = [
        invite.invitee for invite in repo.get_pending_invitations()
    ]

    check_data = {user["username"]: user for user in BASELINE}

    compare_invite_data(actual=invited_users, expected={})
    compare_collaborator_data(actual=user_data, expected=check_data)
Beispiel #13
0
    def execute_develop_default(change: Change[str],
                                repo: Repository) -> Change[str]:
        print_debug("[%s] Changing default branch to 'develop'" %
                    highlight(repo.name))
        try:
            repo.edit(default_branch="develop")
        except GithubException:
            return change.failure()

        return change.success()
Beispiel #14
0
def create_empty_release(repo: Repo, text_id: str):
    try:
        release = repo.get_release(text_id)
    except UnknownObjectException:
        pass
    else:
        release.delete_release()
    finally:
        release = repo.create_git_release(text_id, text_id,
                                          f"{text_id} release")
    return release
Beispiel #15
0
 def create_label(repo: Repository.Repository, newname: str,
                  newsettings: dict):
     print(f" Creating label {newname}")
     try:
         repo.create_label(
             name=newname,
             color=newsettings.get('color') or GithubObject.NotSet,
             description=newsettings.get('description')
             or GithubObject.NotSet,
         )
     except Exception as e:
         print(f" Error creating label: {e}")
Beispiel #16
0
def gh_is_repo_empty(repo: Repository):
    from github import GithubException

    try:
        repo.get_file_contents('README.md')
    except GithubException as err:
        if err.status == 404:
            return err.data['message'] == 'This repository is empty.'
        else:
            raise err

    return False
Beispiel #17
0
def new_label(
    repo: Repository,
    label_color: str,) -> Label:
    """Get the Label object or create it with given features."""
    try:
        label_to_add = repo.get_label('hacktoberfest')
    except UnknownObjectException:
        label_to_add = repo.create_label(
            name='hacktoberfest',
            color=label_color,
            description="Issues for contributing during the period of hacktoberfest")
    return label_to_add
Beispiel #18
0
def has_file(repo: Repository.Repository, file_path: str) -> bool:
    """Return true if file_path is found to exist """

    RateLimiter.check()
    try:
        repo.get_contents(file_path)
        Out.log(f"[{repo.full_name}] Found [{file_path}] file")
        return True
    except UnknownObjectException:
        Out.log(f"[{repo.full_name}] No [{file_path}] file")

    return False
Beispiel #19
0
 def create_branch(repository: Repository, source_branch_name: str,
                   target_branch_name: str) -> Branch:
     """
     Create branch in the forked repository.
     :param repository: Forked repository.
     :param source_branch_name: Name of the base branch from which target branch is created.
     :param target_branch_name: Target name of the new branch.
     :return: Created branch.
     """
     source_branch = repository.get_branch(source_branch_name)
     repository.create_git_ref(ref='refs/heads/' + target_branch_name,
                               sha=source_branch.commit.sha)
     return repository.get_branch(target_branch_name)
Beispiel #20
0
def process_milestone(
    repo: Repository,
    milestone: Milestone,
    in_progress_label: Optional[Label],
    blocked_label: Optional[Label],
) -> str:
    now = datetime.now()
    html_url = milestone._rawData["html_url"]  # type: ignore
    total_issues = milestone.open_issues + milestone.closed_issues
    percentage_complete = as_percentage(milestone.closed_issues, total_issues)

    detail_lines = []

    status_line = f":heavy_check_mark: {percentage_complete}% completed"
    if in_progress_label:
        in_progress_issues = list(
            repo.get_issues(
                milestone=milestone, labels=[in_progress_label], state="open"
            )
        )
        if in_progress_issues:
            percentage_in_progress = as_percentage(
                len(in_progress_issues), total_issues
            )
            status_line += (
                f" - :hourglass_flowing_sand: {percentage_in_progress}% in progress"
            )
    if blocked_label:
        blocked_issues = list(
            repo.get_issues(milestone=milestone, labels=[blocked_label], state="open")
        )
        if blocked_issues:
            percentage_blocked = as_percentage(len(blocked_issues), total_issues)
            status_line += f" - :octagonal_sign: {percentage_blocked}% blocked"
    detail_lines.append(status_line)

    if milestone.due_on:
        duration = milestone.due_on - milestone.created_at
        remaining = milestone.due_on - now
        time_used = 100 - as_percentage(remaining.days, duration.days)
        detail_lines.append(
            f":date: {milestone.due_on.date().isoformat()} - :alarm_clock: {time_used}% time used"
        )

    rendered_line = (
        f"<{html_url}|{milestone.title}> - {milestone.closed_issues}/{total_issues}"
    )
    for line in detail_lines:
        rendered_line += f"\n\t{line}"

    return rendered_line
Beispiel #21
0
def __get_sha_for_tag(repository: Repository, tag: str) -> str:
    """
    Gets necessary SHA value from selected repository's branch.
    """
    branches = repository.get_branches()
    matched_branches = [match for match in branches if match.name == tag]
    if matched_branches:
        return matched_branches[0].commit.sha

    tags = repository.get_tags()
    matched_tags = [match for match in tags if match.name == tag]
    if not matched_tags:
        raise ValueError("No Tag or Branch exists with that name")
    return matched_tags[0].commit.sha
Beispiel #22
0
def create_or_edit_pr(title: str, body: str, skills_repo: Repository,
                      user, branch: str):
    base = skills_repo.default_branch
    head = '{}:{}'.format(user.login, branch)
    pulls = list(skills_repo.get_pulls(base=base, head=head))
    if pulls:
        pull = pulls[0]
        if 'mycroft-skills-kit' in pull.body:
            pull.edit(title, body)
        else:
            raise PRModified('Not updating description since it was not autogenerated')
        return pull
    else:
        return skills_repo.create_pull(title, body, base=base, head=head)
Beispiel #23
0
def create_release(repo: Repository, pr: PullRequest) -> None:
    notes = get_release_notes(pr)
    release = "v" + str(current_version())
    try:
        repo.create_git_release(
            tag=release,
            name=release,
            message=notes,
            target_commitish=pr.merge_commit_sha,
        )
    except GithubException as e:
        if e.status == 422:
            print("This release already exists, ignoring")
        else:
            raise
def get_pr_from_commit(repo: Repository, sha: str) -> Optional[PullRequest]:
    cached = redis.get_int(f'github:head:{sha}')
    if cached:
        try:
            pr = repo.get_pull(cached)
            if pr.head.sha == sha and pr.state == 'open':
                return pr
        except UnknownObjectException:
            pass
    for pr in repo.get_pulls():
        head = pr.head.sha
        redis.store(f'github:head:{head}', pr.number, ex=3600)
        if head == sha:
            return pr
    return None
Beispiel #25
0
    def process_commit(self, repo: Repository, commit_id) -> bool:
        files = {}
        files_content = {}
        scan_result = {}
        commit_data = repo.get_commit(sha=commit_id)
        files = commit_data.raw_data.get('files')
        for file in files:
            code = (file['patch'])
            lines = code.split("\n")
            file_scan = False
            for line in lines:  #if one line was added or changed scan the full file.
                if line[0] == '+' or line[0] == 'M':
                    file_scan = True
                    break
            #wfp calculation
            if file_scan:
                contents = repo.get_contents(file['filename'], ref=commit_id)
                if contents:
                    files_content[file['filename']] = contents.decoded_content

        try:
            asset_json = repo.get_contents(self.sbom_file).decoded_content
        except Exception:
            self.logger.info("No assets")
            asset_json = {}

        self.logger.debug(asset_json)
        scan_result = self.scanner.scan_files(files_content, asset_json)
        result = {'comment': 'No results', 'validation': True, 'cyclondx': {}}

        if scan_result:
            result = self.scanner.format_scan_results(scan_result)
        # Add a comment to the commit
        if (not result['validation']
                or self.comment_always) and result['comment']:
            full_comment = result['comment']
            if result['cyclondx']:
                full_comment += "\n Please find the CycloneDX component details to add to your %s to declare the missing components here:\n" % self.sbom_file
                if asset_json:
                    full_comment += "```\n" + json.dumps(
                        result['cyclondx']['components'], indent=2) + "\n```"
                else:
                    full_comment += "```\n" + json.dumps(result['cyclondx'],
                                                         indent=2) + "\n```"

            self.logger.debug(full_comment)
            commit_data.create_comment(full_comment)
        return result['validation'], full_comment
Beispiel #26
0
def find_matching_pulls(gh_repo: Repository,
                        commits: Iter[Commit]) -> Generator:
    """Find pull requests that contains commits matching the given ``commits``.

    It yields tuple :class:`PullRequest` and list of the matched
    :class:`Commit`s (subset of the given ``commits``).

    The matching algorithm is based on comparing commits by an *author*
    (triplet name, email and date) and set of the affected files (just file
    names). The match is found when a pull request contains at least one commit
    from the given ``commits`` (i.e. their author triplet is the same), and
    an union of filenames affected by all the matching commits is the same as of
    all the pull request's commits.
    """
    LOG.debug('Fetching commits referenced in payload')
    commits_by_author = {commit_git_author(c): c for c in commits}
    find_matching_commit = commits_by_author.get
    cache = shared_cache()

    for pullreq in gh_repo.get_pulls(state='open'):
        LOG.debug("Checking pull request #%s", pullreq.number)

        merged_commits = list(
            keep(find_matching_commit, pullreq_commits_authors(pullreq,
                                                               cache)))
        merged_files = (f.filename for c in merged_commits for f in c.files)
        pullreq_files = (f.filename for f in pullreq.get_files())

        if any(merged_commits) and set(merged_files) == set(pullreq_files):
            del cache[pullreq.id]
            yield pullreq, merged_commits

    LOG.debug("Cached items: %d, max size: %d" %
              (cache.currsize, cache.maxsize))
Beispiel #27
0
def find_matching_pulls(gh_repo: Repository, commits: Iter[Commit]) -> Generator:
    """Find pull requests that contains commits matching the given ``commits``.

    It yields tuple :class:`PullRequest` and list of the matched
    :class:`Commit`s (subset of the given ``commits``).

    The matching algorithm is based on comparing commits by an *author*
    (triplet name, email and date) and set of the affected files (just file
    names). The match is found when a pull request contains at least one commit
    from the given ``commits`` (i.e. their author triplet is the same), and
    an union of filenames affected by all the matching commits is the same as of
    all the pull request's commits.
    """
    LOG.debug('Fetching commits referenced in payload')
    commits_by_author = {commit_git_author(c): c for c in commits}
    find_matching_commit = commits_by_author.get
    cache = shared_cache()

    for pullreq in gh_repo.get_pulls(state='open'):
        LOG.debug("Checking pull request #%s", pullreq.number)

        merged_commits = list(keep(find_matching_commit, pullreq_commits_authors(pullreq, cache)))
        merged_files = (f.filename for c in merged_commits for f in c.files)
        pullreq_files = (f.filename for f in pullreq.get_files())

        if any(merged_commits) and set(merged_files) == set(pullreq_files):
            del cache[pullreq.id]
            yield pullreq, merged_commits

    LOG.debug("Cached items: %d, max size: %d" % (cache.currsize, cache.maxsize))
Beispiel #28
0
def add_labels(gh: github.Github, issue_number: int, affecting_changes: list,
           repo: Repository, project_board: Project):
    try:
        issue = repo.get_issue(issue_number)
    except github.GithubException:
        LOG.warning(f'Issue #{issue_number} not found for project')
        return

    # Assume these conditions and prove otherwise by iterating over affecting changes
    is_wip = False
    is_closed = True

    for change in affecting_changes:
        if 'WIP' in change['commitMessage'] or 'DNM' in change['commitMessage']:
            is_wip = True
        if change['status'] == 'NEW':
            is_closed = False

    if is_closed:
        LOG.debug(f'Issue #{issue_number} is closed, removing labels.')
        remove_label(issue, 'wip')
        remove_label(issue, 'ready for review')
    elif is_wip:
        Log.debug(f'Issue #{issue_number} is WIP, adding the "wip" label and removing ' \
                  f'the "ready for review" label.')
        remove_label(issue, 'ready for review')
        add_label(issue, 'wip')
        move_issue(project_board, issue, 'In Progress')
    else:
        Log.debug(f'Issue #{issue_number} is ready to be reviewed, adding the "ready ' \
                  f'for review" label and removing the "wip" label.')
        remove_label(issue, 'wip')
        add_label(issue, 'ready for review')
        move_issue(project_board, issue, 'Submitted on Gerrit')
Beispiel #29
0
def add_comments(gh: github.Github, change: dict, affected_issues: dict,
           repo: Repository, skip_approvals: bool = False):
    for key, issues_list in affected_issues.items():
        for issue_number in issues_list:
            try:
                issue = repo.get_issue(issue_number)
            except github.GithubException:
                LOG.warning(f'Issue #{issue_number} not found for project')
                return

            comment_msg = get_issue_comment(change, key, skip_approvals)
            # Disable this feature to reopen issue on any gerrit activity on closed issue
            # Issues identified:
            #   1. When an old PS (tagged with closed issue number) is abandoned, it reopens the issue
            #   2. post/promote job events are also considered as some gerrit activity on a
            #       closed issue and is reopened in the next immediate run of bot
            #if issue.state == 'closed':
            #    LOG.debug(f'Issue #{issue_number} was closed, reopening...')

                # NOTE(howell): Reopening a closed issue will move it from the
                # "Done" column to the "In Progress" column on the project
                # board via Github automation.
            #    issue.edit(state='open')
            #    comment_msg += '\n\nIssue reopened due to new activity on Gerrit.'

            bot_comment = github_issues.get_bot_comment(issue, gh.get_user().login, change['number'])
            if not bot_comment:
                LOG.debug(f'Comment to post on #{issue_number}: {comment_msg}')
                issue.create_comment(comment_msg)
                LOG.info(f'Comment posted to issue #{issue_number}')
            else:
                LOG.debug(f'Comment to edit on #{issue_number}: {comment_msg}')
                bot_comment.edit(comment_msg)
                LOG.info(f'Comment edited to issue #{issue_number}')
Beispiel #30
0
def analyse_pull_requests(project: Repository,
                          prev_pulls: Dict[str, Any]) -> Dict[str, Any]:
    """Analyse every closed pull_request in repository.

    Arguments:
        project {Repository} -- currently the PyGithub lib is used because of its functionality
                                ogr unfortunatelly did not provide enough to properly analyze issues

        project_knowledge {Path} -- project directory where the issues knowledge will be stored
    """
    _LOGGER.info(
        "-------------Pull Requests Analysis (including its Reviews)-------------"
    )

    current_pulls = project.get_pulls(state="closed")
    new_pulls = get_only_new_entities(prev_pulls, current_pulls)

    if len(new_pulls) == 0:
        return

    with Knowledge(entity_type="PullRequest",
                   new_entities=new_pulls,
                   accumulator=prev_pulls,
                   store_method=store_pull_request) as analysis:
        accumulated = analysis.store()
    return accumulated
Beispiel #31
0
def analyse_issues(project: Repository,
                   prev_issues: Dict[str, Any]) -> Dict[str, Any]:
    """Analyse of every closed issue in repository.

    Arguments:
        project {Repository} -- currently the PyGithub lib is used because of its functionality
                                ogr unfortunatelly did not provide enough to properly analyze issues

        project_knowledge {Path} -- project directory where the issues knowledge will be stored

    """
    _LOGGER.info("-------------Issues (that are not PR) Analysis-------------")

    current_issues = [
        issue for issue in project.get_issues(state="closed")
        if issue.pull_request is None
    ]
    new_issues = get_only_new_entities(prev_issues, current_issues)

    if len(new_issues) == 0:
        return

    with Knowledge(entity_type="Issue",
                   new_entities=new_issues,
                   accumulator=prev_issues,
                   store_method=store_issue) as analysis:
        accumulated = analysis.store()
    return accumulated