Esempio n. 1
0
def get_pulls(owner_repo, labels=None, state="open", since=None, org=False, pull_details=None):
    """
    Get a bunch of pull requests (actually issues).

    `pull_details` indicates how much information you want from the associated
    pull request document.  None means just issue information is enough. "list"
    means the information available when listing pull requests is enough. "all"
    means you need all the details.  See the GitHub API docs for the difference:
    https://developer.github.com/v3/pulls/

    """
    url = URLObject("https://api.github.com/repos/{}/issues".format(owner_repo))
    if labels:
        url = url.set_query_param('labels', ",".join(labels))
    if since:
        url = url.set_query_param('since', since.isoformat())
    if state:
        url = url.set_query_param('state', state)
    url = url.set_query_param('sort', 'updated')

    org_fn = None
    if org:
        try:
            with open("people.yaml") as fpeople:
                people = yaml.load(fpeople)
            def_org = "other"
        except IOError:
            people = {}
            def_org = "---"

        def org_fn(issue):
            user_info = people.get(issue["user.login"])
            if not user_info:
                user_info = {"institution": "unsigned"}
            return user_info.get("institution", def_org)

    issues = JPullRequest.from_json(paginated_get(url), org_fn)
    if org:
        issues = sorted(issues, key=operator.itemgetter("org"))

    pulls = None
    if pull_details == "list":
        issues = list(issues)
        if issues:
            # Request a bunch of pull details up front, for joining to.  We can't
            # ask for exactly the ones we need, so make a guess.
            limit = int(len(issues) * 1.5)
            pull_url = URLObject("https://api.github.com/repos/{}/pulls".format(owner_repo))
            if state:
                pull_url = pull_url.set_query_param('state', state)
            pulls = { pr['number']: pr for pr in paginated_get(pull_url, limit=limit) }

    for issue in issues:
        if pull_details:
            issue.load_pull_details(pulls=pulls)
        issue['id'] = "{}.{}".format(owner_repo, issue['number'])
        yield issue
Esempio n. 2
0
def get_pulls(owner_repo,
              labels=None,
              state="open",
              since=None,
              org=False,
              pull_details=None):
    """
    Get a bunch of pull requests (actually issues).

    `pull_details` indicates how much information you want from the associated
    pull request document.  None means just issue information is enough. "list"
    means the information available when listing pull requests is enough. "all"
    means you need all the details.  See the GitHub API docs for the difference:
    https://developer.github.com/v3/pulls/

    """
    url = URLObject(
        "https://api.github.com/repos/{}/issues".format(owner_repo))
    if labels:
        url = url.set_query_param('labels', ",".join(labels))
    if since:
        url = url.set_query_param('since', since.isoformat())
    if state:
        url = url.set_query_param('state', state)
    url = url.set_query_param('sort', 'updated')

    issues = PullRequest.from_json(paginated_get(url))
    if org:
        issues = sorted(issues, key=operator.attrgetter("org"))

    pulls = None
    if pull_details == "list":
        issues = list(issues)
        if issues:
            # Request a bunch of pull details up front, for joining to.  We can't
            # ask for exactly the ones we need, so make a guess.
            limit = int(len(issues) * 1.5)
            pull_url = URLObject(
                "https://api.github.com/repos/{}/pulls".format(owner_repo))
            if state:
                pull_url = pull_url.set_query_param('state', state)
            pulls = {
                pr['number']: pr
                for pr in paginated_get(pull_url, limit=limit)
            }

    for issue in issues:
        if pull_details:
            issue.load_pull_details(pulls=pulls)
        issue.id = "{}.{}".format(owner_repo, issue.number)
        yield issue
Esempio n. 3
0
def show_pulls(labels=None, show_comments=False, state="open", since=None, org=False):
    issues = get_pulls("edx/edx-platform", labels, state, since, org, pull_details="all")

    category = None
    for index, issue in enumerate(issues):
        if issue.get("org") != category:
            # new category! print category header
            category = issue["org"]
            print("-- {category} ----".format(category=category))

        if 0:
            import pprint
            pprint.pprint(issue.obj)
        print(issue.format(ISSUE_FMT))

        if show_comments:
            comments_url = URLObject(issue['comments_url'])
            comments_url = comments_url.set_query_param("sort", "created")
            comments_url = comments_url.set_query_param("direction", "desc")
            comments = paginated_get(comments_url)
            last_five_comments = reversed(more_itertools.take(5, comments))
            for comment in last_five_comments:
                print(comment.format(COMMENT_FMT))

    # index is now set to the total number of pull requests
    print()
    print("{num} pull requests".format(num=index+1))
Esempio n. 4
0
def get_labels(owner_repo):
    url = LABELS_URL.format(owner_repo=owner_repo)
    labels = paginated_get(url)
    labels = list(labels)
    for label in labels:
        del label['url']
    return labels
Esempio n. 5
0
def get_pulls(owner_repo, labels=None, state="open", since=None, org=False):
    url = URLObject("https://api.github.com/repos/{}/issues".format(owner_repo))
    if labels:
        url = url.set_query_param('labels', ",".join(labels))
    if since:
        url = url.set_query_param('since', since.isoformat())
    if state:
        url = url.set_query_param('state', state)
    url = url.set_query_param('sort', 'updated')

    org_fn = None
    if org:
        try:
            with open("people.yaml") as fpeople:
                people = yaml.load(fpeople)
            def_org = "other"
        except IOError:
            people = {}
            def_org = "---"

        def org_fn(issue):
            user_info = people.get(issue["user.login"])
            if not user_info:
                user_info = {"institution": "unsigned"}
            return user_info.get("institution", def_org)

    issues = JPullRequest.from_json(paginated_get(url), org_fn)
    if org:
        issues = sorted(issues, key=operator.itemgetter("org"))

    return issues
Esempio n. 6
0
def get_pulls(owner_repo, labels=None, state="open", since=None, org=False):
    url = URLObject(
        "https://api.github.com/repos/{}/issues".format(owner_repo))
    if labels:
        url = url.set_query_param('labels', ",".join(labels))
    if since:
        url = url.set_query_param('since', since.isoformat())
    if state:
        url = url.set_query_param('state', state)
    url = url.set_query_param('sort', 'updated')

    org_fn = None
    if org:
        try:
            with open("people.yaml") as fpeople:
                people = yaml.load(fpeople)
            def_org = "other"
        except IOError:
            people = {}
            def_org = "---"

        def org_fn(issue):
            user_info = people.get(issue["user.login"])
            if not user_info:
                user_info = {"institution": "unsigned"}
            return user_info.get("institution", def_org)

    issues = JPullRequest.from_json(paginated_get(url), org_fn)
    if org:
        issues = sorted(issues, key=operator.itemgetter("org"))

    return issues
Esempio n. 7
0
def show_pulls(labels=None,
               show_comments=False,
               state="open",
               since=None,
               org=False):
    issues = get_pulls("edx/edx-platform", labels, state, since, org)

    category = None
    for index, issue in enumerate(issues):
        issue.load_pull_details()
        if issue.get("org") != category:
            # new category! print category header
            category = issue["org"]
            print("-- {category} ----".format(category=category))

        if 0:
            import pprint
            pprint.pprint(issue.obj)
        print(issue.format(ISSUE_FMT))

        if show_comments:
            comments_url = URLObject(issue['comments_url'])
            comments_url = comments_url.set_query_param("sort", "created")
            comments_url = comments_url.set_query_param("direction", "desc")
            comments = paginated_get(comments_url)
            last_five_comments = reversed(more_itertools.take(5, comments))
            for comment in last_five_comments:
                print(comment.format(COMMENT_FMT))

    # index is now set to the total number of pull requests
    print()
    print("{num} pull requests".format(num=index + 1))
Esempio n. 8
0
def get_labels(owner_repo):
    url = LABELS_URL.format(owner_repo=owner_repo)
    labels = paginated_get(url)
    labels = list(labels)
    for label in labels:
        del label['url']
    return labels
Esempio n. 9
0
def get_pulls(owner_repo, labels=None, state="open", since=None, org=False, pull_details=None):
    """
    Get a bunch of pull requests (actually issues).

    `pull_details` indicates how much information you want from the associated
    pull request document.  None means just issue information is enough. "list"
    means the information available when listing pull requests is enough. "all"
    means you need all the details.  See the GitHub API docs for the difference:
    https://developer.github.com/v3/pulls/

    """
    url = URLObject("https://api.github.com/repos/{}/issues".format(owner_repo))
    if labels:
        url = url.set_query_param('labels', ",".join(labels))
    if since:
        url = url.set_query_param('since', since.isoformat())
    if state:
        url = url.set_query_param('state', state)
    url = url.set_query_param('sort', 'updated')

    issues = PullRequest.from_json(paginated_get(url))
    if org:
        issues = sorted(issues, key=operator.attrgetter("org"))

    pulls = None
    if pull_details == "list":
        issues = list(issues)
        if issues:
            # Request a bunch of pull details up front, for joining to.  We can't
            # ask for exactly the ones we need, so make a guess.
            limit = int(len(issues) * 1.5)
            pull_url = URLObject("https://api.github.com/repos/{}/pulls".format(owner_repo))
            if state:
                pull_url = pull_url.set_query_param('state', state)
            pulls = { pr['number']: pr for pr in paginated_get(pull_url, limit=limit) }

    for issue in issues:
        if pull_details:
            issue.load_pull_details(pulls=pulls)
        issue.id = "{}.{}".format(owner_repo, issue.number)
        yield issue
Esempio n. 10
0
def list_():
    """Lists the teams.

    Also shows permissions, members, and repos.

    """
    teams = list(paginated_get(uritemplate.expand(TEAMS_URL, org="edX")))
    for team in teams:
        print("{0[name]}:".format(team))
        print("    permission: {0[permission]}".format(team))

        members_url = uritemplate.expand(team['members_url'])
        members = list(paginated_get(members_url))
        print("    members:   # {}".format(len(members)))
        for member in members:
            print("        - {0[login]}".format(member))

        repos_url = uritemplate.expand(team['repositories_url'])
        repos = list(paginated_get(repos_url))
        print("    repos:    # {}".format(len(repos)))
        for repo in repos:
            print("        - {0[full_name]}".format(repo))
Esempio n. 11
0
def list_():
    """Lists the teams.

    Also shows permissions, members, and repos.

    """
    teams = list(paginated_get(uritemplate.expand(TEAMS_URL, org="edX")))
    for team in teams:
        print("{0[name]}:".format(team))
        print("    permission: {0[permission]}".format(team))

        members_url = uritemplate.expand(team['members_url'])
        members = list(paginated_get(members_url))
        print("    members:   # {}".format(len(members)))
        for member in members:
            print("        - {0[login]}".format(member))

        repos_url = uritemplate.expand(team['repositories_url'])
        repos = list(paginated_get(repos_url))
        print("    repos:    # {}".format(len(repos)))
        for repo in repos:
            print("        - {0[full_name]}".format(repo))
Esempio n. 12
0
def contributors(owner_repo):
    """
    Returns a set of Github usernames who have contributed to the given repo.

    Usernames are lower-cased, since Github usernames are case-insensitive.

    """
    contributors_url = CONTRIBUTORS_URL.format(owner_repo=owner_repo)
    entries = paginated_get(contributors_url, auth=(GITHUB_USER, PERSONAL_ACCESS_TOKEN))

    actual_contributors = set(entry["login"].lower() for entry in entries)
    hidden_contributors = set((REPO_INFO.get(owner_repo) or {}).get("hidden-contributors", []))
    return actual_contributors | hidden_contributors
Esempio n. 13
0
def contributors(owner_repo):
    """
    Returns a set of Github usernames who have contributed to the given repo.

    Usernames are lower-cased, since Github usernames are case-insensitive.

    """
    contributors_url = CONTRIBUTORS_URL.format(owner_repo=owner_repo)
    entries = paginated_get(contributors_url,
                            auth=(GITHUB_USER, PERSONAL_ACCESS_TOKEN))

    actual_contributors = set(entry["login"].lower() for entry in entries)
    hidden_contributors = set((REPO_INFO.get(owner_repo)
                               or {}).get("hidden-contributors", []))
    return actual_contributors | hidden_contributors
Esempio n. 14
0
def main(hub, org, pattern=None):
    for repo in hub.organization(org).iter_repos():
        printed_repo = False
        url = "https://api.github.com/repos/{name}/hooks".format(name=repo.full_name)
        for r in paginated_get(url):
            if pattern:
                show_it = False
                for v in r['config'].values():
                    if re.search(pattern, v):
                        show_it = True
            else:
                show_it = True

            if show_it:
                if not printed_repo:
                    print("\n-- {} ---------------------".format(repo.full_name))
                    print("  https://github.com/{}/settings/hooks".format(repo.full_name))
                    printed_repo = True
                print("{r[name]}".format(r=r))
                for k, v in sorted(r['config'].items()):
                    print("  {k}: {v}".format(k=k, v=v))
Esempio n. 15
0
def main(hub, org, pattern=None):
    for repo in hub.organization(org).repositories():
        printed_repo = False
        url = f"https://api.github.com/repos/{repo.full_name}/hooks"
        for r in paginated_get(url):
            if pattern:
                show_it = False
                for v in r['config'].values():
                    if re.search(pattern, v):
                        show_it = True
            else:
                show_it = True

            if show_it:
                if not printed_repo:
                    print(f"\n-- {repo.full_name} ---------------------")
                    print(
                        f"  https://github.com/{repo.full_name}/settings/hooks"
                    )
                    printed_repo = True
                print("{r[name]}".format(r=r))
                for k, v in sorted(r['config'].items()):
                    print(f"  {k}: {v}")
Esempio n. 16
0
def get_comment_data(repo, since, internal):

    ora = set(["ormsbee", "wedaly", "stephensanchez"])
    lms = set(["sarina", "cpennington", "dianakhuang", "davestgermain","flowerhack"])
    cms = set(["cahrens", "andy-armstrong", "dmitchell","nasthagiri"])
    analytics = set(["rocha","brianhw", "mulby"])
    forums = set(["gwprice","jimabramson"])

    pull_kwargs = dict(org=True, pull_details="get")
    open_issues = get_pulls(repo, state="open", **pull_kwargs)
    closed_issues = get_pulls(repo, state="closed", since=since, **pull_kwargs)

    for pull in itertools.chain(open_issues, closed_issues):
        #print("-"*80)
        #pprint(pull.obj)
        if pull['intext'] == "internal":
            continue

        users = set()
        for comment in paginated_get(pull['comments_url']):
            created_at = iso8601.parse_date(comment["created_at"]).replace(tzinfo=None)
            commenter = comment["user"]["login"]
            if created_at >= since and internal(commenter):
                if not users:
                    print(pull.format("{id},{user.login},{pull.changed_files},{pull.additions},{pull.deletions}"), end="")
                    print(pull.format(',"{title}"'), end="")
                    print(pull.format(",{url}"), end="")
                users.add(commenter)
        if users:
            check_intersection(users, lms, "LMS")
            check_intersection(users, ora, "ORA")
            check_intersection(users, cms, "CMS")
            check_intersection(users, analytics, "ANALYTICS")
            check_intersection(users, forums, "FORUMS")
            print(",", end="")
            print(":".join("{}".format(user) for user in sorted(users)), end="")
            print()
Esempio n. 17
0
def get_labels(repo):
    url = LABELS_URL.format(owner_repo=repo.name)
    return paginated_get(url)
Esempio n. 18
0
def show_pulls(labels=None, show_comments=False, state="open", since=None,
               org=False, intext=None, merged=False):
    """
    `labels`: Filters PRs by labels (all are shown if None is specified)
    `show_comments`: shows the last 5 comments on each PR, if True
    `state`: Filter PRs by this state (either 'open' or 'closed')
    `since`: a datetime representing the earliest time from which to pull information.
             All PRs regardless of time are shown if None is specified.
    `org`: If True, sorts by PR author affiliation
    `intext`: specify 'int' (internal) or 'ext' (external) pull request
    `merged`: If True and state="closed", shows only PRs that were merged.
    """
    num = 0
    adds = 0
    deletes = 0
    repos = [ r for r in Repo.from_yaml() if r.track_pulls ]
    for repo in repos:
        issues = get_pulls(repo.name, labels, state, since, org=org or intext, pull_details="all")

        category = None
        for issue in issues:
            issue["repo"] = repo.nick
            if intext is not None:
                if issue["intext"] != intext:
                    continue
            if state == 'closed' and merged and issue['combinedstate'] != 'merged':
                # If we're filtering on closed PRs, and only want those that are merged,
                # skip ones that were closed without merge.
                continue
            if state == 'closed' and since:
                # If this PR was closed prior to the last `since` interval of days, continue on
                # (it may have been *updated* - that is, referenced or commented on - more recently,
                #  but we just want to see what's been merged or closed in the past "since" days)
                closed_at = dateutil.parser.parse(issue["closed_at"][:-1])  # Remove TZ information
                if closed_at < since:
                    continue

            if org and issue.get("org") != category:
                # new category! print category header
                category = issue["org"]
                print("-- {category} ----".format(category=category))

            if 0:
                import pprint
                pprint.pprint(issue.obj)
            print(issue.format(ISSUE_FMT))
            num += 1
            adds += issue['pull']['additions']
            deletes += issue['pull']['deletions']

            if show_comments:
                comments_url = URLObject(issue['comments_url'])
                comments_url = comments_url.set_query_param("sort", "created")
                comments_url = comments_url.set_query_param("direction", "desc")
                comments = paginated_get(comments_url)
                last_five_comments = reversed(more_itertools.take(5, comments))
                for comment in last_five_comments:
                    print(comment.format(COMMENT_FMT))

    print()
    print("{num} pull requests; {adds}+ {deletes}-".format(num=num, adds=adds, deletes=deletes))
Esempio n. 19
0
def get_comments(pull):
    url = URLObject(pull.comments_url).set_query_param(
        "sort", "created").set_query_param("direction", "desc")
    comments = Comment.from_json(paginated_get(url))
    return comments
Esempio n. 20
0
def get_comments(pull):
    url = URLObject(pull.comments_url).set_query_param("sort", "created").set_query_param("direction", "desc")
    comments = Comment.from_json(paginated_get(url))
    return comments
Esempio n. 21
0
#!/usr/bin/env python
"""List repos missing from repos.yaml."""

from __future__ import print_function

import yaml

from helpers import paginated_get

REPOS_URL = "https://api.github.com/orgs/{org}/repos"

with open("repos.yaml") as repos_yaml:
    tracked_repos = yaml.load(repos_yaml)

for r in paginated_get(REPOS_URL.format(org="edX")):
    if not r['private'] and not r['fork']:
        if r['full_name'] not in tracked_repos:
            print("{r[full_name]}: {r[description]}".format(r=r))
Esempio n. 22
0
#!/usr/bin/env python
"""List the webhooks in all the repos."""

from __future__ import print_function

import pprint

from helpers import paginated_get
from repos import Repo

repos = Repo.from_yaml()
repo_names = sorted(repo.name for repo in repos)
for repo_name in repo_names:
    print("\n-- {} ---------------------".format(repo_name))
    url = "https://api.github.com/repos/{name}/hooks".format(name=repo_name)
    for r in paginated_get(url):
        print("{r[name]}".format(r=r))
        for k, v in sorted(r['config'].items()):
            print("  {k}: {v}".format(k=k, v=v))
Esempio n. 23
0
from __future__ import print_function

import pprint

import uritemplate

from helpers import paginated_get

EVENTS_URL = "https://api.github.com/repos/{owner}/{repo}/events"

url = uritemplate.expand(EVENTS_URL, owner='edx', repo='edx-platform')
for event in paginated_get(url):
    if event['type'] != 'PushEvent':
        continue
    forced = event['payload'].get('forced', False)
    print("{0[created_at]}: {0[type]}, {0[actor][login]} {1}".format(
        event, forced))
Esempio n. 24
0
def get_duration_data(
    durations, owner="edx", repo="edx-platform", since=None,
    external_label="open-source-contribution", internal_usernames=None,
    user_org_mapping=None,
):
    """
    Update `durations`, a dict of dict of lists of pull requests.

    `durations` has four lists of data, where each list contains only timedelta objects:
      age of internal open pull requests (all)
      age of external open pull requests (all)
      age of internal closed pull requests (since the `since` value)
      age of external closed pull requests (since the `since` value)

    These lists are organized into a dictionary that categorizes the lists
    by position and state.
    """
    internal_usernames = internal_usernames or set()
    user_org_mapping = user_org_mapping or {}

    url = URLObject("https://api.github.com/repos/{owner}/{repo}/issues".format(
                    owner=owner, repo=repo))
    # two separate URLs, one for open PRs, the other for closed PRs
    open_url = url.set_query_param("state", "open")
    closed_url = url.set_query_param("state", "closed")
    if since:
        closed_url = closed_url.set_query_param('since', since.isoformat())

    open_issues_generator = itertools.izip(
        paginated_get(open_url),
        itertools.repeat("open")
    )
    closed_issues_generator = itertools.izip(
        paginated_get(closed_url),
        itertools.repeat("closed")
    )

    for issue, state in itertools.chain(open_issues_generator, closed_issues_generator):
        if not issue.get('pull_request', {}).get('url'):
            continue

        label_names = [label["name"] for label in issue["labels"]]

        if external_label and external_label in label_names:
            position = "external"
        else:
            if issue["user"]["login"] in internal_usernames:
                position = "internal"
            else:
                position = "external"

        created_at = iso8601.parse_date(issue["created_at"]).replace(tzinfo=None)
        if state == "open":
            closed_at = datetime.utcnow()
        else:
            closed_at = iso8601.parse_date(issue["closed_at"]).replace(tzinfo=None)
        issue['duration'] = closed_at - created_at
        issue['org'] = user_org_mapping.get(issue['user']['login'], "other")

        if DEBUG:
            print("{owner}/{repo}#{num}: {position} {state}".format(
                owner=owner, repo=repo, num=issue["number"],
                position=position, state=state
            ), file=sys.stderr)

        durations[state][position].append(issue)
Esempio n. 25
0
#!/usr/bin/env python
"""List repos missing from repos.yaml."""

from __future__ import print_function

import yaml

from helpers import paginated_get

REPOS_URL = "https://api.github.com/orgs/{org}/repos"

with open("repos.yaml") as repos_yaml:
    tracked_repos = yaml.load(repos_yaml)

repos = list(paginated_get(REPOS_URL.format(org="edX")))

shown_any = False
for r in repos:
    if not r['private'] and not r['fork']:
        if r['full_name'] not in tracked_repos:
            if not shown_any:
                print("\n### Untracked repos:")
            print("{r[full_name]}: {r[description]}".format(r=r))
            shown_any = True

shown_any = False
actual_repos = set(r['full_name'] for r in repos)
for tracked in tracked_repos:
    if tracked not in actual_repos:
        if not shown_any:
            print("\n### Disappeared repos:")
Esempio n. 26
0
def get_labels(repo):
    url = LABELS_URL.format(owner_repo=repo.name)
    return paginated_get(url)
Esempio n. 27
0
from __future__ import print_function

import pprint

import uritemplate

from helpers import paginated_get


EVENTS_URL = "https://api.github.com/repos/{owner}/{repo}/events"

url = uritemplate.expand(EVENTS_URL, owner='edx', repo='edx-platform')
for event in paginated_get(url):
    if event['type'] != 'PushEvent':
        continue
    forced = event['payload'].get('forced', False)
    print("{0[created_at]}: {0[type]}, {0[actor][login]} {1}".format(event, forced))
Esempio n. 28
0
"""List repos missing from repos.yaml."""

from __future__ import print_function

import yaml

from helpers import paginated_get

REPOS_URL = "https://api.github.com/orgs/{org}/repos"

# This is hacky; you need to have repo-tools-data cloned locally one dir up.
# To do this properly, you should use yamldata.py
with open("../repo-tools-data/repos.yaml") as repos_yaml:
    tracked_repos = yaml.load(repos_yaml)

repos = list(paginated_get(REPOS_URL.format(org="edX")))

shown_any = False
for r in repos:
    if not r['private'] and not r['fork']:
        if r['full_name'] not in tracked_repos:
            if not shown_any:
                print("\n### Untracked repos:")
            print("{r[full_name]}: {r[description]}".format(r=r))
            shown_any = True

shown_any = False
actual_repos = set(r['full_name'] for r in repos)
for tracked in tracked_repos:
    if tracked not in actual_repos:
        if not shown_any:
Esempio n. 29
0
def get_duration_data(
    durations,
    owner="edx",
    repo="edx-platform",
    since=None,
    external_label="open-source-contribution",
    internal_usernames=None,
    user_org_mapping=None,
):
    """
    Update `durations`, a dict of dict of lists of pull requests.

    `durations` has four lists of data, where each list contains only timedelta objects:
      age of internal open pull requests (all)
      age of external open pull requests (all)
      age of internal closed pull requests (since the `since` value)
      age of external closed pull requests (since the `since` value)

    These lists are organized into a dictionary that categorizes the lists
    by position and state.
    """
    internal_usernames = internal_usernames or set()
    user_org_mapping = user_org_mapping or {}

    url = URLObject(
        "https://api.github.com/repos/{owner}/{repo}/issues".format(
            owner=owner, repo=repo))
    # two separate URLs, one for open PRs, the other for closed PRs
    open_url = url.set_query_param("state", "open")
    closed_url = url.set_query_param("state", "closed")
    if since:
        closed_url = closed_url.set_query_param('since', since.isoformat())

    open_issues_generator = itertools.izip(paginated_get(open_url),
                                           itertools.repeat("open"))
    closed_issues_generator = itertools.izip(paginated_get(closed_url),
                                             itertools.repeat("closed"))

    for issue, state in itertools.chain(open_issues_generator,
                                        closed_issues_generator):
        if not issue.get('pull_request', {}).get('url'):
            continue

        label_names = [label["name"] for label in issue["labels"]]

        if external_label and external_label in label_names:
            position = "external"
        else:
            if issue["user"]["login"] in internal_usernames:
                position = "internal"
            else:
                position = "external"

        created_at = iso8601.parse_date(
            issue["created_at"]).replace(tzinfo=None)
        if state == "open":
            closed_at = datetime.utcnow()
        else:
            closed_at = iso8601.parse_date(
                issue["closed_at"]).replace(tzinfo=None)
        issue['duration'] = closed_at - created_at
        issue['org'] = user_org_mapping.get(issue['user']['login'], "other")

        if DEBUG:
            print("{owner}/{repo}#{num}: {position} {state}".format(
                owner=owner,
                repo=repo,
                num=issue["number"],
                position=position,
                state=state),
                  file=sys.stderr)

        durations[state][position].append(issue)
Esempio n. 30
0
def get_labels(owner_repo):
    url = LABELS_URL.format(owner_repo=owner_repo)
    labels = paginated_get(url)
    for label in labels:
        del label['url']
        yield label
Esempio n. 31
0
import yaml

from helpers import paginated_get

REPOS_URL = "https://api.github.com/orgs/{org}/repos"

# This is hacky; you need to have repo-tools-data cloned locally one dir up.
# To do this properly, you should use yamldata.py
with open("../repo-tools-data/repos.yaml") as repos_yaml:
    tracked_repos = yaml.load(repos_yaml)

ORGS = ["edX", "edx-solutions"]
repos = []
for org in ORGS:
    repos.extend(paginated_get(REPOS_URL.format(org=org)))

shown_any = False
for r in repos:
    if not r['private'] and not r['fork']:
        if r['full_name'] not in tracked_repos:
            if not shown_any:
                print("\n### Untracked repos:")
            print("{r[full_name]}: {r[description]}".format(r=r))
            shown_any = True

shown_any = False
actual_repos = set(r['full_name'] for r in repos)
for tracked in tracked_repos:
    if tracked not in actual_repos:
        if not shown_any: