Exemplo n.º 1
0
def get_pulls(labels=None, state="open", since=None, org=False):
    url = URLObject("https://api.github.com/repos/edx/edx-platform/issues")
    if labels:
        url = url.set_query_param('labels', ",".join(labels))
    if since:
        url = url.set_query_param('since', since.isoformat())
    if state:
        url = url.set_query_param('state', state)
    url = url.set_query_param('sort', 'updated')

    org_fn = None
    if org:
        try:
            with open("mapping.yaml") as fmapping:
                user_mapping = yaml.load(fmapping)
            def_org = "other"
        except IOError:
            user_mapping = {}
            def_org = "---"

        def org_fn(issue):
            return user_mapping.get(issue["user.login"], {}).get("institution", def_org)

    issues = JPullRequest.from_json(paginated_get(url), org_fn)
    if org:
        issues = sorted(issues, key=operator.itemgetter("org"))

    return issues
Exemplo n.º 2
0
def show_pulls(jrep, labels=None, show_comments=False, state="open", since=None, org=False):
    issues = get_pulls(labels, state, since, org)

    category = None
    for index, issue in enumerate(issues):
        issue.finish_loading()
        if issue.get("org") != category:
            # new category! print category header
            category = issue["org"]
            print("-- {category} ----".format(category=category))

        if 0:
            import pprint
            pprint.pprint(issue.obj)
        print(issue.format(ISSUE_FMT))

        if show_comments:
            comments_url = URLObject(issue['comments_url'])
            comments_url = comments_url.set_query_param("sort", "created")
            comments_url = comments_url.set_query_param("direction", "desc")
            comments = paginated_get(comments_url)
            last_five_comments = reversed(more_itertools.take(5, comments))
            for comment in last_five_comments:
                print(comment.format(COMMENT_FMT))

    # index is now set to the total number of pull requests
    print()
    print("{num} pull requests".format(num=index+1))
Exemplo n.º 3
0
def get_duration_info(since=None, labels=None, pull_requests=False):
    labels = labels or []

    url = URLObject("https://api.github.com/repos/edx/edx-platform/issues")
    # we only care about closed PRs for now
    url = url.set_query_param('state', 'closed')
    if labels:
        url = url.set_query_param('labels', ",".join(labels))
    if since:
        url = url.set_query_param('since', since.isoformat())

    counter = defaultdict(list)
    for issue in paginated_get(url):
        if pull_requests and not issue['pull_request']['url']:
            continue
        num = issue['number']
        created_at = iso8601.parse_date(issue["created_at"])
        closed_at = iso8601.parse_date(issue["closed_at"])
        duration = closed_at - created_at
        segment = get_segment(duration)
        counter[segment].append(num)

    return counter
Exemplo n.º 4
0
def get_duration_data(
    durations, owner="edx", repo="edx-platform", since=None,
    external_label="open-source-contribution", internal_usernames=None,
    user_org_mapping=None,
):
    """
    Update `durations`, a dict of dict of lists of pull requests.

    `durations` has four lists of data, where each list contains only timedelta objects:
      age of internal open pull requests (all)
      age of external open pull requests (all)
      age of internal closed pull requests (since the `since` value)
      age of external closed pull requests (since the `since` value)

    These lists are organized into a dictionary that categorizes the lists
    by position and state.
    """
    internal_usernames = internal_usernames or set()
    user_org_mapping = user_org_mapping or {}

    url = URLObject("https://api.github.com/repos/{owner}/{repo}/issues".format(
                    owner=owner, repo=repo))
    # two separate URLs, one for open PRs, the other for closed PRs
    open_url = url.set_query_param("state", "open")
    closed_url = url.set_query_param("state", "closed")
    if since:
        closed_url = closed_url.set_query_param('since', since.isoformat())

    open_issues_generator = itertools.izip(
        paginated_get(open_url),
        itertools.repeat("open")
    )
    closed_issues_generator = itertools.izip(
        paginated_get(closed_url),
        itertools.repeat("closed")
    )

    for issue, state in itertools.chain(open_issues_generator, closed_issues_generator):
        if not issue.get('pull_request', {}).get('url'):
            continue

        label_names = [label["name"] for label in issue["labels"]]

        if external_label and external_label in label_names:
            position = "external"
        else:
            if issue["user"]["login"] in internal_usernames:
                position = "internal"
            else:
                position = "external"

        created_at = iso8601.parse_date(issue["created_at"]).replace(tzinfo=None)
        if state == "open":
            closed_at = datetime.utcnow()
        else:
            closed_at = iso8601.parse_date(issue["closed_at"]).replace(tzinfo=None)
        issue['duration'] = closed_at - created_at
        issue['org'] = user_org_mapping.get(issue['user']['login'], "other")

        if DEBUG:
            print("{owner}/{repo}#{num}: {position} {state}".format(
                owner=owner, repo=repo, num=issue["number"],
                position=position, state=state
            ), file=sys.stderr)

        durations[state][position].append(issue)