Esempio n. 1
0
def index_gh_issues(gh_type, pool, repo_name, since=None):
    fields = {'state': 'all'}
    if since:
        fields['since'] = since
    url = settings.GITHUB[gh_type].get('API_PATH', '')
    url += '/repos/%s/issues?%s' % (repo_name, urllib.urlencode(fields))
    issues = iter_get_url(url, pool)

    bulk_data = []
    most_recent_timestamp = ""
    for issue in issues:
        index = {}
        index["_index"] = "search"
        index["_type"] = obj_type
        index["_id"] = "%s/%s" % (repo_name, issue['number'])
        obj = {}
        obj['url'] = issue['html_url']
        obj['title'] = issue['title']
        obj['content'] = issue['body']
        obj['author'] = issue['user']['login']
        obj['updated_date'] = issue['updated_at']
        obj['status'] = issue['state']
        if issue['assignee']:
            obj['assignee'] = issue['assignee']['login']
        obj['path'] = '/' + repo_name
        obj['loc'] = {'GH': 'github', 'GHE': 'github enterprise'}[gh_type]

        bulk_data.append({'index': index})
        bulk_data.append(obj)

        if most_recent_timestamp < issue['updated_at']:
            most_recent_timestamp = issue['updated_at']

    return bulk_data, most_recent_timestamp
Esempio n. 2
0
def index_gh_issue_comments(gh_type, pool, repo_name, since=None):
    fields = {}
    if since:
        fields = {'since': since}
    url = settings.GITHUB[gh_type].get('API_PATH', '')
    url += '/repos/%s/issues/comments?%s' % (repo_name,
                                             urllib.urlencode(fields))
    comments = iter_get_url(url, pool)

    bulk_data = []
    for comment in comments:
        # parse issue number from url
        # example: https://api.github.com/myorg/myname/myrepo/issues/1
        issue_id = urlparse(comment['issue_url']).path.split('/')[-1]

        index = {}
        index["_index"] = "search"
        index["_type"] = obj_type
        index["_id"] = "%s/%s/%s" % (repo_name, issue_id, comment['id'])
        obj = {}
        obj['url'] = comment['html_url']
        obj['title'] = 'Comment for %s issue %s' % (repo_name, issue_id)
        obj['content'] = comment['body']
        obj['author'] = comment['user']['login']
        obj['updated_date'] = comment['updated_at']
        obj['path'] = '/%s/%s' % (repo_name, issue_id)
        obj['loc'] = {'GH': 'github', 'GHE': 'github enterprise'}[gh_type]

        bulk_data.append({'index': index})
        bulk_data.append(obj)

    return bulk_data
Esempio n. 3
0
def index_gh_issue_comments(gh_type, pool, repo_name, since=None):
    fields = {}
    if since:
        fields = {'since': since}
    url = settings.GITHUB[gh_type].get('API_PATH', '')
    url += '/repos/%s/issues/comments?%s' % (repo_name, urllib.urlencode(fields))
    comments = iter_get_url(url, pool)

    bulk_data = []
    for comment in comments:
        # parse issue number from url
        # example: https://api.github.com/myorg/myname/myrepo/issues/1
        issue_id = urlparse(comment['issue_url']).path.split('/')[-1]

        index = {}
        index["_index"] = "search"
        index["_type"] = obj_type
        index["_id"] = "%s/%s/%s" % (repo_name, issue_id, comment['id'])
        obj = {}
        obj['url'] = comment['html_url']
        obj['title'] = 'Comment for %s issue %s' % (repo_name, issue_id)
        obj['content'] = comment['body']
        obj['author'] = comment['user']['login']
        obj['updated_date'] = comment['updated_at']
        obj['path'] = '/%s/%s' % (repo_name, issue_id)
        obj['source'] = {'GH': 'github', 'GHE': 'github enterprise'}[gh_type]

        bulk_data.append({'index': index})
        bulk_data.append(obj)

    return bulk_data
Esempio n. 4
0
def index_gh_issues(gh_type, pool, repo_name, since=None):
    fields = {'state': 'all'}
    if since:
        fields['since'] = since
    url = settings.GITHUB[gh_type].get('API_PATH', '')
    url += '/repos/%s/issues?%s' % (repo_name, urllib.urlencode(fields))
    issues = iter_get_url(url, pool)

    bulk_data = []
    most_recent_timestamp = ""
    for issue in issues:
        index = {}
        index["_index"] = "search"
        index["_type"] = obj_type
        index["_id"] = "%s/%s" % (repo_name, issue['number'])
        obj = {}
        obj['url'] = issue['html_url']
        obj['title'] = issue['title']
        obj['content'] = issue['body']
        obj['author'] = issue['user']['login']
        obj['updated_date'] = issue['updated_at']
        obj['status'] = issue['state']
        if issue['assignee']:
            obj['assignee'] = issue['assignee']['login']
        obj['path'] = '/' + repo_name
        obj['source'] = {'GH': 'github', 'GHE': 'github enterprise'}[gh_type]

        bulk_data.append({'index': index})
        bulk_data.append(obj)

        if most_recent_timestamp < issue['updated_at']:
            most_recent_timestamp = issue['updated_at']

    return bulk_data, most_recent_timestamp
Esempio n. 5
0
def get_repos():
    if not ghe_settings:
        return []
    return [
        repo['full_name'] for repo in utils.iter_get_url(
            ghe_settings['API_PATH'] + '/repositories', ghe_api_pool)
        if not repo['fork']
    ]
Esempio n. 6
0
def _get_org_repos(org_name):
    return (repo['full_name']
            for repo in utils.iter_get_url('/orgs/%s/repos' %
                                           org_name, gh_api_pool)
            if not repo['fork'])
Esempio n. 7
0
def get_repos():
    if not ghe_settings:
        return []
    return [repo['full_name'] for repo in utils.iter_get_url(ghe_settings['API_PATH'] + '/repositories', ghe_api_pool) if not repo['fork']]
Esempio n. 8
0
def _get_org_repos(org_name):
    return (repo['full_name'] for repo in utils.iter_get_url('/orgs/%s/repos' % org_name, gh_api_pool) if not repo['fork'])