def __init__(self): self.gh = Github(token=access_token, user=username, repo=myrepo)
import json import os import pystache import requests import time import netrc from pygithub3 import Github repos_in = 'repos.json' index_in = 'index.mustache' index_out = 'index.html' auth = netrc.netrc() try: (login, _, password) = auth.authenticators('api.github.com') ghclient = Github(login=login, password=password) logged_in = True except: ghclient = Github() logged_in = False def gh_repo(name): print('Fetching "%s" repo information...' % name) # Use the following for development so you do not hammer the GitHub API. #return {'name': name, 'html_url': 'http://google.com', 'homepage': 'http://example.com', 'description': 'Description!'} if not logged_in: time.sleep( 2.0) # Take a nap so GitHub doesn't aggressively throttle us.
def update_coverage_statistics(token): diag = sp.check_output(["python3", "tools/conversion_coverage.py", "--markdown"]) diag = str(diag, "utf-8") repo = Github(token).get_repo(REPOPATH) issue = repo.get_issue(TRANSLATIONISSUE) issue.edit(body=TRANSLATIONBODY.format(diag))
import MySQLdb as mdb import time from pygithub3 import Github con = mdb.connect('', '', '', '') cur = con.cursor(mdb.cursors.DictCursor) gh = Github(login='', password='') cur.execute("SELECT * FROM first_pull_requests") pull_requests = cur.fetchall() for pr in pull_requests: merged = 0 if pr["merged_at"] is None else 1 cur.execute( "SELECT COUNT(*) FROM pr_comments WHERE user_login = '******' AND created_at < '%s'" % (pr["submitted_by_name"], pr["submitted_on"].strftime('%Y-%m-%d %H:%M:%S'))) user_activity = cur.fetchone()["COUNT(*)"] pr_id = pr["id"] repos_watch_counts = [ repo.watchers for repo in gh.repos.list(user=pr["submitted_by_name"], type="owner").all() if repo.created_at < pr["submitted_on"] ] reputation = sum(repos_watch_counts) cur.execute("SELECT COUNT(*) FROM pr_comments WHERE pr_id = %s" % pr["id"]) number_comments = cur.fetchone()["COUNT(*)"] sql = "INSERT INTO feature_set (merged, user_activity, reputation, number_comments, pr_id) VALUES (%s, %s, %s, %s)" % ( merged, user_activity, reputation, number_comments, pr_id)
def _gh(): gh = Github(user="******", repo="ossec-hids") return gh
# usage send_to_bloom jsk-ros-pkg jsk_common import sys if len(sys.argv) > 4: print "usage: %s jsk-ros-pkg jsk_common" % sys.argv[0] exit(-1) organization = sys.argv[1] repository = sys.argv[2] if len(sys.argv) == 4: repository_name = sys.argv[3] else: repository_name = repository # if it already pull request for ros/rosdistro gh = Github(user='******', repo='rosdistro') if filter(lambda x: repository_name in x.title, gh.pull_requests.list().all()) != []: print "pull requests exists in ros/rosdistro" exit(0) import bloom from bloom.util import disable_git_clone from bloom.util import quiet_git_clone_warning from bloom.commands.release import perform_release import time disable_git_clone(True) quiet_git_clone_warning(True) repo_v = get_repository_version(organization, repository)
#!/usr/bin/env python # Generate commit aliases for jsk-ros-pkg developers import subprocess from pygithub3 import Github # use raw_input for python2 c.f. https://stackoverflow.com/questions/5868506/backwards-compatible-input-calls-in-python if hasattr(__builtins__, 'raw_input'): input = raw_input from getpass import getpass user = input('GitHub User name: ') pw = getpass('Password: '******'jsk-ros-pkg') for page in result: for member in page: user = gh.users.get(member.login) try: name = user.name alias_name = name email = user.email if not email or email == "": raise Exception("No email specified") if len(alias_name.split(" ")) > 0: alias_name = name.split(" ")[-1] alias_command = "commit-%s" % alias_name.lower() alias = "jsk-commit --author='%s <%s>'" % (name, email) subprocess.check_call([ "git", "config", "--global",
def load_repo_url(url): print "Loading %s" % url user, repo_name = _get_user_repo_name_from_url(url) gh = Github(user=GITHUB_AUTH_USER, token=GITHUB_AUTH_TOKEN) repo = gh.repos.get(user=user, repo=repo_name) return repo
def load_date_from_commit(repo_url, sha): user, repo_name = _get_user_repo_name_from_url(repo_url) gh = Github(user=GITHUB_AUTH_USER, token=GITHUB_AUTH_TOKEN) commit = gh.git_data.commits.get(sha=sha, user=user, repo=repo_name) return commit.author.date
def _initialize_git_client(self): password = getpass.getpass('Insert your password: ') return Github(login_or_token=get_github_username(), password=password)
def __update_gh(backlog, new_state): github = Github(token=settings.GITHUB_TOKEN, user=settings.GITHUB_OWNER, repo=backlog.github_repo) data = create_milestone_data(backlog, new_state) github.issues.milestones.update(backlog.github_number, data)
if domain == 'github.com': api = proto + '/api.github.com/' else: api = tld + '/api/v3/' try: #are we running in CI so use the credentials inCI = False if inCI: pass else: auth = netrc.netrc() (login, _, password) = auth.authenticators(domain) ghclient = Github(login=login, password=password, base_url=api) logged_in = True except: print "Unexpected error:", sys.exc_info()[0] ghclient = Github(base_url=api) logged_in = False def gh_repo(name): print('Fetching "%s%s/%s" repo information...' % (api, owner, name)) # Use the following for development so you do not hammer the GitHub API. #return {'name': name, 'html_url': 'http://google.com', 'homepage': 'http://example.com', 'description': 'Description!'} if not logged_in: time.sleep( 2.0) # Take a nap so GitHub doesn't aggressively throttle us.
def auth(login='******', password='******'): # Authentication for github gh = Github(login=login, password=password) main_user = gh.users.get() # Auth required return gh
import sys from pygithub3 import Github if len(sys.argv) < 2: print 'Usage: python {} userid'.format(sys.argv[0]) print 'For example: python {} mattn'.format(sys.argv[0]) print '****************************************************************' print '' print 'This script list out the public repositories created by an user' print '' print 'IMPORTANT!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!' print 'We need to install pygithub3 package' print ' $ pip install pygithub3' print '' print '****************************************************************' sys.exit(1) gh = Github() repos = gh.repos.list(sys.argv[1], type='owner').all() created_repos = [x for x in repos if not x.fork] stared = sorted((i for i in created_repos), key=lambda x: -x.stargazers_count) for repo in stared: print '{}, star:{}, forks:{}'.format(repo.name, repo.stargazers_count, repo.forks)
def load_versions(repo_url, release_type): """Load up released versions of bundle If tags are used in the repo, we simply use the tags to list the various releases available. If tags are not used, we snapshot the latest commit in master as a datestamp @todo: we should be using releases not tags. """ user, repo_name = _get_user_repo_name_from_url(repo_url) gh = Github(user=GITHUB_AUTH_USER, token=GITHUB_AUTH_TOKEN) releases = [] if release_type == "tags": # get tags list result = gh.repos.list_tags(user=user, repo=repo_name) # filter them down to semantic version tags # store them as a list of dicts with version and sha we can use to lookup the date (todo) for tag in result.iterator(): try: # for testing purposes, we'll allow the 'v' but this isn't technically valid. tag_name = tag.name if tag_name.startswith('v'): tag_name = tag.name[1:] v = semantic_version.Version(tag_name) except ValueError: # invalid tag, skip it pass else: release_date = load_date_from_commit(repo_url, tag.commit.sha) releases.append({ 'name': tag_name, 'sha': tag.commit.sha, 'date': release_date, 'download': tag.zipball_url, 'url': 'https://github.com/%s/%s/tree/%s' % (user, repo_name, tag.name) }) else: result = gh.repos.list_branches(user=user, repo=repo_name) branch = [b for b in result.all() if b.name == release_type] if branch: release_date = load_date_from_commit(repo_url, branch[0].commit.sha) release_name = datetime.datetime.strftime(release_date, "%Y.%m.%d.%H.%M.%S") releases = [{ 'name': release_name, 'sha': branch[0].commit.sha, 'date': release_date, 'url': 'https://github.com/%s/%s/tree/%s' % (user, repo_name, release_type) }] return releases
GH_USER) LOGGER.info("Starting...") # Check for destination to save repositories to LOGGER.info("Checking to ensure %s exists.", LOCAL_REPOS_DIR) if not os.path.exists(LOCAL_REPOS_DIR): LOGGER.info("%s not found, creating...", LOCAL_REPOS_DIR) os.makedirs(LOCAL_REPOS_DIR) LOGGER.info("%s has been successfully created.", LOCAL_REPOS_DIR) else: LOGGER.info("%s found, skipping creation.", LOCAL_REPOS_DIR) # Authenticate LOGGER.info("Authenticating with GitHub API.") GH = Github(user=GH_USER, token=GH_USER_TOKEN) LOGGER.info("Successfully authenticated with GitHub API.") # List all of users repositories LOGGER.info("Capturing all of users repos from GitHub.") REPOS = GH.repos.list().all() LOGGER.info("All of users repos have been successfully captured.") # Setup list to collect repo names for further checking if directories are in # your local_repos_dir directory that are not in your GitHub repo list REPO_NAMES = [] # Iterate through list of users repositories and clone them for repo in REPOS: LOGGER.info("Processing repo %s ...", repo.name) REPO_NAMES.append(repo.name)
format_comment(comment).encode("utf-8"), gh_username.encode("utf-8"), gh_repository) retry(create_comment, comment['api_url'], comment['url']) print u"Created: {} [{} comments]".format(issue['title'], len(comments)) if __name__ == "__main__": options = read_arguments() bb_url = "https://api.bitbucket.org/1.0/repositories/{}/{}/issues".format( options.bitbucket_username, options.bitbucket_repo) # push them in GitHub (issues comments are fetched here) github_password = getpass.getpass("Please enter your GitHub password\n") github = Github(login=options.github_username, password=github_password) gh_username, gh_repository = options.github_repo.split('/') # fetch issues from Bitbucket issues = get_issues(bb_url, options.start) # Sort issues, to sync issue numbers on freshly created GitHub projects. # Note: not memory efficient, could use too much memory on large projects. for issue in sorted(issues, key=lambda issue: issue['local_id']): comments = get_comments(bb_url, issue) if options.dry_run: print "Title: {}".format(issue.get('title').encode('utf-8')) print "Body: {}".format( format_body(options, issue).encode('utf-8')) print "Comments", [
def update_translations(token): diag = sp.check_output(["python3", "tools/stringtablediag.py", "--markdown"]) diag = str(diag, "utf-8") repo = Github(token).get_repo(REPOPATH) issue = repo.get_issue(TRANSLATIONISSUE) issue.edit(body=TRANSLATIONBODY.format(diag))
#coding: utf-8 from pygithub3 import Github import getpass import datetime import sys reload(sys) sys.setdefaultencoding("utf-8") auth = { 'login': raw_input("Input github username:"******"Input Password:") } gh = Github(**auth) rlist = gh.repos.list() lines = [] for repo in rlist.iterator(): name = repo.name description = repo.description size = repo.size clone_url = repo.clone_url ctime = datetime.datetime.strftime(repo.created_at, format='%Y-%m-%d %H:%M:%S') mtime = datetime.datetime.strftime(repo.pushed_at, format='%Y-%m-%d %H:%M:%S') line = [name, description, size, clone_url, ctime, mtime] lines.append([str(i) for i in line])
#Make sure pygithub3 is installed in python libraries #sudo pip intsall pygithub3 from pygithub3 import Github source = Github().repos.list_contributors(user="******", repo="python") for x in source: for resource in x: print resource
from pygithub3 import Github gh = Github(login='******', password='******') davidam = gh.users.get() # Auth required davazp = gh.users.get('davazp') # copitux = <User (copitux)> davidam_followers = gh.users.followers.list().all() davazp_followers = gh.users.followers.list('davazp').all() print("davidam followers:") print(davidam_followers) print("davazp followers:") print(davazp_followers) # for repo in gh.get_user().get_repos(): # print(repo.name) # repo.edit(has_wiki=False)
if args['debug']: LOG.setLevel(logging.DEBUG) cache_dir = os.path.dirname(CACHE_FILEPATH) if not os.path.exists(cache_dir): LOG.debug("Creating cache directory '%s'" % cache_dir) os.makedirs(cache_dir) ## ---------------------------------------------- # Download the schedule CSV and figure out who is the czar this month current_czar = get_current_czar(args['schedule'], cache=args['cache']) ## ---------------------------------------------- gh = Github(token=args['token']) r = gh.repos.get(user=GITHUB_USER, repo=GITHUB_REPO) ## ---------------------------------------------- # Get the mapping of Github acctnames to emails user_emails = get_user_emails(gh, args["emails"], cache=args["cache"]) ## -------------------------------- ## PR MONITOR ## -------------------------------- # List of pull requests: pull_requests = gh.pull_requests.list(state='open', user=GITHUB_USER, repo=GITHUB_REPO).all()
from pygithub3 import Github gh = Github(login='******', password='******') davidam = gh.users.get() # Auth required davazp = gh.users.get('davazp') # copitux = <User (copitux)> davidam_followers = gh.users.followers.list().all() davazp_followers = gh.users.followers.list('davazp').all() print("davidam followers:") print(davidam_followers) print("davazp followers:") print(davazp_followers)
rootIssues=[" - Plug-In__sep__This is the root of your issue structure and must be labeled as SUPER ISSUE. It is closed only when all its children and grand children are closed.", " - Analisys__sep__This is the Analisys root. It is closed whenever all analisys is done. This issue must be linked to the root of the issue structure.", " - Implementation__sep__This is the Implementation root. It is closed whenever all implementation is done. This issue must be linked to the root of the issue structure. - Implementation - Developer Class__sep__This issue is closed when this class if fully implemented.", " - Implementation - Plug-in Root__sep__This issue is closed when this class if fully implemented.", " - Implementation - Database__sep__This issue is closed when all database classes are fully implemented.", " - Implementation - Database - Dao Class__sep__This issue is closed when this class if fully implemented.", " - Implementation - Database - Database Factory Class__sep__This issue is closed when this class if fully implemented.", " - Implementation - Database - Database Constants Class__sep__This issue is closed when this class if fully implemented.", " - Implementation - Database - Developer Database Factory Class__sep__This issue is closed when this class if fully implemented.", " - Implementation - Database - Database Factory Exceptions Class__sep__This issue is closed when this class if fully implemented."] print("------------------ISSUES-------------------") print("Root Issues--------------------------------\n") githubConnection = Github(login=githubLogin,password=githubPassword) issues=githubConnection.issues for rootIssue in rootIssues: rootIssueSep=rootIssue.split("__sep__",2) titleGenerated= platform+" - "+layer+" - "+pluginName+" "+rootIssueSep[0] bodyGenerated=rootIssueSep[1]+"\n"+teamLeaderMessage print titleGenerated+"\n"+bodyGenerated print "------------------------------------------\n" if isTeamLeader: issues.create(dict(title=titleGenerated,body=bodyGenerated, assignee=githubLogin, labels=labels),user=repositoryUser,repo=repository) else: issues.create(dict(title=titleGenerated,body=bodyGenerated),user=repositoryUser,repo=repository) #Public interfaces issues titleGenerated= platform+" - "+layer+" - "+pluginName+" - Implementation - Public Interfaces" bodyGenerated="This issue is closed when all public interface's code is written."+"\n"+teamLeaderMessage
def _initialize_client(self): self._gh = Github(login=self.github_user, user=self.github_user, password=self.github_password, repo=self.github_repo)
from pygithub3 import Github import requests import json gh = Github(login='******', password='******') _url = 'https://github.com/Shippable/support/issues' commit_activity = '/stats/commit_activity' contributors_activity = '/stats/contributors' participants = '/stats/participation' # Get the number of commits per hour in each day daily_report = '/stats/punch_card' # issue = '/issues' """ Each array contains the day number, hour number, and number of commits: 0-6: Sunday - Saturday 0-23: Hour of day Number of commits For example, [2, 14, 25] indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. """ r = requests.get(_url + daily_report) print r if (r.ok): print r.text.encode('utf-8') repoItem = json.loads(r.text or r.content) print repoItem
import urllib import json import sys import os import io import zipfile import time if __name__ == '__main__': project_name = sys.argv[1] user_name = sys.argv[2] print('Username = '******'Project = ' + project_name) gh = Github(user=user_name, repo=project_name, login='******', password='******') token = '?access_token=4fca22d76a306f11c5cedec029ab719ab4490ede' #token could be gained from #curl -u "zichaoqi" https://api.github.com/authorizations cm_list = gh.repos.commits.list().all() cur_dir = os.getcwd() pro_dir = os.path.join(cur_dir, project_name + '_' + user_name) if not os.path.isdir(pro_dir): os.makedirs(pro_dir) print('API Limit = ' + str(gh.remaining_requests)) print("#commit = " + str(len(cm_list)))
from pygithub3 import Github import subprocess import time import sys username = raw_input("Username: "******"Password: "******"name": raw_input("RepoName: "), "description": raw_input("Description: "), "homepage": "" }) subprocess.call(["git", "init"]) subprocess.call(["git", "remote", "add", "origin", repo.clone_url])
from pygithub3 import Github import os import sys import yaml import operator DATA_DIR = os.path.join(os.path.dirname( __file__ ), '../_data') ATOMIX_STACK = ['atomix', 'copycat', 'catalyst', 'atomix.github.io'] gh = Github(login=sys.argv[1], password=sys.argv[2]) def delete_keys(dic, keys): for key in dic.keys(): if key in keys: del dic[key] def retain_keys(dic, valid_keys): return dict(zip(valid_keys, [dic[k] for k in valid_keys])) def get_user(login): user = gh.users.get(login) valid_keys = ["name", "blog"] user = retain_keys(user.__dict__, valid_keys) user = dict((k, v) for k, v in user.iteritems() if v is not None) return user # Returns all contributors sorted by number of contributions def get_users(repos, valid_keys, fetcher): result = dict() for repo in repos: contributors = fetcher(repo)
import sendgrid from auth import github_auth, sendgrid_auth from datetime import datetime, timedelta from dateutil import tz from pygithub3 import Github from pygithub3.services.repos import Commits gh = Github(login=github_auth[0], password=github_auth[1]) #################################################################### # Summarizer Configurations # #################################################################### organization = 'agiliq' # mailing configurations sender = "*****@*****.**" subject = "Agiliq-Github Summary for the day " #################################################################### def get_repos(organization): """ Retuns all the repos in the given Organization """ repos = gh.repos.list(organization).all() return repos