def set_pull_request_status(pr, state, target_url="", description='', user=None, credfile='gh.cred'): """Sets a state for every commit associated ith a pull request. Parameters ---------- pr : PullRequest or len-3 sequence A github3 pull request object or a tuple of (owner, repository, number). state : str Accepted values are 'pending', 'success', 'error', 'failure'. target_url : str, optional URL to link with this status. description : str, optional Flavor text. user : str, None, or NotSpecified, optional The username to log into github with. credfile : str, optional The github credentials file name. """ gh = GitHub() ensure_logged_in(gh, user=user, credfile=credfile) if isinstance(pr, Sequence): r = gh.repository(*pr[:2]) pr = gh.pull_request(*pr) else: #r = gh.repository(*pr.repository) Broken on github3.py v0.8+ r = gh.repository(*pr.base.repo) status = r.create_status(pr.head.sha, state=state, target_url=target_url, description=description)
def _get_latest_release(self): # Return Release object if new release available, else, return None if 'alpha' in settings.update_channel: dev_release = self._get_dev_release() if dev_release: return dev_release else: dev_release = None # Check for new beta or release versions if not dev_release: owner = OWNER repo_name = REPO_NAME # If there is an issue accessing the GitHub API, return None try: gh = GitHub() repo = gh.repository(owner, repo_name) except GitHubError: return None for release in repo.iter_releases(): latest_version = release.name if Version(current_version) < Version(latest_version): if (((is_beta(latest_version) or is_release(latest_version)) and is_beta_channel()) or (is_release(latest_version) and is_release_channel())): release_url = ( 'https://github.com/%s/%s/archive/%s.tar.gz' % (owner, repo_name, release.tag_name)) release_notes = release.body return Release(latest_version, release_url, release_notes) else: return None
class Selfapprove(BotPlugin): """ Self approve your github PR via a bot """ def activate(self): """ Triggers on plugin activation """ token = self.config["GITHUB_TOKEN"] self.gh = GitHub(token=token) self.gh.is_starred( "github", "gitignore") # test connection, will raise if connection fails super(Selfapprove, self).activate() def get_configuration_template(self): """ Defines the configuration structure this plugin supports """ return {"GITHUB_TOKEN": "tokenvalue"} @arg_botcmd( "pr", help="The URL of the Pull Request you would like to self approve", type=str, ) @arg_botcmd( "--reason", help= "The reason you are self approving this PR instead of having a teammate approve", type=str, required=True, ) @arg_botcmd("--test-only-changes", help="Are these test only changes?", action="store_true") def selfapprove(self, message, pr, reason, test_only_changes): if not message.to.name == "self_approve": return "!selfapprove command must be run from #self_approve channel" if not test_only_changes: return "Changes that touch more than just tests must be approved by another engineer." match = re.search(r"https://github.com/(.+)/(.+)/pull/([0-9]+)", pr) if not match: return f"PR URL {pr} did not match regex r'https://github.com/(.+)/(.+)/pull/([0-9]+)'" org = match.group(1) repo_name = match.group(2) if not repo_name == "apm_bundle": return f"selfapprove currently only works with apm_bundle" pr_num = match.group(3) repo = self.gh.repository(org, repo_name) pr = repo.pull_request(int(pr_num)) pr.create_review( f"This PR was self-approved by {message.frm.fullname}. The stated reason for self-appoval was: {reason}", event="APPROVE", ) return f"{pr} was succesfully self-approved by {message.frm.fullname}. Reason: {reason}"
def run(self): """ Main worker. """ update_name = None # Store update name, i.e: Eddy v0.9.1 update_version = self.current # Store update version, i.e: 0.9.1 update_url = None # Store update HTML url, i.e: http://github.com/... try: LOGGER.info('Connecting to GitHub to retrieve update information (channel: %s)', self.channel.value) github = GitHub(token='6a417ccfe9a7c526598e77a74cbf1cba6e688f0e') repository = github.repository('danielepantaleone', 'eddy') for release in repository.releases(): if self.channel is Channel.Beta or not release.prerelease: try: if NormalizedVersion(release.tag_name[1:]) > NormalizedVersion(update_version): update_name = release.name update_version = release.tag_name[1:] update_url = release.html_url except IrrationalVersionError as e: LOGGER.warning('Failed to parse version number from TAG: %s', e) if update_version != self.current: LOGGER.info('Update available: %s', update_name) self.sgnUpdateAvailable.emit(update_name, update_url) else: LOGGER.info('No update available') self.sgnNoUpdateAvailable.emit() except Exception as e: LOGGER.warning('Failed to retrieve update data: %s', e) self.sgnNoUpdateDataAvailable.emit() self.finished.emit()
def run(self): """ Main worker. """ update_name = None # Store update name, i.e: Eddy v0.9.1 update_version = self.current # Store update version, i.e: 0.9.1 update_url = None # Store update HTML url, i.e: http://github.com/... try: LOGGER.info( 'Connecting to GitHub to retrieve update information (channel: %s)', self.channel.value) github = GitHub(token='6a417ccfe9a7c526598e77a74cbf1cba6e688f0e') repository = github.repository('danielepantaleone', 'eddy') for release in repository.releases(): if self.channel is Channel.Beta or not release.prerelease: try: if NormalizedVersion( release.tag_name[1:]) > NormalizedVersion( update_version): update_name = release.name update_version = release.tag_name[1:] update_url = release.html_url except IrrationalVersionError as e: LOGGER.warning( 'Failed to parse version number from TAG: %s', e) if update_version != self.current: LOGGER.info('Update available: %s', update_name) self.sgnUpdateAvailable.emit(update_name, update_url) else: LOGGER.info('No update available') self.sgnNoUpdateAvailable.emit() except Exception as e: LOGGER.warning('Failed to retrieve update data: %s', e) self.sgnNoUpdateDataAvailable.emit() self.finished.emit()
def main(args): try: from github3 import GitHub except ImportError: raise errors.DependencyError( 'This command needs the github3.py library installed to work') token = get_token(args.github_token) args.github_token.close() deprecated = parse_deprecations(args.problems) args.problems.close() body_tmpl = args.template.read() args.template.close() project_name = args.project_name.strip().lower() gh_conn = GitHub(token=token) repo = gh_conn.repository('abadger', 'ansible') if project_name: project_column = find_project_todo_column(repo, project_name) issues = create_issues(deprecated, body_tmpl, repo) if project_column: for issue in issues: project_column.create_card_with_issue(issue) time.sleep(0.5) return 0
def _get_repo(gh: github3.GitHub, owner: str, repository: str): try: return gh.repository(owner=owner, repository=repository) except github3.exceptions.NotFoundError: raise GitHubCardsException( f"Can't find repository {owner}/{repository}. Maybe you need to authorize" )
def starred(username, token, sort, repository, message): """GitHub starred creating your own Awesome List used GitHub stars! example: starred --username yrqgithub --sort > README.md """ if repository: if not token: click.secho('Error: create repository need set --token', fg='red') return file = BytesIO() sys.stdout = file else: file = None gh = GitHub(token=token) stars = gh.starred_by(username) click.echo(desc) repo_dict = {} for s in stars: language = s.language or 'Others' description = html_escape(s.description).replace( '\n', '') if s.description else '' if language not in repo_dict: repo_dict[language] = [] repo_dict[language].append([s.name, s.html_url, description.strip()]) if sort: repo_dict = OrderedDict(sorted(repo_dict.items(), key=lambda l: l[0])) for language in repo_dict.keys(): data = u' - [{}](#{})'.format(language, '-'.join(language.lower().split())) click.echo(data) click.echo('') for language in repo_dict: click.echo('## {} \n'.format(language.replace('#', '# #'))) for repo in repo_dict[language]: data = u'- [{}]({}) - {}'.format(*repo) click.echo(data) click.echo('') click.echo(license_.format(username=username)) if file: try: rep = gh.repository(username, repository) readme = rep.readme() readme.update(message, file.getvalue()) except NotFoundError: rep = gh.create_repository(repository, 'A curated list of my GitHub stars!') rep.create_file('README.md', 'starred initial commit', file.getvalue()) click.launch(rep.html_url)
def deactivate_hook(token, owner, repo): """Deactivate goodtables hook for GitHub repo. """ client = GitHub(token=token) repo = client.repository(owner, repo) for hook in repo.iter_hooks(): if hook.config.get('is_goodtables_hook'): hook.delete()
def __init__(self, repo_user = '******', repo_name = 'semanario'): """ @params repo_user - name of organization or user repo_name - repository name """ github = GitHub() self.repo = github.repository(repo_user, repo_name)
class GitHubHandler(BaseHandler): title = "Github" url_regex = '(http|https|git)://github.com/' url = 'https://github.com' repo_regex = r'(?:http|https|git)://github.com/[^/]*/([^/]*)/{0,1}' slug_regex = repo_regex def __init__(self): if settings.GITHUB_USERNAME: self.github = login(settings.GITHUB_USERNAME, settings.GITHUB_PASSWORD) else: self.github = GitHub() def manage_ratelimit(self): while self.github.ratelimit_remaining < 10: sleep(1) def fetch_metadata(self, package): self.manage_ratelimit() username, repo_name = package.repo_name().split('/') repo = self.github.repository(username, repo_name) if repo is None: return package package.repo_watchers = repo.watchers package.repo_forks = repo.forks package.repo_description = repo.description contributors = [x.login for x in repo.iter_contributors()] if contributors: package.participants = ','.join(uniquer(contributors)) return package def fetch_commits(self, package): self.manage_ratelimit() username, repo_name = package.repo_name().split('/') repo = self.github.repository(username, repo_name) if repo is None: return package package.commit_list = str([x['total'] for x in repo.iter_commit_activity(number=52)]) if package.commit_list.strip() == '[]': return package package.save() return package
def deactivate_hook(token, owner, repo): """Deactivate goodtables hook for GitHub repo. """ client = GitHub(token=token) repo = client.repository(owner, repo) for hook in repo.iter_hooks(): if hook.config.get('url') == settings.GITHUB_HOOK_URL: hook.delete()
def __init__(self, repo_user='******', repo_name='semanario'): """ @params repo_user - name of organization or user repo_name - repository name """ github = GitHub() self.repo = github.repository(repo_user, repo_name)
def starred(username, token, sort, repository, message): """GitHub starred creating your own Awesome List used GitHub stars! example: starred --username maguowei --sort > README.md """ if repository: if not token: click.secho('Error: create repository need set --token', fg='red') return file = BytesIO() sys.stdout = file else: file = None gh = GitHub(token=token) stars = gh.starred_by(username) click.echo(desc) repo_dict = {} for s in stars: language = s.language or 'Others' description = html_escape(s.description).replace('\n', '') if s.description else '' if language not in repo_dict: repo_dict[language] = [] repo_dict[language].append([s.full_name, s.name, s.html_url, description.strip(), s.homepage or '']) if sort: repo_dict = OrderedDict(sorted(repo_dict.items(), key=lambda l: l[0])) for language in repo_dict: repo_dict[language] = sorted(repo_dict[language], key=lambda l: l[1]) for language in repo_dict.keys(): data = u' - [{}](#{})'.format(language, '-'.join(language.lower().split())) click.echo(data) click.echo('') for language in repo_dict: click.echo('## {} \n'.format(language.replace('#', '# #'))) for repo in repo_dict[language]: data = u'* [{0}]({2}) - {3} [{4}]({4})'.format(*repo) click.echo(data) click.echo('') click.echo(license_.format(username=username)) if file: rep = gh.repository(username, repository) if rep: readme = rep.readme() readme.update(message, file.getvalue()) else: rep = gh.create_repository(repository, 'A curated list of my GitHub stars!') rep.create_file('README.md', 'starred initial commit', file.getvalue()) click.launch(rep.html_url)
def one_repo_only(owner, reponame, gh_token=''): """ Go back and see why this or that denied request. Provide a valid token. """ gh = GitHub(token=gh_token) user = gh.me() body = ISSUE_BODY.format(user) repo = gh.repository(owner, reponame) i = repo.create_issue(ISSUE_TITLE, body=body) print(i)
def find_update(): logging.debug("Checking remote for new updates.") try: gh = GitHub() repo = gh.repository("phac-nml", "irida-miseq-uploader") # get the latest tag from github return next(repo.iter_tags(number=1)) except: logging.warn("Couldn't reach github to check for new version.") raise
class GitHubIssues: def __init__(self, default_owner: str = DEFAULT_REPO_OWNER, default_repo: str = DEFAULT_REPO_NAME) -> None: self.session = GitHub() self.default_owner = default_owner self.default_repo = default_repo self.logger = logging.getLogger(self.__class__.__qualname__) self.repos = RepoDict(self.default_owner, self.session) self.issues: Dict[int, Issue] = {} self.issue_iterator: Optional[Iterable[Issue]] = None self.ptbcontribs: Dict[str, PTBContrib] = {} self.issues_lock = threading.Lock() self.ptbcontrib_lock = threading.Lock() def set_auth(self, client_id: str, client_secret: str) -> None: self.session = login(client_id, client_secret) self.repos.update_session(self.session) @property def all_ptbcontribs(self) -> List[PTBContrib]: with self.ptbcontrib_lock: return list(self.ptbcontribs.values()) @property def all_issues(self) -> List[Issue]: with self.issues_lock: return list(self.issues.values()) def get_issue(self, number: int, owner: str = None, repo: str = None) -> Optional[Issue]: if owner or repo: self.logger.info( "Getting issue %d for %s/%s", number, owner or self.default_owner, repo or self.default_repo, ) try: if owner is not None: repository = self.session.repository(owner, repo or self.default_repo) gh_issue = repository.issue(number) else: repository = self.repos[repo or self.default_repo] if repo is None: if issue := self.issues.get(number): return issue gh_issue = repository.issue(number) else: gh_issue = repository.issue(number)
class QAzz(object): """docstring for ClassName""" def __init__(self): self.gh = GitHub() # login(username,pwd) self.repo = self._get_repository('gbrindisi', 'wordpot') self.contributors = [] self.last_commit = None self._get_contributors_statistics() def _get_repository(self, owner, repository): return self.gh.repository(owner, repository) def _get_contributors_statistics(self): for c in self.repo.iter_contributor_statistics(): contributor = {'name': c.author, 'total': c.total, 'last': None} for w in reversed(c.alt_weeks): if w['commits'] != 0: contributor['last'] = w['start of week'] break self.contributors.append(contributor) # Sort by date self.contributors = sorted(self.contributors, key=lambda k: k['last']) # Store last commit self.last_commit = self.contributors[-1]['last'] def _print_banner(self): print u""" ██████╗ █████╗ ███████╗███████╗ ..?? ██╔═══██╗██╔══██╗╚══███╔╝╚══███╔╝ ██║ ██║███████║ ███╔╝ ███╔╝ ██║▄▄ ██║██╔══██║ ███╔╝ ███╔╝ ╚██████╔╝██║ ██║███████╗███████╗ ╚══▀▀═╝ ╚═╝ ╚═╝╚══════╝╚══════╝ GitHub Repositories Heuristic Quality Assurance Helper by @gbrindisi """ def go(self): self._print_banner() print "[*] Created: %s" % self.repo.created_at print "[*] Last Commit: %s" % self.last_commit print "[*] Stargazers: %s" % self.repo.stargazers print "[*] Watchers: %s" % self.repo.watchers print "[+] Contributors: " for c in self.contributors: print " |- %s with %d commits (last: %s)" % (c['name'], c['total'], c['last'])
def manage_daily_builds(): # obtain git handle gh = GitHub(GITHUB_API, token=AUTH_TOKEN) repository = gh.repository(REPOSITORY_OWNER, REPOSITORY_REPO) # get list of releases releases = repository.releases() # extract keys and sort by build date release_keys = { x.id: x.created_at for x in releases if "Nightly development build" in x.title } sorted_keys = sorted(release_keys.items(), reverse=True, key=lambda x: x[1]) print('%s' % (pformat(sorted_keys))) # filter to obtain the keys to delete delete_keys = [v[0] for v in sorted_keys[2:]] print('Deleting releases: %s' % (pformat(delete_keys))) # iterate, deleting the releases and corresponding tags for rel in releases: print('examining rel %d from %s...' % (rel.id, str(rel.created_at))) if rel.id in delete_keys: print(' deleting release id %d and tag %s.' % (rel.id, rel.tag_name)) rel_tag_ref = repository.ref('tags/%s' % (rel.tag_name)) rel.delete() if rel_tag_ref is not None: print(' deleting tag %s' % (rel_tag_ref.ref)) rel_tag_ref.delete() else: # Look for stale files in the release assets = rel.assets() print('In release %s found assets:' % (rel.name)) for asset in assets: match = PRODUCT_ASSET_PATTERN.search(asset.name) print(' asset named %s matches %s' % (asset.name, match.group(1) if match is not None else 'None')) build_times = sorted([ PRODUCT_ASSET_PATTERN.search(x.name).group(1) for x in assets if PRODUCT_ASSET_PATTERN.search(x.name) ]) latest_build_time = build_times[-1] if build_times else None print('Lastest build time is %s' % (latest_build_time)) for asset in assets: match = PRODUCT_ASSET_PATTERN.search(asset.name) # print(' asset named %s matches %s' % (asset.name, match.group(1) if match is not None else 'None')) if match is not None: asset_build_time = match.group(1) if asset_build_time != latest_build_time: print('deleting stale asset %s' % (asset.name)) asset.delete()
def callAnonymous(): # anonymous usage gh = GitHub() usrQG = gh.user('QueensGambit') repo = gh.repository('QueensGambit', 'Barcode-App') print('usrQG.msg:') print(usrQG) print('repos:') print(repo) lstEmojis = gh.emojis() print('lstEmojis:') print(lstEmojis)
def run(self): try: from github3 import GitHub except ImportError as ie: raise ie gh = GitHub() corda = gh.repository("corda", "corda") return ({ "github_forks": corda.fork_count, "github_stars": corda.stargazers })
def verify_hook(owner, repo, url, events, user=None, credfile='gh.cred'): """Ensures that the github WebURL API hook has been set up properly. Parameters ---------- owner : str The GitHub repository owner. repo : str The GitHub repository name. url : str The url of the hook. events : list of str The list GitHub events that this hook should trigger on. GitHub defaults this to ['pull'] but ['pull_request'] is a more reasonable value. user : str, None, or NotSpecified, optional The username to log into github with. credfile : str, optional The github credentials file name. """ gh = GitHub() ensure_logged_in(gh, user=user, credfile=credfile) r = gh.repository(owner, repo) for hook in r.iter_hooks(): if hook.name != 'web': continue elif hook.config['url'] == url: break else: hook = r.create_hook(name='web', config={"url": url, "content_type": "json"}, events=events, active=True) if hook is None: msg = ("failed to create github webhook for {0}/{1} pointing to {2} with " "the {3} events").format(owner, repo, url, ", ".join(events)) raise RuntimeError(msg) update = {} if hook.config['url'] != url: update['url'] = url if hook.config['content_type'] != 'json': update['content_type'] = 'json' if hook.events is None or set(hook.events) != set(events): update['events'] = events if not hook.active: update['active'] = True if len(update) > 0: status = hook.edit(**update) if not status: msg = ("failed to update github webhook for {0}/{1} pointing to {2} with " "the {3} events").format(owner, repo, url, ", ".join(events)) raise RuntimeError(msg)
def _get_files(owner, repo, sha, tokens): """Get repo file paths """ # TODO: use other tokens if first fails github_api = GitHub(token=tokens[0]) repo_api = github_api.repository(owner, repo) # First attempt - use GitHub Tree API files = _get_files_tree_api(repo_api, sha) if files is None: # Tree is trancated - use GitHub Contents API files = _get_files_contents_api(repo_api, sha) log.debug('Remaining GitHub API calls: %s', github_api.rate_limit()['rate']['remaining']) return files
def uploadAsset(token, repository, release_id, asset, file_name, label=''): '''Upload given asset to a release.''' # Split unified owner/repo string into separated variables owner, repo = repository.split('/') gh = GitHub(token=token) repository = gh.repository(owner, repo) release = repository.release(release_id) upload = release.upload_asset( content_type='application/zip', name=file_name, asset=asset ) return upload
def get_repository_info(owner, name): """ Get the relevant information needed for the repository from its owner login and name. """ LOGGER.info("Getting info for %s/%s", owner, name) # create an anonymous GitHub client client = GitHub() info = {} # get the repository; if the repo is not found, raise an error try: repository = client.repository(owner, name) # store the repo info info["name"] = name info["owner"] = owner info["language"] = repository.language info["url"] = repository.html_url info["stars"] = repository.stargazers_count info["stars_display"] = numerize.numerize(repository.stargazers_count) info["last_modified"] = repository.last_modified info["id"] = str(repository.id) info["objectID"] = str(repository.id) # for indexing on algolia # get the latest issues with the tag issues = [] for issue in repository.issues( labels=ISSUE_LABELS, state=ISSUE_STATE, number=ISSUE_LIMIT, sort=ISSUE_SORT, direction=ISSUE_SORT_DIRECTION, ): issues.append({ "title": issue.title, "url": issue.html_url, "number": issue.number, "created_at": issue.created_at.isoformat() }) info["issues"] = issues return info except exceptions.NotFoundError: raise RepoNotFoundException()
def activate_hook(token, owner, repo): """Activate goodtables hook for GitHub repo. """ name = 'web' config = { 'content_type': 'json', 'secret': settings.GITHUB_HOOK_SECRET, 'url': settings.GITHUB_HOOK_URL, } events = [ 'pull_request', 'push', ] client = GitHub(token=token) repo = client.repository(owner, repo) repo.create_hook(name, config=config, events=events)
async def git(self, ctx: SlashContext): gh = GitHub() repo = gh.repository('vulcan-dev', 'vulcan-bot') embed = Embed(title='Git', color=0x01dae9) embed.set_author(name=self.bot.user.name, icon_url=self.bot.user.avatar_url) embed.add_field(name='Git URL', value='https://github.com/vulcan-dev/vulcan-bot', inline=False) embed.add_field(name='Created At', value=repo.created_at, inline=False) embed.add_field(name='Last Update', value=repo.updated_at, inline=True) embed.add_field(name='Stars', value=repo.stargazers_count, inline=True) embed.add_field(name='Size', value=repo.size, inline=True) await ctx.respond() await ctx.send(embed=embed)
def activate_hook(token, owner, repo): """Activate goodtables hook for GitHub repo. """ name = 'web' config = { 'content_type': 'json', 'secret': settings.GITHUB_HOOK_SECRET, # We can't use url_for here because there is no app context 'url': '%s/github/hook' % settings.BASE_URL, 'is_goodtables_hook': True, } events = [ 'pull_request', 'push', ] client = GitHub(token=token) repo = client.repository(owner, repo) repo.create_hook(name, config=config, events=events)
def go_forth_and_multiply(owner, listfile, gh_token=''): """ Open issues! Provide a valid token. """ gh = GitHub(token=gh_token) user = gh.me() body = ISSUE_BODY.format(user) # Only allowed to spam pre-approved list now. with open(listfile) as fin: repositories = [ s.strip() for s in fin.readlines() if not s.startswith('#') ] tot_n = len(repositories) if tot_n == 0: print('No repository to process!') return max_n_per_chunk = 30 n_chunks = math.ceil(tot_n / max_n_per_chunk) i_chunk = 0 while i_chunk < n_chunks: i_start = i_chunk * max_n_per_chunk i_end = min(i_start + max_n_per_chunk, tot_n) for reponame in repositories[i_start:i_end]: time.sleep(0.5) # Prevent API limit but not abuse detection repo = gh.repository(owner, reponame) if repo.archived: print('Skipped {0} -- archived'.format(repo.name)) continue try: i = repo.create_issue('Update the help', body=body) except Exception as e: # denied! print('Skipped {0} -- {1}'.format(repo.name, str(e))) else: print(i) i_chunk += 1 if i_chunk < n_chunks: time.sleep(10) # Prevent abuse detection, maybe
def check_for_newer_wp_release(): init() most_recent_version_supported = latest_template_wordpress_version() g = GitHub(token=os.environ.get("GITHUB_TOKEN")) repo = g.repository("wordpress", "wordpress") tags = [t.name for t in repo.tags()] version = parse_version(most_recent_version_supported) for tag in tags: if not re.search("[Bb]eta", tag): assert version >= parse_version(tag), ( Fore.RED + Style.BRIGHT + "\nIt would seem there is a newer version of " "WordPress available: {}-- update the template " "to support it!\n" + Fore.RESET + Style.RESET_ALL ).format(tag) print Fore.GREEN + Style.BRIGHT + "Looks like {} is the most".format( most_recent_version_supported ), "recent version of WordPress. The template supports that, so we're good!" + "\n" + Fore.RESET + Style.RESET_ALL deinit()
def go_forth_and_multiply(owner, listfile, gh_token=''): """ Open issues! Provide a valid token. """ gh = GitHub(token=gh_token) user = gh.me() body = ISSUE_BODY.format(user) # Only allowed to spam pre-approved list now. with open(listfile) as fin: repositories = [s.strip() for s in fin.readlines() if not s.startswith('#')] tot_n = len(repositories) if tot_n == 0: print('No repository to process!') return max_n_per_chunk = 30 n_chunks = math.ceil(tot_n / max_n_per_chunk) i_chunk = 0 while i_chunk < n_chunks: i_start = i_chunk * max_n_per_chunk i_end = min(i_start + max_n_per_chunk, tot_n) for reponame in repositories[i_start:i_end]: time.sleep(0.5) # Prevent API limit but not abuse detection repo = gh.repository(owner, reponame) if repo.archived: print('Skipped {0} -- archived'.format(repo.name)) continue try: i = repo.create_issue('Update the help', body=body) except Exception as e: # denied! print('Skipped {0} -- {1}'.format(repo.name, str(e))) else: print(i) i_chunk += 1 if i_chunk < n_chunks: time.sleep(10) # Prevent abuse detection, maybe
class GithubAvroSchemaRetriever(AvroSchemaRetriever): def __init__(self, **kwargs): from github3 import login, GitHub self.repo_owner = kwargs.get('repo_owner') self.repo_name = kwargs.get('repo_name') self.branch = kwargs.get('branch') self.base_path = kwargs.get('basepath') self.extension = kwargs.get('extension') if 'git_user' in kwargs and 'git_password' in kwargs: self.git_user = kwargs.get('git_user') self.git_password = kwargs.get('git_password') self.git = login(self.git_user, self.git_password) elif 'token' in kwargs: self.token = kwargs.get('token') self.git = login(token=self.token) else: self.git_user = None self.git_password = None self.git = GitHub() super().__init__() def get_all_schemas(self, **kwargs) -> ExpiringDict: repo = self.git.repository(self.repo_owner, self.repo_name) contents = repo.directory_contents(self.base_path, self.branch, return_as=dict) schemas = ExpiringDict(max_len=100, max_age_seconds=86400) schemas_final = self.get_schema_content(repo, contents, schemas) return schemas_final def get_schema_content(self, repo, contents, schema_dict): for key, content in contents.items(): if content.type == 'file' and self.extension in key: # read schema schema_dict[key.split(f"{self.extension}")[0]] = parse_schema(loads( repo.file_contents(content.path, self.branch).decoded)) elif content.type == 'dir': self.get_schema_content(repo, repo.directory_contents(content.path, self.branch, return_as=dict), schema_dict) return schema_dict
def check_for_newer_es_release(): init() most_recent_version_supported = latest_template_es_version() g = GitHub(token=os.environ.get('GITHUB_TOKEN')) repo = g.repository('elasticsearch', 'elasticsearch') tags = [t.name for t in repo.tags()] version = parse_version(most_recent_version_supported) for tag in tags: if tag.startswith('v') and not re.search('[Bb]eta', tag): assert version >= parse_version(tag), \ (Fore.RED + Style.BRIGHT + \ '\nIt would seem there is a newer version of ' 'elasticsearch available: {}-- update the template ' 'to support it!\n' + Fore.RESET + Style.RESET_ALL).format(tag) print Fore.GREEN + Style.BRIGHT + \ "Looks like {} is the most".format(most_recent_version_supported), \ "recent version of es. The template supports that, so we're good!" + \ "\n" + Fore.RESET + Style.RESET_ALL deinit()
def check_for_newer_wp_release(): init() most_recent_version_supported = latest_template_wordpress_version() g = GitHub(token=os.environ.get('GITHUB_TOKEN')) repo = g.repository('wordpress', 'wordpress') tags = [t.name for t in repo.tags()] version = parse_version(most_recent_version_supported) for tag in tags: if not re.search('[Bb]eta', tag): assert version >= parse_version(tag), \ (Fore.RED + Style.BRIGHT + \ '\nIt would seem there is a newer version of ' 'WordPress available: {}-- update the template ' 'to support it!\n' + Fore.RESET + Style.RESET_ALL).format(tag) print Fore.GREEN + Style.BRIGHT + \ "Looks like {} is the most".format(most_recent_version_supported), \ "recent version of WordPress. The template supports that, so we're good!" + \ "\n" + Fore.RESET + Style.RESET_ALL deinit()
def get_pr_obj_from_pr_json( pr_json: Union[Dict, LazyJson], gh: github3.GitHub, ) -> github3.pulls.PullRequest: """Produce a github3 pull request object from pr_json. Parameters ---------- pr_json : dict-like A dict-like object with the current PR information. gh : github3 object The github3 object for interacting with the GitHub API. Returns ------- pr_obj : github3.pulls.PullRequest The pull request object. """ feedstock_reponame = pr_json["base"]["repo"]["name"] repo = gh.repository("conda-forge", feedstock_reponame) return repo.pull_request(pr_json["number"])
def upload(options, info): """Lets upload the release to GitHub""" # read the github token config = configparser.ConfigParser() config.read("personal.properties") myToken = config.get("GitHub", "token") # lets log in print("'%s'" % myToken) gh = GitHub(token=myToken) repo = gh.repository("toomasr", "jspgnviewer") # lets do the releases try: release_name = "JsPgnViewer %s" % options.version tag_name = "jspgnviewer-%s" % options.version release = repo.create_release(tag_name, name=release_name, prerelease=True) f = open("bin/jspgnviewer-%s.zip" % options.version) release.upload_asset(content_type='application/zip', name="jspgnviewer-%s.zip" % options.version, asset=f) except GitHubError as e: raise e try: release_name = "JsPgnViewer WordPress %s" % options.version tag_name = "jspgnviewer-wordpress-%s" % options.version release = repo.create_release(tag_name, name=release_name, prerelease=True) f = open("bin/pgnviewer-%s.zip" % options.version) release.upload_asset(content_type='application/zip', name="pgnviewer-%s.zip" % options.version, asset=f) except GitHubError as e: raise e pass
def ebooks_in_github_release(repo_owner, repo_name, tag, token=None): """ returns a list of (book_type, book_name) for a given GitHub release (specified by owner, name, tag). token is a GitHub authorization token -- useful for accessing higher rate limit in the GitHub API """ # map mimetype to file extension EBOOK_FORMATS = dict([(v, k) for (k, v) in settings.CONTENT_TYPES.items()]) if token is not None: gh = login(token=token) else: # anonymous access gh = GitHub() repo = gh.repository(repo_owner, repo_name) release = release_from_tag(repo, tag) return [(EBOOK_FORMATS.get(asset.content_type), asset.name) for asset in release.iter_assets() if EBOOK_FORMATS.get(asset.content_type) is not None]
def update(trans, user=None, credfile='gh.cred'): """Takes a list of transactions and updates it with the latest PRs.""" hist = trans['history'] previous_prs = set(trans.get('pull_requests', set())) gh = GitHub() ensure_logged_in(gh, user=user) r = gh.repository(trans['owner'], trans['repo']) #import pdb; pdb.set_trace() for pr in r.iter_pulls(state='closed'): if pr.number in previous_prs: # prevents excessive queries continue pr.refresh() if pr.merged_by is None: continue num = pr.number print("Tallying PR {0}".format(num)) previous_prs.add(num) is_self_merge = (pr.user == pr.merged_by) unames = {pr.user.login} for comment in pr.iter_comments(): unames.add(comment.user.login) if is_self_merge: # no cones awarded for self-merges unames.discard(pr.user.login) for u in unames: tran = {'player': u, 'pr': num, 'cones': 1} tran['kind'] = 'requester' if u == pr.user.login else 'reviewer' hist.append(tran) # apply bounties m = BOUNTY_RE.search(pr.body_text) if m is not None and not is_self_merge: bounty = int(m.group(1)) hist.append({'player': pr.user.login, 'pr': num, 'cones': -bounty, 'target': pr.merged_by.login, 'kind': 'bounty_poster'}) hist.append({'player': pr.merged_by.login, 'pr': num, 'cones': bounty, 'target': pr.user.login, 'kind': 'bounty_winner'}) trans['pull_requests'] = sorted(previous_prs)
def get_default_repo_details(owner, repo, token): """Return defaults repo details. """ try: client = GitHub(token=token) repo_client = client.repository(owner, repo) branch_client = repo_client.branch(repo_client.default_branch) branch_data = branch_client.to_json() branch_name = branch_data['name'] author_name = branch_data['commit']['author']['login'] commit_message = branch_data['commit']['commit']['message'] sha = branch_data['commit']['sha'] except Exception as exception: log.exception(exception) return None return { 'is_pr': False, 'owner': owner, 'repo': repo, 'sha': sha, 'branch_name': branch_name, 'author_name': author_name, 'commit_message': commit_message, }
class GitHubHandler(BaseHandler): title = "Github" url_regex = '(http|https|git)://github.com/' url = 'https://github.com' repo_regex = r'(?:http|https|git)://github.com/[^/]*/([^/]*)/{0,1}' slug_regex = repo_regex def __init__(self): if settings.GITHUB_TOKEN: self.github = login(token=settings.GITHUB_TOKEN) else: self.github = GitHub() def manage_ratelimit(self): while self.github.ratelimit_remaining < 10: sleep(1) def _get_repo(self, package): repo_name = package.repo_name() if repo_name.endswith("/"): repo_name = repo_name[:-1] try: username, repo_name = package.repo_name().split('/') except ValueError: return None return self.github.repository(username, repo_name) def fetch_metadata(self, package): self.manage_ratelimit() repo = self._get_repo(package) if repo is None: return package package.repo_watchers = repo.watchers package.repo_forks = repo.forks package.repo_description = repo.description contributors = [] github_account_type = AccountType.objects.get(name="GITHUB") for contributor in repo.iter_contributors(): account, created = Account.objects.get_or_create( account_type=github_account_type, name=contributor.login) contributors.append(account) self.manage_ratelimit() package.contributors.set(contributors) package.save() return package def fetch_commits(self, package): self.manage_ratelimit() repo = self._get_repo(package) if repo is None: return package from package.models import Commit # Added here to avoid circular imports for commit in repo.iter_commits(): self.manage_ratelimit() try: commit_record, created = Commit.objects.get_or_create( package=package, commit_date=commit.commit.committer['date']) if not created: break except Commit.MultipleObjectsReturned: continue # If the commit record already exists, it means we are at the end of the # list we want to import package.save() return package
class Command(object): __metaclass__ = ABCMeta name = None usage = None repository = () user = '' subcommands = {} SUCCESS = 0 FAILURE = 1 COMMAND_UNKNOWN = 127 def __init__(self): super(Command, self).__init__() assert self.name commands[self.name] = self self.gh = GitHub() self.gh.set_user_agent('github-cli/{0} (http://git.io/MEmEmw)'.format( __version__ )) self.parser = CustomOptionParser(usage=self.usage) @abstractmethod def run(self, options, args): return self.FAILURE def get_repo(self, options): self.repo = None if self.repository: self.repo = self.gh.repository(*self.repository) if not (self.repo or options.loc_aware): self.parser.error('A repository is required.') def get_user(self): if not self.user: self.login() self.user = self.gh.user() def login(self): # Get the full path to the configuration file config = github_config() parser = ConfigParser() # Check to make sure the file exists and we are allowed to read it if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK): parser.readfp(open(config)) self.gh.login(token=parser.get('github', 'token')) else: # Either the file didn't exist or we didn't have the correct # permissions user = '' while not user: # We will not stop until we are given a username user = input('Username: '******'' while not pw: # Nor will we stop until we're given a password pw = getpass('Password: '******'user', 'repo', 'gist'], 'github-cli', 'http://git.io/MEmEmw' ) parser.add_section('github') parser.set('github', 'token', auth.token) self.gh.login(token=auth.token) # Create the file if it doesn't exist. Otherwise completely blank # out what was there before. Kind of dangerous and destructive but # somewhat necessary parser.write(open(config, 'w+')) def help(self): self.parser.print_help() if self.subcommands: print('\nSubcommands:') for command in sorted(self.subcommands.keys()): print(' {0}:\n\t{1}'.format( command, self.subcommands[command] )) sys.exit(0)
class Github: def __init__(self, _ask_credentials=None, _ask_2fa=None): self.last_error = None self._ask_credentials = _ask_credentials self._ask_2fa = _ask_2fa self.gh = GitHub(token=self._get_authorization_token()) self.user = self.gh.user() def _get_authorization_token(self): if not config.has('github', 'token') or \ not config.get('github', 'token'): if not self._ask_credentials: raise RuntimeError('Github Token is not set in the ' 'configuration and no function was set to ' 'ask for credentials') token = self._gen_authorization_token() config.set('github', 'token', token) config.save() return config.get('github', 'token') def _gen_authorization_token(self, counter=0, creds=None): """ This function creates the authorization token for AerisCloud. If an existing token exists for this computer, it adds a #N counter next to the name. """ if creds: user, pwd = creds['user'], creds['pwd'] else: (user, pwd) = self._ask_credentials() note = 'AerisCloud on %s' % (node()) if counter > 0: note += ' #%d' % counter try: auth = authorize(user, pwd, ['repo', 'read:org'], note=note, two_factor_callback=self._ask_2fa) return auth.token except GitHubError as e: if not e.errors or e.errors[0]['code'] != 'already_exists': raise # token exists, increment counter counter += 1 return self._gen_authorization_token(counter=counter, creds={'user': user, 'pwd': pwd}) def get_organizations(self): return [org for org in self.gh.iter_orgs()] def get_repo(self, name, fork=False): """ Find a repository in the available ogranizations, if fork is set to True it will try forking it to the user's account """ # check on the user repo = self.gh.repository(self.user.login, name) if repo: return repo, None if not config.has('github', 'organizations'): return False, None # then on configured organization organizations = config.get('github', 'organizations').split(',') for org in organizations: repo = self.gh.repository(org, name) if repo: break if not repo: return False, None if not fork: return repo, None return repo.create_fork(), repo
CODE_OF_CONDUCT_FILENAME = 'CODE_OF_CONDUCT.md' GITHUB_API_HOST = 'api.github.com' gh = GitHub(token='') # NOTE: Insert a valid token user = gh.me() PR_MESSAGE_BODY = re.sub('(\S+)\n', r'\1 ', """ This is an automated addition of spacetelescope code of conduct that is copied from https://github.com/{0}/{1}/. *If this is opened in error, please let {2} know!* """).strip() # Get CoC content from template _r = gh.repository(*CODE_OF_CONDUCT_REPO) _c = _r.file_contents(CODE_OF_CONDUCT_FILENAME).decoded.decode('utf-8') def run_command(command): print('-' * 72) print("Running '{0}'".format(command)) ret = subprocess.call(command, shell=True) if ret != 0: raise Exception("Command '{0}' failed".format(command)) def ensure_fork_exists(repo): if repo.owner.login != user.login: return repo.create_fork() else:
class GitHubHandler(BaseHandler): title = "Github" url_regex = '(http|https|git)://github.com/' url = 'https://github.com' repo_regex = r'(?:http|https|git)://github.com/[^/]*/([^/]*)/{0,1}' slug_regex = repo_regex def __init__(self): if settings.GITHUB_USERNAME: self.github = login(settings.GITHUB_USERNAME, settings.GITHUB_PASSWORD) else: self.github = GitHub() def manage_ratelimit(self): while self.github.ratelimit_remaining < 10: sleep(1) def fetch_metadata(self, package): self.manage_ratelimit() repo_name = package.repo_name() if repo_name.endswith("/"): repo_name = repo_name[:-1] try: username, repo_name = package.repo_name().split('/') except ValueError: return package repo = self.github.repository(username, repo_name) if repo is None: return package package.repo_watchers = repo.watchers package.repo_forks = repo.forks package.repo_description = repo.description contributors = [x.login for x in repo.iter_contributors()] if contributors: package.participants = ','.join(uniquer(contributors)) return package def fetch_commits(self, package): self.manage_ratelimit() repo_name = package.repo_name() if repo_name.endswith("/"): repo_name = repo_name[:-1] try: username, repo_name = package.repo_name().split('/') except ValueError: # TODO error #248 return package if settings.GITHUB_USERNAME: r = requests.get( url='https://api.github.com/repos/{}/{}/commits?per_page=100'.format(username, repo_name), auth=(settings.GITHUB_USERNAME, settings.GITHUB_PASSWORD) ) else: r = requests.get( url='https://api.github.com/repos/{}/{}/commits?per_page=100'.format(username, repo_name) ) if r.status_code == 200: from package.models import Commit # Added here to avoid circular imports for commit in [x['commit'] for x in r.json()]: try: commit, created = Commit.objects.get_or_create( package=package, commit_date=commit['committer']['date'] ) except Commit.MultipleObjectsReturned: pass #package.commit_list = str([x['total'] for x in repo.iter_commit_activity(number=52)]) #if package.commit_list.strip() == '[]': # return package package.last_fetched = timezone.now() package.save() return package
class GitHubAdaptor(object): def __init__(self, owner, repository, token=None): self.token = token or os.environ.get('WOMBLE_TOKEN') self.github = GitHub(token=self.token) self.repo = self.github.repository(owner, repository) def get_issues(self): to_ret = {} issues = list(self.repo.iter_issues(state='closed')) issues += list(self.repo.iter_issues(state='open')) for issue in issues: cur_labels = [l.name for l in issue.labels] if issue.state == 'closed': if 'wontfix' in cur_labels: status = 'wontfix' else: status = 'complete' else: if 'in progress' in cur_labels: status = 'in progress' else: status = 'pending' to_ret[issue.number] = { 'title': issue.title, 'status': status } return to_ret def create_issue(self, title, status): if status == 'complete': labels = [] state = 'closed' elif status == 'pending': labels = [] state = 'open' elif status == 'in progress': labels = ['in progress'] state = 'open' elif status == 'wontfix': labels = ['wontfix'] state = 'closed' issue = self.repo.create_issue( title, labels=labels ) if state == 'closed': self.update_issue(issue.number, title, status) return issue.number def update_issue(self, number, title, status): issue = self.repo.issue(number) if status == 'complete': issue.edit(title, state='closed', labels=[]) elif status == 'pending': issue.edit(title, state='open', labels=[]) elif status == 'in progress': issue.edit(title, state='open', labels=['in progress']) elif status == 'wontfix': issue.edit(title, state='closed', labels=['wontfix'])
class GitHubHelper(RepositoryHelper): def __init__(self, repo_url=None): self.repo_url = repo_url self.user_name = None self.repo_name = None self.github = None # GitHub Object from github3.py self.repo = None self.api_token = config.github_api_token def can_process(self, url): if "github.com" in url: self.repo_url = url return True else: return False def login(self): # Log in logging.info('Logging in to GitHub') try: # Try connecting with the supplied API token # ToDo: Check what actual happens when wrong credentials supplied - I suspect nothing self.github = GitHub(token=self.api_token) except: logging.warning('Login to GitHub failed') # Check supplied repo url # if 'http' in submitted_repo: we've got a full Github URL if ".git" in self.repo_url: # We're dealing with a clone URL if "*****@*****.**" in self.repo_url: # SSH clone URL splitted = self.repo_url.split(':')[1].split('/') self.user_name = splitted[-2] self.repo_name = splitted[-1] elif "https" in self.repo_url: # HTTPS URL splitted = self.repo_url.split('/') self.user_name = splitted[-2] self.repo_name = splitted[-1] self.repo_name = self.repo_name.replace('.git', '') else: if self.repo_url.endswith("/"): self.repo_url = self.repo_url[:-1] # Remove trailing slash splitted = self.repo_url.split('/') self.user_name = splitted[-2] self.repo_name = splitted[-1] logging.info( 'Trying to connect to repository with Username: '******'COPYING','LICENSE'] :return: A Dictionary of the form {'filename':file_contents,...} """ found_files = {} # Get all files in the root directory of the repo root_files = self.repo.contents('/') root_files_iter = root_files.items() for name, contents in root_files_iter: for poss_name in candidate_filenames: if poss_name in name.upper(): logging.info("Found a candidate file: " + name) found_files[name] = self.repo.contents(name).decoded.decode('UTF-8') return found_files def get_commits(self, sha=None, path=None, author=None, number=-1, etag=None, since=None, until=None): """ Return a list of all commits in a repository :params: Parameters: sha (str) – (optional), sha or branch to start listing commits from path (str) – (optional), commits containing this path will be listed author (str) – (optional), GitHub login, real name, or email to filter commits by (using commit author) number (int) – (optional), number of commits to return. Default: -1 returns all commits etag (str) – (optional), ETag from a previous request to the same endpoint since (datetime or string) – (optional), Only commits after this date will be returned. This can be a datetime or an ISO8601 formatted date string. until (datetime or string) – (optional), Only commits before this date will be returned. This can be a datetime or an ISO8601 formatted date string. :return: a list of Commit """ # TODO: Should investigate proper use of GitHubIterator to help ratelimiting: https://github3py.readthedocs.io/en/master/examples/iterators.html list_commits = [] for c in self.repo.iter_commits(sha, path, author, number, etag, since, until): list_commits.append(c) logging.info('Retrieved ' + str(len(list_commits)) + ' commits from repository with Username: ' + self.user_name + " / Repo: " + self.repo_name + "...") return list_commits
def main(argv): # Parse input arguments into username and password username = '' password = '' try: opts, args = getopt.getopt(argv,"u:p:",["username="******"password="******"-u", "--username"): username = arg elif opt in ("-p", "--password"): password = arg # Scan the xdf/load_xdf.m file and pull out the version filename = inspect.getframeinfo(inspect.currentframe()).filename root_path = os.path.dirname(os.path.abspath(filename)) mfile_path = os.path.join(root_path, 'Matlab', 'xdf', 'load_xdf.m') version_found = False with open(mfile_path) as f: for line in f: if not version_found: m = re.match("LIBVERSION\s=\s'(\d+\.\d+)';", line) if m: mfile_version = m.group(1) version_found = True # Create a GitHub instance and access the repository g = GitHub(username, password) repo = g.repository('sccn', 'xdf') if version_found: #Create the release #http://github3py.readthedocs.org/en/latest/repos.html#github3.repos.repo.Repository.create_release try: release = repo.create_release('v'+mfile_version, target_commitish='master') except GitHubError: print 'Release for version ' + mfile_version + ' already exists or validation failed.' return None #zip contents of Matlab\* into xdfimport<version#>.zip eeglabfn = 'xdfimport' + mfile_version + '.zip' zf = zipfile.ZipFile(eeglabfn, mode='w', compression=zipfile.ZIP_DEFLATED) zipdir('Matlab/', zf, 'xdfimport' + mfile_version) zf.close() #zip contents of Matlab\xdf\* into xdf.zip zf2 = zipfile.ZipFile('xdf.zip', mode='w', compression=zipfile.ZIP_DEFLATED) zipdir('Matlab/xdf/', zf2, 'xdf') zf2.close() #http://github3py.readthedocs.org/en/latest/repos.html#github3.repos.release.Release.upload_asset #Upload zip files as release assets with open(eeglabfn) as fd: content = fd.read() release.upload_asset('application/zip', eeglabfn, content) with open('xdf.zip') as fd: content = fd.read() release.upload_asset('application/zip', 'xdf.zip', content) #Upload mex files as release assets for fn in os.listdir('Matlab/xdf/'): fname, fext = os.path.splitext(fn) if len(fext) > 3 and fext[:4] == '.mex': with open(os.path.join('Matlab', 'xdf', fn)) as fd: content = fd.read() release.upload_asset('application/octet-stream', fn, content)
class SourceObject: def __init__(self): self.name = "github.com" self.helpers_needed = ["git_sync_flow"] self.githubApiInterval = 30 # in seconds, so that github won't ban us # self.lastRun = None self.logFile = None self.api = None self.anon = None def init2(self, api): self.api = api self.anon = GitHub() def getSyncTimeRange(self): assert False def getSyncThread(self): # if self.lastRun is not None and datetime.now() - self.lastRun < datetime.timedelta(hours=1): # return None return _SyncThread(self) @property def protocols(self): return [ "git", "ssh", "http", "https", ] @property def capabilities(self): return [ "list-important", "url-for-read", "url-for-write", ] def get_important_repos(self): # get key repository list configuration keyRepoList = [] keyRepoListFile = os.path.join(self.api.getCfgDir(), "key-repo.list") if os.path.exists(keyRepoListFile): with open(keyRepoListFile, "r") as f: for line in f.read().split("\n"): if line.startswith("#") or line.strip(" ") == "": continue keyRepoList.append(McUtil.splitToTuple(line, "/")) # expand key repository list realKeyRepoList = [] for userName, repoName in keyRepoList: if "*" in repoName: for repo in self.anon.iter_user_repos(userName): repoName2 = repo.full_name.split("/")[1] if fnmatch.fnmatch(repoName2, repoName): realKeyRepoList.append("%s/%s" % (userName, repoName2)) time.sleep(self.githubApiInterval) else: realKeyRepoList.append("%s/%s" % (userName, repoName)) return realKeyRepoList def has_repo(self, repo_id): if hasattr(self, "_has_repo_last_run"): timediff = datetime.now() - self._has_repo_last_run if timediff < datetime.timedelta(seconds=self.githubApiInterval): wait_seconds = (datetime.timedelta(seconds=self.githubApiInterval) - timediff).seconds time.sleep(wait_seconds) ret = self.anon.repository(*self._split_repo_id(repo_id)) is not None self._has_repo_last_run = datetime.now() return ret def get_repo_url(self, repo_id, for_write=False): if not for_write: return "git://github.com/%s" % (repo_id) else: return "https://github.com/%s" % (repo_id) def _split_repo_id(self, repo_id): t = repo_id.split("/") assert len(t) == 2 return (t[0], t[1])
class GitHubHandler(BaseHandler): title = "Github" url_regex = '(http|https|git)://github.com/' url = 'https://github.com' repo_regex = r'(?:http|https|git)://github.com/[^/]*/([^/]*)/{0,1}' slug_regex = repo_regex def __init__(self): if settings.GITHUB_TOKEN: self.github = login(token=settings.GITHUB_TOKEN) else: self.github = GitHub() def manage_ratelimit(self): while self.github.ratelimit_remaining < 10: sleep(1) def _get_repo(self, package): repo_name = package.repo_name() if repo_name.endswith("/"): repo_name = repo_name[:-1] try: username, repo_name = package.repo_name().split('/') except ValueError: return None return self.github.repository(username, repo_name) def fetch_metadata(self, package): self.manage_ratelimit() repo = self._get_repo(package) if repo is None: return package package.repo_watchers = repo.watchers package.repo_forks = repo.forks package.repo_description = repo.description contributors = [] for contributor in repo.iter_contributors(): contributors.append(contributor.login) self.manage_ratelimit() if contributors: package.participants = ','.join(uniquer(contributors)) return package def fetch_commits(self, package): self.manage_ratelimit() repo = self._get_repo(package) if repo is None: return package from package.models import Commit # Added here to avoid circular imports for commit in repo.iter_commits(): self.manage_ratelimit() try: commit_record, created = Commit.objects.get_or_create( package=package, commit_date=commit.commit.committer['date'] ) if not created: break except Commit.MultipleObjectsReturned: continue # If the commit record already exists, it means we are at the end of the # list we want to import package.save() return package