def main(): username = raw_input("Enter username: "******"Do you want to clone the repositories of your organization? [Y/N] ") if choice == 'y' or choice == 'Y': all_repos = githubObj.repositories() for repo in all_repos: ssh_url_list.append(repo.ssh_url) html_url_list.append(repo.html_url) else: user_repos = githubObj.repositories_by(username) for repo in user_repos: ssh_url_list.append(repo.ssh_url) html_url_list.append(repo.html_url) clone_choice = raw_input("Do you want to clone via SSH [Y/N] ") if clone_choice == 'y' or clone_choice == 'Y': print "Initiating Cloning via SSH" for url in ssh_url_list: os.system("git clone " + url) else: print "Initiating Cloning via HTTPS" for url in html_url_list: os.system("git clone " + url)
def retrieve_repos_by_keyword(keywords, language, username="", password=""): gh = None if username and password: gh = login(username, password=password) else: gh = GitHub() try: for keyword in keywords: for page in range(1, 11): repos = gh.search_repos(keyword, language=language, start_page=page) for repo in repos: r = repo.to_json() if r["language"] == language: result = r try: user = gh.user(r["owner"]).to_json() result["owner_email"] = user.get("email", "") result["owner_blog"] = user.get("blog", "") result["owner_location"] = user.get("location", "") result["owner_avatar"] = user.get("avatar_url", "") except Exception as e: pass yield result except Exception as e: print e pass
def fixPullRequestTitle(pullRequestId): gho = GitHub(githubUser, githubPassword) oPull = gho.pull_request(githubOwner, githubRepository, str(pullRequestId)) branchName = oPull.head.ref issueKey = extractIssueKey(branchName) title = oPull.title foundIndex = title.find(issueKey) updateRequired = 0 if foundIndex == 0: if issueKey == title: updateRequired = 1 print 'Issue Key ' + issueKey + ' Found in Title but Update Required for ' + title else: print 'Issue Key ' + issueKey + ' found in Title for ' + title return else: updateRequired = 1 print 'Issue Key ' + issueKey + ' NOT Found in Title for ' + title if updateRequired == 1: jiraIssue = jiraGetIssueInfo(issueKey, config) title = issueKey + ' ' + jiraIssue.fields.summary print title oPull.update(title) print 'Updated the Title for the Pull Request ' + oPull.html_url
def starred(username, sort): """ """ gh = GitHub() stars = gh.starred_by(username) click.echo(desc) repo_dict = {} for s in stars: language = s.language or 'Others' description = s.description or '' if language not in repo_dict: repo_dict[language] = [] repo_dict[language].append([s.name, s.html_url, description.strip()]) if sort: repo_dict = OrderedDict(sorted(repo_dict.items(), key=lambda l: l[0])) for language in repo_dict.keys(): data = ' - [{}](#{})'.format(language, language.lower()) click.echo(data) click.echo('') for language in repo_dict: click.echo('## %s\n' % language) for repo in repo_dict[language]: data = '* [{}]({}) - {}'.format(*repo) click.echo(data) click.echo('')
def run(self): """ Main worker. """ update_name = None # Store update name, i.e: Eddy v0.9.1 update_version = self.current # Store update version, i.e: 0.9.1 update_url = None # Store update HTML url, i.e: http://github.com/... try: LOGGER.info('Connecting to GitHub to retrieve update information (channel: %s)', self.channel.value) github = GitHub(token='6a417ccfe9a7c526598e77a74cbf1cba6e688f0e') repository = github.repository('danielepantaleone', 'eddy') for release in repository.releases(): if self.channel is Channel.Beta or not release.prerelease: try: if NormalizedVersion(release.tag_name[1:]) > NormalizedVersion(update_version): update_name = release.name update_version = release.tag_name[1:] update_url = release.html_url except IrrationalVersionError as e: LOGGER.warning('Failed to parse version number from TAG: %s', e) if update_version != self.current: LOGGER.info('Update available: %s', update_name) self.sgnUpdateAvailable.emit(update_name, update_url) else: LOGGER.info('No update available') self.sgnNoUpdateAvailable.emit() except Exception as e: LOGGER.warning('Failed to retrieve update data: %s', e) self.sgnNoUpdateDataAvailable.emit() self.finished.emit()
def get_session(): """Fetch and/or load API authorization token for GITHUB.""" ensure_config_dir() credential_file = os.path.join(CONFIG_DIR, 'github_auth') if os.path.isfile(credential_file): with open(credential_file) as fd: token = fd.readline().strip() gh = GitHub(token=token) try: # Test connection before starting gh.is_starred('github', 'gitignore') return gh except GitHubError as exc: raise_unexpected(exc.code) sys.stderr.write('Invalid saved credential file.\n') from getpass import getpass from github3 import authorize user = prompt('GITHUB Username') try: auth = authorize( user, getpass('Password for {0}: '.format(user)), 'repo', 'Farcy Code Reviewer', two_factor_callback=lambda: prompt('Two factor token')) except GitHubError as exc: raise_unexpected(exc.code) raise FarcyException(exc.message) with open(credential_file, 'w') as fd: fd.write('{0}\n{1}\n'.format(auth.token, auth.id)) return GitHub(token=auth.token)
def _get_latest_release(self): # Return Release object if new release available, else, return None if 'alpha' in settings.update_channel: dev_release = self._get_dev_release() if dev_release: return dev_release else: dev_release = None # Check for new beta or release versions if not dev_release: owner = OWNER repo_name = REPO_NAME # If there is an issue accessing the GitHub API, return None try: gh = GitHub() repo = gh.repository(owner, repo_name) except GitHubError: return None for release in repo.iter_releases(): latest_version = release.name if Version(current_version) < Version(latest_version): if (((is_beta(latest_version) or is_release(latest_version)) and is_beta_channel()) or (is_release(latest_version) and is_release_channel())): release_url = ( 'https://github.com/%s/%s/archive/%s.tar.gz' % (owner, repo_name, release.tag_name)) release_notes = release.body return Release(latest_version, release_url, release_notes) else: return None
def issues(): testToIssue = {} try: # user = GitHub("<user>",token="<token>") user = GitHub() issues = user.issues_on("assaft", "caevo",state="open") regex = r"\btest\b ([tT]\d+)" group = 1 for issue in issues: text = "Issue by " + issue.user.login + ":\n" + issue.title + "\n" + issue.body + "\n\n" for comment in issue.comments(): text = text + "commit by " + comment.user.login + ":\n" + comment.body + "\n\n" matches = re.finditer(regex, text) for match in matches: test = match.group(group) #print test if (test not in testToIssue): testToIssue[test] = set() testIssues = testToIssue[test] testIssues.add(issue.number) except ForbiddenError as err: print ('Issues cannot be fetched from GitHub because connection as an anonymous user has failed. Wait for GitHub counter to reset or put your username and password/token in the script. Message from GitHub:\n' + format(err)); except: sys.exit('Unexpected error fetching issues from GitHub') return testToIssue;
def gen(token): gh = GitHub(token=token) stars = gh.starred() taged_star_tree = Tree() try: with open('tag.json', 'r') as tag_file: repo_tag_dict = json.load(tag_file, object_hook=RepoTagDict) except FileNotFoundError: print("invalid tag", file=sys.stderr) repo_tag_dict = RepoTagDict() for star in stars: repo = Repo(star) if len(repo_tag_dict[repo.full_name]) == 0: taged_star_tree['Other'].nodes.append(repo) else: for tag in repo_tag_dict[repo.full_name]: taged_star_tree[tag].nodes.append(repo) with open('tag.json', 'w+') as tag_file: json.dump(repo_tag_dict, tag_file, indent=' ', sort_keys=True) print("# TOC") taged_star_tree.mdtoc(name="Star") print() taged_star_tree.mdprint( name='Star', node_md=lambda x: '[{}]({}): {}'.format( str(x), x.url, x.description))
def set_pull_request_status(pr, state, target_url="", description='', user=None, credfile='gh.cred'): """Sets a state for every commit associated ith a pull request. Parameters ---------- pr : PullRequest or len-3 sequence A github3 pull request object or a tuple of (owner, repository, number). state : str Accepted values are 'pending', 'success', 'error', 'failure'. target_url : str, optional URL to link with this status. description : str, optional Flavor text. user : str, None, or NotSpecified, optional The username to log into github with. credfile : str, optional The github credentials file name. """ gh = GitHub() ensure_logged_in(gh, user=user, credfile=credfile) if isinstance(pr, Sequence): r = gh.repository(*pr[:2]) pr = gh.pull_request(*pr) else: #r = gh.repository(*pr.repository) Broken on github3.py v0.8+ r = gh.repository(*pr.base.repo) status = r.create_status(pr.head.sha, state=state, target_url=target_url, description=description)
class GitHubApi: """ GitHub API interaction using github3 """ def __init__(self, repo_owner, repo_name, token): self.repo_owner = repo_owner self.repo_name = repo_name self.gh = GitHub(token=token) def get_issues(self, label=None): """Returns all issues (matching label)""" return self.gh.iter_repo_issues(self.repo_owner, self.repo_name, labels=label) def get_issue(self, issue_id): """Returns issue""" return self.gh.issue(self.repo_owner, self.repo_name, issue_id) def get_matching_pull_requests(self, label=None): """ Returns all matching issues Pull requests are treated as issues """ pull_request_list = [] for issue in self.get_issues(label): # Get Pull Request pull_request_list.append(self.pull_request_information(issue.number)) return pull_request_list def get_pull_request_status(self, label=None): """Returns string containing status of pull requests""" pull_requests = self.get_matching_pull_requests(label) pull_requests_information = "Pull Requests - %s\n\n" % label for pr in pull_requests: pull_requests_information += "Title: %s\nBranch: %s\nLink: %s\nMergeable: %s\n\n"\ % (pr.title, pr.head.ref, pr.html_url, pr.mergeable) return pull_requests_information def pull_request_information(self, pull_request_id): """Returns specified pull request""" pull_request = self.gh.pull_request(self.repo_owner, self.repo_name, pull_request_id) return pull_request def assign_new_label_to_issue(self, branch, label, who): """Update issue label""" # Find issue issue = self.filter_on_branch(self.get_matching_pull_requests(), branch) # Remove all existing labels issue.remove_all_labels() # Add label 'release' issue.add_labels(label) # Add comment for tracking issue.create_comment("%s assigned label '%s' via hipchat" % (who, label)) def filter_on_branch(self, pull_requests, branch): for pull_request in pull_requests: if pull_request.head.ref == branch: # Get issue return self.get_issue(pull_request.number)
def __init__(self, repo_user = '******', repo_name = 'semanario'): """ @params repo_user - name of organization or user repo_name - repository name """ github = GitHub() self.repo = github.repository(repo_user, repo_name)
def starred(username, token, sort, repository, message): """GitHub starred creating your own Awesome List used GitHub stars! example: starred --username maguowei --sort > README.md """ if repository: if not token: click.secho('Error: create repository need set --token', fg='red') return file = BytesIO() sys.stdout = file else: file = None gh = GitHub(token=token) stars = gh.starred_by(username) click.echo(desc) repo_dict = {} for s in stars: language = s.language or 'Others' description = html_escape(s.description).replace('\n', '') if s.description else '' if language not in repo_dict: repo_dict[language] = [] repo_dict[language].append([s.full_name, s.name, s.html_url, description.strip(), s.homepage or '']) if sort: repo_dict = OrderedDict(sorted(repo_dict.items(), key=lambda l: l[0])) for language in repo_dict: repo_dict[language] = sorted(repo_dict[language], key=lambda l: l[1]) for language in repo_dict.keys(): data = u' - [{}](#{})'.format(language, '-'.join(language.lower().split())) click.echo(data) click.echo('') for language in repo_dict: click.echo('## {} \n'.format(language.replace('#', '# #'))) for repo in repo_dict[language]: data = u'* [{0}]({2}) - {3} [{4}]({4})'.format(*repo) click.echo(data) click.echo('') click.echo(license_.format(username=username)) if file: rep = gh.repository(username, repository) if rep: readme = rep.readme() readme.update(message, file.getvalue()) else: rep = gh.create_repository(repository, 'A curated list of my GitHub stars!') rep.create_file('README.md', 'starred initial commit', file.getvalue()) click.launch(rep.html_url)
def one_repo_only(owner, reponame, gh_token=''): """ Go back and see why this or that denied request. Provide a valid token. """ gh = GitHub(token=gh_token) user = gh.me() body = ISSUE_BODY.format(user) repo = gh.repository(owner, reponame) i = repo.create_issue(ISSUE_TITLE, body=body) print(i)
class GitHubDB(object): def __init__(self, ghtoken): # Get handle to Github API if ghtoken is not None and ghtoken != '': self.gh = login(token=ghtoken) else: log.warning('Using unauthenticated access to Github API. This will result in severe rate limiting.') self.gh = GitHub() def waitForRateLimit(self, resourceType): """resourceType can be 'search' or 'core'.""" try: rateLimitInfo = self.gh.rate_limit()['resources'] while rateLimitInfo[resourceType]['remaining'] < (1 if resourceType == 'search' else 12): waitTime = max(1, rateLimitInfo[resourceType]['reset'] - time.time()) log.warning('Waiting %s seconds for Github rate limit...', waitTime) time.sleep(waitTime) rateLimitInfo = self.gh.rate_limit()['resources'] except ConnectionError as e: log.error("Connection error while querying GitHub rate limit. Retrying...") self.waitForRateLimit(resourceType) def refreshGithubUser(self, ghUserObject): self.waitForRateLimit('core') return ghUserObject.refresh(True) def getGithubUserForLogin(self, login, session): """Uses the Github API to find the user for the given username. Returns NullObject if the user was not found for any reason.""" # Try to use cached result to avoid hitting rate limit cachedUser = session.query(GitHubUserCache).filter(GitHubUserCache.login == login).first() if cachedUser is not None: return cachedUser if not cachedUser.fake else NullObject() log.debug('Querying GutHub API for login %s', login) try: self.waitForRateLimit('core') potentialUser = self.gh.user(login) if potentialUser is None: # store login as fake session.add(GitHubUserCache(login=login, fake=True)) return NullObject() actualUser = self.refreshGithubUser(potentialUser) if isinstance(potentialUser, NullObject): # store login as fake session.add(GitHubUserCache(login=login, fake=True)) else: # cache user session.add(GitHubUserCache(login=login, name=actualUser.name, email=actualUser.email, company=actualUser.company, location=actualUser.location)) return actualUser except ConnectionError: log.error("github query failed when attempting to verify username %s", login) return NullObject() def searchGithubUsers(self, query): self.waitForRateLimit('search') return self.gh.search_users(query)
def _client(self, auth, api_url): client = GitHub() if auth.token.exists(): client = GitHub(token=auth.token()) elif auth.username.exists() and auth.password.exists(): client = GitHub(username=auth.username(), password=auth.password()) # TODO: hacky - might want to just open up a PR to upstream? if api_url is not None: client._github_url = api_url client.session.base_url = api_url return client
def _get_pr_numbers(last_deploy, current_deploy): repo = GitHub().repository('dimagi', 'commcare-hq') comparison = repo.compare_commits(last_deploy, current_deploy) pr_numbers = map( lambda repo_commit: int(re.search(r'Merge pull request #(\d+)', repo_commit.commit.message).group(1)), filter( lambda repo_commit: repo_commit.commit.message.startswith('Merge pull request'), comparison.commits ) ) return pr_numbers
def response(self, rc): rawdata = json.loads(request.data) if 'pull_request' not in rawdata: return "\n", None action = rawdata['action'] if action not in self._action_to_event: # Can be one of 'opened', 'closed', 'synchronize', or 'reopened', # but we only care about "opened" and "synchronize". return "\n", None gh = GitHub() pr = gh.pull_request(rc.github_owner, rc.github_repo, rawdata['number']) event = Event(name=self._action_to_event[action], data=pr) return request.method + ": github\n", event
def verify_hook(owner, repo, url, events, user=None, credfile='gh.cred'): """Ensures that the github WebURL API hook has been set up properly. Parameters ---------- owner : str The GitHub repository owner. repo : str The GitHub repository name. url : str The url of the hook. events : list of str The list GitHub events that this hook should trigger on. GitHub defaults this to ['pull'] but ['pull_request'] is a more reasonable value. user : str, None, or NotSpecified, optional The username to log into github with. credfile : str, optional The github credentials file name. """ gh = GitHub() ensure_logged_in(gh, user=user, credfile=credfile) r = gh.repository(owner, repo) for hook in r.iter_hooks(): if hook.name != 'web': continue elif hook.config['url'] == url: break else: hook = r.create_hook(name='web', config={"url": url, "content_type": "json"}, events=events, active=True) if hook is None: msg = ("failed to create github webhook for {0}/{1} pointing to {2} with " "the {3} events").format(owner, repo, url, ", ".join(events)) raise RuntimeError(msg) update = {} if hook.config['url'] != url: update['url'] = url if hook.config['content_type'] != 'json': update['content_type'] = 'json' if hook.events is None or set(hook.events) != set(events): update['events'] = events if not hook.active: update['active'] = True if len(update) > 0: status = hook.edit(**update) if not status: msg = ("failed to update github webhook for {0}/{1} pointing to {2} with " "the {3} events").format(owner, repo, url, ", ".join(events)) raise RuntimeError(msg)
def __init__(self, token=None): self.scope = ['public_repo'] self.REQ_TOKEN = 'https://github.com/login/oauth/authorize' self.AUTH_URL = 'https://github.com/login/oauth/authorize' self.ACC_TOKEN = 'https://github.com/login/oauth/access_token' self.username = settings.TH_GITHUB['username'] self.password = settings.TH_GITHUB['password'] self.consumer_key = settings.TH_GITHUB['consumer_key'] self.consumer_secret = settings.TH_GITHUB['consumer_secret'] self.token = token if self.token: token_key, token_secret = self.token.split('#TH#') self.gh = GitHub(token=token_key) else: self.gh = GitHub(username=self.username, password=self.password)
def __init__(self, ghtoken): # Get handle to Github API if ghtoken is not None and ghtoken != '': self.gh = login(token=ghtoken) else: log.warning('Using unauthenticated access to Github API. This will result in severe rate limiting.') self.gh = GitHub()
class GitHubConnector(object): def __init__(self): self.git_hub = GitHub(settings.GITHUB_USER, settings.GITHUB_PASS) self.orgs = list(self.git_hub.iter_orgs()) # As of 10/3/2013, the default rate limit was 5000/hr. # Should your code loop infinitely, this exception will # leave enough requests to debug the problem without # having to wait an hour. if self.git_hub.ratelimit_remaining < 500: raise Exception('You have only 500 GitHub requests left for this hour') def get_issues_for_repo(self, repo_name=None, github_id=None, repo_tuple=None): repos = self.git_hub.iter_all_repos() for repo in repos: if repo_name and repo_name == repo.name: return list(repo.iter_issues()) elif github_id and github_id == repo.id: return list(repo.iter_issues()) elif repo_tuple and repo_tuple[1] == repo.name: return list(repo.iter_issues()) def get_all_issues(self): issues = [] for org in self.orgs: org_repos = org.iter_repos() for repo in org_repos: issues += list(repo.iter_issues(state='open')) issues += list(repo.iter_issues(state='closed')) return issues def get_repos_for_org(self, org_login=None, org_id=None): for org in self.orgs: org_json = org.to_json() if org_json["login"] == org_login or org_json["id"] == org_id: return list(org.iter_repos()) def get_all_repos(self): repos = [] for org in self.orgs: org_repos = org.iter_repos() for repo in org_repos: repos.append(repo) return repos
class GitHubHandler(BaseHandler): title = "Github" url_regex = '(http|https|git)://github.com/' url = 'https://github.com' repo_regex = r'(?:http|https|git)://github.com/[^/]*/([^/]*)/{0,1}' slug_regex = repo_regex def __init__(self): if settings.GITHUB_USERNAME: self.github = login(settings.GITHUB_USERNAME, settings.GITHUB_PASSWORD) else: self.github = GitHub() def manage_ratelimit(self): while self.github.ratelimit_remaining < 10: sleep(1) def fetch_metadata(self, package): self.manage_ratelimit() username, repo_name = package.repo_name().split('/') repo = self.github.repository(username, repo_name) if repo is None: return package package.repo_watchers = repo.watchers package.repo_forks = repo.forks package.repo_description = repo.description contributors = [x.login for x in repo.iter_contributors()] if contributors: package.participants = ','.join(uniquer(contributors)) return package def fetch_commits(self, package): self.manage_ratelimit() username, repo_name = package.repo_name().split('/') repo = self.github.repository(username, repo_name) if repo is None: return package package.commit_list = str([x['total'] for x in repo.iter_commit_activity(number=52)]) if package.commit_list.strip() == '[]': return package package.save() return package
def create(self, validated_data): gh = GitHub() github_name = validated_data['github'] user_data = validated_data.pop('user') user_data['username'] = user_data.get('email') if github_name: gh_id = gh.user(github_name).id validated_data['github_id'] = gh_id user = User.objects.create(**user_data) member = Member.objects.create(user=user, **validated_data) return member
def __init__(self, _ask_credentials=None, _ask_2fa=None): self.last_error = None self._ask_credentials = _ask_credentials self._ask_2fa = _ask_2fa self.gh = GitHub(token=self._get_authorization_token()) self.user = self.gh.user()
def validate(self, attrs): code = attrs.get('code') state = attrs.get('state') user = None if not CLIENT_ID or not CLIENT_SECRET: msg = 'Lifebelt not configurated properly. Please contact administrators' raise exceptions.ValidationError(msg) if code and state: headers = {'Accept': 'application/json'} data = {"client_id": CLIENT_ID, "client_secret": CLIENT_SECRET, "code": code, "state": state} url = 'https://github.com/login/oauth/access_token' r = requests.post(url=url, headers=headers, data=data) if 'access_token' not in r.json(): msg = 'This smells...' if 'error' in r.json(): msg = r.json()['error'] raise exceptions.ValidationError(msg) token = r.json()['access_token'] # https://github3py.readthedocs.org/en/master/ gh = GitHub(token=token) user = Member.objects.get(github_id=gh.me().id) if not user: msg = 'User with this GitHub name is not found' raise exceptions.ValidationError(msg) user.avatar_url = gh.me().as_dict().get('avatar_url') user.github_token = token user.save() else: msg = ('You must provide a valid email and a special code to authenticate') raise exceptions.ValidationError(msg) attrs['user'] = user.user return attrs
def get_gh_repos(user, repo_count): """ Get a user's repositories with a limit :param user: :param repo_count: :return: """ repos = None if user and repo_count: try: gh = GitHub() repo_limit = int(repo_count) if repo_count else 5 repos = list(map(lambda r: r.repository, list(gh.search_repositories( 'user:'******'updated'))[:repo_limit])) except GitHubError: repos = None return repos
def __init__(self): super(Command, self).__init__() assert self.name commands[self.name] = self self.gh = GitHub() self.gh.set_user_agent('github-cli/{0} (http://git.io/MEmEmw)'.format( __version__ )) self.parser = CustomOptionParser(usage=self.usage)
def check_for_newer_wp_release(): init() most_recent_version_supported = latest_template_wordpress_version() g = GitHub(token=os.environ.get("GITHUB_TOKEN")) repo = g.repository("wordpress", "wordpress") tags = [t.name for t in repo.tags()] version = parse_version(most_recent_version_supported) for tag in tags: if not re.search("[Bb]eta", tag): assert version >= parse_version(tag), ( Fore.RED + Style.BRIGHT + "\nIt would seem there is a newer version of " "WordPress available: {}-- update the template " "to support it!\n" + Fore.RESET + Style.RESET_ALL ).format(tag) print Fore.GREEN + Style.BRIGHT + "Looks like {} is the most".format( most_recent_version_supported ), "recent version of WordPress. The template supports that, so we're good!" + "\n" + Fore.RESET + Style.RESET_ALL deinit()
def go_forth_and_multiply(owner, listfile, gh_token=''): """ Open issues! Provide a valid token. """ gh = GitHub(token=gh_token) user = gh.me() body = ISSUE_BODY.format(user) # Only allowed to spam pre-approved list now. with open(listfile) as fin: repositories = [s.strip() for s in fin.readlines() if not s.startswith('#')] tot_n = len(repositories) if tot_n == 0: print('No repository to process!') return max_n_per_chunk = 30 n_chunks = math.ceil(tot_n / max_n_per_chunk) i_chunk = 0 while i_chunk < n_chunks: i_start = i_chunk * max_n_per_chunk i_end = min(i_start + max_n_per_chunk, tot_n) for reponame in repositories[i_start:i_end]: time.sleep(0.5) # Prevent API limit but not abuse detection repo = gh.repository(owner, reponame) if repo.archived: print('Skipped {0} -- archived'.format(repo.name)) continue try: i = repo.create_issue('Update the help', body=body) except Exception as e: # denied! print('Skipped {0} -- {1}'.format(repo.name, str(e))) else: print(i) i_chunk += 1 if i_chunk < n_chunks: time.sleep(10) # Prevent abuse detection, maybe
def get_session(): """Fetch and/or load API authorization token for GitHub.""" token = None if os.path.isfile(TOKEN_FILE): with open(TOKEN_FILE) as fd: token = fd.readline().strip() elif "GITHUB_TOKEN" in os.environ: token = os.environ["GITHUB_TOKEN"] if token is None: raise OppReleaseError( "Please write a GitHub token to .token or set GITHUB_TOKEN env var" ) gh = GitHub(token=token) try: # Test connection before starting gh.is_starred("github", "gitignore") return gh except GitHubError: raise OppReleaseError("Invalid token found")
def _get_github_api(self): config_dir = os.path.dirname(os.path.realpath(__file__)) config_path = os.path.join(config_dir, 'gh_config.json') logging.info( 'Reading Github config from "{path}"'.format(path=config_path)) with open(config_path) as f: config = json.loads(f.read()) logging.info('Attempting to connect to Github...') gh = GitHub(config['login'], config['password']) logging.info("Ratelimit remaining: %s" % gh.ratelimit_remaining) return gh
def check_for_newer_wp_release(): init() most_recent_version_supported = latest_template_wordpress_version() g = GitHub(token=os.environ.get('GITHUB_TOKEN')) repo = g.repository('wordpress', 'wordpress') tags = [t.name for t in repo.tags()] version = parse_version(most_recent_version_supported) for tag in tags: if not re.search('[Bb]eta', tag): assert version >= parse_version(tag), \ (Fore.RED + Style.BRIGHT + \ '\nIt would seem there is a newer version of ' 'WordPress available: {}-- update the template ' 'to support it!\n' + Fore.RESET + Style.RESET_ALL).format(tag) print Fore.GREEN + Style.BRIGHT + \ "Looks like {} is the most".format(most_recent_version_supported), \ "recent version of WordPress. The template supports that, so we're good!" + \ "\n" + Fore.RESET + Style.RESET_ALL deinit()
def manage_daily_builds(): # obtain git handle gh = GitHub(GITHUB_API, token=AUTH_TOKEN) repository = gh.repository(REPOSITORY_OWNER, REPOSITORY_REPO) # get list of releases releases = repository.releases() # extract keys and sort by build date release_keys = {x.id : x.created_at for x in releases} sorted_keys = {k: v for k, v in sorted(release_keys.items(), key=lambda x: x[1])} print('%s' % (pformat(sorted_keys))) # filter to obtain the keys to delete delete_keys = [x for x in sorted_keys.keys()][2:] print('Deleting releases: %s' % (pformat(delete_keys))) # iterate, deleting the releases and corresponding tags for rel in releases: print('examining rel %d from %s...' % (rel.id, str(rel.created_at))) if rel.id in delete_keys: print(' deleting %d.' % (rel.id)) rel_tag_ref = repository.ref('tags/%s' % (rel.tag_name)) rel.delete() if rel_tag_ref is not None: print(' deleting tag %s' % (rel_tag_ref.ref)) rel_tag_ref.delete() else: # Look for stale files in the release assets = rel.assets() print('In release %s found assets:' % (rel.name)) for asset in assets: match = PRODUCT_ASSET_PATTERN.search(asset.name) print(' asset named %s matches %s' % (asset.name, match.group(1) if match is not None else 'None')) build_times = sorted([PRODUCT_ASSET_PATTERN.search(x.name).group(1) for x in assets if PRODUCT_ASSET_PATTERN.search(x.name)]) latest_build_time = build_times[-1] if build_times else None print('Lastest build time is %s' % (latest_build_time)) for asset in assets: match = PRODUCT_ASSET_PATTERN.search(asset.name) # print(' asset named %s matches %s' % (asset.name, match.group(1) if match is not None else 'None')) if match is not None: asset_build_time = match.group(1) if asset_build_time != latest_build_time: print('deleting stale asset %s' % (asset.name)) asset.delete()
def _authorize_github(self): """ :rtype: None :raises werkzeug.exceptions.Forbidden: If cannot login to GitHub using `auth` credentials """ self.github_api = GitHubApi(**self._auth) try: self.current_user = self.github_api.me() except gh_exc.AuthenticationFailed: raise exceptions.Forbidden() if self.current_user is None: raise exceptions.Forbidden()
def get_github_handler(self): userid, passwd = self.get_accounts() try: if '@' in userid: userid = userid.split('@')[0] from github3 import GitHub return GitHub(userid, passwd) except Exception as e: logger.error( "Failed to get github handler using accounts(%s, %s): %s", userid, passwd, str(e)) return None
def determine_email_address( github_user_name: str, github_api: GitHub, ) -> typing.Optional[str]: not_none(github_user_name) try: user = github_api.user(github_user_name) except NotFoundError: logger.warning(f'failed to lookup {github_user_name=} {github_api._github_url=}') return None return user.email
def __init__(self, **kwargs): from github3 import login, GitHub self.repo_owner = kwargs.get('repo_owner') self.repo_name = kwargs.get('repo_name') self.branch = kwargs.get('branch') self.base_path = kwargs.get('basepath') self.extension = kwargs.get('extension') if 'git_user' in kwargs and 'git_password' in kwargs: self.git_user = kwargs.get('git_user') self.git_password = kwargs.get('git_password') self.git = login(self.git_user, self.git_password) elif 'token' in kwargs: self.token = kwargs.get('token') self.git = login(token=self.token) else: self.git_user = None self.git_password = None self.git = GitHub() super().__init__()
def get_cached_user(gh: GitHub, username: str) -> users.User: """ Get a GitHub user by username. Results are cached to stay under API limits. """ key = f"gh_user_{username}" user_dict = cache.get(key) if user_dict is not None: return users.User.from_dict(user_dict, gh.session) user = gh.user(username) cache.set(key, user.as_dict(), timeout=60 * 60 * 24) # 1 day return user
def submit_issue(title, body, score): log.debug("Failed test: {test}".format(test=title)) # Create Github issue gh_username = '******' gh_password = '******' log.debug("Logging in as {user}".format(user=gh_username)) github = GitHub(gh_username, gh_password) log.debug( "Ratelimit remaining: {rate}".format(rate=github.ratelimit_remaining)) # create_issue(owner, repository, title, body=None, assignee=None, milestone=None, labels=[]) # TRAVIS_REPO_SLUG (owner_name/repo_name) # https://docs.travis-ci.com/user/environment-variables/ owner, repo = os.environ.get('TRAVIS_REPO_SLUG').split('/') log.debug("Repo: {owner}/{repo}".format(owner=owner, repo=repo)) found = False # If there is already an open issue, create a comment instead of a new issue for issue in github.iter_repo_issues(owner, repo, state='open'): if issue.title == title: log.debug( "Found existing open ticket: {url}".format(url=issue.html_url)) comment = issue.create_comment(body) log.debug("Created comment: {comment}".format(comment=comment)) found = True break if not found: log.debug("Attempting to create issue...") resp = github.create_issue(owner, repo, title, body, owner) log.debug("Created ticket: {resp}".format(resp=resp)) # Post results log.debug("Attempting to post score ({score})...".format(score=score)) url = "https://tdd-chart.herokuapp.com/score/add?user={owner}/{repo}&score={score}" resp = get(url.format(owner=owner, repo=repo, score=score)) log.debug("TDD-chart response: {code}".format(code=resp.status_code))
def upload(options, info): """Lets upload the release to GitHub""" # read the github token config = configparser.ConfigParser() config.read("personal.properties") myToken = config.get("GitHub", "token") # lets log in print("'%s'" % myToken) gh = GitHub(token=myToken) repo = gh.repository("toomasr", "jspgnviewer") # lets do the releases try: release_name = "JsPgnViewer %s" % options.version tag_name = "jspgnviewer-%s" % options.version release = repo.create_release(tag_name, name=release_name, prerelease=True) f = open("bin/jspgnviewer-%s.zip" % options.version) release.upload_asset(content_type='application/zip', name="jspgnviewer-%s.zip" % options.version, asset=f) except GitHubError as e: raise e try: release_name = "JsPgnViewer WordPress %s" % options.version tag_name = "jspgnviewer-wordpress-%s" % options.version release = repo.create_release(tag_name, name=release_name, prerelease=True) f = open("bin/pgnviewer-%s.zip" % options.version) release.upload_asset(content_type='application/zip', name="pgnviewer-%s.zip" % options.version, asset=f) except GitHubError as e: raise e pass
def ebooks_in_github_release(repo_owner, repo_name, tag, token=None): """ returns a list of (book_type, book_name) for a given GitHub release (specified by owner, name, tag). token is a GitHub authorization token -- useful for accessing higher rate limit in the GitHub API """ # map mimetype to file extension EBOOK_FORMATS = dict([(v, k) for (k, v) in settings.CONTENT_TYPES.items()]) if token is not None: gh = login(token=token) else: # anonymous access gh = GitHub() repo = gh.repository(repo_owner, repo_name) release = release_from_tag(repo, tag) return [(EBOOK_FORMATS.get(asset.content_type), asset.name) for asset in release.iter_assets() if EBOOK_FORMATS.get(asset.content_type) is not None]
def prepare_sync(key: bytes, app_id: int, installation_id: int) -> Sync: gh = GitHub() gh.login_as_app_installation(key, app_id, installation_id) auth = "x-access-token:" + gh.session.auth.token repos = {} print(f"Checking GitHub repositories for installation {installation_id}") for r in _app_installation_repositories(gh): if not r.homepage: print(f"NOTE: Skipping repository {r.full_name} (no homepage)") continue assert r.name not in repos, 'Duplicate repository name: ' + r.name # FIXME: Should we always append .git? src = parse_url(r.homepage + ".git") dest = parse_url(r.clone_url)._replace(auth=auth) repos[r.name] = Repo(src, dest) return Sync(repos)
def __init__(self, config, dry_run=False, only_from_cache=False): self._dry_run = dry_run self.only_from_cache = only_from_cache self._mapping = config['mapping'] self._template = config['template'] self._gh = GitHub(token=config['token']) # Everything is done via _repo self._repo = self._gh.repository(config['owner'], config['repository']) self._upstream_repo = self._gh.repository( config['upstream_owner'], config['upstream_repository']) # get current set of available milestones self._milestones = dict({ milestone.title: milestone.number for milestone in self._repo.iter_milestones() }) self._users = dict() self._user_cache = config.get('user_cache', None) self._load_user_cache()
def check_previous(self, error, user=None): """Check whether the error was previously reported. Rather than try to do fuzzy matching, we simply check whether there is a PyTattle section of the first message of the issue, and if so, compare the fingerprint against that of the error. """ api = None if user is not None: cache = user.get_cache('github') if 'api' in cache: api = cache['api'] if api is None: api = GitHub()
def starred(username, sort, token): """GitHub starred make your own awesome lists page by GitHub star! example: starred --username maguowei --sort > README.md """ gh = GitHub(token=token) stars = gh.starred_by(username) click.echo(desc) repo_dict = {} for s in stars: language = s.language or 'Others' description = html_escape(s.description) if s.description else '' if language not in repo_dict: repo_dict[language] = [] repo_dict[language].append([s.name, s.html_url, description.strip()]) if sort: repo_dict = OrderedDict(sorted(repo_dict.items(), key=lambda l: l[0])) for language in repo_dict.keys(): data = u' - [{}](#{})'.format(language, language.lower()) click.echo(data) click.echo('') for language in repo_dict: click.echo('## %s\n' % language) for repo in repo_dict[language]: data = u'* [{}]({}) - {}'.format(*repo) click.echo(data) click.echo('')
def get_default_repo_details(owner, repo, token): """Return defaults repo details. """ try: client = GitHub(token=token) repo_client = client.repository(owner, repo) branch_client = repo_client.branch(repo_client.default_branch) branch_data = branch_client.to_json() branch_name = branch_data['name'] author_name = branch_data['commit']['author']['login'] commit_message = branch_data['commit']['commit']['message'] sha = branch_data['commit']['sha'] except Exception as exception: log.exception(exception) return None return { 'is_pr': False, 'owner': owner, 'repo': repo, 'sha': sha, 'branch_name': branch_name, 'author_name': author_name, 'commit_message': commit_message, }
def resolve_team_members( github_team_name: str, github_api: GitHub, ) -> typing.Union[typing.Generator[str, None, None], list]: not_none(github_team_name) org_name, team_name = github_team_name.split('/') # always of form 'org/name' organisation = github_api.organization(org_name) # unfortunately, we have to look-up the team (no api to retrieve it by name) team_or_none = _first(filter(lambda team: team.slug == team_name, organisation.teams())) if not team_or_none: logger.warning('failed to lookup team {t}'.format(t=team_name)) return [] for member in map(github_api.user, team_or_none.members()): if member.email: yield member.email else: logger.warning(f'no email found for GitHub user {member}')
def get_github_api_for_repo(keychain, owner, repo): gh = GitHub() # Apply retry policy gh.session.mount("http://", adapter) gh.session.mount("https://", adapter) APP_KEY = os.environ.get("GITHUB_APP_KEY", "").encode("utf-8") APP_ID = os.environ.get("GITHUB_APP_ID") if APP_ID and APP_KEY: installation = INSTALLATIONS.get((owner, repo)) if installation is None: gh.login_as_app(APP_KEY, APP_ID, expire_in=120) try: installation = gh.app_installation_for_repository(owner, repo) except github3.exceptions.NotFoundError: raise GithubException( f"Could not access {owner}/{repo} using GitHub app. " "Does the app need to be installed for this repository?") INSTALLATIONS[(owner, repo)] = installation gh.login_as_app_installation(APP_KEY, APP_ID, installation.id) else: github_config = keychain.get_service("github") gh.login(github_config.username, github_config.password) return gh
class GithubAvroSchemaRetriever(AvroSchemaRetriever): def __init__(self, **kwargs): from github3 import login, GitHub self.repo_owner = kwargs.get('repo_owner') self.repo_name = kwargs.get('repo_name') self.branch = kwargs.get('branch') self.base_path = kwargs.get('basepath') self.extension = kwargs.get('extension') if 'git_user' in kwargs and 'git_password' in kwargs: self.git_user = kwargs.get('git_user') self.git_password = kwargs.get('git_password') self.git = login(self.git_user, self.git_password) elif 'token' in kwargs: self.token = kwargs.get('token') self.git = login(token=self.token) else: self.git_user = None self.git_password = None self.git = GitHub() super().__init__() def get_all_schemas(self, **kwargs) -> ExpiringDict: repo = self.git.repository(self.repo_owner, self.repo_name) contents = repo.directory_contents(self.base_path, self.branch, return_as=dict) schemas = ExpiringDict(max_len=100, max_age_seconds=86400) schemas_final = self.get_schema_content(repo, contents, schemas) return schemas_final def get_schema_content(self, repo, contents, schema_dict): for key, content in contents.items(): if content.type == 'file' and self.extension in key: # read schema schema_dict[key.split(f"{self.extension}")[0]] = parse_schema(loads( repo.file_contents(content.path, self.branch).decoded)) elif content.type == 'dir': self.get_schema_content(repo, repo.directory_contents(content.path, self.branch, return_as=dict), schema_dict) return schema_dict
def get_pr_obj_from_pr_json( pr_json: Union[Dict, LazyJson], gh: github3.GitHub, ) -> github3.pulls.PullRequest: """Produce a github3 pull request object from pr_json. Parameters ---------- pr_json : dict-like A dict-like object with the current PR information. gh : github3 object The github3 object for interacting with the GitHub API. Returns ------- pr_obj : github3.pulls.PullRequest The pull request object. """ feedstock_reponame = pr_json["base"]["repo"]["name"] repo = gh.repository("conda-forge", feedstock_reponame) return repo.pull_request(pr_json["number"])
def is_github_api_limit_reached(e: github3.GitHubError, gh: github3.GitHub) -> bool: """Prints diagnostic information about a github exception. Returns ------- out_of_api_credits A flag to indicate that the api limit has been exhausted """ print(e) print(e.response) print(e.response.url) try: c = gh.rate_limit()["resources"]["core"] except Exception: # if we can't connect to the rate limit API, let's assume it has been reached return True if c["remaining"] == 0: ts = c["reset"] print("API timeout, API returns at") print(datetime.datetime.utcfromtimestamp(ts).strftime("%Y-%m-%dT%H:%M:%SZ")) return True return False
def __init__(self): if settings.GITHUB_TOKEN: self.github = login(token=settings.GITHUB_TOKEN) else: self.github = GitHub()
class GitHubHandler(BaseHandler): title = "Github" url_regex = '(http|https|git)://github.com/' url = 'https://github.com' repo_regex = r'(?:http|https|git)://github.com/[^/]*/([^/]*)/{0,1}' slug_regex = repo_regex def __init__(self): if settings.GITHUB_TOKEN: self.github = login(token=settings.GITHUB_TOKEN) else: self.github = GitHub() def manage_ratelimit(self): while self.github.ratelimit_remaining < 10: sleep(1) def _get_repo(self, package): repo_name = package.repo_name() if repo_name.endswith("/"): repo_name = repo_name[:-1] try: username, repo_name = package.repo_name().split('/') except ValueError: return None return self.github.repository(username, repo_name) def fetch_metadata(self, package): self.manage_ratelimit() repo = self._get_repo(package) if repo is None: return package package.repo_watchers = repo.watchers package.repo_forks = repo.forks package.repo_description = repo.description contributors = [] github_account_type = AccountType.objects.get(name="GITHUB") for contributor in repo.iter_contributors(): account, created = Account.objects.get_or_create( account_type=github_account_type, name=contributor.login) contributors.append(account) self.manage_ratelimit() package.contributors.set(contributors) package.save() return package def fetch_commits(self, package): self.manage_ratelimit() repo = self._get_repo(package) if repo is None: return package from package.models import Commit # Added here to avoid circular imports for commit in repo.iter_commits(): self.manage_ratelimit() try: commit_record, created = Commit.objects.get_or_create( package=package, commit_date=commit.commit.committer['date']) if not created: break except Commit.MultipleObjectsReturned: continue # If the commit record already exists, it means we are at the end of the # list we want to import package.save() return package
import subprocess from datetime import datetime, date from glob import glob from os import remove, system, environ, path, getcwd, chdir, rename import yaml from github3 import GitHub, exceptions from hurry.filesize import size, alternative from requests import get from xiaomi_flashable_firmware_creator.firmware_creator import FlashableFirmwareCreator from helpers import set_region, set_version, md5_check, set_folder from post_updates import post_updates GIT_OAUTH_TOKEN = environ['XFU'] GIT = GitHub(token=GIT_OAUTH_TOKEN) WORK_DIR = getcwd() ARB_DEVICES = [ 'nitrogen', 'nitrogen_global', 'sakura', 'sakura_india_global', 'wayne' ] STABLE = {} WEEKLY = {} VARIANTS = ['stable'] def initialize(): """ Initial loading and preparing """ with open('devices/stable_devices.yml', 'r') as stable_json:
class ServiceGithub(ServicesMgr): """ Service Github """ def __init__(self, token=None, **kwargs): super(ServiceGithub, self).__init__(token, **kwargs) self.scope = ['public_repo'] self.REQ_TOKEN = 'https://github.com/login/oauth/authorize' self.AUTH_URL = 'https://github.com/login/oauth/authorize' self.ACC_TOKEN = 'https://github.com/login/oauth/access_token' self.username = settings.TH_GITHUB_KEY['username'] self.password = settings.TH_GITHUB_KEY['password'] self.consumer_key = settings.TH_GITHUB_KEY['consumer_key'] self.consumer_secret = settings.TH_GITHUB_KEY['consumer_secret'] self.token = token self.oauth = 'oauth1' self.service = 'ServiceGithub' if self.token: token_key, token_secret = self.token.split('#TH#') self.gh = GitHub(token=token_key) else: self.gh = GitHub(username=self.username, password=self.password) def gh_footer(self, trigger, issue): link = 'https://github.com/{0}/{1}/issues/{2}'.format( trigger.repo, trigger.project, issue.id) provided_by = _('Provided by') provided_from = _('from') footer_from = "<br/><br/>{} <em>{}</em> {} <a href='{}'>{}</a>" return footer_from.format(provided_by, trigger.trigger.description, provided_from, link, link) def read_data(self, **kwargs): """ get the data from the service :param kwargs: contain keyword args : trigger_id at least :type kwargs: dict :rtype: list """ trigger_id = kwargs.get('trigger_id') date_triggered = str(kwargs.get('date_triggered')).replace(' ', 'T') data = list() if self.token: # check if it remains more than 1 access # then we can create an issue if self.gh.ratelimit_remaining > 1: import pypandoc trigger = Github.objects.get(trigger_id=trigger_id) issues = self.gh.issues_on(trigger.repo, trigger.project, since=date_triggered) for issue in issues: content = pypandoc.convert(issue.body, 'md', format='html') content += self.gh_footer(trigger, issue) data.append({'title': issue.title, 'content': content}) # digester self.send_digest_event(trigger_id, issue.title, '') cache.set('th_github_' + str(trigger_id), data) else: # rate limit reach, do nothing right now logger.warning("Rate limit reached") update_result(trigger_id, msg="Rate limit reached", status=True) else: logger.critical("no token provided") update_result(trigger_id, msg="No token provided", status=True) return data def save_data(self, trigger_id, **data): """ let's save the data :param trigger_id: trigger ID from which to save data :param data: the data to check to be used and save :type trigger_id: int :type data: dict :return: the status of the save statement :rtype: boolean """ if self.token: title = self.set_title(data) body = self.set_content(data) # get the details of this trigger trigger = Github.objects.get(trigger_id=trigger_id) # check if it remains more than 1 access # then we can create an issue limit = self.gh.ratelimit_remaining if limit > 1: # repo goes to "owner" # project goes to "repository" r = self.gh.create_issue(trigger.repo, trigger.project, title, body) else: # rate limit reach logger.warn("Rate limit reached") update_result(trigger_id, msg="Rate limit reached", status=True) # put again in cache the data that could not be # published in Github yet cache.set('th_github_' + str(trigger_id), data, version=2) return True sentence = str('github {} created').format(r) logger.debug(sentence) status = True else: sentence = "no token or link provided for " \ "trigger ID {} ".format(trigger_id) logger.critical(sentence) update_result(trigger_id, msg=sentence, status=False) status = False return status def auth(self, request): """ let's auth the user to the Service :param request: request object :return: callback url :rtype: string that contains the url to redirect after auth """ auth = self.gh.authorize(self.username, self.password, self.scope, '', '', self.consumer_key, self.consumer_secret) request.session['oauth_token'] = auth.token request.session['oauth_id'] = auth.id return self.callback_url(request) def callback(self, request, **kwargs): """ Called from the Service when the user accept to activate it :param request: request object :return: callback url :rtype: string , path to the template """ access_token = request.session['oauth_token'] + "#TH#" access_token += str(request.session['oauth_id']) kwargs = {'access_token': access_token} return super(ServiceGithub, self).callback(request, **kwargs)
class Command(object): __metaclass__ = ABCMeta name = None usage = None repository = () user = '' subcommands = {} SUCCESS = 0 FAILURE = 1 COMMAND_UNKNOWN = 127 def __init__(self): super(Command, self).__init__() assert self.name commands[self.name] = self self.gh = GitHub() self.gh.set_user_agent('github-cli/{0} (http://git.io/MEmEmw)'.format( __version__ )) self.parser = CustomOptionParser(usage=self.usage) @abstractmethod def run(self, options, args): return self.FAILURE def get_repo(self, options): self.repo = None if self.repository: self.repo = self.gh.repository(*self.repository) if not (self.repo or options.loc_aware): self.parser.error('A repository is required.') def get_user(self): if not self.user: self.login() self.user = self.gh.user() def login(self): # Get the full path to the configuration file config = github_config() parser = ConfigParser() # Check to make sure the file exists and we are allowed to read it if os.path.isfile(config) and os.access(config, os.R_OK | os.W_OK): parser.readfp(open(config)) self.gh.login(token=parser.get('github', 'token')) else: # Either the file didn't exist or we didn't have the correct # permissions user = '' while not user: # We will not stop until we are given a username user = input('Username: '******'' while not pw: # Nor will we stop until we're given a password pw = getpass('Password: '******'user', 'repo', 'gist'], 'github-cli', 'http://git.io/MEmEmw' ) parser.add_section('github') parser.set('github', 'token', auth.token) self.gh.login(token=auth.token) # Create the file if it doesn't exist. Otherwise completely blank # out what was there before. Kind of dangerous and destructive but # somewhat necessary parser.write(open(config, 'w+')) def help(self): self.parser.print_help() if self.subcommands: print('\nSubcommands:') for command in sorted(self.subcommands.keys()): print(' {0}:\n\t{1}'.format( command, self.subcommands[command] )) sys.exit(0)
def starred(username, token, sort, repository, message, output, http_proxy, https_proxy, launch, type): """GitHub starred creating your own Awesome List used GitHub stars! example: starred --username 1132719438 --output README.md """ if output.strip(): output = output.strip() output_path = os.path.split(output)[0] if output_path and not os.path.isdir(output_path): os.makedirs(output_path) output_file = open(output, "w", encoding='utf-8') else: output_file = None if repository: if not token: click.secho('Error: create repository need set --token', fg='red', file=sys.stderr) return repo_file = BytesIO() sys.stdout = repo_file # do not output to file when update repository output_file = None else: repo_file = None try: gh = GitHub(token=token) if http_proxy: gh.session.proxies['http://'] = http_proxy if https_proxy: gh.session.proxies['https://'] = https_proxy else: gh.session.proxies['https://'] = http_proxy stars = gh.starred_by(username) except ForbiddenError as e: click.secho('Error: talk to Github failed: {}'.format(e), fg='red', file=sys.stderr) return today = str(datetime.date.today()) month = datetime.date.today().strftime('%Y%m') repo_dict = {} new_dict = {} # starred order star_order = 0 for s in stars: language = s.language or 'Others' description = html_escape(s.description).replace( '\n', '') if s.description else '' if language not in repo_dict: repo_dict[language] = [] repo_dict[language].append([ s.name, s.html_url, description.strip(), s.owner.login, s.stargazers_count, star_order ]) if language not in new_dict: new_dict[language] = [] new_dict[language].append([s.name, s.html_url]) star_order += 1 repo_dict = OrderedDict(sorted(repo_dict.items(), key=lambda l: l[0])) new_dict = OrderedDict(sorted(new_dict.items(), key=lambda l: l[0])) # load prev repo dict and compare with new repo dict save_pkl = True cur_path = os.path.split(os.path.realpath(__file__))[0] repo_pkl_path = os.path.join(cur_path, 'starred-repo.pkl') if os.path.isfile(repo_pkl_path): with open(repo_pkl_path, 'rb') as file: old_dict = pickle.load(file, encoding='utf-8') if operator.eq(old_dict, new_dict): save_pkl = False if repo_file: click.secho( 'Error: starred repositories not change in {}'.format( today), fg='red', file=sys.stderr) return if save_pkl: with open(repo_pkl_path, 'wb') as file: pickle.dump(new_dict, file) total = 0 # sort by language and date if sort == 'date': for language in repo_dict: repo_dict[language] = sorted(repo_dict[language], key=lambda l: l[5]) total += len(repo_dict[language]) # sort by language and name elif sort == 'name': for language in repo_dict: repo_dict[language] = sorted(repo_dict[language], key=lambda l: l[0]) total += len(repo_dict[language]) # sort by language and stars else: for language in repo_dict: repo_dict[language] = sorted(repo_dict[language], key=lambda l: l[4], reverse=True) total += len(repo_dict[language]) # desc count_badge_url = count_badge.format(count=total, color='green') date_badge_url = date_badge.format(today=today.replace('-', '--'), color='blue') click.echo(desc.format(badge_url=badge_url, awesome_url=awesome_url, github_url=github_url, count_badge_url=count_badge_url, date_badge_url=date_badge_url), file=output_file) # contents title_dict = {} for language in repo_dict.keys(): title = '{} ({})'.format(language, len(repo_dict[language])) title_url = title2url(title) if title_url not in title_dict: title_dict[title_url] = 1 else: cnt = title_dict[title_url] title_dict[title_url] += 1 title_url = title_url + '-' + str(cnt) data = u' - [{}](#{})'.format(title, title_url) click.echo(data, file=output_file) click.echo('', file=output_file) info_dict = {} for language in repo_dict: info_dict[language] = [ [ index + 1, # index '[{}]({})'.format(repo[0], repo[1]), # name with url repo[2], # description repo[3], # owner repo[4] ] # stars for index, repo in enumerate(repo_dict[language]) ] info_dict = OrderedDict(sorted(info_dict.items(), key=lambda l: l[0])) # repo for language in info_dict: count = len(info_dict[language]) info_dict[language].insert( 0, ['', 'Name', 'Description', 'Owner', 'Stars']) click.echo('## {} ({}) \n'.format(language, count), file=output_file) if type == 'table': table = GithubFlavoredMarkdownTable(info_dict[language]) click.echo(table.table, file=output_file) else: for repo in repo_dict[language]: data = u'- [{}]({}) - {}'.format(*repo) click.echo(data, file=output_file) click.echo('', file=output_file) # license click.echo(license_.format(username=username), file=output_file) if repo_file: if not message: message = 'Add starred {}'.format(today) try: rep = gh.repository(username, repository) try: rep.file_contents('/Archives/{}/README-{}.md'.format( month, today)) click.secho( 'Error: already commit [/Archives/{}/README-{}.md]'.format( month, today), fg='red', file=sys.stderr) except NotFoundError: readme = rep.readme() readme.update(message, repo_file.getvalue()) rep.create_file( 'Archives/{}/README-{}.md'.format(month, today), 'Archive starred {}'.format(today), repo_file.getvalue()) except NotFoundError: rep = gh.create_repository(repository, 'A curated list of my GitHub stars!') rep.create_file('README.md', 'Add starred {}'.format(today), repo_file.getvalue()) rep.create_file('Archives/{}/README-{}.md'.format(month, today), 'Archive starred {}'.format(today), repo_file.getvalue()) if launch: click.launch(rep.html_url)
class GitHubRepository: """Wrapper around GitHub API. Used to access public data.""" def __init__(self, owner, repo_name, token=""): """Build the GitHub API URL which points to the definition of the repository. Args: owner (str): the owner's GitHub username repo_name (str): the name of the repository token (str): the GitHub API token Returns: dict: a representation of the repo definition """ self._github_repository = GitHub(token=token).repository( owner, repo_name) @property def definition(self): """Fetch the definition of the repository, exposed by the GitHub API. Returns: dict: a representation of the repo definition """ return self._github_repository.as_dict() @retry_async_decorator(retry_exceptions=GitHubException) async def get_commit(self, commit_hash): """Fetch the definition of the commit, exposed by the GitHub API. Args: commit_hash (str): the hash of the git commit Returns: dict: a representation of the commit """ return self._github_repository.commit(commit_hash).as_dict() @retry_async_decorator(retry_exceptions=GitHubException) async def get_pull_request(self, pull_request_number): """Fetch the definition of the pull request, exposed by the GitHub API. Args: pull_request_number (int): the ID of the pull request Returns: dict: a representation of the pull request """ return self._github_repository.pull_request( pull_request_number).as_dict() @retry_async_decorator(retry_exceptions=GitHubException) async def get_release(self, tag_name): """Fetch the definition of the release matching the tag name. Args: tag_name (str): the tag linked to the release Returns: dict: a representation of the tag """ return self._github_repository.release_from_tag(tag_name).as_dict() @retry_async_decorator(retry_exceptions=GitHubException) async def get_tag_hash(self, tag_name): """Fetch the commit hash that was tagged with ``tag_name``. Args: tag_name (str): the name of the tag Returns: str: the commit hash linked by the tag """ tag_object = get_single_item_from_sequence( sequence=self._github_repository.tags(), condition=lambda tag: tag.name == tag_name, no_item_error_message='No tag "{}" exist'.format(tag_name), too_many_item_error_message='Too many tags "{}" found'.format( tag_name), ) return tag_object.commit.sha async def has_commit_landed_on_repository(self, context, revision): """Tell if a commit was landed on the repository or if it just comes from a pull request. Args: context (scriptworker.context.Context): the scriptworker context. revision (str): the commit hash or the tag name. Returns: bool: True if the commit is present in one of the branches of the main repository """ if any( vcs_rule.get("require_secret") for vcs_rule in context.config["trusted_vcs_rules"]): # This check uses unofficial API on github, which we can't easily # check for private repos, assume its true in the private case. log.info( "has_commit_landed_on_repository() not implemented for private" "repositories, assume True") return True # Revision may be a tag name. `branch_commits` doesn't work on tags if not _is_git_full_hash(revision): revision = await self.get_tag_hash(tag_name=revision) html_text = await _fetch_github_branch_commits_data( context, self._github_repository.html_url, revision) # https://github.com/{repo_owner}/{repo_name}/branch_commits/{revision} just returns some \n # when the commit hasn't landed on the origin repo. Otherwise, some HTML data is returned - it # represents the branches on which the given revision is present. return html_text != ""