def add_repo(request): """ View for adding a repo. """ user = UserSocialAuth.objects.filter(provider='github').get(user_id=request.user.id) github = Github(user.tokens[u'access_token']) name = request.POST['name'] repo = github.get_user().get_repo(name) message = {'success': None} try: language_str = repo.language.lower() except AttributeError: message['success'] = False message['message'] = "Could not add repo because it doesn't have a language. Try specifying the language in your Github repo settings." json = simplejson.dumps(message) return HttpResponse(json, mimetype='application/json') # TODO: exception handling here language_obj, created_lang = Language.objects.get_or_create(name=language_str) repo_obj, created_repo = Repo.objects.get_or_create(full_name=repo.full_name, description=repo.description, language=language_obj) if not created_repo: message['success'] = False message['message'] = '{} is already in our collection'.format(name) else: message['success'] = True message['message'] = 'Successfully added {}.'.format(name) json = simplejson.dumps(message) return HttpResponse(json, mimetype='application/json')
def change_issue_label(user, lbl_from, lbl_to, repo_name, issue_number): # retrieve Github objects github = Github(login_or_token=user.github_access_token) git_user = github.get_user() repo = get_a_repo(git_user, repo_name) # retrieve the issue issue = repo.get_issue(int(issue_number)) # check if the destination label is DONE if lbl_to == "DONE": # change the status of the issue to close issue.edit(state='closed') else: # remove the from label issue.remove_from_labels(urllib.quote(lbl_from)) # check if new label exists in the repository # get all labels from this repository labels = [label for label in repo.get_labels()] if any(filter(lambda l:l.name == lbl_to, labels)): # get the label object new_label = repo.get_label(lbl_to) else: # create the new label in repository new_label = repo.create_label(lbl_to, "00ff00") issue.add_to_labels(new_label) return new_label
def _get_issues_in_milestone(self, milestone, login, password): repo = Github(login_or_token=login, password=password).get_repo(self.repository) milestone = self._get_milestone(repo, milestone) for data in repo.get_issues(milestone=milestone, state="all"): issue = Issue(data) if not issue.ignored: yield issue
class GitHub(object): def __init__(self): token = os.environ.get('SGIT_GITHUB_TOKEN', '') self.gh = Github(token) def clone_repositories(self, args): repos = self.get_repositories(args) for repo in repos: repo_basename = repo.split('/')[-1][:-4] if exists(join(abspath(os.curdir), repo_basename)): pprinter('Repository already exists:', repo_basename) else: pprinter('Cloning Repository', repo) check_call(['git', 'clone', repo]) def get_repositories(self, args): owner_type = args[0] owner_name = args[1] if owner_type == "-o": org = self.gh.get_organization(owner_name) return [repo.clone_url for repo in org.get_repos()] elif owner_type == "-u": user = self.gh.get_user(owner_name) return [repo.clone_url for repo in user.get_repos('owner')]
def nonSocialConnect(USER,REPO): filename = "./static/bootstrap/data/" +USER +REPO+ ".json" if os.path.exists(filename): return 1 else: username,password=readcfg() client = Github(login_or_token=username,password=password, per_page=100) user = client.get_user(USER) repo = user.get_repo(REPO) print 'chgithub.SocialConnect->get start' stargazers = [ s for s in repo.get_stargazers() ] #获得关注者,通常这个人数比较多 contributors = [ s for s in repo.get_contributors() ] #获得贡献者 g = nx.DiGraph() g.add_node(repo.name + '(r)', type='repo', lang=repo.language, owner=user.login) for sg in stargazers: g.add_node(sg.login + '(u)', type='user') g.add_edge(sg.login + '(u)', repo.name + '(r)', type='gazes') print 'chgithub.SocialConnect->finish add stargazers' for sg in contributors: g.add_node(sg.login + '(u)', type='user') g.add_edge(sg.login + '(u)', repo.name + '(r)', type='conbs') print 'chgithub.SocialConnect->finish add contributors' d = json_graph.node_link_data(g) json.dump(d, open(filename, 'w')) print 'chgithub.SocialConnect->DONE' return 1
def authenticate(self): logger.info("User authentication.") try: github = Github(self._username, self._password) self._user = github.get_user() except Exception as e: raise Exception("Can not authenticate user {}.\n{}".format(self._username, str(e)))
def main(args, loglevel): logging.basicConfig(format="%(levelname)s: %(message)s", level=loglevel) socket.setdefaulttimeout(args.timeout) g = Github() with open(args.repo_file, 'r') as f: file_counter = 0 for line in f.readlines(): logging.info('Fetching repository: %s' % line) try: repo_str = line.rstrip().split('github.com/')[-1] repo = g.get_repo(repo_str) tree = repo.get_git_tree('master', recursive=True) files_to_download = [] for file in tree.tree: if fnmatch.fnmatch(file.path, args.wildcard): files_to_download.append('https://github.com/%s/raw/master/%s' % (repo_str, file.path)) for file in files_to_download: logging.info('Downloading %s' % file) file_counter += 1 filename = posixpath.basename(urlparse.urlsplit(file).path) output_path = os.path.join(args.output_dir, filename) if os.path.exists(output_path): output_path += "-" + str(file_counter) try: urllib.urlretrieve(file, output_path) except: logging.error('Error downloading %s' % file) except: logging.error('Error fetching repository %s' % line)
def _identify(self, token=None, login=None): if token: self.github = Github(token) elif login: self.github = Github(login, getpass()) else: self.github = Github()
def __init__(self, db_name="codeneurohackathon", repo_name="CodeNeuro/neurofinder", s3_name='code.neuro/neurofinder', dry=False): mongo_connect_url = os.environ['MONGO_CONNECT_URL'] if not mongo_connect_url: raise Exception("Mongo connect environment variable not set") github_bot_username = os.environ['GITHUB_BOT_USERNAME'] if not github_bot_username: raise Exception("Github bot username environment variable not set") github_bot_pass = os.environ['GITHUB_BOT_PASS'] if not github_bot_pass: raise Exception("Github bot password environment variable not set") mongo = MongoClient(mongo_connect_url) self.db = getattr(mongo, db_name) gitbot = Github(github_bot_username, github_bot_pass) self.repo = gitbot.get_repo(repo_name) conn = boto.connect_s3() bucket_name, folder = s3_name.split('/') bucket = conn.get_bucket(bucket_name) self.bucket = bucket self.folder = folder self.dry = dry print("\nConnected to databases and repo, starting evaluation...\n")
def send_to_ckan(mod): if not _cfg("netkan_repo_path"): return json_blob = { 'spec_version': 1, 'identifier': re.sub(r'\W+', '', mod.name), '$kref': '#/ckan/kerbalstuff/' + str(mod.id), 'x_netkan_license_ok': True } wd = _cfg("netkan_repo_path") if os.path.exists(os.path.join(wd, 'NetKAN', json_blob['identifier'] + '.netkan')): num = 1 while os.path.exists(os.path.join(wd, 'NetKAN', json_blob['identifier'] + str(num) + '.netkan')): num += 1 json_blob['identifier'] = json_blob['identifier'] + str(num) path = os.path.join(wd, 'NetKAN', json_blob['identifier'] + '.netkan') with open(path, 'w') as f: f.write(json.dumps(json_blob, indent=4)) subprocess.call(['git', 'fetch', 'upstream'], cwd=wd) subprocess.call(['git', 'checkout', '-b', 'add-' + json_blob['identifier'], 'upstream/master'], cwd=wd) subprocess.call(['git', 'add', '-A'], cwd=wd) subprocess.call(['git', 'commit', '-m', 'Add {0} from Kerbal Stuff\n\nThis is an automated commit on behalf of {1}'\ .format(mod.name, mod.user.username), '--author={0} <{1}>'.format(mod.user.username, mod.user.email)], cwd=wd) subprocess.call(['git', 'push', '-u', 'origin', 'add-' + json_blob['identifier']], cwd=wd) g = Github(_cfg('github_user'), _cfg('github_pass')) r = g.get_repo("KSP-CKAN/NetKAN") r.create_pull(title="Add {0} from Kerbal Stuff".format(mod.name), base=r.default_branch, head="KerbalStuffBot:add-" + json_blob['identifier'], body=\ """\ This pull request was automatically generated by Kerbal Stuff on behalf of {0}, to add [{1}]({4}{2}) to CKAN. Please direct questions about this pull request to [{0}]({4}{3}). """.format(mod.user.username, mod.name,\ url_for('mods.mod', mod_name=mod.name, id=mod.id),\ url_for("profile.view_profile", username=mod.user.username),\ _cfg("protocol") + "://" + _cfg("domain")))
def main(token, revision_range): lst_release, cur_release = [r.strip() for r in revision_range.split('..')] github = Github(token) github_repo = github.get_repo('numpy/numpy') # document authors authors = get_authors(revision_range) heading = u"Contributors to {0}".format(cur_release) print() print(heading) print(u"-"*len(heading)) print(author_msg % len(authors)) for s in authors: print(u'- ' + s) # document pull requests pull_requests = get_pull_requests(github_repo, revision_range) heading = u"Pull requests merged for {0}".format(cur_release) print() print(heading) print(u"-"*len(heading)) print(pull_request_msg % len(pull_requests)) for pull in pull_requests: pull_msg = u"- `#{0} <{1}>`__: {2}" title = re.sub(u"\s+", u" ", pull.title.strip()) if len(title) > 60: remainder = re.sub(u"\s.*$", u"...", title[60:]) if len(remainder) > 20: remainder = title[:80] + u"..." else: title = title[:60] + remainder print(pull_msg.format(pull.number, pull.html_url, title))
def authenticate_user(): token_file = os.path.expanduser('~/.letgithub/token.txt') while True: try: with open(token_file, 'r') as f: token = f.read() g = Github(token) config.update(GITHUB=g) config.update(PROMT_MSG='[{}]> '.format(g.get_user().login)) return except FileNotFoundError: username = input('Github username: '******'Github password: '******'{}/{}'.format(GITHUB_API, 'authorizations') payload = { 'note': 'token for letgithub', 'scopes': ['repo'] } res = requests.post(url, auth=(username, password), data=json.dumps(payload)) data = json.loads(res.text) if res.status_code != 201: msg = data.get('message', res.status_code) print('ERROR: {}'.format(msg)) continue config.update(GITHUB=Github(data['token'])) config.update(PROMT_MSG='[{}]> '.format(username)) os.makedirs(os.path.dirname(token_file), exist_ok=True) with open(token_file, 'w') as f: f.write(data['token']) return
def build_pr_index(filename, gh_org='conda-forge', staged_recipes_repo='staged-recipes'): "Iterate over open pull requests in staged_recipes and return dict of pr:pkg-name" token = smithy_github.gh_token() gh = Github(token) org = gh.get_organization(gh_org) repo = org.get_repo(staged_recipes_repo) pkg_index = {} for pr in list(repo.get_pulls()): for f in pr.get_files(): if f.filename.lower().endswith('meta.yaml'): try: meta = requests.get(f.raw_url).content pkg_name = _extract_package_name(meta) idx = 'pr {} ({}) /{}'.format(pr.number, pkg_name, f.filename) pkg_index[idx] = pkg_name except (AttributeError, ScannerError) as err: pkg_index[idx] = None print('Unable to parse meta.yaml for pr #{}'.format(pr.number)) print('setting pkg_name to None') print('Traceback: \n', err) with open(filename, 'w') as f: json.dump(pkg_index, f) print('pull requests index written to {}'.format(filename))
def emit(self, record): try: request = record.request title = '%s (%s IP): %s' % ( record.levelname, (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'), record.getMessage() ) filter = get_exception_reporter_filter(request) request_repr = filter.get_request_repr(request) except Exception: title = '%s: %s' % ( record.levelname, record.getMessage() ) request = None request_repr = "Request repr() unavailable." title = self.format_title(title) if record.exc_info: exc_info = record.exc_info stack_trace = '\n'.join(traceback.format_exception(*record.exc_info)) else: exc_info = (None, record.getMessage(), None) stack_trace = 'No stack trace available' message = "%s\n\n%s" % (stack_trace, request_repr) # create github issue github = Github(conf.GITHUB_TOKEN) user = github.get_user(conf.GITHUB_USER) repository = user.get_repo(conf.GITHUB_REPOSITORY_NAME) issue = repository.create_issue(self, title, body=message)
def github_get_all_issues(): """ Get all Github issues, note this only returns open issues """ github = Github(settings.GITHUB_USER, settings.GITHUB_PASSWORD) github_repo = github.get_repo(settings.ISSUES_REPO) return github_repo.get_issues()
def main(): username = raw_input("Username: "******"open") closed_issues = repo.get_issues(state="closed") issues = [] [issues.append(i) for i in open_issues] [issues.append(i) for i in closed_issues] today = datetime.today() completed_articles, late_articles, due_articles = ParseIssues(issues) print "HTML5 Rocks Quarter Report for %s" % today.date() print "=========================================\n" print "Articles due this quater" print "------------------------\n" if len(due_articles) == 0: print "There are no articles due this quarter, either all is good, or something messed up!\n" else: print "|Author|Article|Delivery date|Tech Writer|State|" print "|------|-------|-------------|-----------|-----|" for article in due_articles: print "|%s|[%s](%s)|%s|%s|%s" % ((article.assignee or article.user).name, article.title, article.html_url, article.due_on.date(), article.tech_writer, article.state)
def check_update(window=None): """ Function to check for GSF Parser updates by checking tags and opening a window if an update is available """ if not settings["misc"]["autoupdate"]: return try: user = Github().get_user("RedFantom") repo = user.get_repo("gsf-parser") current = Version(settings["misc"]["version"].replace("v", "")) for item in repo.get_tags(): try: if Version(item.name.replace("v", "")) > current: UpdateWindow(window, item.name) break elif Version(item.name.replace("v", "")) < current: # The newest tags come first in the loop # If the tag is older than the current version, an update isn't needed # The loop is stopped to preserve the rate limit break except ValueError as e: print(e) continue except (GithubException, socket.timeout, socket.error, socket.gaierror): pass
def getRespond(user1 = 'edx',repo1 = 'edx-documentation'): ''' 获取原始仓库或者用户的一切API请求,参数是配置查找的用户以及公开仓库 :return: client,repo,stargazers,user ''' url = "https://api.github.com/repos/%s/%s/stargazers" % (user1, repo1) response = requests.get(url) print json.dumps(response.json()[0], indent=1) print for (k,v) in response.headers.items(): print k, "=>", v # ACCESS_TOKEN = '9ebc1b3f8357b7b5a208daafd8a65a7ead7eba19' ACCESS_TOKEN = '1161b718b9555cd76bf7ff9070c8f1ba300ea885' # 这里配置查找的用户以及公开仓库 USER = user1 REPO = repo1 client = Github(ACCESS_TOKEN, per_page=100) user = client.get_user(USER) repo = user.get_repo(REPO) stargazers = [ s for s in repo.get_stargazers() ] #可以先对这些人数进行分类限制 print "关注人的数目: ", len(stargazers) #人数众多,速度太慢 print return client,repo,stargazers,user #在这里可以控制人数
def is_bug_open(cls, issue_id): """Checks whether the GitHub issue for the given issue id is open. An issue is considered open if its state is open and it does not have a label that contains 'done' or 'complete.' """ config = GitHubConfig() github = Github(login_or_token=config.token) log = logging.getLogger('RunnerLog') issue = None try: repo = github.get_repo(config.repo) issue = repo.get_issue(int(issue_id)) except UnknownObjectException as error: log.info('Invalid issue number or GitHub repo. ' 'UnknownObjectException: {0}'.format(error)) except RateLimitExceededException as error: log.info('Rate limit for API calls exceeded.' 'GithubException: {0}'.format(error)) except Exception as error: # Need to blanket error catch due to the wide variety of issues # in PyGitHub. Using info as a normal user would need to know this log.info(error.message) if issue is None or issue.state == 'closed': return False labels = issue.get_labels() for label in labels: label = label.name.lower() if 'done' in label or 'complete' in label: return False return True
def main(): username = raw_input("Username: "******"open") today = datetime.today() print "Parsing Issues" late_articles, due_articles = ParseIssues(issues) print "\n\nHTML5 Rocks Weekly Report for %s" % today.date() print "========================================\n" print "Overdue articles" print "----------------\n" for article in late_articles: print "%s - '%s' was due on %s" % ((article.assignee or article.user).name, article.title, article.due_on.date()) print "\nArticles due this week" print "----------------------\n" for article in due_articles: print "%s - '%s' is due on %s" % ((article.assignee or article.user).name, article.title, article.due_on.date())
def build_package_from_pr_number(gh_token, sdk_id, pr_number, output_folder, *, with_comment=False): """Will clone the given PR branch and vuild the package with the given name.""" con = Github(gh_token) repo = con.get_repo(sdk_id) sdk_pr = repo.get_pull(pr_number) # "get_files" of Github only download the first 300 files. Might not be enough. package_names = {f.filename.split('/')[0] for f in sdk_pr.get_files() if f.filename.startswith("azure")} absolute_output_folder = Path(output_folder).resolve() with tempfile.TemporaryDirectory() as temp_dir, \ manage_git_folder(gh_token, Path(temp_dir) / Path("sdk"), sdk_id, pr_number=pr_number) as sdk_folder: for package_name in package_names: _LOGGER.debug("Build {}".format(package_name)) execute_simple_command( ["python", "./build_package.py", "--dest", str(absolute_output_folder), package_name], cwd=sdk_folder ) _LOGGER.debug("Build finished: {}".format(package_name)) if with_comment: files = [f.name for f in absolute_output_folder.iterdir()] comment_message = None dashboard = DashboardCommentableObject(sdk_pr, "(message created by the CI based on PR content)") try: installation_message = build_installation_message(sdk_pr) download_message = build_download_message(sdk_pr, files) comment_message = installation_message + "\n\n" + download_message dashboard.create_comment(comment_message) except Exception: _LOGGER.critical("Unable to do PR comment:\n%s", comment_message)
def checkfork(): try: token = session['oauth_token']['access_token'] github = Github(token) user = github.get_user() session['username'] = user.login if session.get('fork'): return redirect(url_for('.checktravis')) repos = user.get_repos() forked = None for repo in repos: if not repo.fork: continue parent = repo.parent.full_name if parent == HOMEWORK_REPO: forked = repo.full_name break if forked: session['fork'] = forked return redirect(url_for('.checktravis')) else: return redirect(url_for('.askfork')) except: exc_type, exc_value, exc_traceback = sys.exc_info() return 'checkfork: %s\n%s\n%s' % (exc_type, exc_value, exc_traceback)
def do_pr(gh_token, sdk_git_id, sdk_pr_target_repo_id, branch_name, base_branch): "Do the PR" if not gh_token: _LOGGER.info("Skipping the PR, no token found") return github_con = Github(gh_token) sdk_pr_target_repo = github_con.get_repo(sdk_pr_target_repo_id) if "/" in sdk_git_id: sdk_git_owner = sdk_git_id.split("/")[0] _LOGGER.info("Do the PR from %s", sdk_git_owner) head_name = "{}:{}".format(sdk_git_owner, branch_name) else: head_name = branch_name body = "" rest_api_pr = get_pr_object_from_travis(gh_token) if rest_api_pr: body += "Generated from RestAPI PR: {}".format(rest_api_pr.html_url) try: github_pr = sdk_pr_target_repo.create_pull( title="Automatic PR from {}".format(branch_name), body=body, head=head_name, base=base_branch ) except GithubException as err: if err.status == 422 and err.data["errors"][0]["message"].startswith("A pull request already exists"): _LOGGER.info("PR already exists, it was a commit on an open PR") return raise _LOGGER.info("Made PR %s", github_pr.html_url) comment = compute_pr_comment_with_sdk_pr(github_pr.html_url, sdk_git_id, branch_name) add_comment_to_initial_pr(gh_token, comment)
def get_repo_pull_requests(repo_name, user_name=None, organization_name="cornell-cs5220-f15"): """Get the pull requests associated with a given repository. Inputs: repo_name (str) - The name of the repository. user_name (str) - The name of the user that owns the repository. organization_name (str) - The name of the organization that owns the repository. """ username = raw_input("Username: "******"get_pull_request_users: Both user_name and organization_name are None") return repo.get_pulls()
def run(u, p): g = Github(u, p) g0v = None # get orgs and extract g0v for o in g.get_user().get_orgs(): if o.name == 'g0v': g0v = o break if not g0v: raise Exception('g0v is None') # get repos urls = [] count = 10 for r in g0v.get_repos(): if count <= 0: break count -= 1 urls.append(r.html_url) os.makedirs('./data/', mode=0o755, exist_ok=True) out_fpath = './data/url_list.json' try: os.remove(out_fpath) except OSError: pass with open(out_fpath, 'a+') as f: f.write(json.dumps(urls))
def get_issue_details(): if not request.json.get('repo') or not request.json.get('issue_number'): abort(400) repo = request.json.get('repo') issue_number = request.json.get('issue_number') gh = Github(login_or_token=g.github_token, per_page=100) gh_repo = gh.get_repo(repo) issue = gh_repo.get_issue(issue_number) issue.repo = repo issue.unix_updated_at = arrow.get(issue.updated_at).timestamp issue.unix_created_at = arrow.get(issue.created_at).timestamp all_comments = [] for comment in issue.get_comments(): comment.repo = repo comment.issue_number = issue.number comment.unix_updated_at = arrow.get(comment.updated_at).timestamp comment.unix_created_at = arrow.get(comment.created_at).timestamp all_comments.append(comment) issue_schema = Issue() issue_result = issue_schema.dump(issue) comments_schema = Comment(many=True) comments_result = comments_schema.dump(all_comments) return jsonify(issue=issue_result.data, comments=comments_result.data)
def create_comment(): repo = request.json.get('repo') issue_number = request.json.get('issue_number') body = request.json.get('body') if not repo: abort(400) if request.json.get('images'): body += '\n\n' for image in request.json.get('images'): image_bytes = base64.b64decode(image) filename = '{}.jpg'.format(hashlib.md5(image_bytes).hexdigest()) with open('scratch/{}'.format(filename), 'wb') as output: output.write(image_bytes) body += '{}/{}\n'.format(current_app.config.get('STATIC_ASSET_URL'), filename) gh = Github(login_or_token=g.github_token, per_page=100) gh_repo = gh.get_repo(repo) r = gh_repo.get_issue(issue_number) comment = r.create_comment(body) comment.repo = repo comment.issue_number = issue_number comment.unix_created_at = arrow.get(comment.created_at).timestamp comment.unix_updated_at = arrow.get(comment.updated_at).timestamp comment_schema = Comment() comment_result = comment_schema.dump(comment) return jsonify(created_comment=comment_result.data)
def get_pr_from_travis_commit_sha(gh_token): """Try to determine the initial PR using #<number> in the current commit comment. Will check if the found number is really a merged PR""" if not gh_token: return github_con = Github(gh_token) github_repo = github_con.get_repo(os.environ["TRAVIS_REPO_SLUG"]) local_commit = github_repo.get_commit(os.environ["TRAVIS_COMMIT"]) commit_message = local_commit.commit.message issues_in_message = re.findall("#([\\d]+)", commit_message) issue_object = None for issue in issues_in_message: try: _LOGGER.info("Check if %s is a PR", issue) issue_object = github_repo.get_pull(int(issue)) if not issue_object.is_merged(): continue break except Exception as err: pass if not issue_object: _LOGGER.warning("Was not able to found PR commit message") return issue_object
def render_gist(gist_id): g = Github() gist = g.get_gist(gist_id) embed_url = "<script src=\"https://gist.github.com/%s/%s.js\"></script>" % (gist.user.login, gist_id) output = "<p>%s</p> %s" % (gist.description, embed_url) return output
def test_throws_an_exception_if_the_key_is_not_a_real_key(self): github_user = Github(username, account_password).get_user() try: github_user.create_key('someKeyAlias', 'keyValue') except Exception as exception: self.assertIsInstance(exception, GithubException) self.assertIn('key is invalid', exception.data['errors'][0]['message'])
class Requester: def __init__(self, access_token, bot_access_token=None): self.github = Github(access_token) if bot_access_token: self.bot = Github(bot_access_token) @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def get_repositories(self, username): return self.github.get_user(username).get_repos() @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def create_pull_request_hook(self, username, repository_name, callback_url): self.github.get_user(username).get_repo(repository_name)\ .create_hook('web', {'url': callback_url, 'content_type': 'json'}, ['pull_request'], True) @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def delete_pull_request_hook(self, username, repository_name, callback_url): hooks = self.github.get_user(username).get_repo( repository_name).get_hooks() for hook in hooks: if hook.config['url'] == callback_url: hook.delete() @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def clone_repository(self, clone_url, save_path, bytes_io): if os.path.exists(save_path): shutil.rmtree(save_path) porcelain.clone(clone_url, save_path, errstream=bytes_io) @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def get_file(self, file_url): return requests.get(file_url) @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def get_pull_request(self, username, repository_name, pull_request_number): return self.github.get_user(username).get_repo( repository_name).get_pull(pull_request_number) @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def get_latest_commit_from_pull_request(self, pull_request): return pull_request.get_commits().reversed[0] @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def create_status(self, commit, state, target_url, description, context): commit.create_status(state, target_url, description, context) @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def create_issue_comment(self, pull_request, text): pull_request.create_issue_comment(text) @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def create_comment(self, pull_request, text, commit, file, line): pull_request.create_comment(text, commit, file, line) @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def create_issue_comment_bot(self, username, repository_name, pull_request_number, text): self.bot.get_user(username).get_repo(repository_name).get_pull( pull_request_number).create_issue_comment(text) @retry(stop=stop_after_attempt(SETTINGS['attempt']), wait=wait_exponential(multiplier=SETTINGS['multiplier'], max=SETTINGS['max'])) def create_comment_bot(self, username, repository_name, pull_request_number, text, commit, file, line): self.bot.get_user(username).get_repo(repository_name).get_pull( pull_request_number).create_comment(text, commit, file, line)
import os import time from datetime import datetime import requests import xlrd import xlsxwriter from requests.auth import HTTPBasicAuth from github import Github from colorama import Fore, Back, Style import zlib g_counter = 0 username = [ 'soghac'] password = [ '8cfc66cbda6c52ca0f7c4866e0b909fa18d31cdb'] g = Github(username[g_counter % 3], password[g_counter % 3]) class Repository: parent_commits_dict = dict() parent_pulls_requests = dict() parent_pulls_requests_authors = dict() list_authors = set() june_2019 = datetime.strptime('2019-June', '%Y-%B') @staticmethod def get_full_name_from_url(url): return url.replace('https://api.github.com/repos/', '') @staticmethod def convert_message_to_number(message):
class Organization: __token = "" __organizaton = "" __url = "https://api.github.com/graphql" __base_uri = "https://api.github.com" def __init__(self): self.__map = {} self.__users = {} self.__invitations = {} self.__teams = {} self.__client = Github(self.__token) self.__client = self.__client.get_organization(self.__organizaton) self.__transport = AIOHTTPTransport( url=self.__url, headers={"Authorization": f"token {self.__token}"}) self.__setup_complete = False def __link_teams(self): """ Convert team.parent from id/name to object """ for team_id_or_name, team_data in self.__teams.items(): if "parent" in team_data: parent = team_data["parent"] if parent is None: del team_data["parent"] if isinstance(parent, int) or isinstance(parent, str): team_data["parent"] = self.__teams[parent] async def __populate_teams(self, api_url=None): """ Populate existing teams from github """ data = [] link = None if api_url is None: api_url = f"{self.__base_uri}/orgs/{self.__organizaton}/teams" async with aiohttp.ClientSession() as session: async with session.get(api_url, headers={ "Authorization": f"token {self.__token}" }) as response: assert response.status == 200 data = await response.json() link = response.links for team in data: team_data = { "id": team["id"], "name": team["name"], "parent": team["parent"], "node": team["node_id"], } self.__teams[team["name"]] = team_data self.__teams[team["id"]] = team_data api_url = link.get("next", {}).get("url", None) if api_url is not None: await self.__populate_teams(api_url) async def __populate_members(self, after=None): """ Populate existing members from github """ results = None async with Client(transport=self.__transport, fetch_schema_from_transport=True) as session: params = {"orgname": self.__organizaton} if after: params["after"] = after query = gql(""" query ($orgname: String!, $after: String) { organization(login: $orgname) { membersWithRole(first: 100, after: $after) { nodes { organizationVerifiedDomainEmails(login: $orgname) } pageInfo { endCursor } } } } """) results = await session.execute(query, variable_values=params) # import ipdb # ipdb.set_trace() if results: members_with_role = results.get("organization", {}).get("membersWithRole", {}) for user in members_with_role.get("nodes", []): for email in user["organizationVerifiedDomainEmails"]: self.__users[email.lower()] = True after = members_with_role.get("pageInfo", {}).get("endCursor") if after: await self.__populate_members(after) async def __populate_invitees(self, api_url=None): """ Populate existing invitations from github """ data = [] link = None if api_url is None: api_url = f"{self.__base_uri}/orgs/{self.__organizaton}/invitations" async with aiohttp.ClientSession() as session: async with session.get(api_url, headers={ "Authorization": f"token {self.__token}" }) as response: assert response.status == 200 data = await response.json() link = response.links for invitation in data: invitee = { "email": invitation["email"].lower(), "id": invitation["id"], } self.__invitations[invitee["email"].lower()] = invitee api_url = link.get("next", {}).get("url", None) if api_url is not None: await self.__populate_invitees(api_url) async def setup(self): """ Populate existing data from github """ print("Populating members") await self.__populate_members() print(f"{len(self.__users)} members populated") print("Populating teams") await self.__populate_teams() print(f"{len(self.__teams)} teams populated") self.__link_teams() print("Populating invitations") await self.__populate_invitees() print(f"{len(self.__invitations)} invitations populated") self.__notify_duplicates() def __notify_duplicates(self): """ Notify of emails that have duplicate invitations """ duplicates = [] for email, invitation in self.__invitations.items(): if email in self.__users: duplicates.append(invitation) print("Here are the duplicate invitations: ", json.dumps(duplicates)) print("Press any key to continue: ") async def __add_teams(self, teams): """ Add a team hierarchy into github Returns the leaf team """ if teams: team = teams[-1] if team in self.__teams: return self.__teams[team] parent = await self.__add_teams(teams[:-1]) api_url = f"{self.__base_uri}/orgs/{self.__organizaton}/teams" kwargs = {"name": team, "privacy": "closed"} data = None if parent: kwargs["parent_team_id"] = parent["id"] async with aiohttp.ClientSession() as session: async with session.post( api_url, headers={"Authorization": f"token {self.__token}"}, json=kwargs, ) as response: assert response.status == 201 data = await response.json() team_data = { "id": data["id"], "name": team, "node": data["node_id"], } if parent: team_data["parent"] = parent self.__teams[team] = team_data self.__teams[team_data["id"]] = team_data return None async def __add_user(self, email, team=None): """ Add a user to organization """ api_url = f"{self.__base_uri}/orgs/{self.__organizaton}/invitations" kwargs = { "email": email.lower(), } data = {} if team: kwargs["team_ids"] = [team["id"]] async with aiohttp.ClientSession() as session: async with session.post( api_url, headers={"Authorization": f"token {self.__token}"}, json=kwargs, ) as response: assert response.status == 201 data = await response.json() self.__invitations[email.lower()] = { "email": data["email"].lower(), "id": data["id"], } async def add_members(self, members): extras = [] member_emails = [member["email"].lower() for member in members] for member in self.__users.keys(): if member.lower() not in member_emails: extras.append(member) for member in self.__invitations.keys(): if member.lower() not in member_emails: extras.append(member) print(f"Extras: {', '.join(extras)}") input("Press enter when extras are cleared: ") for member in members: team = await self.__add_teams(member["teams"]) email = member["email"].lower() if email in self.__users: print(f"Member '{email}' already in organization") elif email in self.__invitations: print(f"Member '{email}' already invited") else: print(f"Adding new member '{email}': {member}") await self.__add_user(email.lower(), team) def show(self): print( f"Total {len(self.__users) + len(self.__invitations)} members: {len(self.__users)} active {len(self.__invitations)} invited" ) def show_invitations(self): print(f"Invites: {', '.join(self.__invitations.keys())}")
import os from github import Github import csv token = os.getenv('GITHUB_TOKEN', '4d349bfd02f405b839d80cc15aca5e81b0a67f7c') g = Github(token) def extracting_repos(): repos = [] with open('../dataset/topJavaMavenProjects.csv', 'r') as file: count = 0 reader = csv.reader(file) for row in reader: count = count + 1 if count == 1: continue project_name = row[0][len("https://github.com/"):] repos.append(project_name) return repos def extracting_attributes(repos): count = 0 for repo in repos: count = count + 1 if repo == 'b3log/solo': continue r = g.get_repo(repo) print(count, r.size, r.forks)
from github import Github API_KEY = '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' githubInstance = Github(base_url="https://api.github.com", login_or_token=API_KEY) for repo in githubInstance.get_user('angeloevangelista').get_repos(): print(repo.name)
if get_env_var_name("LABELS") in os.environ: if get_env_var("LABELS") == "": print("LABELS is empty string, won't filter") labels = [] else: labels = get_env_var("LABELS").split(",") else: print("LABELS not specified, won't filter") labels = [] slack = WebClient(token=get_env_var("SLACK_TOKEN")) channel = get_env_var("SLACK_CHANNEL") slack_webhook = get_env_var("SLACK_WEBHOOK") try: # Subject to GitHub RateLimitExceededException github = Github(get_env_var("PAT") or os.getenv("GITHUB_SCRIPT_TOKEN")) repo = github.get_repo(get_env_var("REPO_FOR_DATA")) transport = AIOHTTPTransport( url='https://api.github.com/graphql', headers={'Authorization': 'Bearer %s' % get_env_var("PAT")}) # Create a GraphQL client using the defined transport gql_client = Client(transport=transport, fetch_schema_from_transport=True) project_dict = resolve_url(gql_client, get_env_var("PROJECT_URL")) main(repo, project_dict) except RateLimitExceededException: print("Hit GitHub RateLimitExceededException. Skipping this run.")
def __init__(self, access_token, bot_access_token=None): self.github = Github(access_token) if bot_access_token: self.bot = Github(bot_access_token)
Command | Description -- | -- `@hacs-bot no` | Will close the issue, and not publish a new release. `@hacs-bot close` | Same as `@hacs-bot no`. `@hacs-bot yes` | Will create a new release and close the issue. `@hacs-bot LGTM` | Same as `@hacs-bot yes`. `@hacs-bot release x.xx.x` | Same as `@hacs-bot yes` but will change the release number to the one specified. </details> """ CHANGE = "- [{line}]({link}) @{author}\n" NOCHANGE = "_No changes in this release._" GITHUB = Github(sys.argv[2]) def new_commits(repo, sha): """Get new commits in repo.""" from datetime import datetime dateformat = "%a, %d %b %Y %H:%M:%S GMT" release_commit = repo.get_commit(sha) since = datetime.strptime(release_commit.last_modified, dateformat) commits = repo.get_commits(since=since) if len(list(commits)) == 1: return False return reversed(list(commits)[:-1])
def make_gh_client_by_name(user_name, password): return Github(user_name, password)
from github import Github, GithubException from decouple import config import base64 g = Github(config('GITHUB_USER'), config('GITHUB_PASS')) def get_user_info(user_link): num_of_repos = 0 repos = [] followers = 0 try: username = user_link.strip('/').split("/")[-1] user = g.get_user(username) for r in user.get_repos(): temp = [] temp.append(r.name) temp.append(r.language) try: temp.append(base64.b64decode(r.get_readme().content)) except GithubException: temp.append('') repos.append(temp) num_of_repos += 1 followers = user.followers except GithubException: print("No user found") return None return { "num_of_repos": num_of_repos, "repos": repos,
def login(token): return Github(token)
def make_default_gh_client(): return Github(github_config.GITHUB_USER, github_config.GITHUB_PWS)
skos = Namespace("http://www.w3.org/2004/02/skos/core#") dct = Namespace('http://purl.org/dc/terms/') isothes = Namespace('http://purl.org/iso25964/skos-thes#') foaf = Namespace('http://xmlns.com/foaf/0.1/') ysa = Namespace('http://www.yso.fi/onto/ysa/') if len(sys.argv) < 3: print >>sys.stderr, "Usage: %s GitHub-credentials-file Finto-data-path" % sys.argv[0] sys.exit() secrets = json.load(open(sys.argv[1])) finto_data = sys.argv[2] ysa_file = finto_data + '/vocabularies/ysa/ysa-skos.ttl' yse_file = finto_data + '/vocabularies/yse/yse-skos.ttl' g = Github(secrets['username'], secrets['password']) repo = g.get_user('Finto-ehdotus').get_repo('YSE') label = repo.get_label('uusi') need_inspection = repo.get_issues(state='closed', labels=[label]) ysa_skos = Graph().parse(ysa_file, format='turtle') yse_skos = Graph().parse(yse_file, format='turtle') def delete_triples(uri): yse_skos.remove((URIRef(uri), None, None)) yse_skos.remove((None, None, URIRef(uri))) def replaced_by(uri, title): new_uri = lookup(title) if(new_uri != ''): yse_skos.remove((None, None, URIRef(uri))) yse_skos.add((URIRef(uri), dct.isReplacedBy, URIRef(new_uri)))
def get_repo(user: Github, repo: str): return user.get_repo(repo)
import os import discord from github import Github client = discord.Client() gh = Github(os.environ.get("GH_TOKEN")) @client.event async def on_ready(): print(f'Logged in as {client.user}') @client.event async def on_message(message): def check(reaction, user): return user == message.author and (str(reaction.emoji) == "✅" or srt(reaction.emoji) == "❎") if str(message.channel.id) != os.environ.get('CHECK_CHANNEL'): print(os.environ.get("CHECK_CHANNEL")) print(message.channel.id) print(message.channel.id != os.environ.get("CHECK_CHANNEL")) return if message.author == client.user: return if len(message.content.split()) == 1: x = await message.channel.send("React to Confirm: Invite `" + message.content +
return i GH = "https://github.com" GH_USER = '******' GH_REPO = 'dask-image' GH_TOKEN = os.environ.get('GH_TOKEN') if GH_TOKEN is None: raise RuntimeError( "It is necessary that the environment variable `GH_TOKEN` " "be set to avoid running into problems with rate limiting. " "One can be acquired at https://github.com/settings/tokens.\n\n" "You do not need to select any permission boxes while generating " "the token.") g = Github(GH_TOKEN) repository = g.get_repo(f'{GH_USER}/{GH_REPO}') parser = argparse.ArgumentParser(usage=__doc__) parser.add_argument('from_commit', help='The starting tag.') parser.add_argument('to_commit', help='The head branch.') parser.add_argument('--version', help="Version you're about to release.", default='0.2.0') args = parser.parse_args() for tag in repository.get_tags(): if tag.name == args.from_commit: previous_tag = tag break
'formatter': 'raw', 'stream': 'ext://sys.stdout', }, }, 'loggers': { '': { 'level': 'INFO', 'handlers': ['file_handler', 'stream_handler', ], 'propagate': False, }, }, }) repository = [ o for o in Github(GITHUB_TOKEN).get_user().get_orgs() if o.login=='ooi-integration' ][0].get_repo('ingestion-csvs') log = logging.getLogger('Main') def get_csvs(repo, filepath, csv_files={}): for item in repo.get_dir_contents(filepath): if item.type == "dir": csv_files = get_csvs(repo, item.path) elif item.path.endswith(".csv"): csv_files[item.path] = StringIO(item.decoded_content) log.info("Found CSV file: %s" % item.path) return csv_files def commented(row):
def main(): """Handles External PRs (PRs from forks) Performs the following operations: 1. If the external PR's base branch is master we create a new branch and set it as the base branch of the PR. 2. Labels the PR with the "Contribution" label. (Adds the "Hackathon" label where applicable.) 3. Assigns a Reviewer. 4. Creates a welcome comment Will use the following env vars: - CONTENTBOT_GH_ADMIN_TOKEN: token to use to update the PR - EVENT_PAYLOAD: json data from the pull_request event """ t = Terminal() payload_str = get_env_var('EVENT_PAYLOAD') if not payload_str: raise ValueError('EVENT_PAYLOAD env variable not set or empty') payload = json.loads(payload_str) print(f'{t.cyan}Processing PR started{t.normal}') org_name = 'demisto' repo_name = 'content' gh = Github(get_env_var('CONTENTBOT_GH_ADMIN_TOKEN'), verify=False) content_repo = gh.get_repo(f'{org_name}/{repo_name}') pr_number = payload.get('pull_request', {}).get('number') pr = content_repo.get_pull(pr_number) # Add 'Contribution' Label to PR contribution_label = 'Contribution' pr.add_to_labels(contribution_label) print(f'{t.cyan}Added "Contribution" label to the PR{t.normal}') # check base branch is master if pr.base.ref == 'master': print(f'{t.cyan}Determining name for new base branch{t.normal}') branch_prefix = 'contrib/' new_branch_name = f'{branch_prefix}{pr.head.label.replace(":", "_")}' existant_branches = content_repo.get_git_matching_refs( f'heads/{branch_prefix}') potential_conflicting_branch_names = [ branch.ref.lstrip('refs/heads/') for branch in existant_branches ] # make sure new branch name does not conflict with existing branch name while new_branch_name in potential_conflicting_branch_names: # append or increment digit if not new_branch_name[-1].isdigit(): new_branch_name += '-1' else: digit = str(int(new_branch_name[-1]) + 1) new_branch_name = f'{new_branch_name[:-1]}{digit}' master_branch_commit_sha = content_repo.get_branch('master').commit.sha # create new branch print(f'{t.cyan}Creating new branch "{new_branch_name}"{t.normal}') content_repo.create_git_ref(f'refs/heads/{new_branch_name}', master_branch_commit_sha) # update base branch of the PR pr.edit(base=new_branch_name) print( f'{t.cyan}Updated base branch of PR "{pr_number}" to "{new_branch_name}"{t.normal}' ) # assign reviewers / request review from reviewer_to_assign = determine_reviewer(REVIEWERS, content_repo) pr.add_to_assignees(reviewer_to_assign) pr.create_review_request(reviewers=[reviewer_to_assign]) print(f'{t.cyan}Assigned user "{reviewer_to_assign}" to the PR{t.normal}') print( f'{t.cyan}Requested review from user "{reviewer_to_assign}"{t.normal}') # create welcome comment (only users who contributed through Github need to have that contribution form filled) message_to_send = WELCOME_MSG if pr.user.login == MARKETPLACE_CONTRIBUTION_PR_AUTHOR else WELCOME_MSG_WITH_GFORM body = message_to_send.format(selected_reviewer=reviewer_to_assign) pr.create_issue_comment(body) print(f'{t.cyan}Created welcome comment{t.normal}')
def get_vimrcs(VERBOSE=False, TEST=False): # Open existing data file if it exists if os.path.isfile(search_result_file): with open(search_result_file, "rb") as f: data = pickle.load(f) all_vimrcs = data["all_vimrcs"] repos_seen = data["repos_seen"] else: all_vimrcs = {} repos_seen = 0 g = Github(github_api_key, per_page=max_per_page) repos = g.search_repositories( "dotfiles OR vimrc", sort="stars", order="desc", **{ "in": "name,description", "stars": star_threshold }, ) try: print(f"Starting at repo {repos_seen}") for repo in repos[repos_seen:]: # Get .vimrc files in this repo vimrcs = g.search_code("", repo=repo.full_name, filename=".vimrc") if VERBOSE: print("Repo: ", repo.full_name) print("Vimrc Count: ", vimrcs.totalCount) for vimrc in vimrcs: # Try to decode content and save info try: decoded = vimrc.decoded_content except: print("Couldn't decode ", vimrc.download_url) else: # Save vimrc info all_vimrcs[vimrc.download_url] = { "repo": repo.full_name, "stars": repo.stargazers_count, "content": decoded, } sleep(1) check_rate_limit(g, repos_seen, all_vimrcs, VERBOSE, TEST) sleep(1) repos_seen += 1 check_rate_limit(g, repos_seen, all_vimrcs, VERBOSE, TEST) except Exception as e: print(e) save_results(search_result_file, repos_seen, all_vimrcs) else: print("Made it through all of the results") save_results(search_result_file, repos_seen, all_vimrcs)
help="Ensure that migrated issues keep the same ID", default=False) options, args = parser.parse_args() if len(args) != 3: parser.print_help() sys.exit() label_cache = {} # Cache Github tags, to avoid unnecessary API requests google_project_name, github_user_name, github_project = args github_password = getpass.getpass("Github password: "******"/" in github_project: owner_name, github_project = github_project.split("/") try: github_owner = github.get_user(owner_name) except GithubException: try: github_owner = github.get_organization(owner_name) except GithubException: github_owner = github_user
from github import Github from github import GithubException import matplotlib.pyplot as plt import numpy as np import getpass quantity=[] label=['HTML','JavaScript','Java','C++','C','Python','C#','Other'] username = raw_input("Please enter your Github Username:"******"Please enter username of Github user you would like to see data on:") def computation(g): count1=0 count2=0 count3=0 count4=0 count5=0 count6=0 count7=0 other=0 for repo in g.get_user(gitUser).get_repos(): if repo.language == "HTML": count1+=1 elif repo.language == "JavaScript": count2+=1
from collections import defaultdict from github import Github from github import PullRequest from settings import GITHUB_USERNAME, GITHUB_PASSWORD, REPOS, USERS from slack_helper import send_message PR_STATE_OPEN = 'open' PR_STATE_CLOSED = 'closed' PR_STATE_ALL = 'all' g = Github(GITHUB_USERNAME, GITHUB_PASSWORD) pending_notifications = set() def check_repos(): repos = get_repos(REPOS) for githubname in repos: for repo in repos.get(githubname): open_pulls = repo.get_pulls(state=PR_STATE_OPEN) check_pulls(open_pulls) process_pending_notifications() def get_repos(repo_names): """ Get each repo in repo_names for each user in settings.USERS Return: {"github_username": [github.Repository.Repository]} """ repos = defaultdict(list) for user in USERS:
def github(): return Github(os.getenv("GITHUB_AUTH_TOKEN"))
# Clear screen os.system('cls' if os.name == 'nt' else 'clear') print "Organization Ego-network analysis" print "" userlogin = raw_input("Login: Enter your username: "******"Login: Enter yor password: "******"Enter the username you want to analyse: ") print "" status_forcelist = (500, 502, 504, 403) retry_data = urllib3.Retry(total=1000, read=300, connect=300, backoff_factor=0.5, status_forcelist=status_forcelist) g = Github(userlogin, password, retry=retry_data) print "ORGANIZATIONS:" for i in g.get_user(username).get_orgs(): print "-", i.login print "" org_to_mine = raw_input( "Enter the name of the Organization you want to analyse: ") print "" org = g.get_organization(org_to_mine) graph = nx.DiGraph() for i in org.get_members(): print "Member:", i.login
import constants import notify_chat import utils PULL_REQUEST_TITLE = '[Automated] Bump Ballerina Lang version' COMMIT_MESSAGE_PREFIX = '[Automated] Update ballerina lang to ' SLEEP_INTERVAL = 30 # 30s MAX_WAIT_CYCLES = 180 # script timeout is 90 minutes ballerina_bot_username = os.environ[constants.ENV_BALLERINA_BOT_USERNAME] ballerina_bot_token = os.environ[constants.ENV_BALLERINA_BOT_TOKEN] ballerina_bot_email = os.environ[constants.ENV_BALLERINA_BOT_EMAIL] ballerina_reviewer_bot_token = os.environ[ constants.ENV_BALLERINA_REVIEWER_BOT_TOKEN] github = Github(ballerina_bot_token) def main(): branch_name = sys.argv[1] lang_version = sys.argv[2] update_lang_version(branch_name, lang_version) def update_lang_version(branch_name, lang_version): dist_repo = github.get_repo(constants.BALLERINA_ORG_NAME + '/ballerina-distribution', ref=branch_name) properties_content = dist_repo.get_contents( constants.GRADLE_PROPERTIES_FILE) properties_content = properties_content.decoded_content.decode(
import flask from github import Github from flask import Flask, request, render_template, jsonify, send_file import requests import pandas as pd import datetime from datetime import timedelta import dateutil.parser import json # using an access token g = Github("93e27cd53dd1f8f4ba1ce9b120dfa62f8892fa3b") mytoken = "93e27cd53dd1f8f4ba1ce9b120dfa62f8892fa3b" app = flask.Flask(__name__) app.config["DEBUG"] = True #download git hub analysis file of my repo @app.route('/public_repo_download', methods=['GET']) def public_repo_download(): txtfile = 'publicgithubanalysis.txt' print("Saved commits information to commits_info.csv") return send_file(txtfile, as_attachment=True, cache_timeout=0) #download git hub analysis file of public repo @app.route('/my_repo_download', methods=['GET']) def my_repo_download(): txtfile = 'githubanalysis.txt' print("Saved commits information to commits_info.csv")
def post(self): repo_name = self.repo_name g = Github(self.token) # Since creating a repo happens on the user obejct, we must fetch the user first. user = g.get_user() # Try to create the repo. Creation will fail if a repo has already been created with that name. try: print('Trying to create new repo with name: {}'.format(repo_name)) repo = user.create_repo(repo_name) # If we fail to create a repo, we check to see if it was because there was already one with that name except GithubException as repo_creation_error: data = { 'repoName': repo_name, } return handleGithubError(repo_creation_error, data=data) # If we successfully created the repo, then we can prep all files in this app to add to the repo. files = getAllFilesWPathsInDirectory( '.', dirsToAvoid=DEFAULT_DIRS_TO_AVOID, extensionsToAvoid=DEFAULT_EXTENSIONS_TO_AVOID) files_added_successfully = [] files_failed = [] for i, file_path in enumerate(files): # Try to read the file's content. try: with open(file_path, 'rb') as file: file_content = file.read() except IOError as e: files_failed.append(file_path_formatted) continue file_path_formatted = file_path[2:] commit_message = 'Committing file {file_num} of {num_files}: {file_path}'.format( file_num=i + 1, num_files=len(files), file_path=file_path_formatted) print(commit_message) try: # Ideally Github would allow us to add our files in batches, rather than one at a time, # so that we can reduce the number of API calls required. However, based on this # dicsussion, it does not appear to be possible. https://github.com/isaacs/github/issues/199 repo.create_file(file_path_formatted, commit_message, file_content) files_added_successfully.append(file_path_formatted) except GithubException as e: errorMessage = e.args[1].get('message') files_failed.append(file_path_formatted) results = { 'repoName': repo_name, 'successfullyAdded': files_added_successfully, 'failed': files_failed, } return results
repo_path = os.getenv("REPO_COPY", os.path.abspath("../../")) reports_path = os.getenv("REPORTS_PATH", "./reports") check_name = sys.argv[1] if not os.path.exists(temp_path): os.makedirs(temp_path) with open(os.getenv('GITHUB_EVENT_PATH'), 'r', encoding='utf-8') as event_file: event = json.load(event_file) is_flaky_check = 'flaky' in check_name pr_info = PRInfo(event, need_changed_files=is_flaky_check) gh = Github(get_best_robot_token()) images = get_images_with_versions(temp_path, IMAGES) images_with_versions = {i.name: i.version for i in images} result_path = os.path.join(temp_path, "output_dir") if not os.path.exists(result_path): os.makedirs(result_path) work_path = os.path.join(temp_path, "workdir") if not os.path.exists(work_path): os.makedirs(work_path) build_path = os.path.join(temp_path, "build") if not os.path.exists(build_path): os.makedirs(build_path)
def _connect(self): """Connects to GitHub's API""" return Github(login_or_token=self.github_token or self.github_user, password=self.github_pass)
def create_pull_request(token, repo, head, base, title, body): return Github(token).get_repo(repo).create_pull( title=title, body=body, base=base, head=head)