def _make_message(self, message, user, repo): github = GitHub() try: git_user = github.user(user) except BadUser: logging.error("Uh oh! {} doesn't seem to be a valid GitHub user.".format(user)) return False try: git_repo = github.repo(user, repo) except BadRepo: logging.error("Uh oh! Can't connect to {}'s repo {}.".format(user, repo)) return False # TODO - this is ugly, setup Jinja or some other templating here instead. message = """ **[{title}]({url})** - *A {language} repository written by [{author}]({author_url})* > {description} {stars} stars, {forks} forks Clone this repository: `{clone}` --- [I'm a bot](https://github.com/naiyt/autogithubbot) that posts info about GitHub repositories. [Create your own bot](https://github.com/naiyt/reddit-replier)! (Just be nice about it.) """.format(title=git_repo.name, url=git_repo.url, language=git_repo.language, description=git_repo.description, author=git_repo.author, author_url=git_user.url, stars=git_repo.stars, forks=git_repo.forks, clone=git_repo.clone_url) return message
def get(self): user = users.get_current_user() if not user: self.redirect(users.create_login_url(self.request.uri)) else: if validAdmin(user): creds = get_githubcredentials() self.response.write(""" <form action='%s' method="POST"> Github Repository Owner: <input name="github_owner" type="text" value="%s" /><br> Github Repository Name: <input name="github_repo" type="text" value="%s" /><br> Github Access Token (always hidden): <input name="github_token" type="text" placeholder="access token" /><br> <input type="submit" /> </form> """%(self.request.uri, creds.owner, creds.repo)) hasAccess = False ghcreds = get_githubcredentials() gh = GitHub(creds.owner, creds.repo, creds.token) try: hasAccess = gh.hasPushAccess() except: pass if hasAccess: self.response.write("<p>Push Access Granted by GitHub.com</p>") else: self.response.write("<p>WARNING: No push access with the stored token and repo information. Please provide valid information</p>") else: self.response.headers['Content-Type'] = 'text/plain; charset=utf-8' self.response.write("No write access for user %s"%user.email())
def main(org_name): gh = GitHub() if "GITHUB_API_TOKEN" in os.environ: gh = GitHub(os.environ["GITHUB_API_TOKEN"]) else: gh = GitHub() org = gh.get_organization(org_name) totals = { "watchers": 0, "stars": 0, "forks": 0, } print(f"{'repo':40} {'watch'} {'stars'} {'forks'}") for repo in org.get_repos(): print(f"{repo.full_name:40}" f" {repo.watchers_count:5}" f" {repo.stargazers_count:5}" f" {repo.forks_count:5}") totals["watchers"] += repo.watchers_count totals["stars"] += repo.stargazers_count totals["forks"] += repo.forks_count print("\nTotals:") for key, value in totals.items(): print(f"{key:8}: {value:4}")
def get_gh_data(): gh_access_token = request.cookies.get('gh_access_token') gh = GitHub(access_token=gh_access_token) user_id = gh.user().get()['login'] page = 1 events = gh.users(user_id).events().get(page=page) while True: page += 1 new_events = gh.users(user_id).events().get(page=page) if len(new_events) > 0: events.extend(new_events) else: break created_ats = [e['created_at'][:-1] for e in events] timestamps = [] for ca in created_ats: ts = time.strptime(ca, "%Y-%m-%dT%H:%M:%S") timestamps.append((ts.tm_hour * 60 + ts.tm_min -300)%1440) print("GH: " + str(len(timestamps))) return json.dumps(timestamps)
def webhook_handler(payload, signature): """Respond to Travis webhook.""" travis = Travis() github = GitHub() # The payload comes in the request, but we need to make sure it is # really signed by Travis CI. If not, respond to this request with # an error. verified_payload = travis.get_verified_payload(payload, signature) error = verified_payload.get('error') if error: return error.get('message'), error.get('code') issue_number = int(verified_payload.get('pull_request_number')) logs = travis.get_logs(verified_payload) comments = parse_logs(logs) # Create a separate comment for every job for title, comment in comments.iteritems(): try: github.post_comment(issue_number, comment, title) except requests.RequestException as err: logging.error(err.response.text) return err.response.text, 500 return "OK", 200
def setUp(self): if os.environ.get('TRAVIS') == None: self.github = GitHub() self.db = DBConnector() self.config = Config() self.github.update_library_data(self.config.github_user, self.config.github_repos[0]) self.filename = "./csv/{}.csv".format(GitHubData.__tablename__)
def upload_to_github(file_path, nwjs_version): github = GitHub(auth_token()) releases = github.repos(GITHUB_REPO).releases.get() release = create_or_get_release_draft(github, releases, nwjs_version) params = {'name': os.path.basename(file_path) } headers = {'Content-Type': 'application/zip'} with open(file_path, 'rb') as f: github.repos(GITHUB_REPO).releases(release['id']).assets.post( params=params, headers=headers, data=f, verify=False)
def get_issues_api(): if app.config['REPORT_PARSING_ISSUES']: access_token = app.config['GITHUB_ACCESS_TOKEN'] repo_owner = app.config['GITHUB_REPO_OWNER'] repo_name = app.config['GITHUB_REPO_NAME'] gh = GitHub(access_token=access_token) return gh.repos(repo_owner)(repo_name).issues else: return None
def upload_to_github(file_path, nwjs_version): github = GitHub(auth_token()) releases = github.repos(GITHUB_REPO).releases.get() release = create_or_get_release_draft(github, releases, nwjs_version) params = {'name': os.path.basename(file_path)} headers = {'Content-Type': 'application/zip'} with open(file_path, 'rb') as f: github.repos(GITHUB_REPO).releases(release['id']).assets.post( params=params, headers=headers, data=f, verify=False)
def main(): parser = OptionParser(usage="usage: %prog [options] [pull number]") parser.add_option("-g", "--github-user", dest="gituser", type="string", help="github user, if not supplied no auth is used", metavar="USER") (options, args) = parser.parse_args() github = GitHub(options) for pullNumber in args: pull = github.pull("apache", "storm", pullNumber) print "git pull "+pull.from_repo()+" "+pull.from_branch()
def main(): parser = OptionParser(usage="usage: %prog [options] [pull number]") parser.add_option("-g", "--github-user", dest="gituser", type="string", help="github user, if not supplied no auth is used", metavar="USER") (options, args) = parser.parse_args() github = GitHub(options) for pullNumber in args: pull = github.pull("apache", "storm", pullNumber) print "git pull "+pull.fromRepo()+" "+pull.fromBranch()
def lambda_handler(event, context): github = GitHub(event) fix_version = github.get_fix_version() ticket_numbers = github.get_ticket_numbers() assignee = get_assignee(github) for ticket in ticket_numbers: jira = Jira(ticket) jira.update_status(assignee=assignee, fix_version=fix_version) move_ticket(jira, github) return ""
def main(): parser = OptionParser(usage="usage: %prog [options]") parser.add_option("-g", "--github-user", dest="gituser", type="string", help="github user, if not supplied no auth is used", metavar="USER") (options, args) = parser.parse_args() jrepo = JiraRepo("https://issues.apache.org/jira/rest/api/2") github = GitHub(options) openPullRequests = github.openPulls("apache","incubator-storm") stormJiraNumber = re.compile("STORM-[0-9]+") openJiras = jrepo.openJiras("STORM") jira2Pulls = {} pullWithoutJira = [] pullWithBadJira = [] for pull in openPullRequests: found = stormJiraNumber.search(pull.title()) if found: jiraNum = found.group(0) if not (jiraNum in openJiras): pullWithBadJira.append(pull) else: if jira2Pulls.get(jiraNum) == None: jira2Pulls[jiraNum] = [] jira2Pulls[jiraNum].append(pull) else: pullWithoutJira.append(pull); now = datetime.utcnow() print "Pull requests that need a JIRA:" print "Pull URL\tPull Title\tPull Age\tPull Update Age" for pull in pullWithoutJira: print ("%s\t%s\t%s\t%s"%(pull.html_url(), pull.title(), daydiff(now, pull.created_at()), daydiff(now, pull.updated_at()))).encode("UTF-8") print "\nPull with bad or closed JIRA:" print "Pull URL\tPull Title\tPull Age\tPull Update Age" for pull in pullWithBadJira: print ("%s\t%s\t%s\t%s"%(pull.html_url(), pull.title(), daydiff(now, pull.created_at()), daydiff(now, pull.updated_at()))).encode("UTF-8") print "\nOpen JIRA to Pull Requests and Possible Votes, vote detection is very approximate:" print "JIRA\tPull Requests\tJira Summary\tJIRA Age\tPull Age\tJIRA Update Age\tPull Update Age" print "\tComment Vote\tComment Author\tPull URL\tComment Age" for key, value in jira2Pulls.items(): print ("%s\t%s\t%s\t%s\t%s\t%s\t%s"%(key, mstr(value), openJiras[key].getSummary(), daydiff(now, openJiras[key].getCreated()), daydiff(now, value[0].created_at()), daydiff(now, openJiras[key].getUpdated()), daydiff(now, value[0].updated_at()))).encode("UTF-8") for comment in openJiras[key].getComments(): #print comment.raw() if comment.hasVote(): print (("\t%s\t%s\t%s\t%s")%(comment.getVote(), comment.getAuthor(), comment.getPull(), daydiff(now, comment.getCreated()))).encode("UTF-8")
def main(): parser = OptionParser(usage="usage: %prog [options]") parser.add_option("-g", "--github-user", dest="gituser", type="string", help="github user, if not supplied no auth is used", metavar="USER") (options, args) = parser.parse_args() jrepo = JiraRepo("https://issues.apache.org/jira/rest/api/2") github = GitHub(options) openPullRequests = github.openPulls("apache","storm") stormJiraNumber = re.compile("STORM-[0-9]+") openJiras = jrepo.openJiras("STORM") jira2Pulls = {} pullWithoutJira = [] pullWithBadJira = [] for pull in openPullRequests: found = stormJiraNumber.search(pull.title()) if found: jiraNum = found.group(0) if not (jiraNum in openJiras): pullWithBadJira.append(pull) else: if jira2Pulls.get(jiraNum) == None: jira2Pulls[jiraNum] = [] jira2Pulls[jiraNum].append(pull) else: pullWithoutJira.append(pull); now = datetime.utcnow() print "Pull requests that need a JIRA:" print "Pull URL\tPull Title\tPull Age\tPull Update Age" for pull in pullWithoutJira: print ("%s\t%s\t%s\t%s"%(pull.html_url(), pull.title(), daydiff(now, pull.created_at()), daydiff(now, pull.updated_at()))).encode("UTF-8") print "\nPull with bad or closed JIRA:" print "Pull URL\tPull Title\tPull Age\tPull Update Age" for pull in pullWithBadJira: print ("%s\t%s\t%s\t%s"%(pull.html_url(), pull.title(), daydiff(now, pull.created_at()), daydiff(now, pull.updated_at()))).encode("UTF-8") print "\nOpen JIRA to Pull Requests and Possible Votes, vote detection is very approximate:" print "JIRA\tPull Requests\tJira Summary\tJIRA Age\tPull Age\tJIRA Update Age\tPull Update Age" print "\tComment Vote\tComment Author\tPull URL\tComment Age" for key, value in jira2Pulls.items(): print ("%s\t%s\t%s\t%s\t%s\t%s\t%s"%(key, mstr(value), openJiras[key].getSummary(), daydiff(now, openJiras[key].getCreated()), daydiff(now, value[0].created_at()), daydiff(now, openJiras[key].getUpdated()), daydiff(now, value[0].updated_at()))).encode("UTF-8") for comment in openJiras[key].getComments(): #print comment.raw() if comment.hasVote(): print (("\t%s\t%s\t%s\t%s")%(comment.getVote(), comment.getAuthor(), comment.getPull(), daydiff(now, comment.getCreated()))).encode("UTF-8")
def get_token(config): token = config.get('token') if token is None: github = GitHub() login, password = get_user_credentials() token = github.create_token(login, password) if token is None: otp = raw_input('Two-factor code: ') token = github.create_token(login, password, otp) config.put('token', token) config.save() return token
def cron(): from mod_ci.controllers import start_platform from run import config, log from database import create_session from github import GitHub log.info('Run the cron for kicking off CI platform(s).') # Create session db = create_session(config['DATABASE_URI']) gh = GitHub(access_token=config['GITHUB_TOKEN']) repository = gh.repos(config['GITHUB_OWNER'])(config['GITHUB_REPOSITORY']) start_platform(db, repository)
def lp_layout_list(self, upstream=None): """ search_order : list layouts from upstream if mentioned list layouts from core package """ if upstream is None: l_files = list_files(self.base_path + "/inventory_layouts") return l_files else: g = GitHub(upstream) l_files = [] files = g.list_files("inventory_layouts") return files
def test_data(self): """Test the api output of github.py""" with open('tests/actual_data.json', 'r') as json_file: expected_dict_output = json.load(json_file) gh = GitHub(owner='moby', repositories=['moby', 'toolkit', 'tool'], resources=['issues', 'commits', 'pull_requests']) actual_dict_outut = gh.read() # check if matched self.assertEqual(actual_dict_outut['data'], expected_dict_output)
def lp_topo_list(self, upstream=None): """ search_order : list topologies from upstream if mentioned list topologies from current folder """ if upstream is None: t_files = list_files(self.base_path + "/ex_topo") return t_files else: print "getting from upstream" g = GitHub(upstream) t_files = [] files = g.list_files("ex_topo") return files
def lp_layout_get(self, layout, upstream=None): """ search_order : get layouts from upstream if mentioned get layouts from core package """ if upstream is None: get_file(self.base_path + "/inventory_layouts/" + layout, "./layouts/") else: g = GitHub(upstream) files = g.list_files("inventory_layouts") link = filter(lambda link: link['name'] == layout, files) link = link[0]["download_url"] get_file(link, "./layouts", True) return link
def lp_topo_get(self, topo, upstream=None): """ search_order : get topologies from upstream if mentioned get topologies from core package # need to add checks for ./topologies """ if upstream is None: get_file(self.base_path + "/ex_topo/" + topo, "./topologies/") else: g = GitHub(upstream) files = g.list_files("ex_topo") link = filter(lambda link: link['name'] == topo, files) link = link[0]["download_url"] get_file(link, "./topologies", True) return link
def run(): github = GitHub() # 今日趋势 daily, resp = github.get_trending_repository(Since.daily) # 最近一周趋势 weekly, resp = github.get_trending_repository(Since.weekly) # 最近一个月趋势 monthly, resp = github.get_trending_repository(Since.monthly) # 最新数据 readme = generateReadme(daily, weekly, monthly) handleReadme(readme) # 归档 archiveMd = generateArchiveMd(daily, weekly, monthly) handleArchiveMd(archiveMd)
def __init__(self): self.client = zulip.Client(site="https://technh.zulipchat.com/api/", api_key="vkEQgQYDPUgAGmXaTXdMPsMwlkkgMfM5", email="*****@*****.**") self.subscribe_all() self.hacknews = Hackernews() self.trans = Translate() self.movie= Movie() self.lyrics = Lyrics() self.holiday = Holiday() self.currency = Currency() self.cricket = Cricket() self.github = GitHub() self.chatbot = ChatBot(name="technehru") print("done init") self.subkeys = ["use", "help", "translate", "hackernews", "hn", "hotel", "HN", "cricnews", "cricketnews", "movie", "currency", "holiday", "lyrics", "github"]
def updatePullRequests(self): print 'Updating pull requests from GitHub...' if not self.client: self.client = GitHub(userAgent=userAgent, async=True, reuseETag=True, access_token=githubAccessToken) gh_pullrequests = yield self.client.repos(self.username)(self.repo).pulls.get(state='open', per_page=100) if self.client.status == 304: print "GitHub pull requests was not changed" defer.returnValue(None) elif self.client.status == 200: prs = [] for gh_pullrequest in gh_pullrequests: pr = {} pr['id'] = gh_pullrequest['number'] pr['branch'] = gh_pullrequest['base']['ref'] pr['author'] = gh_pullrequest['user']['login'] pr['assignee'] = gh_pullrequest['assignee']['login'] if gh_pullrequest['assignee'] else None pr['head_user'] = gh_pullrequest['head']['repo']['owner']['login'] pr['head_repo'] = gh_pullrequest['head']['repo']['name'] pr['head_branch'] = gh_pullrequest['head']['ref'] pr['head_sha'] = gh_pullrequest['head']['sha'] pr['title'] = gh_pullrequest['title'] pr['description'] = gh_pullrequest['body'] prs.append(pr) defer.returnValue(prs) raise Exception('invalid status', self.client.status)
def __init__(self, config): self.myaddress = config.get('lilac', 'email') self.mymaster = config.get('lilac', 'master') self.repomail = config.get('repository', 'email') self.trim_ansi_codes = not config.getboolean( 'smtp', 'use_ansi', fallback=False) self.repodir = Path(config.get('repository', 'repodir')).expanduser() self.ms = MailService(config) github_token = config.get('lilac', 'github_token', fallback=None) if github_token: self.gh = GitHub(config.get('lilac', 'github_token', fallback=None)) else: self.gh = None
def generate_image(): repo = request.args.get('repository') filepath = request.args.get('filepath') dot_file = "" png_file = "" try: # Pull the file from GitHub dot_file = GitHub.download_file(repo, filepath) # Generate Graphviz image from dot file png_file = Graphviz.generate_png(dot_file) except GitHubFileDownloadException: app.logger.error(f'Error downloading {filepath} from {repo}') return send_file('static/404.png', mimetype='image/png') except GraphvizRenderException: app.logger.error(f'Error rendering {filepath} from {repo}') return send_file('static/404.png', mimetype='image/png') # Remove the png file after the file is sent @after_this_request def remove_file(response): os.remove(dot_file) os.remove(png_file) return response return send_file(png_file, mimetype='image/png')
def setup_repos(): """ Ensure we have a matching GitHub repo for every import project """ github = GitHub(access_token=GITHUB_TOKEN, scope='user,repo') #current_repos = github.users(GITHUB_USERNAME).repos.get() current_repos = github.orgs(GITHUB_ORGANIZATION).repos.get() repo_names = [x['name'] for x in current_repos] for project in PROJECTS: target_repo_name = GITHUB_REPO_NAME_FORMAT % (project['github_name']) if target_repo_name not in repo_names: github.orgs(GITHUB_ORGANIZATION).repos.post( name=target_repo_name, description='Mirrored repository') # FIXME
def __init__(self, config: Dict[str, Any]) -> None: self.myaddress = config['lilac']['email'] self.mymaster = config['lilac']['master'] self.logurl_template = config['lilac'].get('logurl') self.repomail = config['repository']['email'] self.name = config['repository']['name'] self.trim_ansi_codes = not config['smtp']['use_ansi'] self.repodir = Path(config['repository']['repodir']).expanduser() self.ms = MailService(config) github_token = config['lilac'].get('github_token') if github_token: self.gh = GitHub(github_token) else: self.gh = None self.mods: LilacMods = { } # to be filled by self.load_all_lilac_and_report()
def remote_repo() -> Repository: access_token = os.environ.get("GITHUB_ACCESS_TOKEN") client = GitHub(access_token) with vcr.use_cassette( "tests/cassettes/init_github.yaml", filter_headers=["Authorization"], decode_compressed_response=True, ): repo = client.get_repo("spectacles-ci/eye-exam") return repo
def test_tools_github(): GIT_AUTH = get_secret['CREDS1']['AUTH_TOKEN'] gh = GitHub(access_token=GIT_AUTH) repos = get_repos(gh) for repo_dict in repos: releases = get_releases(gh, repo_dict) branches = get_branches(gh, repo_dict) pprint(releases) print() pprint(branches)
def __init__(self, config: configparser.ConfigParser): self.myaddress = config.get('lilac', 'email') self.mymaster = config.get('lilac', 'master') self.repomail = config.get('repository', 'email') self.name = config.get('repository', 'name') self.trim_ansi_codes = not config.getboolean( 'smtp', 'use_ansi', fallback=False) self.repodir = Path(config.get('repository', 'repodir')).expanduser() self.ms = MailService(config) github_token = config.get('lilac', 'github_token', fallback=None) if github_token: self.gh = GitHub(config.get('lilac', 'github_token', fallback=None)) else: self.gh = None self.mods: LilacMods = { } # to be filled by self.load_all_lilac_and_report()
def cron(testing=False): """Script to run from cron for Sampleplatform.""" from mod_ci.controllers import start_platforms, kvm_processor, TestPlatform from flask import current_app from run import config, log from database import create_session from github import GitHub log.info('Run the cron for kicking off CI platform(s).') # Create session db = create_session(config['DATABASE_URI']) gh = GitHub(access_token=config['GITHUB_TOKEN']) repository = gh.repos(config['GITHUB_OWNER'])(config['GITHUB_REPOSITORY']) if testing is True: kvm_processor(current_app._get_current_object(), db, config.get('KVM_LINUX_NAME', ''), TestPlatform.linux, repository, None) else: start_platforms(db, repository)
def run(argv=None): """Main code body :returns None """ known_args, extra = parse_command_line_args(argv) # set parameters for github object gh = GitHub(owner=known_args.owner, repositories=known_args.repositories.split(','), resources=known_args.resources.split(',')) # read until data is depleted data = gh.read() while data is not None: # do something with the data write_to_file(data=data) # read next batch of data data = gh.read()
def get_comments(owner, repo, user, csvfile): gh = GitHub(access_token=GITHUB_ACCESS_TOKEN) page = 1 writer = DictWriter(csvfile, fieldnames=CSV_FIELD_NAMES) writer.writeheader() while True: print "Getting page {}".format(page) new_comments = gh.repos(owner)(repo).pulls.comments.get(page=page) if len(new_comments) == 0: break else: page = page + 1 for comment in new_comments: if comment['user']['login'] == user: row = { 'message': comment['body'].encode('utf8'), 'url': comment['html_url'], 'username': comment['user']['login'] } writer.writerow(row)
class TestExportTable(unittest.TestCase): # Corresponds to schema in `db/data_schema.sql` header_row = "id,date_updated,language,pull_requests,open_issues,"\ "number_of_commits,number_of_branches,number_of_releases,"\ "number_of_contributors,number_of_watchers,"\ "number_of_stargazers,number_of_forks\n" def setUp(self): if os.environ.get('TRAVIS') == None: self.github = GitHub() self.db = DBConnector() self.config = Config() self.github.update_library_data(self.config.github_user, self.config.github_repos[0]) self.filename = "./csv/{}.csv".format(GitHubData.__tablename__) def test_file_export_succeeds(self): if os.environ.get('TRAVIS') == None: self.assertFalse(os.path.exists(self.filename)) self.db.export_table_to_csv(GitHubData) self.assertTrue(os.path.exists(self.filename)) def test_file_export_has_correct_data(self): if os.environ.get('TRAVIS') == None: self.db.export_table_to_csv(GitHubData) with open(self.filename, 'r') as fp: exported_data = fp.readlines() # Table has correct header self.assertEqual(exported_data[0], self.header_row) # Table exported correct number of rows num_exported_rows = len(exported_data) - 1 # exclude header num_db_rows = len(self.db.get_data(GitHubData)) self.assertEqual(num_exported_rows, num_db_rows) def tearDown(self): if os.environ.get('TRAVIS') == None: os.remove(self.filename)
async def cmd_gui( repo: Optional[str], exclude_check_run_names: Sequence[str], exclude_check_run_conclusions: Sequence[str], ): async with GitHub(repo_path=repo or "") as gh: await gui_main( gh, CommandOptions( exclude_check_run_names=exclude_check_run_names, exclude_check_run_conclusions=exclude_check_run_conclusions, ), )
def pull_request(): args = _make_argparser().parse_args() if args.debug: logging.disable(logging.NOTSET) try: git = Git() except GitException as ex: print('ERROR: ' + ex.message) return 1 config = CmdprConfig(CONFIG) try: github = GitHub(get_token(config)) base = args.base[0] title, body = '', '' if args.message is None: title, body = create_request_title(git.get_commits(base)) else: title = args.message[0] if title is None: print('ERROR: There\'s no title for pull request') return 1 repo_info = git.get_repo_info() bug_tracker = config.get("bug_tracker") if bug_tracker: body = bug_tracker.format(task=repo_info['branch']) pr_url = github.create_pull_request(repo_info, title, base, body) print(pr_url) except GitHubException as ex: print('ERROR: ' + ex.message) return 1
class TestGitHub(unittest.TestCase): def setUp(self): if os.environ.get('TRAVIS') == None: self.github = GitHub() self.db = DBConnector() self.config = Config() def test_update_library_data(self): if os.environ.get('TRAVIS') == None: res = self.github.update_library_data(self.config.github_user, self.config.github_repos[0]) self.assertTrue(isinstance(res, GitHubData)) res = self.db.delete_data(res.id, 'github_data') self.assertTrue(res)
async def cmd_set_state( repo: str, environment: str, deployment_id: int, state: str, description: Optional[str], ): async with GitHub(repo_path=repo) as gh: await gh.create_deployment_status( deployment_id=deployment_id, state=DeploymentState[state], environment=environment, description=description, )
def __init__(self, debug=False): """ Constructor for the Processor class. :param debug: If set to True, the console will also log debug messages. :return: """ # Init GitHub with the configured access token self.g = GitHub(access_token=Configuration.token) self.debug = debug loggers = LogConfiguration(self.debug) self.logger = loggers.create_logger("Processor")
def start(data: Data): admin_files = AdminFiles(data) github = GitHub(data) github.clone_repo() admin_files.start_deploy() if data.use_git_hub(): github.execute() travis = Travis(data) if data.use_travis(): travis.enable_repo() if data.base_64_file(): travis.set_envs("GCLOUD_SERVICE_KEY_DEV", get_base_64(data), False) admin_files.update_travis_badge(travis.get_badge_value(), data.project_dir()) github.commit("Update Travis Badge") else: admin_files.update_travis_badge("", data.project_dir()) github.commit("Remove Travis Badge") if data.use_codeclimate(): code_climate = CodeClimate(data) code_climate.enable_repo() if data.use_travis(): travis.set_envs("CC_TEST_REPORTER_ID", code_climate.get_test_reporter_id, False) admin_files.update_code_climate_badge( code_climate.get_test_coverage_badge(), code_climate.get_maintainability_badge())
def Run(self): github_user_name = 'ytyaru' os_user_name = getpass.getuser() device_name = '85f78c06-a96e-4020-ac36-9419b7e456db' path_db_base = 'mint/root/db/Account/GitHub' path_db_license = '/media/{0}/{1}/{2}/private/v0/GitHub.Licenses.sqlite3'.format(os_user_name, device_name, path_db_base) path_db_api = "/media/{0}/{1}/{2}/public/v0/GitHub.Apis.sqlite3".format(os_user_name, device_name, path_db_base) path_db_account = '/media/{0}/{1}/{2}/private/v0/GitHub.Accounts.sqlite3'.format(os_user_name, device_name, path_db_base) # path_db_repo = '/media/{0}/{1}/{2}/public/v0/GitHub.Repositories.{3}.sqlite3'.format(os_user_name, device_name, path_db_base, github_user_name) path_db_repo = './GitHub.Repositories.{3}.sqlite3'.format(github_user_name) g = GitHub.GitHub(path_db_account, path_db_api, path_db_repo, github_user_name) res = g.db.update_local_db() aggr = github.db.repositories.LanguagesAggregate.LanguagesAggregate(db_path_repo) aggr.show()
class GitHubScooper(object): def __init__(self, type='GitHub', user=None, token=None): self.user = user self.gh = GitHub(access_token=token) def scoop(self): result = [] events = self.gh.users(self.user).events.public.get() for event in events: hash = sha256() hash.update(event['id'].encode()) result.append(dict( checksum=hash.hexdigest(), headline='{} event to repo {}'.format( event['type'], event['repo']['name']), content='contentfoo', pub_date=datetime.strptime( event['created_at'], '%Y-%m-%dT%H:%M:%SZ'))) return result
def post(self): try: message = { 'title': self.request.POST['title'], 'user': self.request.POST['user'], 'browser': self.request.POST['browser'], 'message': self.request.POST['message'], 'version': self.request.POST['version'], } except KeyError as err: self.response.status = 400 self.response.headers['Content-Type'] = 'text/plain; charset=utf-8' self.response.write('Missing Key: "%s"'%err.message) return if not message['title']: self.response.status = 400 self.response.headers['Content-Type'] = 'text/plain; charset=utf-8' self.response.write('Der Titel der Fehlermeldung wurde nicht angegeben') return if not message['message']: self.response.status = 400 self.response.headers['Content-Type'] = 'text/plain; charset=utf-8' self.response.write('Die Beschreibung der Fehlermeldung wurde nicht angegeben') return # savedata: data from the save button try: savedata = self.request.POST['save'] savefilename = savedata.filename savedata = savedata.file.read() except: savefilename = None savedata = None try: ghcreds = get_githubcredentials() except: self.response.headers['Content-Type'] = 'text/plain; charset=utf-8' self.response.write('Missing Key: "%s"'%err.message) return gh = GitHub(ghcreds.owner, ghcreds.repo, ghcreds.token) url = '' try: url = gh.createIssue(message, savedata, savefilename) except: pass if url: # everything's fine # self.response.status = 201 # self.response.headers['Content-Type'] = 'text/plain; charset=utf-8' # self.response.write(url) self.redirect(url.encode('ascii', 'ignore')) else: self.response.status = 500 self.response.headers['Content-Type'] = 'text/plain; charset=utf-8' self.response.write("Server Error: cannot create issue. This might be caused by missing or invalid values")
def __init__(self, script = ''): self.CURR_SCRIPT = script self.github = GitHub()
class Bat(object): CURR_SCRIPT = '' VIM_PATH = path.join(path.expanduser('~'), '.vim') AUTOLOAD_PATH = path.join(VIM_PATH, 'autoload') VIMPYRE_PATH = path.join(VIM_PATH, 'vimpyre') @property def pathogen_url(self): try: return environ['VIM_PATHOGEN_URL'] except: return 'https://raw.github.com/tpope/vim-pathogen/master/autoload/pathogen.vim' def __init__(self, script = ''): self.CURR_SCRIPT = script self.github = GitHub() def _check_name(self): if self.CURR_SCRIPT.startswith('http') or self.CURR_SCRIPT.startswith('git'): return self.CURR_SCRIPT try: search_ret = self.search() rets = [item for item in search_ret if self.CURR_SCRIPT == item['name']] if rets: rets[0]['homepage'] = 'https://github.com/' + self.CURR_SCRIPT return rets[0] return [] except: pass def _filter_script_name(self): return self.CURR_SCRIPT.split('/')[-1] def _render_fetch_url(self, ret): if type(ret) == dict: fetch_url = ret['url'] + '.git' else: fetch_url = ret return fetch_url @property def bundles(self): """ List of bundles in the vimpyre path """ try: with util.cd(self.VIMPYRE_PATH): return [item for item in listdir('.') if path.isdir(item)] except OSError: console('Cannot access your vimpyre path!') def install_base(self): """ Install pathogen.vim and create vimpyre directory. >>> bat = Bat() >>> bat.install_base() => => Send a bat to catch pathogen.vim ... Catch done! Please add the following message to your .vimrc: execute pathogen#infect('bundle/{}', 'vimpyre/{}') """ try: console('=> => Send a bat to catch pathogen.vim ...') raw_urlopen = urllib.urlopen(self.pathogen_url) if raw_urlopen.getcode() == 200: util.mkdir_p(self.AUTOLOAD_PATH) util.mkdir_p(self.VIMPYRE_PATH) raw_pathogen = raw_urlopen.read() pathogen = path.join(self.AUTOLOAD_PATH, 'pathogen.vim') with open(pathogen, 'w') as f: f.write(raw_pathogen) console('Catch done! Please add the following to your .vimrc:') console("execute pathogen#infect('bundle/{}', 'vimpyre/{}')") else: console('Pathogen vimscript not found in %s' % self.pathogen_url) console('You can change this url with enviroment variable VIM_PATHOGEN_URL') console('Catch fail! Please try again!') except: console('[Unexpected Error] Catch fail! Please try again!') def install(self): console('=> => Send a bat to catch %s' % self.CURR_SCRIPT) try: ret = self._check_name() if ret: fetch_url = self._render_fetch_url(ret) cmd_fetch = 'git clone --depth 1 %s' % fetch_url util.mkdir_p(self.VIMPYRE_PATH) with util.cd(self.VIMPYRE_PATH): system(cmd_fetch) else: msg = ('%s not found! Please use `vimpyre search <vim-script>`' ' to check the script name and install again!' % self.CURR_SCRIPT) console(msg) except: self.install_base() def update(self): console('=> => Send a bat to update %s' % self.CURR_SCRIPT) bundle_path = path.join(self.VIMPYRE_PATH, self._filter_script_name()) if path.isdir(bundle_path): with util.cd(bundle_path): system('git pull') console('%s update done!' % self.CURR_SCRIPT) else: console('%s does not exist!' % self.CURR_SCRIPT) def update_all(self): console('=> => Send bats to update all installed vim-scripts ...') if not self.bundles: console('No vim-scripts! Please use `vimpyre install <vim-scripts>` first!') sys.exit(1) for item in self.bundles: console('=> Update %s ...' % item) with util.cd(path.join(self.VIMPYRE_PATH, item)): system('git pull') console('Update all vim-scripts done!') def remove(self): console('=> => Send a bat to bite %s' % self.CURR_SCRIPT) bundle_path = path.join(self.VIMPYRE_PATH, self._filter_script_name()) if path.isdir(bundle_path): shutil.rmtree(bundle_path) console('%s removed!' % self.CURR_SCRIPT) else: console('%s does not exist!' % self.CURR_SCRIPT) def remove_all(self): console('=> => Send bats to clean all vimpyre files') try: with util.cd(self.VIMPYRE_PATH): for bundle in self.bundles: shutil.rmtree(bundle) console('Remove vimpyre bundles done!') except OSError: console('Could not remove bundles! Please verify permissions of ' 'your bundle directories.') else: console('Please remove %s/pathogen.vim manually!' % self.AUTOLOAD_PATH) console('') console('If you wish to use vimpyre to manage your vim scripts again, you need to use `vimpyre init` first!') def search(self): """ Search github vim-scripts, return array. >>> bat = Bat('xxxxxxxx') >>> bat.search() [] >>> bat = Bat('pathogen') >>> bat.search() # doctest: +ELLIPSIS [{..., 'name': 'pathogen.vim'}] """ rets = self.github.search(self.CURR_SCRIPT) return [item for item in rets if self.CURR_SCRIPT.lower() in item['name'].lower() or self.CURR_SCRIPT.lower() in item['description'].lower()] def open_homepage(self): console('=> => Send bats to open your browser...') bundle = self._check_name() if type(bundle) == dict and bundle['homepage']: webbrowser.open(bundle['homepage']) elif bundle: webbrowser.open(bundle) else: console('Sorry, no homepage found for this script.') def list_installed(self): console('=> => Send bats to collect all your vim-scripts') if not self.bundles: console('No vim-scripts found!') sys.exit(1) for bundle in self.bundles: bundle_path = path.join(self.VIMPYRE_PATH, bundle) with util.cd(bundle_path): if path.isfile(path.join('.git', 'config')): url = subprocess.check_output(['grep', 'url', '.git/config']).replace('\turl = ', '').replace('\n', '') console('\033[1m%s\033[m => %s' % (bundle, url)) else: console('\033[0;31m%s\033[m => %s' % (bundle, 'No git repository!'))
from github import GitHub if False: gh = GitHub("Test") res = gh.users('octocats').get() print res else: from twisted.internet import threads, reactor, defer gh = GitHub("Test", async=True) @defer.inlineCallbacks def f(): print "Async" res = yield gh.users('octocats').get() print res reactor.stop() f() reactor.run()
from github import GitHub gh = GitHub() user = gh.users('stevesun112').get() #print( user ) commit = gh.repos('imsure', 'hello-antares').commits('083a8604a73dcb5eda83a5bdd6638a93cfa60045').get() #print( commit[ 'html_url' ] ) search = gh.search.code.get(q="addClass in:file language:js repo:jquery/jquery") #print( search ) search = gh.search.code.get(q="create_table in:file language:py repo:imsure/hello-antares") #print( search[ 'items' ][0][ 'html_url' ] ) user = gh.users('AzNOAOTares').get() print( user ) commit = gh.repos('AzNOAOTares', 'antares-docs').commits('ee22aff520fba4e69971c9ac86a383e0b2374bb6').get() print( commit[ 'html_url' ] ) # search = gh.search.code.get(q="addClass in:file language:js repo:jquery/jquery") #print( search ) search = gh.search.code.get(q="maketitle in:file language:tex repo:AzNOAOTares/antares-docs") print( search[ 'items' ][0][ 'html_url' ] ) user = gh.users('AzNOAOTares').get() print( user ) commit = gh.repos('AzNOAOTares', 'architecture').commits('93d4c7d2e6d6950dbeebff0de9c33941ecf3d109').get() print( commit[ 'html_url' ] ) # search = gh.search.code.get(q="addClass in:file language:js repo:jquery/jquery") #print( search ) #search = gh.search.code.get(q="maketitle in:file language:tex repo:AzNOAOTares/antares-docs") #print( search[ 'items' ][0][ 'html_url' ] )
debug = True # Read the GitHub projects from the ./config file base_path = os.path.dirname(os.path.realpath(__file__)) config_file_path = os.path.join(base_path, 'config') config = ConfigParser.SafeConfigParser() config.read(config_file_path) # Read the GitHub auth info token_path = os.path.expanduser('~/.gluster_forge_credentials') token_file = ConfigParser.SafeConfigParser() token_file.read(token_path) token = token_file.get('personal_token', 'token') # Authenticate to GitHub using my generated Personal Access token as per https://github.com/settings/tokens gh = GitHub(access_token=token) # Open the ./db/project_stats.db SQLite3 database db_path = os.path.join(base_path, 'db/project_stats.db') conn = sqlite3.connect(db_path) # Connect to the database c = conn.cursor() # Create the SQLite3 table to store the info, if it's not already present sql = ('CREATE TABLE IF NOT EXISTS social_stats (project TEXT, time_stamp TEXT, ' 'watchers INTEGER, stars INTEGER, forks INTEGER, commits INTEGER, downloads INTEGER)') c.execute(sql) conn.commit() # Loop through the projects in the config file
class Repo: def __init__(self, config): self.myaddress = config.get('lilac', 'email') self.mymaster = config.get('lilac', 'master') self.repomail = config.get('repository', 'email') self.name = config.get('repository', 'name') self.trim_ansi_codes = not config.getboolean( 'smtp', 'use_ansi', fallback=False) self.repodir = Path(config.get('repository', 'repodir')).expanduser() self.ms = MailService(config) github_token = config.get('lilac', 'github_token', fallback=None) if github_token: self.gh = GitHub(config.get('lilac', 'github_token', fallback=None)) else: self.gh = None self.mods: LilacMods = {} # to be filled by self.load_all_lilac_and_report() @lru_cache() def maintainer_from_github(self, username: str) -> Optional[Maintainer]: if self.gh is None: raise ValueError('未设置 github token,无法从 GitHub 取得用户 Email 地址') userinfo = self.gh.get_user_info(username) if userinfo['email']: return Maintainer(userinfo['name'], userinfo['email'], username) else: return None @lru_cache() def find_maintainers(self, mod: LilacMod) -> List[Maintainer]: ret = [] errors = [] maintainers: List[Dict[str, str]] = getattr(mod, 'maintainers', None) if maintainers is not None: for m in maintainers: if 'github' in m and 'email' in m: ret.append( Maintainer.from_email_address(m['email'], m['github']) ) elif 'github' in m: try: u = self.maintainer_from_github(m['github']) except Exception as e: errors.append(f'从 GitHub 获取用户 Email 地址时出错:{e!r}') else: if u is None: errors.append(f'GitHub 用户 {m["github"]} 未公开 Email 地址') else: ret.append(u) else: logger.error('unsupported maintainer info: %r', m) errors.append(f'不支持的格式:{m!r}') continue if not ret or errors: # fallback to git dir = self.repodir / mod.pkgbase git_maintainer = self.find_maintainer_by_git(dir) if errors: error_str = '\n'.join(errors) self.sendmail( git_maintainer, subject = f'{mod.pkgbase} 的 maintainers 信息有误', msg = f"以下 maintainers 信息有误,请修正。\n\n{error_str}\n", ) if not ret: logger.warning("lilac doesn't give out maintainers for %s, " "fallback to git.", mod.pkgbase) return [git_maintainer] else: return ret def find_maintainer_by_git( self, dir: Path = Path('.'), file: str = '*', ) -> Maintainer: me = self.myaddress cmd = [ "git", "log", "--format=%H %an <%ae>", "--", file, ] p = subprocess.Popen( cmd, stdout=subprocess.PIPE, universal_newlines=True, cwd = dir, ) try: while True: line = p.stdout.readline() commit, author = line.rstrip().split(None, 1) if me not in author: return Maintainer.from_email_address(author) finally: p.terminate() def report_error(self, subject: str, msg: str) -> None: self.ms.sendmail(self.mymaster, subject, msg) def send_error_report( self, mod: Union[LilacMod, str], *, msg: Optional[str] = None, exc: Optional[Tuple[Exception, str]] = None, subject: Optional[str] = None, ) -> None: if msg is None and exc is None: raise TypeError('send_error_report received inefficient args') if isinstance(mod, str): maintainers = [self.find_maintainer_by_git(file=mod)] pkgbase = mod else: maintainers = self.find_maintainers(mod) pkgbase = mod.pkgbase msgs = [] if msg is not None: msgs.append(msg) if exc is not None: exception, tb = exc if isinstance(exception, subprocess.CalledProcessError): subject_real = subject or '在编译软件包 %s 时发生错误' msgs.append('命令执行失败!\n\n命令 %r 返回了错误号 %d。' \ '命令的输出如下:\n\n%s' % ( exception.cmd, exception.returncode, exception.output)) msgs.append('调用栈如下:\n\n' + tb) elif isinstance(exception, api.AurDownloadError): subject_real = subject or '在获取AUR包 %s 时发生错误' msgs.append('获取AUR包失败!\n\n') msgs.append('调用栈如下:\n\n' + tb) else: subject_real = subject or '在编译软件包 %s 时发生未知错误' msgs.append('发生未知错误!调用栈如下:\n\n' + tb) else: if subject is None: raise ValueError('subject should be given but not') subject_real = subject if '%s' in subject_real: subject_real = subject_real % pkgbase if build_output: msgs.append('编译命令输出如下:\n\n' + build_output) msg = '\n'.join(msgs) if self.trim_ansi_codes: msg = ansi_escape_re.sub('', msg) addresses = [str(x) for x in maintainers] logger.debug('mail to %s:\nsubject: %s\nbody: %s', addresses, subject_real, msg[:200]) self.sendmail(addresses, subject_real, msg) def sendmail(self, who: Union[str, List[str], Maintainer], subject: str, msg: str) -> None: if isinstance(who, Maintainer): who = str(who) self.ms.sendmail(who, subject, msg) def send_repo_mail(self, subject: str, msg: str) -> None: self.ms.sendmail(self.repomail, subject, msg) def manages(self, dep) -> bool: return dep.pkgdir.name in self.mods def load_all_lilac_and_report(self) -> Set[str]: self.mods, errors = lilacpy.load_all(self.repodir) failed = set(errors) for name, exc_info in errors.items(): tb_lines = traceback.format_exception(*exc_info) tb = ''.join(tb_lines) logger.error('error while loading lilac.py for %s', name, exc_info=exc_info) exc = exc_info[1] if not isinstance(exc, Exception): raise self.send_error_report(name, exc=(exc, tb), subject='为软件包 %s 载入 lilac.py 时失败') build_logger_old.error('%s failed', name) build_logger.exception('lilac.py error', pkgbase = name) return failed
(options, args) = parser.parse_args() try: [trac_db_path, github_username, github_password, github_repo] = args except ValueError: parser.error('Wrong number of arguments') if not '/' in github_repo: parser.error('Repo must be specified like "organization/project"') if options.quiet: logging.basicConfig(level=logging.INFO) else: logging.basicConfig(level=logging.DEBUG) trac = Trac(trac_db_path) github = GitHub(github_username, github_password, github_repo) # Show the Trac usernames assigned to tickets as an FYI logging.info("Getting Trac ticket owners (will NOT be mapped to GitHub username)...") for (username,) in trac.sql('SELECT DISTINCT owner FROM ticket'): if username: username = username.strip() # username returned is tuple like: ('phred',) logging.debug("Trac ticket owner: %s" % username) # Get GitHub labels; we'll merge Trac components and other values into them logging.info("Getting existing GitHub labels...") labels = {} for label in github.labels():
(options, args) = parser.parse_args() try: [trac_db_path, github_username, github_password, github_repo] = args except ValueError: parser.error('Wrong number of arguments') if not '/' in github_repo: parser.error('Repo must be specified like "organization/project"') if options.quiet: logging.basicConfig(level=logging.INFO) else: logging.basicConfig(level=logging.DEBUG) trac = Trac(trac_db_path) github = GitHub(github_username, github_password, github_repo) # Show the Trac usernames assigned to tickets as an FYI logging.info("Getting Trac ticket owners (will NOT be mapped to GitHub username)...") for (username,) in trac.sql('SELECT DISTINCT owner FROM ticket'): if username: username = username.strip() # username returned is tuple like: ('phred',) logging.debug("Trac ticket owner: %s" % username) # Get GitHub labels; we'll merge Trac components into them logging.info("Getting existing GitHub labels...") labels = {} for label in github.labels():