def main(org_name): gh = GitHub() if "GITHUB_API_TOKEN" in os.environ: gh = GitHub(os.environ["GITHUB_API_TOKEN"]) else: gh = GitHub() org = gh.get_organization(org_name) totals = { "watchers": 0, "stars": 0, "forks": 0, } print(f"{'repo':40} {'watch'} {'stars'} {'forks'}") for repo in org.get_repos(): print(f"{repo.full_name:40}" f" {repo.watchers_count:5}" f" {repo.stargazers_count:5}" f" {repo.forks_count:5}") totals["watchers"] += repo.watchers_count totals["stars"] += repo.stargazers_count totals["forks"] += repo.forks_count print("\nTotals:") for key, value in totals.items(): print(f"{key:8}: {value:4}")
def webhook_handler(payload, signature): """Respond to Travis webhook.""" travis = Travis() github = GitHub() # The payload comes in the request, but we need to make sure it is # really signed by Travis CI. If not, respond to this request with # an error. verified_payload = travis.get_verified_payload(payload, signature) error = verified_payload.get('error') if error: return error.get('message'), error.get('code') issue_number = int(verified_payload.get('pull_request_number')) logs = travis.get_logs(verified_payload) comments = parse_logs(logs) # Create a separate comment for every job for title, comment in comments.iteritems(): try: github.post_comment(issue_number, comment, title) except requests.RequestException as err: logging.error(err.response.text) return err.response.text, 500 return "OK", 200
def setUp(self): if os.environ.get('TRAVIS') == None: self.github = GitHub() self.db = DBConnector() self.config = Config() self.github.update_library_data(self.config.github_user, self.config.github_repos[0]) self.filename = "./csv/{}.csv".format(GitHubData.__tablename__)
def upload_to_github(file_path, nwjs_version): github = GitHub(auth_token()) releases = github.repos(GITHUB_REPO).releases.get() release = create_or_get_release_draft(github, releases, nwjs_version) params = {'name': os.path.basename(file_path)} headers = {'Content-Type': 'application/zip'} with open(file_path, 'rb') as f: github.repos(GITHUB_REPO).releases(release['id']).assets.post( params=params, headers=headers, data=f, verify=False)
def test_tools_github(): GIT_AUTH = get_secret['CREDS1']['AUTH_TOKEN'] gh = GitHub(access_token=GIT_AUTH) repos = get_repos(gh) for repo_dict in repos: releases = get_releases(gh, repo_dict) branches = get_branches(gh, repo_dict) pprint(releases) print() pprint(branches)
def remote_repo() -> Repository: access_token = os.environ.get("GITHUB_ACCESS_TOKEN") client = GitHub(access_token) with vcr.use_cassette( "tests/cassettes/init_github.yaml", filter_headers=["Authorization"], decode_compressed_response=True, ): repo = client.get_repo("spectacles-ci/eye-exam") return repo
def main(): parser = OptionParser(usage="usage: %prog [options] [pull number]") parser.add_option("-g", "--github-user", dest="gituser", type="string", help="github user, if not supplied no auth is used", metavar="USER") (options, args) = parser.parse_args() github = GitHub(options) for pullNumber in args: pull = github.pull("apache", "storm", pullNumber) print "git pull "+pull.from_repo()+" "+pull.from_branch()
def lambda_handler(event, context): github = GitHub(event) fix_version = github.get_fix_version() ticket_numbers = github.get_ticket_numbers() assignee = get_assignee(github) for ticket in ticket_numbers: jira = Jira(ticket) jira.update_status(assignee=assignee, fix_version=fix_version) move_ticket(jira, github) return ""
def main(): parser = OptionParser(usage="usage: %prog [options]") parser.add_option("-g", "--github-user", dest="gituser", type="string", help="github user, if not supplied no auth is used", metavar="USER") (options, args) = parser.parse_args() jrepo = JiraRepo("https://issues.apache.org/jira/rest/api/2") github = GitHub(options) openPullRequests = github.openPulls("apache","incubator-storm") stormJiraNumber = re.compile("STORM-[0-9]+") openJiras = jrepo.openJiras("STORM") jira2Pulls = {} pullWithoutJira = [] pullWithBadJira = [] for pull in openPullRequests: found = stormJiraNumber.search(pull.title()) if found: jiraNum = found.group(0) if not (jiraNum in openJiras): pullWithBadJira.append(pull) else: if jira2Pulls.get(jiraNum) == None: jira2Pulls[jiraNum] = [] jira2Pulls[jiraNum].append(pull) else: pullWithoutJira.append(pull); now = datetime.utcnow() print "Pull requests that need a JIRA:" print "Pull URL\tPull Title\tPull Age\tPull Update Age" for pull in pullWithoutJira: print ("%s\t%s\t%s\t%s"%(pull.html_url(), pull.title(), daydiff(now, pull.created_at()), daydiff(now, pull.updated_at()))).encode("UTF-8") print "\nPull with bad or closed JIRA:" print "Pull URL\tPull Title\tPull Age\tPull Update Age" for pull in pullWithBadJira: print ("%s\t%s\t%s\t%s"%(pull.html_url(), pull.title(), daydiff(now, pull.created_at()), daydiff(now, pull.updated_at()))).encode("UTF-8") print "\nOpen JIRA to Pull Requests and Possible Votes, vote detection is very approximate:" print "JIRA\tPull Requests\tJira Summary\tJIRA Age\tPull Age\tJIRA Update Age\tPull Update Age" print "\tComment Vote\tComment Author\tPull URL\tComment Age" for key, value in jira2Pulls.items(): print ("%s\t%s\t%s\t%s\t%s\t%s\t%s"%(key, mstr(value), openJiras[key].getSummary(), daydiff(now, openJiras[key].getCreated()), daydiff(now, value[0].created_at()), daydiff(now, openJiras[key].getUpdated()), daydiff(now, value[0].updated_at()))).encode("UTF-8") for comment in openJiras[key].getComments(): #print comment.raw() if comment.hasVote(): print (("\t%s\t%s\t%s\t%s")%(comment.getVote(), comment.getAuthor(), comment.getPull(), daydiff(now, comment.getCreated()))).encode("UTF-8")
async def cmd_gui( repo: Optional[str], exclude_check_run_names: Sequence[str], exclude_check_run_conclusions: Sequence[str], ): async with GitHub(repo_path=repo or "") as gh: await gui_main( gh, CommandOptions( exclude_check_run_names=exclude_check_run_names, exclude_check_run_conclusions=exclude_check_run_conclusions, ), )
def lp_layout_list(self, upstream=None): """ search_order : list layouts from upstream if mentioned list layouts from core package """ if upstream is None: l_files = list_files(self.base_path + "/inventory_layouts") return l_files else: g = GitHub(upstream) l_files = [] files = g.list_files("inventory_layouts") return files
def test_data(self): """Test the api output of github.py""" with open('tests/actual_data.json', 'r') as json_file: expected_dict_output = json.load(json_file) gh = GitHub(owner='moby', repositories=['moby', 'toolkit', 'tool'], resources=['issues', 'commits', 'pull_requests']) actual_dict_outut = gh.read() # check if matched self.assertEqual(actual_dict_outut['data'], expected_dict_output)
def cron(): from mod_ci.controllers import start_platform from run import config, log from database import create_session from github import GitHub log.info('Run the cron for kicking off CI platform(s).') # Create session db = create_session(config['DATABASE_URI']) gh = GitHub(access_token=config['GITHUB_TOKEN']) repository = gh.repos(config['GITHUB_OWNER'])(config['GITHUB_REPOSITORY']) start_platform(db, repository)
def lp_topo_list(self, upstream=None): """ search_order : list topologies from upstream if mentioned list topologies from current folder """ if upstream is None: t_files = list_files(self.base_path + "/ex_topo") return t_files else: print "getting from upstream" g = GitHub(upstream) t_files = [] files = g.list_files("ex_topo") return files
async def cmd_set_state( repo: str, environment: str, deployment_id: int, state: str, description: Optional[str], ): async with GitHub(repo_path=repo) as gh: await gh.create_deployment_status( deployment_id=deployment_id, state=DeploymentState[state], environment=environment, description=description, )
def lp_topo_get(self, topo, upstream=None): """ search_order : get topologies from upstream if mentioned get topologies from core package # need to add checks for ./topologies """ if upstream is None: get_file(self.base_path + "/ex_topo/" + topo, "./topologies/") else: g = GitHub(upstream) files = g.list_files("ex_topo") link = filter(lambda link: link['name'] == topo, files) link = link[0]["download_url"] get_file(link, "./topologies", True) return link
def __init__(self): self.client = zulip.Client(site="https://technh.zulipchat.com/api/", api_key="vkEQgQYDPUgAGmXaTXdMPsMwlkkgMfM5", email="*****@*****.**") self.subscribe_all() self.hacknews = Hackernews() self.trans = Translate() self.movie= Movie() self.lyrics = Lyrics() self.holiday = Holiday() self.currency = Currency() self.cricket = Cricket() self.github = GitHub() self.chatbot = ChatBot(name="technehru") print("done init") self.subkeys = ["use", "help", "translate", "hackernews", "hn", "hotel", "HN", "cricnews", "cricketnews", "movie", "currency", "holiday", "lyrics", "github"]
def run(): github = GitHub() # 今日趋势 daily, resp = github.get_trending_repository(Since.daily) # 最近一周趋势 weekly, resp = github.get_trending_repository(Since.weekly) # 最近一个月趋势 monthly, resp = github.get_trending_repository(Since.monthly) # 最新数据 readme = generateReadme(daily, weekly, monthly) handleReadme(readme) # 归档 archiveMd = generateArchiveMd(daily, weekly, monthly) handleArchiveMd(archiveMd)
def lp_layout_get(self, layout, upstream=None): """ search_order : get layouts from upstream if mentioned get layouts from core package """ if upstream is None: get_file(self.base_path + "/inventory_layouts/" + layout, "./layouts/") else: g = GitHub(upstream) files = g.list_files("inventory_layouts") link = filter(lambda link: link['name'] == layout, files) link = link[0]["download_url"] get_file(link, "./layouts", True) return link
def Run(self): github_user_name = 'ytyaru' os_user_name = getpass.getuser() device_name = '85f78c06-a96e-4020-ac36-9419b7e456db' path_db_base = 'mint/root/db/Account/GitHub' path_db_license = '/media/{0}/{1}/{2}/private/v0/GitHub.Licenses.sqlite3'.format(os_user_name, device_name, path_db_base) path_db_api = "/media/{0}/{1}/{2}/public/v0/GitHub.Apis.sqlite3".format(os_user_name, device_name, path_db_base) path_db_account = '/media/{0}/{1}/{2}/private/v0/GitHub.Accounts.sqlite3'.format(os_user_name, device_name, path_db_base) # path_db_repo = '/media/{0}/{1}/{2}/public/v0/GitHub.Repositories.{3}.sqlite3'.format(os_user_name, device_name, path_db_base, github_user_name) path_db_repo = './GitHub.Repositories.{3}.sqlite3'.format(github_user_name) g = GitHub.GitHub(path_db_account, path_db_api, path_db_repo, github_user_name) res = g.db.update_local_db() aggr = github.db.repositories.LanguagesAggregate.LanguagesAggregate(db_path_repo) aggr.show()
def __init__(self, config): self.myaddress = config.get('lilac', 'email') self.mymaster = config.get('lilac', 'master') self.repomail = config.get('repository', 'email') self.trim_ansi_codes = not config.getboolean( 'smtp', 'use_ansi', fallback=False) self.repodir = Path(config.get('repository', 'repodir')).expanduser() self.ms = MailService(config) github_token = config.get('lilac', 'github_token', fallback=None) if github_token: self.gh = GitHub(config.get('lilac', 'github_token', fallback=None)) else: self.gh = None
def setup_repos(): """ Ensure we have a matching GitHub repo for every import project """ github = GitHub(access_token=GITHUB_TOKEN, scope='user,repo') #current_repos = github.users(GITHUB_USERNAME).repos.get() current_repos = github.orgs(GITHUB_ORGANIZATION).repos.get() repo_names = [x['name'] for x in current_repos] for project in PROJECTS: target_repo_name = GITHUB_REPO_NAME_FORMAT % (project['github_name']) if target_repo_name not in repo_names: github.orgs(GITHUB_ORGANIZATION).repos.post( name=target_repo_name, description='Mirrored repository') # FIXME
def main(): parser = OptionParser(usage="usage: %prog [options]") parser.add_option("-g", "--github-user", dest="gituser", type="string", help="github User, if not supplied no auth is used", metavar="USER") (options, args) = parser.parse_args() jira_repo = JiraRepo("https://issues.apache.org/jira/rest/api/2") github_repo = GitHub(options) print "Report generated on: %s (GMT)" % (datetime.strftime( datetime.utcnow(), "%Y-%m-%d %H:%M:%S")) report_builder = CompleteReportBuilder(jira_repo, github_repo) report_builder.report.print_all()
def __init__(self, config: Dict[str, Any]) -> None: self.myaddress = config['lilac']['email'] self.mymaster = config['lilac']['master'] self.logurl_template = config['lilac'].get('logurl') self.repomail = config['repository']['email'] self.name = config['repository']['name'] self.trim_ansi_codes = not config['smtp']['use_ansi'] self.repodir = Path(config['repository']['repodir']).expanduser() self.ms = MailService(config) github_token = config['lilac'].get('github_token') if github_token: self.gh = GitHub(github_token) else: self.gh = None self.mods: LilacMods = { } # to be filled by self.load_all_lilac_and_report()
def __init__(self, config: configparser.ConfigParser): self.myaddress = config.get('lilac', 'email') self.mymaster = config.get('lilac', 'master') self.repomail = config.get('repository', 'email') self.name = config.get('repository', 'name') self.trim_ansi_codes = not config.getboolean( 'smtp', 'use_ansi', fallback=False) self.repodir = Path(config.get('repository', 'repodir')).expanduser() self.ms = MailService(config) github_token = config.get('lilac', 'github_token', fallback=None) if github_token: self.gh = GitHub(config.get('lilac', 'github_token', fallback=None)) else: self.gh = None self.mods: LilacMods = { } # to be filled by self.load_all_lilac_and_report()
def fetch_repos(args, testing=False): """ Fetch the repositories and write the raw links into an output txt file """ g = GitHub(args.token, password=None, timeout=args.timeout, per_page=args.per_page) base = "https://raw.githubusercontent.com" all_repos = set() nb_search = args.nb_search for _ in range(nb_search): repos = set() for cls in __shells__: repos.update(cls.get_repos(g, testing)) repos = to_raw_urls(repos, base) all_repos.update(repos) with open(args.output, "w") as f: for rep in all_repos: f.write(rep + "\n")
def cron(testing=False): """Script to run from cron for Sampleplatform.""" from mod_ci.controllers import start_platforms, kvm_processor, TestPlatform from flask import current_app from run import config, log from database import create_session from github import GitHub log.info('Run the cron for kicking off CI platform(s).') # Create session db = create_session(config['DATABASE_URI']) gh = GitHub(access_token=config['GITHUB_TOKEN']) repository = gh.repos(config['GITHUB_OWNER'])(config['GITHUB_REPOSITORY']) if testing is True: kvm_processor(current_app._get_current_object(), db, config.get('KVM_LINUX_NAME', ''), TestPlatform.linux, repository, None) else: start_platforms(db, repository)
def run(argv=None): """Main code body :returns None """ known_args, extra = parse_command_line_args(argv) # set parameters for github object gh = GitHub(owner=known_args.owner, repositories=known_args.repositories.split(','), resources=known_args.resources.split(',')) # read until data is depleted data = gh.read() while data is not None: # do something with the data write_to_file(data=data) # read next batch of data data = gh.read()
async def cmd_inspect(repo: str, deployment_id: int): class Columns(Enum): state = "state" environment = "environment" creator = "creator" created = "created" description = "description" tbl = {column: [] for column in Columns} async with GitHub(repo_path=repo) as gh: for status in await gh.get_deployment_statuses(deployment_id): tbl[Columns.created].append(status.created_at) tbl[Columns.state].append(color_state(status.state)) tbl[Columns.environment].append(status.environment) tbl[Columns.creator].append(status.creator.login) tbl[Columns.description].append(status.description) print( tabulate.tabulate({key.value: data for key, data in tbl.items()}, headers="keys"))
def foo(): q = Queue(connection=conn) print('inicio processamento') token = os.getenv("TOKEN") data = json.loads(request.data) user = data['comment']['user']['login'] comentary = data['comment']['body'] bot = comentary[1:8] typesearch = comentary[9:15].strip() search = comentary[15:].lstrip() result = "" print(bot) if bot.upper() == "HELPBOT": # return "O Robô não foi citado no comentário, por favor cite o robô" github = GitHub(token) result = q.enqueue(github.process_user_followings, user, typesearch, search) # text_return = # github.response_comment(user, text_return) return result