def _refresh(self, force: bool = False) -> None: if not force and os.path.exists(self._path(self._COMPLETE_FILE)): return logging.info("Refreshing star name data") shutil.rmtree(self._dirname, ignore_errors=True) os.makedirs(self._dirname) cultures: List[str] = [] stellarium = Github().get_repo("Stellarium/stellarium") # The skycultures directory contains all of our cultures. for culture in stellarium.get_contents("skycultures"): if culture.type != "dir": continue try: starNames = stellarium.get_contents(culture.path + "/star_names.fab") except GithubException: logging.warning( f"No star names found for culture {culture.name}") continue cultures.append(culture.name) with open(self._path(culture.name + ".fab"), "wb") as output: output.write(starNames.decoded_content) with open(self._path(self._COMPLETE_FILE), "wb") as output: output.write(b"") logging.info( f"Finished downloading star names for cultures: {cultures}")
def update_github(data: dict, path_: str, config: dict): repo = Github(config["ACCESS_TOKEN"]).get_repo(config["REPO_NAME"]) json_file = repo.get_contents(path_) now: str = datetime.now().isoformat(" ", "seconds") commit_message = f"update {json_file.name} @ {now}" repo.update_file(json_file.path, commit_message, data, json_file.sha) logger.info("updated %s @ %s", json_file.name, now)
def __init__(self): self.model = None self.cache_file = 'gallery.json' download = True if os.path.isfile(self.cache_file): diff = time.time() - datetime.datetime.fromtimestamp( os.stat(self.cache_file).st_mtime).timestamp() if (diff < 3600): download = False # use our cache then. if download: from github import Github repo = Github().get_organization('Microsoft').get_repo( 'ELL-models') model_dirs = repo.get_contents('models/ILSVRC2012') modelre = re.compile( '(?P<src>[a-z])_[A-Z](?P<size>\d+x\d+x\d+)(?P<arch>([A-Z]\d*)+)' ) descriptions = [{ 'modelarch': match.group('src'), 'size': match.group('size'), 'layers': match.group('arch'), 'model_name': match.string } for match in [modelre.match(d.name) for d in model_dirs] if match] self.save_list(descriptions) else: descriptions = self.load_list() self.grouped = defaultdict(list) for desc in descriptions: self.grouped[desc['size']].append(desc)
def get_config_file(filename): try: repo = Github().get_user(username).get_repo(given_repo) decodedContent = base64.b64decode(repo.get_contents(filename).content) #print decodedContent finalString = "" if (filename.endswith(".yml")): #d = yaml.load(decodedContent)#converts the string decodedContent to Dictionary d #for keys,values in d.items(): # finalString = str(finalString)+str(keys)+" - "+str(values)+"<br/>" finalString = decodedContent else: #d = json.loads(decodedContent) #finalString="{<br/>" #for keys,values in d.items(): # finalString = str(finalString)+str(keys)+" - "+str(values)+"<br/>" #finalString=str(finalString)+"}" finalString = decodedContent except: finalString = "Can not find the file or Repository" return finalString
def getYmlContent(environment): try: git = Github().get_user(user).get_repo(repo) content = git.get_file_contents( environment + "-config.yml").content.decode( git.get_contents(environment + "-config.yml").encoding) return content except: return "File not found!"
def __init__(self): self.model = None repo = Github().get_organization('Microsoft').get_repo('ELL-models') model_dirs = repo.get_contents('models/ILSVRC2012') self.model_files = [ d.name for d in model_dirs if d.name.lower().find('labels') == -1 ] self.modelre = re.compile( '(?P<src>[a-z])_[A-Z](?P<size>\d+x\d+x\d+)(?P<arch>([A-Z]\d*)+)') self.grouped = defaultdict(list) for desc in [self.parse_name(n) for n in self.model_files]: self.grouped[desc['size']].append(desc)
async def getPins(self, interval): print("getPins Running") guild = self.bot.get_guild(id=int(get_section("bot").get("guild"))) channel = guild.get_channel( int(get_section("api").get("pinnedChannel"))) await self.bot.wait_until_ready() cron = CronTab(interval) repo = Github(get_section("api").get("gitKey")).get_repo( get_section("api").get("gitRepo")) obj = [] eurl = "" aurl = "" while True: async for msg in channel.history(limit=5000): if (msg.pinned): if msg.embeds: eurl = [] eurl.clear() for embed in msg.embeds: eurl.append(str(embed.url)) if msg.attachments: aurl = [] for attachment in msg.attachments: aurl.append(str(attachment.url)) tmp = { "user": str(msg.author), "message": str(msg.content), "created": str(msg.created_at), "avatar_url": str(msg.author.avatar_url), "message_id": str(msg.id), "embeds": str(eurl), "attachments": aurl } obj.append(tmp) eurl = "" aurl = "" file = repo.get_contents("market.json") encode = base64.b64encode(str(json.dumps(obj)).encode("utf-8")) if str(file.content).replace('\n', '') == str(encode, "utf-8"): print("Data is unchanged, no commit was made") await asyncio.sleep(cron.next(default_utc=True)) obj.clear() tmp.clear() else: repo.update_file("market.json", str(datetime.datetime.now()), json.dumps(obj), file.sha) print("Updated market.json") await asyncio.sleep(cron.next(default_utc=True)) obj.clear() tmp.clear()
def displayfilecontent(filename): cli = (sys.argv[1]).split("/") repo_name = cli[4] username = cli[3] repo = Github().get_user(username).get_repo(repo_name) extensions = '.yml', '.json' output = [] if (filename != None): if filename.endswith(extensions): f = repo.get_contents(filename) output.append(f.decoded_content) return "<br/>".join(output) else: return "file cannot open! No access rights--**Please enter files with correct extension!!**" else: return "Page not found.Wrong URL Request(Error 404)"
def __init__(self): self.model = None repo = Github().get_organization('Microsoft').get_repo('ELL-models') model_dirs = repo.get_contents('models/ILSVRC2012') modelre = re.compile( '(?P<src>[a-z])_[A-Z](?P<size>\d+x\d+x\d+)(?P<arch>([A-Z]\d*)+)') descriptions = [{ 'modelarch': match.group('src'), 'size': match.group('size'), 'layers': match.group('arch'), 'model_name': match.string } for match in [modelre.match(d.name) for d in model_dirs] if match] self.grouped = defaultdict(list) for desc in descriptions: self.grouped[desc['size']].append(desc)
def on_github_push(event, context, dryrun=False): message = _process_event(event) ref = message["ref"] secret_name = os.environ['SECRET_NAME'] secrets = json.loads(get_secret(secret_name)) access_token = get_access_token(secrets) service_account = get_service_account(secrets) if ref in BRANCH_REFS: repo_name = message["repository"]["full_name"] repo = Github(access_token).get_repo(repo_name) branch = repo.get_branch(ref) pusher = message["pusher"]["name"] notification_message = "Commit to " + ref + " detected on " + repo_name + " branch " + branch.name + " by " + \ pusher print(notification_message) _send_notification(notification_message, context, dryrun) server_path = 'json_schema' versions_file = repo.get_contents(server_path + "/versions.json", branch.name) version_numbers_str = base64.b64decode( versions_file.content).decode("utf-8") version_numbers = json.loads(version_numbers_str) result = _process_directory(repo, branch.name, server_path, server_path, version_numbers, context, dryrun) result_str = "\n".join(result) result_message = "" if len(result) == 0: result_message = result_message + "No schema changes published" else: result_message = result_message + "New schema changes published:\n" + result_str time.sleep(5) notify_ingest(branch.name, service_account) print(result_message) _send_notification(result_message, context, dryrun) else: result = [] response = {"statusCode": 200, "body": {"created": json.dumps(result)}} return response
class cheatsheet: repo_name = "rstudio/cheatsheets" raw_github = "https://raw.githubusercontent.com/rstudio/cheatsheets/master/" def __init__(self): logger.info("Initialising cheatsheet") logger.info("Looking up the full cheatsheet list") self.repo = Github().get_repo(self.repo_name) self.cheatsheets = self.repo.get_contents("pngs") logger.info("Randomly selecting a cheatsheet") self.n_sheets = len(self.cheatsheets) self.sheet_n = randint(1, self.n_sheets - 1) self.sheet = self.cheatsheets[self.sheet_n] logger.info("Extracting the name of the selected cheatsheet") self.path = self.sheet.path self.name = self.path.removesuffix(".png") self.name = self.name.removeprefix("pngs/") logger.info("Getting the paths related to the target cheatsheet") self.png = "".join([self.raw_github, self.path]) self.pdf = "".join([self.raw_github, self.name, ".pdf"]) def message(self): logger.info("Building a message with the cheatsheet information") message = [ f"Today's #rstats cheatsheet: {self.name}", f"Download: {self.pdf}", "See more: https://www.rstudio.com/resources/cheatsheets/", "Contribute your own: https://github.com/rstudio/cheatsheets", ] message = " \n".join(message) return message def download(self, filename): logger.info("Requesting selected cheatsheet") try: request = requests.get(self.png) except requests.exceptions.RequestException as e: logger.error("Error downloading cheatsheet", exc_info=True) raise SystemExit(e) logger.info(f"Downloading cheatsheet to {filename}") with open(filename, "wb") as image: image.write(request.content)
class GitTestRepo: def __init__(self, token, repo_name): self.token = token self.repo_name = repo_name self.repo = Github(self.token).get_repo(repo_name) def create_and_tag(self, author, filename, data_frame, tag_name, branch_name): self.repo.create_file(filename, "initial creation", data_frame.to_csv()) fetched_file = self.repo.get_contents(filename, ref=branch_name) data = fetched_file.decoded_content.decode("utf-8") commit = push(author, self.repo, filename, "commit message", data, branch_name) gtag = self.repo.create_git_tag(tag=str(tag_name), message="initial load", object=commit.sha, type="commit", tagger=author) print('Created new file {} with tag {}'.format(filename, tag_name)) self.repo.create_git_ref('refs/tags/{}'.format(gtag.tag), gtag.sha)
class GitRepo(): """ Extension of PyGithub with a couple of other helper methods. """ def __init__(self, repo_name, credentials=None): """Retrieves a Repository by its fully qualified name. If credentials are passed they will be used.""" if not credentials: self._github = Github().get_repo(repo_name) elif credentials.token: self._github = Github(credentials.token).get_repo(repo_name) else: self._github = Github(credentials.username, credentials.password).get_repo(repo_name) @property def github(self): """ Direct access to the underlying PyGithub object. """ return self._github def get_file(self, filename): """Fetch and decode the file from the master branch. Note that GitHub's API only supports files up to 1MB in size.""" return self._github.get_contents(filename).decoded_content.decode('utf-8') def modify_and_branch(self, base_branch, new_branch_name, commit_message, filename, file_content): """Create a new branch from base_branch, makes changes to a file, and commits it to the new branch.""" base_sha = self._github.get_git_ref('heads/{}'.format(base_branch)).object.sha base_tree = self._github.get_git_tree(base_sha) element = InputGitTreeElement(filename, '100644', 'blob', file_content) tree = self._github.create_git_tree([element], base_tree) parent = self._github.get_git_commit(base_sha) commit = self._github.create_git_commit(commit_message, tree, [parent]) self._github.create_git_ref('refs/heads/{}'.format(new_branch_name), commit.sha)
def getDataFromGit(fileName): commandLineInput = ((sys.argv[1]).split("/")) commandLineInput.reverse() repositoryName = commandLineInput[0] userName = commandLineInput[1] print userName print repositoryName try: repo = Github().get_user(userName).get_repo(repositoryName) output = "" try: decodedJsonString = base64.b64decode( repo.get_contents(fileName).content) output = decodedJsonString except: output = "cannot find the config file '" + fileName + "' in the repository '" + repositoryName + "'" except: output = "Cannot find the github repository" return output
class PR: def __init__(self, owner, repo, pr_number, token): self.owner = owner self.repo = repo self.pr_number = pr_number self.token = token self.repo = Github(token).get_user(owner).get_repo(repo) self.pr = self.repo.get_pull(pr_number) self.merge_url = ( "https://api.github.com/repos/{}/{}/pulls/{}/reviews".format( owner, repo, pr_number)) self.merge_headers = { 'Authorization': 'token {}'.format(token), 'Accept': 'application/vnd.github.black-cat-preview+json' } def content(self, fpath, ref=None): if ref is None: ref = GithubObject.NotSet content = self.repo.get_contents(fpath, ref) assert (content.encoding == "base64") return base64.b64decode(content.content) def create_status(self, commit_sha, state, target_url=None, description=None, context=None): if target_url is None: target_url = GithubObject.NotSet if description is None: description = GithubObject.NotSet if context is None: context = GithubObject.NotSet self.repo.get_commit(commit_sha).create_status(state, target_url, description, context) def base_sha(self): return self.pr.base.sha def head_sha(self): return self.pr.head.sha def files(self): return [f.filename for f in self.pr.get_files()] def get_patches(self): return {f.filename: f.patch for f in self.pr.get_files()} def merge(self, commit_message=None): if commit_message is None: commit_message = GithubObject.NotSet self.pr.merge(commit_message) return 0 def review(self, event, body=None): if body is None: body = GithubObject.NotSet data = {'event': event, 'body': body} r = requests.post(self.merge_url, json=data, headers=self.merge_headers) return 0 if r.status_code == requests.codes.ok else 1
def hello(variable): git = Github().get_user(user).get_repo(repo) stream = git.get_file_contents(variable).content.decode( git.get_contents(variable).encoding) # print "stream %r" % stream return stream
#!/usr/bin/env python3 import os, re from github import Github # https://github.com/PyGithub/PyGithub def parse_ac_version(src): """Parse the Android-Components version out of the AndroidComponents.kt file.""" if match := re.compile(r'VERSION = "([^"]*)"', re.MULTILINE).search(src): return match[1] if __name__ == "__main__": repo = Github(os.getenv("GITHUB_ACCESS_TOKEN")).get_repo("mozilla-mobile/fenix") for release in repo.get_releases(): try: content_file = repo.get_contents("buildSrc/src/main/java/AndroidComponents.kt", ref=release.tag_name) if version := parse_ac_version(content_file.decoded_content.decode('utf8')): print(release.tag_name, "=>", version) except Exception as e: print(release.tag_name, "=>", "FAILED:", str(e))
class GithubStorageMethod(StorageMethod): def __init__(self,token,repo_name): super().__init__("github") self.token = token self.repo_name = repo_name self.repo =Github(self.token).get_repo(repo_name) def acquireContent(self, path, params, version_id=None)->AcquireContentReturnValue: fetched_file = self.repo.get_contents(path,ref=params['branch']) header = MatrixHeader( name= path, revision_id= None, storage_method=self.name, path= path, memory_style=MemStyles.DATA_FRAME, description=None ) csv = fetched_file.decoded_content.decode("utf-8") df = pd.read_csv(StringIO(csv)) return AcquireContentReturnValue(content=df,header=header) def storeContent(self, path, params, content,revision_info)->Revision: library, ticker = self._lib_ticker(path) _store_content(self.store[library],ticker,content,revision_info) def history(self,matrix_url)->List[Revision]: library, ticker = self._lib_ticker(matrix_url.url_components.path) lib = self.store[library] meta = lib.read_metadata(ticker) logging.info("attempted to get history for : {},{} result = [{}]".format(library, ticker,meta)) if meta.metadata is None: return [] else: return get_revisions_from_metadata(meta.metadata) def list(self) -> List[MatrixHeader]: ret_val = [] for this_lib_name in self.store.list_libraries(): library = self.store[this_lib_name] for this_symbol in library.list_symbols(): versions = library.list_versions(this_symbol) filtered = [version for version in versions if not version['deleted']] max_version = max(map(lambda v: v['version'], filtered)) symbol_with_slashes = this_symbol.replace('.','/') ret_val.append(MatrixHeader(name=symbol_with_slashes, description="don't know yet", storage_method = self.name, memory_style = MemStyles.DATA_FRAME, revision_id = str(max_version), path="{}/{}".format(this_lib_name,symbol_with_slashes))) return ret_val