def get_csv(file_name): token = '' # GitHub TOKEN repo_path = 'CSSEGISandData/COVID-19' daily_reports_dir_path = 'https://github.com/CSSEGISandData/COVID-19/raw/master/csse_covid_19_data/csse_covid_19_daily_reports/' time_series_dir_path = 'https://github.com/CSSEGISandData/COVID-19/raw/master/csse_covid_19_data/csse_covid_19_time_series/' dr_repo_file_list = 'csse_covid_19_data/csse_covid_19_daily_reports' git = Github(token) repo = git.get_repo(repo_path) daily_reports_file_list = repo.get_contents(dr_repo_file_list) if file_name == 'daily_reports': daily_reports_file_path = daily_reports_dir_path + str( daily_reports_file_list[-2]).split('/')[-1].split(".")[0] + '.csv' req = requests.get(daily_reports_file_path) url_content = req.content csv_file = open('daily_report.csv', 'wb') csv_file.write(url_content) csv_file.close() else: confirmed_global_file_path = time_series_dir_path + 'time_series_covid19_confirmed_global.csv' deaths_global_file_path = time_series_dir_path + 'time_series_covid19_deaths_global.csv' recovered_global_file_path = time_series_dir_path + 'time_series_covid19_recovered_global.csv' return confirmed_global_file_path, deaths_global_file_path, recovered_global_file_path
def find_my_contributions(cls, account): """Find and save the user contributions. A contribution is a merged Pull Request opened by the user. """ connection = Github(login_or_token=account.github_token) # Set query to find contributions qualifiers = {'is': 'merged', 'type': 'pr', 'author': account.user.username} # Search GH API for projects. issues = connection.search_issues( query='', sort='updated', order='desc', **qualifiers) i = 0 issues_page = issues.get_page(i) while issues_page: for issue in issues_page: attributes = {'account': account, 'title': issue.title, 'url': issue.html_url, 'repo': issue.repository.name, 'repo_url': issue.repository.html_url, 'merged': issue.updated_at} if not Contribution.objects.filter(**attributes): contribution = Contribution(**attributes) contribution.save() i += 1 issues_page = issues.get_page(i)
def run(self): if self.GITHUB_TOKEN is None: logging.critical('No github OAuth token defined in the GITHUB_TOKEN env variable') sys.exit(1) if self.SSH_PKEY is None: logging.critical('SSH_KEY not configured, please set it to you private SSH key file') sys.exit(1) github = Github(self.GITHUB_TOKEN) ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.SSH_PKEY = os.path.expanduser(self.SSH_PKEY) orga = github.get_organization('nuxeo') # type: Organization repo = orga.get_repo('nuxeo.com') # type: Repository opened_pulls = [('/var/www/nuxeo.com/pr-%d.' % pull.number) + self.PREVIEW_DOMAIN for pull in repo.get_pulls()] try: proxy = ProxyCommand(('ssh -i %s -W 10.10.0.63:22 ' % self.SSH_PKEY) + self.BASTION_IP) ssh.connect('10.10.0.63', username='******', sock=proxy, key_filename=self.SSH_PKEY) _, stdout, _ = ssh.exec_command('ls -d /var/www/nuxeo.com/pr-*') [ssh.exec_command('rm -rf ' + line.strip()) for line in stdout.readlines() if line.strip() not in opened_pulls] ssh.close() except SSHException, e: logging.critical('Could work on remote: %s', e) sys.exit(1)
def run(self): if self.GITHUB_TOKEN is None: logging.critical('No github OAuth token defined in the GITHUB_TOKEN env variable') sys.exit(1) github = Github(self.GITHUB_TOKEN) qualifiers = { 'query': '', 'in': 'file', 'org': 'nuxeo', 'filename': 'README.md' } logging.info('Qualifiers: %s', qualifiers) search = github.search_code(**qualifiers) # type: PaginatedList logging.info('Number of results: %d', search.totalCount) for result in search: # type: ContentFile try: logging.info('Updating nuxeo/%s', result.repository.name) new_content = result.decoded_content commit = result.repository.update_file( '/'+result.path, 'NXBT-1198: Remove Company name', new_content, result.sha)['commit'] logging.info('Updated nuxeo/%s: %s', result.repository.name, commit.html_url) except Exception, e: logging.warn('Failed failed nuxeo/%s: %s', result.repository.name, e)
def get_kaggle_credential_from_secret_repository( repository: str, location: str, gh_token: str) -> KaggleCredential: gh = Github(gh_token) cred_repo = gh.get_repo(repository) kaggle_cred_res = cred_repo.get_contents(location) kaggle_cred = json.loads(kaggle_cred_res.decoded_content) if 'key' not in kaggle_cred or 'username' not in kaggle_cred: raise RuntimeError('Cannot find username or key in kaggle.json file.') kaggle_username = kaggle_cred['username'] kaggle_key = kaggle_cred['key'] ret_cred = KaggleCredential(kaggle_username, kaggle_key) return ret_cred
def try_auth(self) -> Optional[Github]: """Create Github client object and attempt hitting API with token""" token = self.config.get("token") if token: try: gh_client = Github(token) _ = gh_client.get_rate_limit() return gh_client except BadCredentialsException: return None
def main() -> None: try: github_token: str = os.environ[ENV_GITHUB_TOKEN] except KeyError: print(f'Environment variable {ENV_GITHUB_TOKEN} must be defined.') sys.exit(1) gh = Github(login_or_token=github_token) GoPRMaker(gh).run()
def get_github(repo): login_or_token, password = None, None if repo.is_private: message = "This environment references a private repository: {}".format( repo.url) login_or_token, password = get_github_credentials(message, force=True) if not login_or_token and not password: raise EnvironmentException("Github credentials required") return Github(login_or_token=login_or_token, password=password)
def get_github(repo): token = None if repo.is_private: message = "This environment references a private repository: {}".format( repo.url) token = get_github_token(message, required=True) if not token: raise EnvironmentException("Github credentials required") return Github(login_or_token=token)
def github(monkeypatch, github_user, github_organization): target = Github(login_or_token='_test_user_', password='******', user_agent='MedusaTests') monkeypatch.setattr(target, 'get_user', lambda *args, **kwargs: github_user) monkeypatch.setattr(target, 'get_organization', lambda login: github_organization) return target
def repo_details(request, login, repo): """ :param request: A url request. :param login: An argument from a url :param repo: An argument from a ulr :return: If the login and the repository exists, returns its statistics and adds the login and the name of repository to 'visited' cookies. If there is not such login and/or repository, returns a 404 error message. If there is an GitHub API rate limit, returns an 403 error message. """ try: repository = Github().get_user(login).get_repo(repo) session = request.session owner = repository.owner.login name = repository.name if 'visited' in session and session['visited'] is not None: if '%s/%s' % (owner, name) not in session['visited']: session['visited'] += '---%s/%s' % (owner, name) else: session['visited'] = '%s/%s' % (owner, name) now = datetime.now().time().strftime('%H:%M:%S').split(':') # The day's number of seconds minus an amount of ones # since the start of the current day. seconds_to_midnight = 86400 - (int(now[0]) * 3600 + int(now[1]) * 60 + int(now[2])) # Setting up how much time the cookies file will exist. session.set_expiry(seconds_to_midnight) week = week_statistics(repository) month = month_statistics(repository) return render(request, 'statistics_app/repo_details.html', {'repo': repository, 'week_commits': week[0], 'week_contributors': week[1], 'week_days_names': week[2], 'month_commits': month[0], 'month_contributors': month[1], 'month_intervals': month[2], 'rate_limit': False, 'not_found': False}) except (UnknownObjectException, UnicodeEncodeError): return render(request, 'statistics_app/repo_details.html', {'not_found': True, 'rate_limit': False}) except GithubException: return render(request, 'statistics_app/repo_details.html', {'rate_limit': True, 'not_found': False})
def __init__(self): # static basedir for github action container self.config['base_dir'] = '/github/workspace/' # config map config = { 'arg_tag': 'INPUT_TAG', 'arg_tag_pattern': 'INPUT_TAG_PATTERN', 'arg_release_name': 'INPUT_RELEASE_NAME', 'arg_release_desc': 'INPUT_RELEASE_DESCRIPTION', 'arg_prerelease': 'INPUT_PRERELEASE', 'arg_assets': 'INPUT_ASSETS', 'arg_auto_increment': 'INPUT_AUTO_INCREMENT', 'arg_repo_name': 'INPUT_REPO_NAME', 'arg_get_last_tag': 'INPUT_GET_LAST_TAG', 'base_dir': 'INPUT_BASE_DIR' } for c_key, c_value in config.items(): value = os.environ.get(c_value) if not self.is_empty(value): self.config[c_key] = value # validate required config if (not self.validate_config("tag", self.get_config('arg_tag')) and not self.validate_config("assets", self.get_config('arg_assets')) and not self.validate_config( "repo name", self.get_config('arg_repo_name'))): self.workflow("error", "Missing required fields") exit(1) # make sure tag pattern was supplied if self.validate_config("tag pattern", self.get_config('arg_tag_pattern')): try: re_pattern = self.get_config('arg_tag_pattern') re.compile(f'{re_pattern}') except re.error: self.workflow("error", "invalid tag pattern regex") exit(1) # validate the assets self.validate_assets(self.get_config('arg_assets')) # connect to github self.gh = Github(os.environ.get('INPUT_TOKEN')) # run app self.run()
def find_my_projects(cls, account, type): """Find projects for a user, based on hers/his needs: to practice or to learn. Save the data in the database. """ connection = Github(login_or_token=account.github_token) # Find user Skills repos = connection.get_user().get_repos() cls.find_account_skills(account, repos) # Search for projects based on what skills he wants to practice wanted_skills = cls.get_account_wanted_skills(account, type) # If the user doesn't have any skills and she/he wants to practice, # return none, let him start a project on his own / learn. if len(wanted_skills) == 0 and type == ProjectTypes.PRACTICE: return # We need the names for the GH query. wanted_languages = [skill.name.lower() for skill in wanted_skills] # Set query to find an active project stars = '%d..%d' % (MIN_STARS, MAX_STARS) forks = '%d..%d' % (MIN_FORKS, MAX_FORKS) pushed = '>%s' % get_last_month() qualifiers = {'stars': stars, 'forks': forks, 'pushed': pushed} # Search GH API for projects per each language. repos = [] for wanted_language in wanted_languages: query = 'language:%s' % wanted_language repos_per_lang = connection.search_repositories( query=query, sort='stars', order='desc', **qualifiers) repos.extend(repos_per_lang.get_page(0)[:MAX_PROJECTS]) # Save the projects which are a match for the user. cls.find_account_projects(account, repos, type)
def run(self): if self.GITHUB_TOKEN is None: logging.critical('No github OAuth token defined in the GITHUB_TOKEN env variable') sys.exit(1) github = Github(self.GITHUB_TOKEN) captain_hooks = CaptainHooksClient(self.GITHUB_TOKEN) logging.info('Fetching organization %s', self.ORGANIZATION) orga = github.get_organization(self.ORGANIZATION) # type: Organization logging.info('Fetching organization repositories') for repo in orga.get_repos(): # type: Repository for hook in repo.get_hooks(): # type: Hook logging.info('%s/%s:backup: ' + json.dumps(hook.raw_data), orga.login, repo.name) logging.info('%s/%s:updating', orga.login, repo.name) try: captain_hooks.setup_webhooks(orga.login, repo.name, { 'absent': [ {'url': 'http://qapreprod.in.nuxeo.com/jenkins/github-webhook/'}, {'url': 'https://qa.nuxeo.org/githooks/send-email'}, # {'url': 'https://app.review.ninja/github/webhook'}, ], 'present': [ { 'name': 'web', 'config': { 'content_type': 'json', 'url': 'https://hooks.nuxeo.org/hook/' }, 'events': ['push'], 'active': True }, ] }) logging.info('%s/%s:done', orga.login, repo.name) except CaptainHooksClientException, e: logging.warn('%s/%s:failed: %s', orga.login, repo.name, e)
def main() -> None: parser = ArgumentParser() parser.add_argument('--update-version-only', type=bool, default=False, help='Exit after updating the GO_VERSION file') args: Namespace = parser.parse_args() try: github_token: str = os.environ[ENV_GITHUB_TOKEN] except KeyError: print(f'Environment variable {ENV_GITHUB_TOKEN} must be defined.') sys.exit(1) gh = Github(login_or_token=github_token) GoPRMaker(gh).run(args.update_version_only)
def main() -> None: """ The entrypoint for running the PR-maker. """ parser = ArgumentParser() parser.add_argument('--update-version-only', action="store_true", help='Exit after updating the GO_VERSION file') args: Namespace = parser.parse_args() try: github_token = os.environ[ENV_GITHUB_TOKEN] except KeyError: print(f'Environment variable {ENV_GITHUB_TOKEN} must be defined.') sys.exit(1) gh_api = Github(login_or_token=github_token) GoPRMaker(gh_api).run(args.update_version_only)
def create_pr(self, latest_go_version: str, commit_message: str, owner: str, source_branch_name: str, target_branch: str) -> None: """ Creates the pull request to update the Go version. """ prs = self.gh_api.search_issues( f'repo:{self.repo.full_name} is:pr is:open head:{source_branch_name}' ) for list_item in prs: pull_request = self.repo.get_pull(list_item.number) if pull_request.head.ref != source_branch_name: continue print( f'Pull request for branch {source_branch_name} already exists:\n{pull_request.html_url}' ) return milestone_url = self.get_go_milestone(latest_go_version) pr_body = _get_pr_body(latest_go_version, milestone_url) try: pr_github_token = getenv(ENV_PR_GITHUB_TOKEN) self.gh_api = Github(login_or_token=pr_github_token) self.repo = self.get_repo(getenv(ENV_GITHUB_REPOSITORY)) except KeyError: print( f'Token in {ENV_PR_GITHUB_TOKEN} is invalid, creating the PR using the ' f'{ENV_GITHUB_TOKEN} token') pass pull_request = self.repo.create_pull( title=commit_message, body=pr_body, head=f'{owner}:{source_branch_name}', base=target_branch, maintainer_can_modify=True, ) try: go_version_label = self.repo.get_label('go version') pull_request.add_to_labels(go_version_label) except UnknownObjectException: print('Unable to find a label named "go version"', file=sys.stderr) print(f'Created pull request {pull_request.html_url}')
# getting the right logo link # need github api token for this import pandas as pd import os desired_width = 320 pd.set_option('display.width', desired_width) pd.set_option('display.max_columns', 10) # making path for loading currPath = os.getcwd() inPath = os.path.join(currPath, "output") # import from github -- need token from github.MainClass import Github g = Github('PUT_YOUR_TOKEN_HERE') repo = g.get_repo("Transcranial-Solutions/ICONProject") # making proper logo web address & name using logo file_list = repo.get_contents("vote_analysis/output/logos") split_file_list = lambda x: str(x).split('"')[1] file_list = list(map(split_file_list, file_list)) split_logo_list = lambda x: str(x).split('/')[-1].split('.')[0] logo_list = list(map(split_logo_list, file_list)) github_dir_path = 'https://raw.githubusercontent.com/Transcranial-Solutions/ICONProject/master/' web_list = [github_dir_path + s for s in file_list] # dataframe - name by logo and web address
from dataclasses import dataclass, field from time import sleep from typing import Iterator, List, Optional, Tuple, Iterable, Dict from github import Repository, PullRequest, Issue, PullRequestReview from github.AuthenticatedUser import AuthenticatedUser from github.MainClass import Github from plumbum import cli, local, FG, ProcessExecutionError from plumbum.cli import switch import plumbum.colors as colors GITHUB_TOKEN = os.environ["GITHUB_TOKEN"] if not GITHUB_TOKEN: raise Exception( "GitHub token not specified in environment. Please set GITHUB_TOKEN") github: Github = Github(GITHUB_TOKEN) git = local["git"] bash = local["bash"] verbose = colors.dim branch_color = colors.green sha_color = colors.yellow class RemoteRepo: def __init__(self, repo: Repository): self._repo = repo def get_user_prs(self, user: AuthenticatedUser) -> List['PrInfo']: issues: Iterable[Issue] = github.search_issues(
def __login(self): if hasattr(self.__config, "git_token") and self.__config.git_token: self.__github = Github(self.__config.git_token) else: self.__github = Github(self.__config.git_username, self.__config.git_password)
try: from flask_minify import minify except ModuleNotFoundError: minify_inited = False # Init flask app = Flask(__name__, template_folder="game") if compress_inited: Compress(app) if minify_inited: minify(app=app, js=False, caching_limit=0) import before_after_req # noqa: F401,E402 # Init github if os.getenv("GITHUB_VERSION_PAT") is not None: github_instance = Github(os.getenv("GITHUB_VERSION_PAT")) else: github_instance = Github() rep = github_instance.get_repo("KTibow/scorecard") def sleep(timefor): """ Sleep, but catch interrupts and whatever. Args: timefor: The amount of time to sleep in seconds. """ for _index in range(round(timefor * 16.0)): time.sleep(1 / 16)
def github(self): return Github(login_or_token=self.github_api_key)
from github.GithubException import GithubException from github.MainClass import Github from github.NamedUser import NamedUser as User from github.Organization import Organization as Org from github.Team import Team from autobot import ORG_NAME from autobot.concepts import Group # GH_API_KEY = os.environ["GITHUB_API_KEY"] _GH_API_KEY = "ca9c8efd282ec1bc79dd10e9a84d2883f0104a51" _api: Github = Github(_GH_API_KEY, api_preview=True) _org: Org = _api.get_organization(ORG_NAME) _team: Team = None _coord_team: Team = None _gen_err = "There's something wrong surrounding the GitHub API." # region Actual calls to the GitHub API def get_github_user(user: str) -> User: return _api.get_user(user) def create_semester_team(grp: Group) -> int: # TODO: Enable nested teams (waiting on PyGitHub to implement this) global _team _team = _org.create_team(grp.sem.short, privacy="public") return _team.id
def init(args: Namespace) -> tuple: """ Init some standard vars """ g: Github = Github(args.organisation_token) org: Organization = g.get_organization(args.organisation_slug) team: Team = org.get_team_by_slug(args.team_slug) return g, org, team
def read_from_github(): # Input: no # Outputs: the main datasets ready to use # Function: reads the files from GitHub # Online Github path from github.MainClass import Github token = 'f1d4c930800cb3e782c1b61b86101c97deefc4c7' # connected to GitHub using my credentials g = Github(token) # load the repo repo = g.get_repo("CSSEGISandData/COVID-19") file_list = repo.get_contents("csse_covid_19_data/csse_covid_19_time_series") github_dir_path = 'https://github.com/CSSEGISandData/COVID-19/raw/master/csse_covid_19_data/csse_covid_19_time_series/' file_path_confirmed_US = github_dir_path + str(file_list[-5]).split('/')[-1].split(".")[0]+ '.csv' confirmed_US = pd.read_csv(file_path_confirmed_US, error_bad_lines=False) confirmed_US = confirmed_US.drop(['UID', 'iso2', 'iso3', 'code3', 'FIPS', 'Admin2', 'Combined_Key'], axis=1) confirmed_US=confirmed_US.rename(columns = {'Province_State':'Province/State', 'Country_Region':'Country/Region', 'Long_':'Long'}) confirmed_info = confirmed_US.groupby(["Province/State"])['Lat','Long','Country/Region'].agg({ 'Lat' :lambda x: x.mean(), 'Long' :lambda x: x.mean(), 'Country/Region':lambda x: x.unique() }) cases = confirmed_US.drop(columns=['Country/Region', 'Lat', 'Long']).set_index('Province/State') cases = cases.groupby(["Province/State"]).sum() confirmed_US = confirmed_info.merge(cases, left_index=True, right_index=True).reset_index() confirmed_US = confirmed_US[confirmed_US['Long'].isna() == False] confirmed_US = confirmed_US[confirmed_US['Lat'].isna() == False] confirmed_US = confirmed_US[confirmed_US['Long'] != 0] confirmed_US = confirmed_US[confirmed_US['Lat'] != 0] file_path_confirmed = github_dir_path + str(file_list[-4]).split('/')[-1].split(".")[0]+ '.csv' confirmed = pd.read_csv(file_path_confirmed, error_bad_lines=False) confirmed = pd.concat([confirmed_US,confirmed]) file_path_died_US = github_dir_path + str(file_list[-3]).split('/')[-1].split(".")[0]+ '.csv' died_US = pd.read_csv(file_path_died_US, error_bad_lines=False) died_US = died_US.drop(['UID', 'iso2', 'iso3', 'code3', 'FIPS', 'Admin2', 'Combined_Key', 'Population'], axis=1) died_US=died_US.rename(columns = {'Province_State':'Province/State', 'Country_Region':'Country/Region', 'Long_':'Long'}) died_info = died_US.groupby(["Province/State"])['Lat','Long','Country/Region'].agg({ 'Lat' :lambda x: x.mean(), 'Long' :lambda x: x.mean(), 'Country/Region':lambda x: x.unique() }) deaths = died_US.drop(columns=['Country/Region', 'Lat', 'Long']).set_index('Province/State') deaths = deaths.groupby(["Province/State"]).sum() died_US = died_info.merge(deaths, left_index=True, right_index=True).reset_index() died_US = died_US[died_US['Long'].isna() == False] died_US = died_US[died_US['Lat'].isna() == False] died_US = died_US[died_US['Long'] != 0] died_US = died_US[died_US['Lat'] != 0] # file_path_died = github_dir_path + str(file_list[-2]).split('/')[-1].split(".")[0]+ '.csv' died = pd.read_csv(file_path_died, error_bad_lines=False) died = pd.concat([died_US,died]) file_path_recovered = github_dir_path + str(file_list[-1]).split('/')[-1].split(".")[0]+ '.csv' recovered = pd.read_csv(file_path_recovered, error_bad_lines=False) # preprocessing to add total column confirmed = preprocessing(confirmed) died = preprocessing(died) recovered = preprocessing(recovered) # demographics population_statistics = pd.read_csv("population_statistics.csv") return confirmed, died, recovered, population_statistics
import sys import os from github.MainClass import Github foldername = str(sys.argv[1]) path = os.environ.get('mp') # add projects dirctory to the env vars token = os.environ.get('gt') # add github token to the env vars _dir = path + '/' + foldername g = Github(token) user = g.get_user() login = user.login repo = user.create_repo(foldername) commands = [ f'echo "# {repo.name}" >> README.md', 'git init', f'git remote add origin https://github.com/{login}/{foldername}.git', 'git add .', 'git commit -m "Initial commit"', 'git push -u origin master' ] os.mkdir(_dir) os.chdir(_dir) for c in commands: os.system(c) print(f'{foldername} created locally') os.system('code .')
class GithubService: """Contains convenience methods and properties for Github-related functionality. An adapter to the functionality of the PyGithub library. """ def __init__(self, access_token: str): """Constructor. :param str access_token: the access token to use for connecting """ self.client = Github(login_or_token=access_token) @lru_cache(maxsize=None) def get_repo(self, repo_name: str) -> Repository: """Return the repository object with the given name. :param str repo_name: the full name of the repository :return: the repository or None if not found :rtype: github.Repository.Repository """ return self.client.get_repo(repo_name) @lru_cache(maxsize=None) def get_pr(self, repo_name: str, pr_num: int): """Return the pull request object with the given number. :param str repo_name: the name of the repository the PR is in :param int pr_num: the identifier of the pull request :return: the pull request object :rtype: PullRequest """ repo = self.get_repo(repo_name) if repo: return repo.get_pull(pr_num) def create_pr_comment(self, repo_name: str, pr_num: int, body: str) -> dict: """Create a comment on the pull request with the given info. :param str repo_name: the name of the repository the PR is in :param int pr_num: the identifier of the pull request :param str body: the body of the comment to add :return: a dictionary with information about the created comment :rtype: dict """ pr = self.get_pr(repo_name, pr_num) issue = pr.as_issue() comment = issue.create_comment(body) return {'id': comment.id, 'html_url': comment.html_url} def get_pr_comments(self, repo_name: str, pr_num: int) -> List[dict]: """Return a list of comments on the PR with the given number. :param str repo_name: the name of the repository the PR is in :param int pr_num: the identifier of the pull request :return: a list of all comments, formatted as: [ {'id': <comment_id>, 'body': <body>, 'updated_at': <updated_at>}, ... ] :rtype: list """ pr = self.get_pr(repo_name, pr_num) issue = pr.as_issue() comments = issue.get_comments() return [ {'id': comment.id, 'body': comment.body, 'updated_at': comment.updated_at} for comment in comments ] def delete_pr_comment(self, repo_name: str, pr_num: int, comment_id: int) -> bool: """Delete the PR comment with the given id :param str repo_name: the name of the repository the PR is in :param int pr_num: the identifier of the pull request :param int comment_id: the ID of the comment to delete :return: True if found and deleted successfully, False otherwise :rtype: bool """ pr = self.get_pr(repo_name, pr_num) issue = pr.as_issue() comment = issue.get_comment(comment_id) if comment is None: return False comment.delete() return True
class GitHub: def __init__(self, config: Config) -> None: self.__config: Config = config self.__authenticate_git_user() self.__initialize_repository_object() def __initialize_repository_object(self): self.__cache = GitHubCache() try: org = self.__github.get_organization(self.__config.git_repo_org) if self.__config.debug: log_debug("Organization found.") except UnknownObjectException: if self.__config.debug: log_debug("Organization not found. Try interpreting " + self.__config.git_repo_org + " as user...") org = self.__github.get_user(self.__config.git_repo_org) if self.__config.debug: log_debug("User found.") self.__repo: Repository = org.get_repo(self.__config.git_repo_name) # This script is responsible for the authentication of git user def __authenticate_git_user(self): while True: if not hasattr(self.__config, "two_factor_authentication"): self.__config.two_factor_authentication = prompt_yesno_question( "Are you using two-factor authentication on GitHub?") if self.__config.two_factor_authentication: self.__config.git_token = getpass.getpass("> Please enter your token: ") while not self.__config.git_token: self.__config.git_token = getpass.getpass("> Please enter your token: ") else: if not hasattr(self.__config, "git_username"): self.__config.git_username = prompt_enter_value("your git user name") else: log_info("The stored Github username is {}".format(self.__config.git_username)) self.__config.git_password = getpass.getpass("> Please enter your password: "******"> Please enter your password: "******"Authenticated.") break except BadCredentialsException: log_info("Authentication error, please try again.") continue def __login(self): if hasattr(self.__config, "git_token") and self.__config.git_token: self.__github = Github(self.__config.git_token) else: self.__github = Github(self.__config.git_username, self.__config.git_password) def find_issue(self, issue_number: int) -> Issue: '''Search for the Release issue to be used, if not found, exit''' # caching! if issue_number in self.__cache.issues: log_info("Issue with number " + str(issue_number) + " found.") return self.__cache.issues[issue_number] else: log_debug("Issue not found in cache, retrieving from GitHub...") try: self.__cache.issues.update({issue_number: self.__repo.get_issue(issue_number)}) log_info("Issue with number " + str(issue_number) + " found.") return self.__cache.issues[issue_number] except UnknownObjectException: return None def create_issue(self, title: str, milestone: Milestone, body: str) -> int: '''Function creates an issue in git hub with title,milestone,body,labels passed''' if self.__config.dry_run: log_info_dry('Skipping creation of issue with title ' + str(title)) return 0 if self.__config.debug and not prompt_yesno_question( '[DEBUG] Would now create GitHub issue with title="' + title + '", milestone=' + str( milestone) + '. Continue?'): sys.exit() log_info('Create GitHub issue with title "' + title + '"...') try: issue: Issue = self.__repo.create_issue(title=title, body=body, milestone=milestone, labels=[self.__config.issue_label_name, "CI/CD"], assignee=self.__github.get_user().login) self.__config.github_issue_no = issue.number self.__cache.issues.update({issue.number: issue}) return self.__config.github_issue_no except GithubException as e: print(str(e)) return 0 def __request_milestone_list(self) -> PaginatedList: # caching! try: return self.__cache.milestones except AttributeError: log_debug("Milestones not found in cache, retrieving from GitHub...") try: milestones: PaginatedList = self.__repo.get_milestones(state="all") self.__cache.milestones = milestones return milestones except GithubException as e: log_error('Could not retrieve milestones') print(str(e)) sys.exit() def find_release_milestone(self) -> Milestone: milestones: PaginatedList = self.__request_milestone_list() for milestone in milestones: milestone_title_in_git = milestone.title if self.__config.expected_milestone_name in milestone_title_in_git: return milestone return None def find_milestone(self, module: str, version: str) -> Milestone: milestones: PaginatedList = self.__request_milestone_list() search_title = self.__config.expected_raw_milestone_names.get(module) + version log_debug("Trying to search milestone: " + search_title) for milestone in milestones: if milestone.title == search_title: return milestone return None def create_next_release_milestone(self) -> Milestone: new_mile_title = self.__config.expected_milestone_name.replace(self.__config.release_version, self.__config.next_version) if self.__config.dry_run: log_info_dry("Would now create a new milestone with title '" + new_mile_title + "'.") return None log_info("Creating milestone '" + new_mile_title + "' for next release...") try: milestone: Milestone = self.__repo.create_milestone(title=new_mile_title, state="open") log_info("New milestone created!") return milestone except GithubException as e: log_info("Could not create milestone!") print(str(e)) return None def create_release(self, closed_milestone: Milestone, core_version_in_eclipse_pom: str) -> GitRelease: if self.__config.dry_run: log_info_dry("Would create a new GitHub release") return None url_milestone = self.__config.github_closed_milestone_url(closed_milestone.number) release_title = self.__config.cobigenwiki_title_name + " v" + self.__config.release_version release_text = "[ChangeLog](" + url_milestone + ")" if "eclipse" in self.__config.branch_to_be_released and core_version_in_eclipse_pom: cobigen_core_milestone: Milestone = self.find_milestone("dev_core", core_version_in_eclipse_pom) if cobigen_core_milestone.state == "closed": core_url_milestone = self.__config.github_closed_milestone_url(cobigen_core_milestone.number) release_text = release_text + "\n also includes \n" + "[ChangeLog CobiGen Core](" + core_url_milestone + ")" else: log_info("Core version " + core_version_in_eclipse_pom + " is not yet released. This should be released before releasing cobigen-eclipse") sys.exit() try: release: GitRelease = self.__repo.create_git_release(self.__config.tag_name, release_title, release_text, draft=False, prerelease=False, target_commitish="master") content_type = "application/java-archive" if self.__config.branch_to_be_released == self.__config.branch_eclipseplugin: content_type = "application/zip" for root, dirs, files in os.walk( os.path.join(self.__config.build_folder_abs, self.__config.build_artifacts_root_search_path)): dirs[:] = [d for d in dirs if d not in [".settings", "src", "repository", "repository-upload", "classes", "apidocs"]] for fname in files: fpath = os.path.join(root, fname) # To prevent uploading of unnecessary zip/jar files. if (fname.endswith("jar") or fname.endswith("zip")) and self.__config.release_version in fname and 'nexus-staging' in fpath: log_info("Uploading file " + fname + " from " + fpath + " ...") try: asset: GitReleaseAsset = release.upload_asset(path=fpath, label=fname, content_type=content_type) log_info("Uploaded " + str(asset.size) + "kb!") except GithubException as e: log_error("Upload failed!") if self.__config.debug: print(str(e)) # workaround as of https://github.com/PyGithub/PyGithub/issues/779 self.__login() self.__initialize_repository_object() return release except GithubException as e: log_error("Could not create release.") print(str(e)) sys.exit()
def __init__(self, access_token: str): """Constructor. :param str access_token: the access token to use for connecting """ self.client = Github(login_or_token=access_token)
def run(): # Init some variables args = docopt(__doc__) ctx = Context() ctx.debugging = args["--debug"] # Check for dotenv file & load variables deliverit.dotenv.load(ctx) # read config file config_filepath = args["--config-file"] or (".deliverit.yaml" if Path( ".deliverit.yaml").is_file() else ".deliverit.yml") config = deliverit.config.load(config_filepath, cli_args=args, has_git_remote=has_git_remote()) # Read manifest file to get some info # TODO: support multiple manifest files ( ctx.old_version, ctx.package_name, ctx.repository_url, ) = deliverit.manifest_file.load(config.manifest_file) if ctx.old_version is None and config.tag_name is None: raise ConfigurationError("Please set either manifest_file or tag_name") ctx.old_version = ctx.old_version or get_current_version_from_git_tag( tag_template=config.tag_name, fallback_version=Version(0, 1, 0)) ctx.package_name = ctx.package_name or config.package_name if ctx.package_name is None: raise ConfigurationError( "Could not detect the package name. Set it explicitly with package_name" ) ctx.repository_url = ctx.repository_url or config.repository_url if ctx.repository_url is None: raise ConfigurationError( "Could not detect the github repository's URL. Set it explicitly with repository_url" ) # Check if repository is hosted on github if not is_hosted_on_github(ctx.repository_url): raise NotImplementedError("Your repository is not hosted on github") # Get the repository name ctx.repository_owner, ctx.repository_name = split_repository_name( ctx.repository_url) ctx.repository_full_name = f"{ctx.repository_owner}/{ctx.repository_name}" # Compute new version ctx.version_bump = ("major" if args["major"] else "minor" if args["minor"] else "patch" if args["patch"] else None) if ctx.version_bump is None: raise ValueError("No version bump specified.") ctx.new_version = ctx.old_version.bump(ctx.version_bump) # Compute some configurable values version_tag = config.tag_name.format(new=ctx.new_version) # Log info print(f"""\ Releasing a new {em(ctx.version_bump)} version! Upgrading package {em(ctx.package_name)} by {em(ctx.repository_owner)} hosted at {em(ctx.repository_url)} published on {em(config.registry)} from version {em(ctx.old_version)} to version {em(ctx.new_version)} """) # Make the step function step = make_step_function(args, config) # Modify the changelog step( "update_changelog", "Update the changelog", lambda: deliverit.changelog.update( ctx, ctx.apply(config.changelog), config.tag_name.replace("{new}", "{t}"), ), ) # Codemods for declaration in config.version_declarations: new_content = ctx.apply(declaration.replace) step( "update_code_version", f"Replace {declaration.search} with {new_content} in {ctx.apply(declaration.in_)}", lambda: deliverit.version_declaration.update(ctx, declaration), ) # Bump version step( "bump_manifest_version", "Bump the poetry version", command=ctx.apply(config.steps.bump_manifest_version), ) # Add all changes step( "git_add", "Add changes", command=( "git", "add", *[ctx.apply(f.in_) for f in config.version_declarations], config.changelog, config.manifest_file, ), ) # Commit step( "git_commit", "Commit the version bump", command=("git", "commit", "-m", ctx.apply(config.commit_message)), ) # Add tag to commit try: latest_commit_hash = get_latest_commit_hash() except CalledProcessError: print(red("Could not get the latest commit's hash")) exit(1) step( "git_tag", f"Add tag {version_tag} to commit {latest_commit_hash[:7]}", command=( "git", "tag", "-a", version_tag, latest_commit_hash, "-m", ctx.apply(config.commit_message), ), ) # Push step("git_push", "Push changes", command=("git", "push")) # Push tags step( "git_push_tag", f"Push the tag {version_tag}", command=("git", "push", "origin", version_tag), ) # Build step( "build_for_registry", "Build for registry", command=config.steps.build_for_registry, ) # Publish step( "publish_to_registry", f"Publish to {config.registry}", command=config.steps.publish_to_registry, ) # Start a Github API session gh = Github(getenv("GITHUB_TOKEN")) # Get the release notes release_notes = get_release_notes_for_version( ctx.new_version, Path(config.changelog).read_text("utf-8")) release = step( "create_github_release", "Create a GitHub release", lambda: create_github_release( ctx, gh, ctx.apply(config.tag_name), ctx.apply(config.release_title), message=release_notes, ), ) step( "add_assets_to_github_release", "Upload assets to the Github release", lambda: upload_assets_to_release( ctx, release, assets=config.release_assets, ), ) step( "close_milestone", "Close the milestone", lambda: close_milestone(ctx, gh, ctx.apply(config.milestone_title)), )
import csv import requests import os import sys from github.MainClass import Github import pandas as pd # BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # sys.path.insert(0, BASE_DIR) from simple_prognozer.config import TOKEN REPO_PATH = 'CSSEGISandData/COVID-19' GIT = Github(TOKEN) REPO = GIT.get_repo(REPO_PATH) DR_REPO_FILE_LIST = 'csse_covid_19_data/csse_covid_19_daily_reports' DAILY_REPORTS_DIR_PATH = ('https://github.com/CSSEGISandData/COVID-19/raw/' 'master/csse_covid_19_data' '/csse_covid_19_daily_reports/' ) def get_csv(file_name): print('Getting daily report csv...') daily_reports_file_list = REPO.get_contents(DR_REPO_FILE_LIST) if file_name == 'daily_reports': daily_reports_file_path = DAILY_REPORTS_DIR_PATH +\