def run(): args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) codetools.validate_org(args.org) org = g.get_organization(args.org) # list of exceptions problems = [] if args.delete_repos: problems += delete_all_repos(org, fail_fast=args.fail_fast, limit=args.delete_repos_limit, dry_run=args.dry_run) if args.delete_teams: problems += delete_all_teams(org, fail_fast=args.fail_fast, limit=args.delete_teams_limit, dry_run=args.dry_run) if problems: msg = "{n} errors removing repo(s)/teams(s)".format(n=len(problems)) raise codetools.DogpileError(problems, msg) info("Consider deleting your privileged auth token @ {path}".format( path=args.token_path))
def run(): args = parse_args() codetools.setup_logging(args.debug) gh_org_name = args.org tags = args.tag git_email = codetools.lookup_email(args) git_user = codetools.lookup_user(args) tagger = github.InputGitAuthor( git_user, git_email, codetools.current_timestamp() ) debug(tagger) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) org = g.get_organization(gh_org_name) info("tagging repos in org: {org}".format(org=org.login)) tag_teams = get_candidate_teams(org, args.allow_team) target_repos = get_candidate_repos(tag_teams) problems = [] # do not fail-fast on non-write operations problems += check_repos( target_repos, args.allow_team, args.deny_team, fail_fast=False, ) # existing tags are always ignored (not an error) under --delete ignore_existing = True if args.delete else args.ignore_existing_tag # do not fail-fast on non-write operations present_tags, absent_tags, err = check_tags( target_repos, tags, ignore_existing=ignore_existing, fail_fast=False, ) problems += err if problems: msg = "{n} repo(s) have errors".format(n=len(problems)) raise codetools.DogpileError(problems, msg) if args.delete: untag_repos(present_tags, dry_run=args.dry_run) else: tag_repos(absent_tags, tagger=tagger, dry_run=args.dry_run)
def run(): args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) info("github ratelimit: {rl}".format(rl=g.rate_limiting)) reset = datetime.datetime.fromtimestamp(int(g.rate_limiting_resettime)) info("github ratelimit reset: {time}".format(time=reset))
def run(): """List repos and teams""" args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) if not args.hide: args.hide = [] org = g.get_organization(args.organization) try: repos = list(org.get_repos()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting repos' raise pygithub.CaughtOrganizationError(org, e, msg) from None for r in repos: try: teamnames = [ t.name for t in r.get_teams() if t.name not in args.hide ] except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtRepositoryError(r, e, msg) from None maxt = args.maxt if (args.maxt is not None and args.maxt >= 0) else len(teamnames) if args.debug: print("MAXT=", maxt) if args.mint <= len(teamnames) <= maxt: print(r.name.ljust(40) + args.delimiter.join(teamnames))
def run(): """List repos and teams""" args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) if not args.hide: args.hide = [] org = g.get_organization(args.organization) try: repos = list(org.get_repos()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting repos' raise pygithub.CaughtOrganizationError(org, e, msg) from None for r in repos: try: teamnames = [t.name for t in r.get_teams() if t.name not in args.hide] except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtRepositoryError(r, e, msg) from None maxt = args.maxt if (args.maxt is not None and args.maxt >= 0) else len(teamnames) if args.debug: print("MAXT=", maxt) if args.mint <= len(teamnames) <= maxt: print(r.name.ljust(40) + args.delimiter.join(teamnames))
def run(): args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) codetools.validate_org(args.org) org = g.get_organization(args.org) # list of exceptions problems = [] if args.delete_repos: problems += delete_all_repos( org, fail_fast=args.fail_fast, limit=args.delete_repos_limit, dry_run=args.dry_run ) if args.delete_teams: problems += delete_all_teams( org, fail_fast=args.fail_fast, limit=args.delete_teams_limit, dry_run=args.dry_run ) if problems: msg = "{n} errors removing repo(s)/teams(s)".format( n=len(problems)) raise codetools.DogpileError(problems, msg) info("Consider deleting your privileged auth token @ {path}".format( path=args.token_path))
def run(): """Move the repos""" args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) org = g.get_organization(args.org) # only iterate over all teams once try: teams = list(org.get_teams()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(org, e, msg) from None old_team = find_team(teams, args.oldteam) new_team = find_team(teams, args.newteam) move_me = args.repos debug(len(move_me), 'repos to be moved') added = [] removed = [] for name in move_me: try: r = org.get_repo(name) except github.RateLimitExceededException: raise except github.GithubException as e: msg = "error getting repo by name: {r}".format(r=name) raise pygithub.CaughtOrganizationError(org, e, msg) from None # Add team to the repo debug("Adding {repo} to '{team}' ...".format( repo=r.full_name, team=args.newteam )) if not args.dry_run: try: new_team.add_to_repos(r) added += r.full_name debug(' ok') except github.RateLimitExceededException: raise except github.GithubException as e: debug(' FAILED') if old_team.name in 'Owners': warn("Removing repo {repo} from team 'Owners' is not allowed" .format(repo=r.full_name)) debug("Removing {repo} from '{team}' ...".format( repo=r.full_name, team=args.oldteam )) if not args.dry_run: try: old_team.remove_from_repos(r) removed += r.full_name debug(' ok') except github.RateLimitExceededException: raise except github.GithubException as e: debug(' FAILED') info('Added:', added) info('Removed:', removed)
def run(): """Create the tag""" args = parse_args() codetools.setup_logging(args.debug) git_tag = args.tag # if email not specified, try getting it from the gitconfig git_email = codetools.lookup_email(args) # ditto for the name of the git user git_user = codetools.lookup_user(args) # The default eups tag is derived from the git tag, otherwise specified # with the --eups-tag option. The reason to currently do this is that for # weeklies and other internal builds, it's okay to eups publish the weekly # and git tag post-facto. However for official releases, we don't want to # publish until the git tag goes down, because we want to eups publish the # build that has the official versions in the eups ref. if not args.manifest_only: eups_tag = args.eups_tag if not eups_tag: # generate eups-style version eups_tag = eups.git_tag2eups_tag(git_tag) debug("using eups tag: {eups_tag}".format(eups_tag=eups_tag)) # sadly we need to "just" know this # XXX this can be parsed from the eups tag file post d_2018_05_08 manifest = args.manifest debug("using manifest: {manifest}".format(manifest=manifest)) if not args.manifest_only: # release from eups tag message_template = "Version {{git_tag}}"\ " release from {eups_tag}/{manifest}".format( eups_tag=eups_tag, manifest=manifest, ) else: # release from manifest only message_template = "Version {{git_tag}}"\ " release from manifest {manifest}".format( manifest=manifest, ) debug("using tag message: {msg}".format(msg=message_template)) tagger = github.InputGitAuthor( git_user, git_email, codetools.current_timestamp(), ) debug("using taggger: {tagger}".format(tagger=tagger)) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) org = g.get_organization(args.org) info("tagging repos in org: {org}".format(org=org.login)) problems = [] manifest_products = versiondb.Manifest( manifest, base_url=args.versiondb_base_url).products if not args.manifest_only: # cross-reference eups tag version strings with manifest eups_products = eups.EupsTag( eups_tag, base_url=args.eupstag_base_url).products # do not fail-fast on non-write operations products, err = cross_reference_products( eups_products, manifest_products, ignore_manifest_versions=args.ignore_manifest_versions, fail_fast=False, ) problems += err else: # no eups tag; use manifest products without sanity check against eups # tag version strings products = manifest_products if args.limit: products = dict(itertools.islice(products.items(), args.limit)) # do not fail-fast on non-write operations products, err = get_repo_for_products( org=org, products=products, allow_teams=args.allow_team, ext_teams=args.external_team, deny_teams=args.deny_team, fail_fast=False, ) problems += err # do not fail-fast on non-write operations products_to_tag, err = check_product_tags( products, git_tag, tag_message_template=message_template, tagger=tagger, force_tag=args.force_tag, fail_fast=False, ignore_git_message=args.ignore_git_message, ignore_git_tagger=args.ignore_git_tagger, ) problems += err if args.verify: # in verify mode, it is an error if there are products that need to be # tagged. err = identify_products_missing_tags(products_to_tag) problems += err if problems: msg = "{n} pre-flight error(s)".format(n=len(problems)) raise codetools.DogpileError(problems, msg) tag_products( products_to_tag, fail_fast=args.fail_fast, dry_run=args.dry_run, )
def run(): args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) # protect destination org codetools.validate_org(args.dst_org) src_org = g.get_organization(args.src_org) dst_org = g.get_organization(args.dst_org) info("forking repos from: {org}".format(org=src_org.login)) info(" to: {org}".format(org=dst_org.login)) debug('looking for repos -- this can take a while for large orgs...') if args.team: debug('checking that selection team(s) exist') try: org_teams = list(src_org.get_teams()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(src_org, e, msg) from None missing_teams = [n for n in args.team if n not in [t.name for t in org_teams]] if missing_teams: error("{n} team(s) do not exist:".format(n=len(missing_teams))) [error(" '{t}'".format(t=n)) for n in missing_teams] return fork_teams = [t for t in org_teams if t.name in args.team] repos = pygithub.get_repos_by_team(fork_teams) debug('selecting repos by membership in team(s):') [debug(" '{t}'".format(t=t.name)) for t in fork_teams] else: repos = pygithub.get_repos_by_team(fork_teams) src_repos = list(itertools.islice(repos, args.limit)) repo_count = len(src_repos) if not repo_count: debug('nothing to do -- exiting') return debug("found {n} repos to be forked from org {src_org}:".format( n=repo_count, src_org=src_org.login )) [debug(" {r}".format(r=r.full_name)) for r in src_repos] if args.copy_teams: debug('checking source repo team membership...') # dict of repo and team objects, keyed by repo name src_rt = find_teams_by_repo(src_repos) # extract a non-duplicated list of team names from all repos being # forked as a dict, keyed by team name src_teams = find_used_teams(src_rt) debug('found {n} teams in use within org {o}:'.format( n=len(src_teams), o=src_org.login )) [debug(" '{t}'".format(t=t)) for t in src_teams.keys()] # check for conflicting teams in dst org before attempting to create # any forks so its possible to bail out before any resources have been # created. debug('checking teams in destination org') conflicting_teams = pygithub.get_teams_by_name( dst_org, list(src_teams.keys()) ) if conflicting_teams: raise TeamError( "found {n} conflicting teams in {o}: {teams}".format( n=len(conflicting_teams), o=dst_org.login, teams=[t.name for t in conflicting_teams] )) debug('there is no spoon...') problems = [] pygithub.debug_ratelimit(g) dst_repos, skipped_repos, err = create_forks( dst_org, src_repos, fail_fast=args.fail_fast, dry_run=args.dry_run ) if err: problems += err if args.copy_teams: # filter out repos which were skipped # dict of str(fork_repo.name): fork_repo dst_forks = dict((r.name, r) for r in dst_repos) bad_repos = dict((r.name, r) for r in skipped_repos) # dict of str(team.name): [repos] to be created dst_teams = {} for name, repos in src_teams.items(): dst_teams[name] = [dst_forks[r.name] for r in repos if r.name not in bad_repos] _, err = create_teams( dst_org, dst_teams, with_repos=True, fail_fast=args.fail_fast, dry_run=args.dry_run ) if err: problems += err if problems: msg = "{n} errors forking repo(s)/teams(s)".format( n=len(problems)) raise codetools.DogpileError(problems, msg)
def run(): """Log in and store credentials""" args = parse_args() appname = sys.argv[0] hostname = platform.node() codetools.setup_logging(args.debug) password = '' if args.token_path is None and args.delete_role is True: cred_path = os.path.expanduser('~/.sq_github_token_delete') elif args.token_path is None and args.delete_role is False: cred_path = os.path.expanduser('~/.sq_github_token') else: cred_path = os.path.expandvars(os.path.expanduser(args.token_path)) if not os.path.isfile(cred_path): print(""" Type in your password to get an auth token from github It will be stored in {0} and used in subsequent occasions. """.format(cred_path)) while not password: password = getpass('Password for {0}: '.format(args.user)) note = textwrap.dedent("""\ {app} via bored^H^H^H^H^H terrified opossums[1] on {host} by {user} {creds} [1] https://youtu.be/ZtLrn2zPTxQ?t=1m10s """).format( app=appname, host=hostname, user=args.user, creds=cred_path ) note_url = 'https://www.youtube.com/watch?v=cFvijBpzD_Y' if args.delete_role: scopes = ['repo', 'user', 'delete_repo', 'admin:org'] else: scopes = ['repo', 'user'] global g g = github.Github(args.user, password) u = g.get_user() try: auth = u.create_authorization( scopes=scopes, note=note, note_url=note_url, ) except github.TwoFactorException: auth = u.create_authorization( scopes=scopes, note=note, note_url=note_url, # not a callback onetime_password=codetools.github_2fa_callback() ) g = github.Github(auth.token) with open(cred_path, 'w') as fdo: fdo.write(auth.token + '\n') fdo.write(str(auth.id)) print('Token written to {0}'.format(cred_path)) else: print("You already have an auth file: {0} ".format(cred_path)) print("Delete it if you want a new one and run again") print("Remember to also remove the corresponding token on Github")
#!/usr/bin/env python3 from codekit import codetools, eups import codecs import os import pytest import responses codetools.setup_logging() @pytest.fixture def fixture_dir(): d = os.path.dirname(os.path.abspath(__file__)) return os.path.join(d, 'data') @pytest.fixture def v15_0(fixture_dir): filename = os.path.join(fixture_dir, 'v15_0.list') with codecs.open(filename, 'r', encoding='utf8') as file: return file.read() @pytest.fixture def d_2018_05_08(fixture_dir): filename = os.path.join(fixture_dir, 'd_2018_05_08.list') with codecs.open(filename, 'r', encoding='utf8') as file: return file.read()
def run(): args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) # protect destination org codetools.validate_org(args.dst_org) src_org = g.get_organization(args.src_org) dst_org = g.get_organization(args.dst_org) info("forking repos from: {org}".format(org=src_org.login)) info(" to: {org}".format(org=dst_org.login)) debug('looking for repos -- this can take a while for large orgs...') if args.team: debug('checking that selection team(s) exist') try: org_teams = list(src_org.get_teams()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(src_org, e, msg) from None missing_teams = [ n for n in args.team if n not in [t.name for t in org_teams] ] if missing_teams: error("{n} team(s) do not exist:".format(n=len(missing_teams))) [error(" '{t}'".format(t=n)) for n in missing_teams] return fork_teams = [t for t in org_teams if t.name in args.team] repos = pygithub.get_repos_by_team(fork_teams) debug('selecting repos by membership in team(s):') [debug(" '{t}'".format(t=t.name)) for t in fork_teams] else: repos = pygithub.get_repos_by_team(fork_teams) src_repos = list(itertools.islice(repos, args.limit)) repo_count = len(src_repos) if not repo_count: debug('nothing to do -- exiting') return debug("found {n} repos to be forked from org {src_org}:".format( n=repo_count, src_org=src_org.login)) [debug(" {r}".format(r=r.full_name)) for r in src_repos] if args.copy_teams: debug('checking source repo team membership...') # dict of repo and team objects, keyed by repo name src_rt = find_teams_by_repo(src_repos) # extract a non-duplicated list of team names from all repos being # forked as a dict, keyed by team name src_teams = find_used_teams(src_rt) debug('found {n} teams in use within org {o}:'.format(n=len(src_teams), o=src_org.login)) [debug(" '{t}'".format(t=t)) for t in src_teams.keys()] # check for conflicting teams in dst org before attempting to create # any forks so its possible to bail out before any resources have been # created. debug('checking teams in destination org') conflicting_teams = pygithub.get_teams_by_name(dst_org, list(src_teams.keys())) if conflicting_teams: raise TeamError( "found {n} conflicting teams in {o}: {teams}".format( n=len(conflicting_teams), o=dst_org.login, teams=[t.name for t in conflicting_teams])) debug('there is no spoon...') problems = [] pygithub.debug_ratelimit(g) dst_repos, skipped_repos, err = create_forks(dst_org, src_repos, fail_fast=args.fail_fast, dry_run=args.dry_run) if err: problems += err if args.copy_teams: # filter out repos which were skipped # dict of str(fork_repo.name): fork_repo dst_forks = dict((r.name, r) for r in dst_repos) bad_repos = dict((r.name, r) for r in skipped_repos) # dict of str(team.name): [repos] to be created dst_teams = {} for name, repos in src_teams.items(): dst_teams[name] = [ dst_forks[r.name] for r in repos if r.name not in bad_repos ] _, err = create_teams(dst_org, dst_teams, with_repos=True, fail_fast=args.fail_fast, dry_run=args.dry_run) if err: problems += err if problems: msg = "{n} errors forking repo(s)/teams(s)".format(n=len(problems)) raise codetools.DogpileError(problems, msg)
#!/usr/bin/env python3 from codekit import codetools, versiondb import codecs import os import pytest import responses codetools.setup_logging() @pytest.fixture def fixture_dir(): d = os.path.dirname(os.path.abspath(__file__)) return os.path.join(d, 'data') @pytest.fixture def b3504(fixture_dir): filename = os.path.join(fixture_dir, 'b3504.txt') with codecs.open(filename, 'r', encoding='utf8') as file: return file.read() @responses.activate def test_init(): # should not make any http requests m = versiondb.Manifest('b1234') assert isinstance(m, versiondb.Manifest)
def run(): """Create the tag""" args = parse_args() codetools.setup_logging(args.debug) git_tag = args.tag # if email not specified, try getting it from the gitconfig git_email = codetools.lookup_email(args) # ditto for the name of the git user git_user = codetools.lookup_user(args) # The default eups tag is derived from the git tag, otherwise specified # with the --eups-tag option. The reason to currently do this is that for # weeklies and other internal builds, it's okay to eups publish the weekly # and git tag post-facto. However for official releases, we don't want to # publish until the git tag goes down, because we want to eups publish the # build that has the official versions in the eups ref. if not args.manifest_only: eups_tag = args.eups_tag if not eups_tag: # generate eups-style version eups_tag = eups.git_tag2eups_tag(git_tag) debug("using eups tag: {eups_tag}".format(eups_tag=eups_tag)) # sadly we need to "just" know this # XXX this can be parsed from the eups tag file post d_2018_05_08 manifest = args.manifest debug("using manifest: {manifest}".format(manifest=manifest)) if not args.manifest_only: # release from eups tag message_template = "Version {{git_tag}}"\ " release from {eups_tag}/{manifest}".format( eups_tag=eups_tag, manifest=manifest, ) else: # release from manifest only message_template = "Version {{git_tag}}"\ " release from manifest {manifest}".format( manifest=manifest, ) debug("using tag message: {msg}".format(msg=message_template)) tagger = github.InputGitAuthor( git_user, git_email, codetools.current_timestamp(), ) debug("using taggger: {tagger}".format(tagger=tagger)) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) org = g.get_organization(args.org) info("tagging repos in org: {org}".format(org=org.login)) problems = [] manifest_products = versiondb.Manifest( manifest, base_url=args.versiondb_base_url).products if not args.manifest_only: # cross-reference eups tag version strings with manifest eups_products = eups.EupsTag(eups_tag, base_url=args.eupstag_base_url).products # do not fail-fast on non-write operations products, err = cross_reference_products( eups_products, manifest_products, ignore_manifest_versions=args.ignore_manifest_versions, fail_fast=False, ) problems += err else: # no eups tag; use manifest products without sanity check against eups # tag version strings products = manifest_products if args.limit: products = dict(itertools.islice(products.items(), args.limit)) # do not fail-fast on non-write operations products, err = get_repo_for_products( org=org, products=products, allow_teams=args.allow_team, ext_teams=args.external_team, deny_teams=args.deny_team, fail_fast=False, ) problems += err # do not fail-fast on non-write operations products_to_tag, err = check_product_tags( products, git_tag, tag_message_template=message_template, tagger=tagger, force_tag=args.force_tag, fail_fast=False, ignore_git_message=args.ignore_git_message, ignore_git_tagger=args.ignore_git_tagger, ) problems += err if args.verify: # in verify mode, it is an error if there are products that need to be # tagged. err = identify_products_missing_tags(products_to_tag) problems += err if problems: msg = "{n} pre-flight error(s)".format(n=len(problems)) raise codetools.DogpileError(problems, msg) tag_products( products_to_tag, fail_fast=args.fail_fast, dry_run=args.dry_run, )
def run(): """Move the repos""" args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) org = g.get_organization(args.org) # only iterate over all teams once try: teams = list(org.get_teams()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(org, e, msg) from None old_team = find_team(teams, args.oldteam) new_team = find_team(teams, args.newteam) move_me = args.repos debug(len(move_me), 'repos to be moved') added = [] removed = [] for name in move_me: try: r = org.get_repo(name) except github.RateLimitExceededException: raise except github.GithubException as e: msg = "error getting repo by name: {r}".format(r=name) raise pygithub.CaughtOrganizationError(org, e, msg) from None # Add team to the repo debug("Adding {repo} to '{team}' ...".format(repo=r.full_name, team=args.newteam)) if not args.dry_run: try: new_team.add_to_repos(r) added += r.full_name debug(' ok') except github.RateLimitExceededException: raise except github.GithubException as e: debug(' FAILED') if old_team.name in 'Owners': warn("Removing repo {repo} from team 'Owners' is not allowed". format(repo=r.full_name)) debug("Removing {repo} from '{team}' ...".format(repo=r.full_name, team=args.oldteam)) if not args.dry_run: try: old_team.remove_from_repos(r) removed += r.full_name debug(' ok') except github.RateLimitExceededException: raise except github.GithubException as e: debug(' FAILED') info('Added:', added) info('Removed:', removed)