def find_teams_by_repo(src_repos): assert isinstance(src_repos, list), type(src_repos) # length of longest repo name max_name_len = len(max([r.full_name for r in src_repos], key=len)) src_rt = {} for r in src_repos: try: teams = r.get_teams() except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtRepositoryError(r, e, msg) from None team_names = [t.name for t in teams] debug(" {repo: >{w}} {teams}".format( repo=r.full_name, w=max_name_len, teams=team_names )) src_rt[r.full_name] = {'repo': r, 'teams': teams} return src_rt
def __fetch_manifest_file(self): # construct url tag_url = '/'.join((self.base_url, self.name + '.txt')) debug("fetching: {url}".format(url=tag_url)) r = requests.get(tag_url) r.raise_for_status() self.__text = r.text
def __fetch_tag_file(self): # construct url tag_url = '/'.join((self.base_url, self.name + '.list')) debug("fetching: {url}".format(url=tag_url)) r = requests.get(tag_url) r.raise_for_status() self.__text = r.text
def run(): args = parse_args() codetools.setup_logging(args.debug) gh_org_name = args.org tags = args.tag git_email = codetools.lookup_email(args) git_user = codetools.lookup_user(args) tagger = github.InputGitAuthor( git_user, git_email, codetools.current_timestamp() ) debug(tagger) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) org = g.get_organization(gh_org_name) info("tagging repos in org: {org}".format(org=org.login)) tag_teams = get_candidate_teams(org, args.allow_team) target_repos = get_candidate_repos(tag_teams) problems = [] # do not fail-fast on non-write operations problems += check_repos( target_repos, args.allow_team, args.deny_team, fail_fast=False, ) # existing tags are always ignored (not an error) under --delete ignore_existing = True if args.delete else args.ignore_existing_tag # do not fail-fast on non-write operations present_tags, absent_tags, err = check_tags( target_repos, tags, ignore_existing=ignore_existing, fail_fast=False, ) problems += err if problems: msg = "{n} repo(s) have errors".format(n=len(problems)) raise codetools.DogpileError(problems, msg) if args.delete: untag_repos(present_tags, dry_run=args.dry_run) else: tag_repos(absent_tags, tagger=tagger, dry_run=args.dry_run)
def debug_ratelimit(g): """Log debug of github ratelimit information from last API call Parameters ---------- org: github.MainClass.Github github object """ assert isinstance(g, github.MainClass.Github), type(g) debug("github ratelimit: {rl}".format(rl=g.rate_limiting))
def main(): try: try: run() except codetools.DogpileError as e: error(e) n = len(e.errors) sys.exit(n if n < 256 else 255) else: sys.exit(0) finally: if 'g' in globals(): pygithub.debug_ratelimit(g) except SystemExit as e: debug("exit {status}".format(status=str(e))) raise e
def main(): try: try: run() except codetools.DogpileError as e: error(e) n = len(e.errors) sys.exit(n if n < 256 else 255) else: sys.exit(0) finally: if 'g' in globals(): pygithub.debug_ratelimit(g) except SystemExit as e: debug("exit {status}".format(status=str(e))) raise e
def get_candidate_teams(org, target_teams): assert isinstance(org, github.Organization.Organization), type(org) try: teams = list(org.get_teams()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(org, e, msg) from None debug("looking for teams: {teams}".format(teams=target_teams)) tag_teams = [t for t in teams if t.name in target_teams] debug("found teams: {teams}".format(teams=tag_teams)) if not tag_teams: raise RuntimeError('No teams found') return tag_teams
def check_tags(repos, tags, ignore_existing=False, fail_fast=False): """ check if tags already exist in repos""" debug("looking for {n} tag(s):".format(n=len(tags))) [debug(" {t}".format(t=t)) for t in tags] debug("in {n} repo(s):".format(n=len(repos))) [debug(" {r}".format(r=r.full_name)) for r in repos] # present/missing tags by repo name present_tags = {} absent_tags = {} problems = [] for r in repos: has_tags = find_tags_in_repo(r, tags) if has_tags: if not ignore_existing: yikes = GitTagExistsError( "tag(s) {tag} already exists in repos {r}".format( tag=list(has_tags.keys()), r=r.full_name )) if fail_fast: raise yikes problems.append(yikes) error(yikes) present_tags[r.full_name] = { 'repo': r, 'tags': list(has_tags.values()), } missing_tags = [x for x in tags if x not in has_tags] if missing_tags: absent_tags[r.full_name] = { 'repo': r, 'need_tags': missing_tags, } debug(textwrap.dedent("""\ found: {n_with:>4} repos with tag(s) {n_none:>4} repos with no tag(s) {errors:>4} repos with error(s)\ """).format( n_with=len(present_tags), n_none=len(absent_tags), errors=len(problems), )) return present_tags, absent_tags, problems
def find_teams_by_repo(src_repos): assert isinstance(src_repos, list), type(src_repos) # length of longest repo name max_name_len = len(max([r.full_name for r in src_repos], key=len)) src_rt = {} for r in src_repos: try: teams = r.get_teams() except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtRepositoryError(r, e, msg) from None team_names = [t.name for t in teams] debug(" {repo: >{w}} {teams}".format(repo=r.full_name, w=max_name_len, teams=team_names)) src_rt[r.full_name] = {'repo': r, 'teams': teams} return src_rt
def create_forks(dst_org, src_repos, fail_fast=False, dry_run=False): assert isinstance(dst_org, github.Organization.Organization),\ type(dst_org) assert isinstance(src_repos, list), type(src_repos) repo_count = len(src_repos) dst_repos = [] skipped_repos = [] problems = [] with pbar.eta_bar(msg='forking', max_value=repo_count) as progress: repo_idx = 0 for r in src_repos: progress.update(repo_idx) repo_idx += 1 # XXX per # https://developer.github.com/v3/repos/forks/#create-a-fork # fork creation is async and pygithub doesn't appear to wait. # https://github.com/PyGithub/PyGithub/blob/c44469965e4ea368b78c4055a8afcfcf08314585/github/Organization.py#L321-L336 # so its possible that this may fail in some strange way such as # not returning all repo data, but it hasn't yet been observed. # get current time before API call in case fork creation is slow. now = datetime.datetime.now() debug("forking {r}".format(r=r.full_name)) if dry_run: debug(' (noop)') continue try: fork = dst_org.create_fork(r) dst_repos.append(fork) debug(" -> {r}".format(r=fork.full_name)) except github.RateLimitExceededException: raise except github.GithubException as e: if 'Empty repositories cannot be forked.' in e.data['message']: warn("{r} is empty and can not be forked".format( r=r.full_name)) skipped_repos.append(r) continue msg = "error forking repo {r}".format(r=r.full_name) yikes = pygithub.CaughtOrganizationError(dst_org, e, msg) if fail_fast: raise yikes from None problems.append(yikes) error(yikes) if fork.created_at < now: warn("fork of {r} already exists\n created_at {ctime}".format( r=fork.full_name, ctime=fork.created_at)) return dst_repos, skipped_repos, problems
def create_tags(repo, tags, tagger, dry_run=False): assert isinstance(repo, github.Repository.Repository), type(repo) # tag the head of the designated "default branch" # XXX this probably should be resolved via repos.yaml head = pygithub.get_default_ref(repo) debug(textwrap.dedent("""\ tagging repo: {repo} @ default ref: {db} type: {obj_type} sha: {obj_sha}\ """).format( repo=repo.full_name, db=head.ref, obj_type=head.object.type, obj_sha=head.object.sha )) for t in tags: debug(" creating 'annotated tag' {t}".format(t=t)) if dry_run: debug(' (noop)') continue tag_obj = repo.create_git_tag( t, "Version {t}".format(t=t), # fmt similar to github-tag-release head.object.sha, head.object.type, tagger=tagger ) debug(" created tag object {tag_obj}".format(tag_obj=tag_obj)) ref = repo.create_git_ref("refs/tags/{t}".format(t=t), tag_obj.sha) debug(" created ref: {ref}".format(ref=ref.ref))
def delete_all_repos(org, **kwargs): assert isinstance(org, github.Organization.Organization), type(org) limit = kwargs.pop('limit', None) try: repos = list(itertools.islice(org.get_repos(), limit)) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting repos' raise pygithub.CaughtOrganizationError(org, e, msg) from None info("found {n} repos in {org}".format(n=len(repos), org=org.login)) [debug(" {r}".format(r=r.full_name)) for r in repos] if repos: warn("Deleting all repos in {org}".format(org=org.login)) pbar.wait_for_user_panic_once() return delete_repos(repos, **kwargs)
def delete_all_teams(org, **kwargs): assert isinstance(org, github.Organization.Organization), type(org) limit = kwargs.pop('limit', None) try: teams = list(itertools.islice(org.get_teams(), limit)) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(org, e, msg) from None info("found {n} teams in {org}".format(n=len(teams), org=org.login)) [debug(" '{t}'".format(t=t.name)) for t in teams] if teams: warn("Deleting all teams in {org}".format(org=org.login)) pbar.wait_for_user_panic_once() return delete_teams(teams, **kwargs)
def delete_all_repos(org, **kwargs): assert isinstance(org, github.Organization.Organization), type(org) limit = kwargs.pop('limit', None) try: repos = list(itertools.islice(org.get_repos(), limit)) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting repos' raise pygithub.CaughtOrganizationError(org, e, msg) from None info("found {n} repos in {org}".format(n=len(repos), org=org.login)) [debug(" {r}".format(r=r.full_name)) for r in repos] if repos: warn("Deleting all repos in {org}".format(org=org.login)) pbar.wait_for_user_panic_once() return delete_repos(repos, **kwargs)
def delete_all_teams(org, **kwargs): assert isinstance(org, github.Organization.Organization), type(org) limit = kwargs.pop('limit', None) try: teams = list(itertools.islice(org.get_teams(), limit)) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(org, e, msg) from None info("found {n} teams in {org}".format(n=len(teams), org=org.login)) [debug(" '{t}'".format(t=t.name)) for t in teams] if teams: warn("Deleting all teams in {org}".format(org=org.login)) pbar.wait_for_user_panic_once() return delete_teams(teams, **kwargs)
def get_teams_by_name(org, team_names): """Find team(s) in org by name(s). Parameters ---------- org: github.Organization.Organization org to search for team(s) teams: list(str) list of team names to search for Returns ------- list of github.Team.Team objects Raises ------ github.GithubException Upon error from github api """ assert isinstance(org, github.Organization.Organization), type(org) try: org_teams = list(org.get_teams()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise CaughtOrganizationError(org, e, msg) from None found_teams = [] for name in team_names: debug("looking for team: {o}/'{t}'".format( o=org.login, t=name )) t = next((t for t in org_teams if t.name == name), None) if t: debug(' found') found_teams.append(t) else: debug(' not found') return found_teams
def delete_refs(repo, refs, dry_run=False): """Note that only the ref to a tag can be explicitly removed. The tag object will leave on until it's gargabe collected.""" assert isinstance(repo, github.Repository.Repository), type(repo) debug("removing {n} refs from {repo}".format( n=len(refs), repo=repo.full_name) ) for r in refs: debug(" deleting {ref}".format(ref=r.ref)) if dry_run: debug(' (noop)') continue r.delete()
def find_tags_in_repo(repo, tags): assert isinstance(repo, github.Repository.Repository), type(repo) debug(textwrap.dedent("""\ looking in repo: {repo} for tag(s): {tags}\ """).format( repo=repo.full_name, tags=tags, )) found_tags = {} for t in tags: ref = pygithub.find_tag_by_name(repo, t) if ref and ref.ref: debug(" found: {tag} ({ref})".format(tag=t, ref=ref.ref)) name = tag_name_from_ref(ref) found_tags[name] = ref continue debug(" not found: {tag}".format(tag=t)) return found_tags
def create_forks( dst_org, src_repos, fail_fast=False, dry_run=False ): assert isinstance(dst_org, github.Organization.Organization),\ type(dst_org) assert isinstance(src_repos, list), type(src_repos) repo_count = len(src_repos) dst_repos = [] skipped_repos = [] problems = [] with pbar.eta_bar(msg='forking', max_value=repo_count) as progress: repo_idx = 0 for r in src_repos: progress.update(repo_idx) repo_idx += 1 # XXX per # https://developer.github.com/v3/repos/forks/#create-a-fork # fork creation is async and pygithub doesn't appear to wait. # https://github.com/PyGithub/PyGithub/blob/c44469965e4ea368b78c4055a8afcfcf08314585/github/Organization.py#L321-L336 # so its possible that this may fail in some strange way such as # not returning all repo data, but it hasn't yet been observed. # get current time before API call in case fork creation is slow. now = datetime.datetime.now() debug("forking {r}".format(r=r.full_name)) if dry_run: debug(' (noop)') continue try: fork = dst_org.create_fork(r) dst_repos.append(fork) debug(" -> {r}".format(r=fork.full_name)) except github.RateLimitExceededException: raise except github.GithubException as e: if 'Empty repositories cannot be forked.' in e.data['message']: warn("{r} is empty and can not be forked".format( r=r.full_name )) skipped_repos.append(r) continue msg = "error forking repo {r}".format(r=r.full_name) yikes = pygithub.CaughtOrganizationError(dst_org, e, msg) if fail_fast: raise yikes from None problems.append(yikes) error(yikes) if fork.created_at < now: warn("fork of {r} already exists\n created_at {ctime}".format( r=fork.full_name, ctime=fork.created_at )) return dst_repos, skipped_repos, problems
def run(): """Create the tag""" args = parse_args() codetools.setup_logging(args.debug) git_tag = args.tag # if email not specified, try getting it from the gitconfig git_email = codetools.lookup_email(args) # ditto for the name of the git user git_user = codetools.lookup_user(args) # The default eups tag is derived from the git tag, otherwise specified # with the --eups-tag option. The reason to currently do this is that for # weeklies and other internal builds, it's okay to eups publish the weekly # and git tag post-facto. However for official releases, we don't want to # publish until the git tag goes down, because we want to eups publish the # build that has the official versions in the eups ref. if not args.manifest_only: eups_tag = args.eups_tag if not eups_tag: # generate eups-style version eups_tag = eups.git_tag2eups_tag(git_tag) debug("using eups tag: {eups_tag}".format(eups_tag=eups_tag)) # sadly we need to "just" know this # XXX this can be parsed from the eups tag file post d_2018_05_08 manifest = args.manifest debug("using manifest: {manifest}".format(manifest=manifest)) if not args.manifest_only: # release from eups tag message_template = "Version {{git_tag}}"\ " release from {eups_tag}/{manifest}".format( eups_tag=eups_tag, manifest=manifest, ) else: # release from manifest only message_template = "Version {{git_tag}}"\ " release from manifest {manifest}".format( manifest=manifest, ) debug("using tag message: {msg}".format(msg=message_template)) tagger = github.InputGitAuthor( git_user, git_email, codetools.current_timestamp(), ) debug("using taggger: {tagger}".format(tagger=tagger)) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) org = g.get_organization(args.org) info("tagging repos in org: {org}".format(org=org.login)) problems = [] manifest_products = versiondb.Manifest( manifest, base_url=args.versiondb_base_url).products if not args.manifest_only: # cross-reference eups tag version strings with manifest eups_products = eups.EupsTag(eups_tag, base_url=args.eupstag_base_url).products # do not fail-fast on non-write operations products, err = cross_reference_products( eups_products, manifest_products, ignore_manifest_versions=args.ignore_manifest_versions, fail_fast=False, ) problems += err else: # no eups tag; use manifest products without sanity check against eups # tag version strings products = manifest_products if args.limit: products = dict(itertools.islice(products.items(), args.limit)) # do not fail-fast on non-write operations products, err = get_repo_for_products( org=org, products=products, allow_teams=args.allow_team, ext_teams=args.external_team, deny_teams=args.deny_team, fail_fast=False, ) problems += err # do not fail-fast on non-write operations products_to_tag, err = check_product_tags( products, git_tag, tag_message_template=message_template, tagger=tagger, force_tag=args.force_tag, fail_fast=False, ignore_git_message=args.ignore_git_message, ignore_git_tagger=args.ignore_git_tagger, ) problems += err if args.verify: # in verify mode, it is an error if there are products that need to be # tagged. err = identify_products_missing_tags(products_to_tag) problems += err if problems: msg = "{n} pre-flight error(s)".format(n=len(problems)) raise codetools.DogpileError(problems, msg) tag_products( products_to_tag, fail_fast=args.fail_fast, dry_run=args.dry_run, )
def get_repo_for_products(org, products, allow_teams, ext_teams, deny_teams, fail_fast=False): debug("allowed teams: {allow}".format(allow=allow_teams)) debug("external teams: {ext}".format(ext=ext_teams)) debug("denied teams: {deny}".format(deny=deny_teams)) resolved_products = {} problems = [] for name, data in products.items(): debug("looking for git repo for: {name} [{ver}]".format( name=name, ver=data['eups_version'])) try: repo = org.get_repo(name) except github.RateLimitExceededException: raise except github.GithubException as e: msg = "error getting repo by name: {r}".format(r=name) yikes = pygithub.CaughtOrganizationError(org, e, msg) if fail_fast: raise yikes from None problems.append(yikes) error(yikes) continue debug(" found: {slug}".format(slug=repo.full_name)) try: repo_team_names = [t.name for t in repo.get_teams()] except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' yikes = pygithub.CaughtRepositoryError(repo, e, msg) if fail_fast: raise yikes from None problems.append(yikes) error(yikes) continue debug(" teams: {teams}".format(teams=repo_team_names)) try: pygithub.check_repo_teams(repo, allow_teams=allow_teams, deny_teams=deny_teams, team_names=repo_team_names) except pygithub.RepositoryTeamMembershipError as e: if fail_fast: raise problems.append(e) error(e) continue has_ext_team = any(x in repo_team_names for x in ext_teams) debug(" external repo: {v}".format(v=has_ext_team)) resolved_products[name] = data.copy() resolved_products[name]['repo'] = repo resolved_products[name]['v'] = has_ext_team if problems: error("{n} product(s) have error(s)".format(n=len(problems))) return resolved_products, problems
def create_teams(org, teams, with_repos=False, ignore_existing=False, fail_fast=False, dry_run=False): assert isinstance(org, github.Organization.Organization), type(org) assert isinstance(teams, dict), type(teams) # it takes fewer api calls to create team(s) with an explicit list of # members after all repos have been forked but this blows up if the team # already exists. debug("creating teams in {org}".format(org=org.login)) # dict of dst org teams keyed by name (str) with team object as value dst_teams = {} problems = [] batch_repos = 50 for name, repos in teams.items(): pygithub.debug_ratelimit(g) debug("creating team {o}/'{t}'".format(o=org.login, t=name)) if dry_run: debug(' (noop)') continue dst_t = None try: if with_repos: debug(" with {n} member repos:".format(n=len(repos))) [debug(" {r}".format(r=r.full_name)) for r in repos] leftover_repos = repos[batch_repos:] if leftover_repos: debug(" creating team with first {b} of {n} repos".format( b=batch_repos, n=len(repos))) dst_t = org.create_team(name, repo_names=repos[:batch_repos]) if leftover_repos: # add any repos over the batch limit individually to team for r in leftover_repos: debug(" adding repo {r}".format(r=r.full_name)) dst_t.add_to_repos(r) else: dst_t = org.create_team(name) except github.RateLimitExceededException: raise except github.GithubException as e: # if the error is for any cause other than the team already # existing, puke. team_exists = False if ignore_existing and 'errors' in e.data: for oops in e.data['errors']: msg = oops['message'] if 'Name has already been taken' in msg: # find existing team dst_t = pygithub.get_teams_by_name(org, name)[0] team_exists = True if not (ignore_existing and team_exists): msg = "error creating team: {t}".format(t=name) yikes = pygithub.CaughtOrganizationError(org, e, msg) if fail_fast: raise yikes from None problems.append(yikes) error(yikes) break else: dst_teams[dst_t.name] = dst_t return dst_teams, problems
def tag_products( products, fail_fast=False, dry_run=False, ): problems = [] for name, data in products.items(): repo = data['repo'] t_tag = data['target_tag'] info(textwrap.dedent("""\ tagging repo: {repo} @ sha: {sha} as {gt} (eups version: {et}) external repo: {v} replace existing tag: {update}\ """).format( repo=repo.full_name, sha=t_tag.sha, gt=t_tag.name, et=data['eups_version'], v=data['v'], update=data['update_tag'], )) if dry_run: info(' (noop)') continue try: tag_obj = repo.create_git_tag( t_tag.name, t_tag.message, t_tag.sha, 'commit', tagger=t_tag.tagger, ) debug(" created tag object {tag_obj}".format(tag_obj=tag_obj)) if data['update_tag']: ref = pygithub.find_tag_by_name( repo, t_tag.name, safe=False, ) ref.edit(tag_obj.sha, force=True) debug(" updated existing ref: {ref}".format(ref=ref)) else: ref = repo.create_git_ref( "refs/tags/{t}".format(t=t_tag.name), tag_obj.sha ) debug(" created ref: {ref}".format(ref=ref)) except github.RateLimitExceededException: raise except github.GithubException as e: msg = "error creating tag: {t}".format(t=t_tag.name) yikes = pygithub.CaughtRepositoryError(repo, e, msg) if fail_fast: raise yikes from None problems.append(yikes) error(yikes) if problems: msg = "{n} tag failures".format(n=len(problems)) raise codetools.DogpileError(problems, msg)
def run(): """Create the tag""" args = parse_args() codetools.setup_logging(args.debug) git_tag = args.tag # if email not specified, try getting it from the gitconfig git_email = codetools.lookup_email(args) # ditto for the name of the git user git_user = codetools.lookup_user(args) # The default eups tag is derived from the git tag, otherwise specified # with the --eups-tag option. The reason to currently do this is that for # weeklies and other internal builds, it's okay to eups publish the weekly # and git tag post-facto. However for official releases, we don't want to # publish until the git tag goes down, because we want to eups publish the # build that has the official versions in the eups ref. if not args.manifest_only: eups_tag = args.eups_tag if not eups_tag: # generate eups-style version eups_tag = eups.git_tag2eups_tag(git_tag) debug("using eups tag: {eups_tag}".format(eups_tag=eups_tag)) # sadly we need to "just" know this # XXX this can be parsed from the eups tag file post d_2018_05_08 manifest = args.manifest debug("using manifest: {manifest}".format(manifest=manifest)) if not args.manifest_only: # release from eups tag message_template = "Version {{git_tag}}"\ " release from {eups_tag}/{manifest}".format( eups_tag=eups_tag, manifest=manifest, ) else: # release from manifest only message_template = "Version {{git_tag}}"\ " release from manifest {manifest}".format( manifest=manifest, ) debug("using tag message: {msg}".format(msg=message_template)) tagger = github.InputGitAuthor( git_user, git_email, codetools.current_timestamp(), ) debug("using taggger: {tagger}".format(tagger=tagger)) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) org = g.get_organization(args.org) info("tagging repos in org: {org}".format(org=org.login)) problems = [] manifest_products = versiondb.Manifest( manifest, base_url=args.versiondb_base_url).products if not args.manifest_only: # cross-reference eups tag version strings with manifest eups_products = eups.EupsTag( eups_tag, base_url=args.eupstag_base_url).products # do not fail-fast on non-write operations products, err = cross_reference_products( eups_products, manifest_products, ignore_manifest_versions=args.ignore_manifest_versions, fail_fast=False, ) problems += err else: # no eups tag; use manifest products without sanity check against eups # tag version strings products = manifest_products if args.limit: products = dict(itertools.islice(products.items(), args.limit)) # do not fail-fast on non-write operations products, err = get_repo_for_products( org=org, products=products, allow_teams=args.allow_team, ext_teams=args.external_team, deny_teams=args.deny_team, fail_fast=False, ) problems += err # do not fail-fast on non-write operations products_to_tag, err = check_product_tags( products, git_tag, tag_message_template=message_template, tagger=tagger, force_tag=args.force_tag, fail_fast=False, ignore_git_message=args.ignore_git_message, ignore_git_tagger=args.ignore_git_tagger, ) problems += err if args.verify: # in verify mode, it is an error if there are products that need to be # tagged. err = identify_products_missing_tags(products_to_tag) problems += err if problems: msg = "{n} pre-flight error(s)".format(n=len(problems)) raise codetools.DogpileError(problems, msg) tag_products( products_to_tag, fail_fast=args.fail_fast, dry_run=args.dry_run, )
def check_existing_git_tag(repo, t_tag, **kwargs): """ Check for a pre-existng tag in the github repo. Parameters ---------- repo : github.Repository.Repository repo to inspect for an existing tagsdf t_tag: codekit.pygithub.TargetTag dict repesenting a target git tag Returns ------- insync : `bool` True if tag exists and is in sync. False if tag does not exist. Raises ------ GitTagExistsError If tag exists but is not in sync. """ assert isinstance(repo, github.Repository.Repository), type(repo) assert isinstance(t_tag, codekit.pygithub.TargetTag), type(t_tag) debug("looking for existing tag: {tag} in repo: {repo}".format( repo=repo.full_name, tag=t_tag.name, )) # find ref/tag by name e_ref = pygithub.find_tag_by_name(repo, t_tag.name) if not e_ref: debug(" not found: {tag}".format(tag=t_tag.name)) return False # find tag object pointed to by the ref try: e_tag = repo.get_git_tag(e_ref.object.sha) except github.RateLimitExceededException: raise except github.GithubException as e: msg = "error getting tag: {tag} [{sha}]".format( tag=e_tag.tag, sha=e_tag.sha, ) raise pygithub.CaughtRepositoryError(repo, e, msg) from None debug(" found existing: {tag} [{sha}]".format( tag=e_tag.tag, sha=e_tag.sha, )) if cmp_existing_git_tag(t_tag, e_tag, **kwargs): return True yikes = GitTagExistsError(textwrap.dedent("""\ tag: {tag} already exists in repo: {repo} with conflicting values: existing: sha: {e_sha} message: {e_message} tagger: {e_tagger} target: sha: {t_sha} message: {t_message} tagger: {t_tagger}\ """).format( tag=t_tag.name, repo=repo.full_name, e_sha=e_tag.object.sha, e_message=e_tag.message, e_tagger=e_tag.tagger, t_sha=t_tag.sha, t_message=t_tag.message, t_tagger=t_tag.tagger, )) raise yikes
def get_repo_for_products( org, products, allow_teams, ext_teams, deny_teams, fail_fast=False ): debug("allowed teams: {allow}".format(allow=allow_teams)) debug("external teams: {ext}".format(ext=ext_teams)) debug("denied teams: {deny}".format(deny=deny_teams)) resolved_products = {} problems = [] for name, data in products.items(): debug("looking for git repo for: {name} [{ver}]".format( name=name, ver=data['eups_version'] )) try: repo = org.get_repo(name) except github.RateLimitExceededException: raise except github.GithubException as e: msg = "error getting repo by name: {r}".format(r=name) yikes = pygithub.CaughtOrganizationError(org, e, msg) if fail_fast: raise yikes from None problems.append(yikes) error(yikes) continue debug(" found: {slug}".format(slug=repo.full_name)) try: repo_team_names = [t.name for t in repo.get_teams()] except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' yikes = pygithub.CaughtRepositoryError(repo, e, msg) if fail_fast: raise yikes from None problems.append(yikes) error(yikes) continue debug(" teams: {teams}".format(teams=repo_team_names)) try: pygithub.check_repo_teams( repo, allow_teams=allow_teams, deny_teams=deny_teams, team_names=repo_team_names ) except pygithub.RepositoryTeamMembershipError as e: if fail_fast: raise problems.append(e) error(e) continue has_ext_team = any(x in repo_team_names for x in ext_teams) debug(" external repo: {v}".format(v=has_ext_team)) resolved_products[name] = data.copy() resolved_products[name]['repo'] = repo resolved_products[name]['v'] = has_ext_team if problems: error("{n} product(s) have error(s)".format(n=len(problems))) return resolved_products, problems
def run(): args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) # protect destination org codetools.validate_org(args.dst_org) src_org = g.get_organization(args.src_org) dst_org = g.get_organization(args.dst_org) info("forking repos from: {org}".format(org=src_org.login)) info(" to: {org}".format(org=dst_org.login)) debug('looking for repos -- this can take a while for large orgs...') if args.team: debug('checking that selection team(s) exist') try: org_teams = list(src_org.get_teams()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(src_org, e, msg) from None missing_teams = [ n for n in args.team if n not in [t.name for t in org_teams] ] if missing_teams: error("{n} team(s) do not exist:".format(n=len(missing_teams))) [error(" '{t}'".format(t=n)) for n in missing_teams] return fork_teams = [t for t in org_teams if t.name in args.team] repos = pygithub.get_repos_by_team(fork_teams) debug('selecting repos by membership in team(s):') [debug(" '{t}'".format(t=t.name)) for t in fork_teams] else: repos = pygithub.get_repos_by_team(fork_teams) src_repos = list(itertools.islice(repos, args.limit)) repo_count = len(src_repos) if not repo_count: debug('nothing to do -- exiting') return debug("found {n} repos to be forked from org {src_org}:".format( n=repo_count, src_org=src_org.login)) [debug(" {r}".format(r=r.full_name)) for r in src_repos] if args.copy_teams: debug('checking source repo team membership...') # dict of repo and team objects, keyed by repo name src_rt = find_teams_by_repo(src_repos) # extract a non-duplicated list of team names from all repos being # forked as a dict, keyed by team name src_teams = find_used_teams(src_rt) debug('found {n} teams in use within org {o}:'.format(n=len(src_teams), o=src_org.login)) [debug(" '{t}'".format(t=t)) for t in src_teams.keys()] # check for conflicting teams in dst org before attempting to create # any forks so its possible to bail out before any resources have been # created. debug('checking teams in destination org') conflicting_teams = pygithub.get_teams_by_name(dst_org, list(src_teams.keys())) if conflicting_teams: raise TeamError( "found {n} conflicting teams in {o}: {teams}".format( n=len(conflicting_teams), o=dst_org.login, teams=[t.name for t in conflicting_teams])) debug('there is no spoon...') problems = [] pygithub.debug_ratelimit(g) dst_repos, skipped_repos, err = create_forks(dst_org, src_repos, fail_fast=args.fail_fast, dry_run=args.dry_run) if err: problems += err if args.copy_teams: # filter out repos which were skipped # dict of str(fork_repo.name): fork_repo dst_forks = dict((r.name, r) for r in dst_repos) bad_repos = dict((r.name, r) for r in skipped_repos) # dict of str(team.name): [repos] to be created dst_teams = {} for name, repos in src_teams.items(): dst_teams[name] = [ dst_forks[r.name] for r in repos if r.name not in bad_repos ] _, err = create_teams(dst_org, dst_teams, with_repos=True, fail_fast=args.fail_fast, dry_run=args.dry_run) if err: problems += err if problems: msg = "{n} errors forking repo(s)/teams(s)".format(n=len(problems)) raise codetools.DogpileError(problems, msg)
def tag_products( products, fail_fast=False, dry_run=False, ): problems = [] for name, data in products.items(): repo = data['repo'] t_tag = data['target_tag'] info( textwrap.dedent("""\ tagging repo: {repo} @ sha: {sha} as {gt} (eups version: {et}) external repo: {v} replace existing tag: {update}\ """).format( repo=repo.full_name, sha=t_tag.sha, gt=t_tag.name, et=data['eups_version'], v=data['v'], update=data['update_tag'], )) if dry_run: info(' (noop)') continue try: tag_obj = repo.create_git_tag( t_tag.name, t_tag.message, t_tag.sha, 'commit', tagger=t_tag.tagger, ) debug(" created tag object {tag_obj}".format(tag_obj=tag_obj)) if data['update_tag']: ref = pygithub.find_tag_by_name( repo, t_tag.name, safe=False, ) ref.edit(tag_obj.sha, force=True) debug(" updated existing ref: {ref}".format(ref=ref)) else: ref = repo.create_git_ref("refs/tags/{t}".format(t=t_tag.name), tag_obj.sha) debug(" created ref: {ref}".format(ref=ref)) except github.RateLimitExceededException: raise except github.GithubException as e: msg = "error creating tag: {t}".format(t=t_tag.name) yikes = pygithub.CaughtRepositoryError(repo, e, msg) if fail_fast: raise yikes from None problems.append(yikes) error(yikes) if problems: msg = "{n} tag failures".format(n=len(problems)) raise codetools.DogpileError(problems, msg)
def run(): """Move the repos""" args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) org = g.get_organization(args.org) # only iterate over all teams once try: teams = list(org.get_teams()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(org, e, msg) from None old_team = find_team(teams, args.oldteam) new_team = find_team(teams, args.newteam) move_me = args.repos debug(len(move_me), 'repos to be moved') added = [] removed = [] for name in move_me: try: r = org.get_repo(name) except github.RateLimitExceededException: raise except github.GithubException as e: msg = "error getting repo by name: {r}".format(r=name) raise pygithub.CaughtOrganizationError(org, e, msg) from None # Add team to the repo debug("Adding {repo} to '{team}' ...".format(repo=r.full_name, team=args.newteam)) if not args.dry_run: try: new_team.add_to_repos(r) added += r.full_name debug(' ok') except github.RateLimitExceededException: raise except github.GithubException as e: debug(' FAILED') if old_team.name in 'Owners': warn("Removing repo {repo} from team 'Owners' is not allowed". format(repo=r.full_name)) debug("Removing {repo} from '{team}' ...".format(repo=r.full_name, team=args.oldteam)) if not args.dry_run: try: old_team.remove_from_repos(r) removed += r.full_name debug(' ok') except github.RateLimitExceededException: raise except github.GithubException as e: debug(' FAILED') info('Added:', added) info('Removed:', removed)
def run(): args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) # protect destination org codetools.validate_org(args.dst_org) src_org = g.get_organization(args.src_org) dst_org = g.get_organization(args.dst_org) info("forking repos from: {org}".format(org=src_org.login)) info(" to: {org}".format(org=dst_org.login)) debug('looking for repos -- this can take a while for large orgs...') if args.team: debug('checking that selection team(s) exist') try: org_teams = list(src_org.get_teams()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(src_org, e, msg) from None missing_teams = [n for n in args.team if n not in [t.name for t in org_teams]] if missing_teams: error("{n} team(s) do not exist:".format(n=len(missing_teams))) [error(" '{t}'".format(t=n)) for n in missing_teams] return fork_teams = [t for t in org_teams if t.name in args.team] repos = pygithub.get_repos_by_team(fork_teams) debug('selecting repos by membership in team(s):') [debug(" '{t}'".format(t=t.name)) for t in fork_teams] else: repos = pygithub.get_repos_by_team(fork_teams) src_repos = list(itertools.islice(repos, args.limit)) repo_count = len(src_repos) if not repo_count: debug('nothing to do -- exiting') return debug("found {n} repos to be forked from org {src_org}:".format( n=repo_count, src_org=src_org.login )) [debug(" {r}".format(r=r.full_name)) for r in src_repos] if args.copy_teams: debug('checking source repo team membership...') # dict of repo and team objects, keyed by repo name src_rt = find_teams_by_repo(src_repos) # extract a non-duplicated list of team names from all repos being # forked as a dict, keyed by team name src_teams = find_used_teams(src_rt) debug('found {n} teams in use within org {o}:'.format( n=len(src_teams), o=src_org.login )) [debug(" '{t}'".format(t=t)) for t in src_teams.keys()] # check for conflicting teams in dst org before attempting to create # any forks so its possible to bail out before any resources have been # created. debug('checking teams in destination org') conflicting_teams = pygithub.get_teams_by_name( dst_org, list(src_teams.keys()) ) if conflicting_teams: raise TeamError( "found {n} conflicting teams in {o}: {teams}".format( n=len(conflicting_teams), o=dst_org.login, teams=[t.name for t in conflicting_teams] )) debug('there is no spoon...') problems = [] pygithub.debug_ratelimit(g) dst_repos, skipped_repos, err = create_forks( dst_org, src_repos, fail_fast=args.fail_fast, dry_run=args.dry_run ) if err: problems += err if args.copy_teams: # filter out repos which were skipped # dict of str(fork_repo.name): fork_repo dst_forks = dict((r.name, r) for r in dst_repos) bad_repos = dict((r.name, r) for r in skipped_repos) # dict of str(team.name): [repos] to be created dst_teams = {} for name, repos in src_teams.items(): dst_teams[name] = [dst_forks[r.name] for r in repos if r.name not in bad_repos] _, err = create_teams( dst_org, dst_teams, with_repos=True, fail_fast=args.fail_fast, dry_run=args.dry_run ) if err: problems += err if problems: msg = "{n} errors forking repo(s)/teams(s)".format( n=len(problems)) raise codetools.DogpileError(problems, msg)
def check_existing_git_tag(repo, t_tag, **kwargs): """ Check for a pre-existng tag in the github repo. Parameters ---------- repo : github.Repository.Repository repo to inspect for an existing tagsdf t_tag: codekit.pygithub.TargetTag dict repesenting a target git tag Returns ------- insync : `bool` True if tag exists and is in sync. False if tag does not exist. Raises ------ GitTagExistsError If tag exists but is not in sync. """ assert isinstance(repo, github.Repository.Repository), type(repo) assert isinstance(t_tag, codekit.pygithub.TargetTag), type(t_tag) debug("looking for existing tag: {tag} in repo: {repo}".format( repo=repo.full_name, tag=t_tag.name, )) # find ref/tag by name e_ref = pygithub.find_tag_by_name(repo, t_tag.name) if not e_ref: debug(" not found: {tag}".format(tag=t_tag.name)) return False # find tag object pointed to by the ref try: e_tag = repo.get_git_tag(e_ref.object.sha) except github.RateLimitExceededException: raise except github.GithubException as e: msg = "error getting tag: {tag} [{sha}]".format( tag=e_tag.tag, sha=e_tag.sha, ) raise pygithub.CaughtRepositoryError(repo, e, msg) from None debug(" found existing: {tag} [{sha}]".format( tag=e_tag.tag, sha=e_tag.sha, )) if cmp_existing_git_tag(t_tag, e_tag, **kwargs): return True yikes = GitTagExistsError( textwrap.dedent("""\ tag: {tag} already exists in repo: {repo} with conflicting values: existing: sha: {e_sha} message: {e_message} tagger: {e_tagger} target: sha: {t_sha} message: {t_message} tagger: {t_tagger}\ """).format( tag=t_tag.name, repo=repo.full_name, e_sha=e_tag.object.sha, e_message=e_tag.message, e_tagger=e_tag.tagger, t_sha=t_tag.sha, t_message=t_tag.message, t_tagger=t_tag.tagger, )) raise yikes
def run(): """Move the repos""" args = parse_args() codetools.setup_logging(args.debug) global g g = pygithub.login_github(token_path=args.token_path, token=args.token) org = g.get_organization(args.org) # only iterate over all teams once try: teams = list(org.get_teams()) except github.RateLimitExceededException: raise except github.GithubException as e: msg = 'error getting teams' raise pygithub.CaughtOrganizationError(org, e, msg) from None old_team = find_team(teams, args.oldteam) new_team = find_team(teams, args.newteam) move_me = args.repos debug(len(move_me), 'repos to be moved') added = [] removed = [] for name in move_me: try: r = org.get_repo(name) except github.RateLimitExceededException: raise except github.GithubException as e: msg = "error getting repo by name: {r}".format(r=name) raise pygithub.CaughtOrganizationError(org, e, msg) from None # Add team to the repo debug("Adding {repo} to '{team}' ...".format( repo=r.full_name, team=args.newteam )) if not args.dry_run: try: new_team.add_to_repos(r) added += r.full_name debug(' ok') except github.RateLimitExceededException: raise except github.GithubException as e: debug(' FAILED') if old_team.name in 'Owners': warn("Removing repo {repo} from team 'Owners' is not allowed" .format(repo=r.full_name)) debug("Removing {repo} from '{team}' ...".format( repo=r.full_name, team=args.oldteam )) if not args.dry_run: try: old_team.remove_from_repos(r) removed += r.full_name debug(' ok') except github.RateLimitExceededException: raise except github.GithubException as e: debug(' FAILED') info('Added:', added) info('Removed:', removed)
def create_teams( org, teams, with_repos=False, ignore_existing=False, fail_fast=False, dry_run=False ): assert isinstance(org, github.Organization.Organization), type(org) assert isinstance(teams, dict), type(teams) # it takes fewer api calls to create team(s) with an explicit list of # members after all repos have been forked but this blows up if the team # already exists. debug("creating teams in {org}".format(org=org.login)) # dict of dst org teams keyed by name (str) with team object as value dst_teams = {} problems = [] batch_repos = 50 for name, repos in teams.items(): pygithub.debug_ratelimit(g) debug("creating team {o}/'{t}'".format( o=org.login, t=name )) if dry_run: debug(' (noop)') continue dst_t = None try: if with_repos: debug(" with {n} member repos:".format(n=len(repos))) [debug(" {r}".format(r=r.full_name)) for r in repos] leftover_repos = repos[batch_repos:] if leftover_repos: debug(" creating team with first {b} of {n} repos" .format( b=batch_repos, n=len(repos) )) dst_t = org.create_team(name, repo_names=repos[:batch_repos]) if leftover_repos: # add any repos over the batch limit individually to team for r in leftover_repos: debug(" adding repo {r}".format(r=r.full_name)) dst_t.add_to_repos(r) else: dst_t = org.create_team(name) except github.RateLimitExceededException: raise except github.GithubException as e: # if the error is for any cause other than the team already # existing, puke. team_exists = False if ignore_existing and 'errors' in e.data: for oops in e.data['errors']: msg = oops['message'] if 'Name has already been taken' in msg: # find existing team dst_t = pygithub.get_teams_by_name(org, name)[0] team_exists = True if not (ignore_existing and team_exists): msg = "error creating team: {t}".format(t=name) yikes = pygithub.CaughtOrganizationError(org, e, msg) if fail_fast: raise yikes from None problems.append(yikes) error(yikes) break else: dst_teams[dst_t.name] = dst_t return dst_teams, problems