def delete_repos(repos, fail_fast=False, dry_run=False, delay=0):
    assert isinstance(repos, list), type(repos)

    problems = []
    for r in repos:
        assert isinstance(r, github.Repository.Repository), type(r)

        if delay:
            sleep(delay)

        try:
            info("deleting: {r}".format(r=r.full_name))
            if dry_run:
                info('  (noop)')
                continue
            r.delete()
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = 'FAILED - does your token have delete_repo scope?'
            yikes = pygithub.CaughtRepositoryError(r, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    return problems
Example #2
0
def run():
    args = parse_args()

    codetools.setup_logging(args.debug)

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)
    codetools.validate_org(args.org)
    org = g.get_organization(args.org)

    # list of exceptions
    problems = []

    if args.delete_repos:
        problems += delete_all_repos(org,
                                     fail_fast=args.fail_fast,
                                     limit=args.delete_repos_limit,
                                     dry_run=args.dry_run)

    if args.delete_teams:
        problems += delete_all_teams(org,
                                     fail_fast=args.fail_fast,
                                     limit=args.delete_teams_limit,
                                     dry_run=args.dry_run)

    if problems:
        msg = "{n} errors removing repo(s)/teams(s)".format(n=len(problems))
        raise codetools.DogpileError(problems, msg)

    info("Consider deleting your privileged auth token @ {path}".format(
        path=args.token_path))
Example #3
0
def delete_repos(repos, fail_fast=False, dry_run=False, delay=0):
    assert isinstance(repos, list), type(repos)

    problems = []
    for r in repos:
        assert isinstance(r, github.Repository.Repository), type(r)

        if delay:
            sleep(delay)

        try:
            info("deleting: {r}".format(r=r.full_name))
            if dry_run:
                info('  (noop)')
                continue
            r.delete()
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = 'FAILED - does your token have delete_repo scope?'
            yikes = pygithub.CaughtRepositoryError(r, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    return problems
Example #4
0
def run():
    args = parse_args()

    codetools.setup_logging(args.debug)

    gh_org_name = args.org
    tags = args.tag

    git_email = codetools.lookup_email(args)
    git_user = codetools.lookup_user(args)

    tagger = github.InputGitAuthor(
        git_user,
        git_email,
        codetools.current_timestamp()
    )
    debug(tagger)

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)
    org = g.get_organization(gh_org_name)
    info("tagging repos in org: {org}".format(org=org.login))

    tag_teams = get_candidate_teams(org, args.allow_team)
    target_repos = get_candidate_repos(tag_teams)

    problems = []
    # do not fail-fast on non-write operations
    problems += check_repos(
        target_repos,
        args.allow_team,
        args.deny_team,
        fail_fast=False,
    )

    # existing tags are always ignored (not an error) under --delete
    ignore_existing = True if args.delete else args.ignore_existing_tag

    # do not fail-fast on non-write operations
    present_tags, absent_tags, err = check_tags(
        target_repos,
        tags,
        ignore_existing=ignore_existing,
        fail_fast=False,
    )
    problems += err

    if problems:
        msg = "{n} repo(s) have errors".format(n=len(problems))
        raise codetools.DogpileError(problems, msg)

    if args.delete:
        untag_repos(present_tags, dry_run=args.dry_run)
    else:
        tag_repos(absent_tags, tagger=tagger, dry_run=args.dry_run)
def run():
    args = parse_args()

    codetools.setup_logging(args.debug)

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)
    info("github ratelimit: {rl}".format(rl=g.rate_limiting))

    reset = datetime.datetime.fromtimestamp(int(g.rate_limiting_resettime))
    info("github ratelimit reset: {time}".format(time=reset))
Example #6
0
def delete_all_teams(org, **kwargs):
    assert isinstance(org, github.Organization.Organization), type(org)
    limit = kwargs.pop('limit', None)

    try:
        teams = list(itertools.islice(org.get_teams(), limit))
    except github.RateLimitExceededException:
        raise
    except github.GithubException as e:
        msg = 'error getting teams'
        raise pygithub.CaughtOrganizationError(org, e, msg) from None

    info("found {n} teams in {org}".format(n=len(teams), org=org.login))
    [debug("  '{t}'".format(t=t.name)) for t in teams]

    if teams:
        warn("Deleting all teams in {org}".format(org=org.login))
        pbar.wait_for_user_panic_once()

    return delete_teams(teams, **kwargs)
def delete_all_teams(org, **kwargs):
    assert isinstance(org, github.Organization.Organization), type(org)
    limit = kwargs.pop('limit', None)

    try:
        teams = list(itertools.islice(org.get_teams(), limit))
    except github.RateLimitExceededException:
        raise
    except github.GithubException as e:
        msg = 'error getting teams'
        raise pygithub.CaughtOrganizationError(org, e, msg) from None

    info("found {n} teams in {org}".format(n=len(teams), org=org.login))
    [debug("  '{t}'".format(t=t.name)) for t in teams]

    if teams:
        warn("Deleting all teams in {org}".format(org=org.login))
        pbar.wait_for_user_panic_once()

    return delete_teams(teams, **kwargs)
def delete_all_repos(org, **kwargs):
    assert isinstance(org, github.Organization.Organization), type(org)
    limit = kwargs.pop('limit', None)

    try:
        repos = list(itertools.islice(org.get_repos(), limit))
    except github.RateLimitExceededException:
        raise
    except github.GithubException as e:
        msg = 'error getting repos'
        raise pygithub.CaughtOrganizationError(org, e, msg) from None

    info("found {n} repos in {org}".format(n=len(repos), org=org.login))
    [debug("  {r}".format(r=r.full_name)) for r in repos]

    if repos:
        warn("Deleting all repos in {org}".format(org=org.login))
        pbar.wait_for_user_panic_once()

    return delete_repos(repos, **kwargs)
Example #9
0
def delete_all_repos(org, **kwargs):
    assert isinstance(org, github.Organization.Organization), type(org)
    limit = kwargs.pop('limit', None)

    try:
        repos = list(itertools.islice(org.get_repos(), limit))
    except github.RateLimitExceededException:
        raise
    except github.GithubException as e:
        msg = 'error getting repos'
        raise pygithub.CaughtOrganizationError(org, e, msg) from None

    info("found {n} repos in {org}".format(n=len(repos), org=org.login))
    [debug("  {r}".format(r=r.full_name)) for r in repos]

    if repos:
        warn("Deleting all repos in {org}".format(org=org.login))
        pbar.wait_for_user_panic_once()

    return delete_repos(repos, **kwargs)
Example #10
0
def get_candidate_repos(teams):
    # flatten generator to list so it can be itererated over multiple times
    repos = list(pygithub.get_repos_by_team(teams))

    # find length of longest repo name to nicely format output
    names = [r.full_name for r in repos]
    max_name_len = len(max(names, key=len))

    team_names = [t.name for t in teams]
    info("found {n} repo(s) [selected by team(s)]:".format(n=len(repos)))
    for r in repos:
        # list only teams which were used to select the repo as a candiate
        # for tagging
        s_teams = [t.name for t in find_repo_teams(r)
                   if t.name in team_names]
        info("  {repo: >{w}} {teams}".format(
            w=max_name_len,
            repo=r.full_name,
            teams=s_teams)
        )

    return repos
Example #11
0
def delete_teams(teams, fail_fast=False, dry_run=False, delay=0):
    assert isinstance(teams, list), type(teams)

    problems = []
    for t in teams:
        if delay:
            sleep(delay)

        try:
            info("deleting team: '{t}'".format(t=t.name))
            if dry_run:
                info('  (noop)')
                continue
            t.delete()
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            yikes = pygithub.CaughtTeamError(t, e)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    return problems
def run():
    args = parse_args()

    codetools.setup_logging(args.debug)

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)
    codetools.validate_org(args.org)
    org = g.get_organization(args.org)

    # list of exceptions
    problems = []

    if args.delete_repos:
        problems += delete_all_repos(
            org,
            fail_fast=args.fail_fast,
            limit=args.delete_repos_limit,
            dry_run=args.dry_run
        )

    if args.delete_teams:
        problems += delete_all_teams(
            org,
            fail_fast=args.fail_fast,
            limit=args.delete_teams_limit,
            dry_run=args.dry_run
        )

    if problems:
        msg = "{n} errors removing repo(s)/teams(s)".format(
            n=len(problems))
        raise codetools.DogpileError(problems, msg)

    info("Consider deleting your privileged auth token @ {path}".format(
        path=args.token_path))
def delete_teams(teams, fail_fast=False, dry_run=False, delay=0):
    assert isinstance(teams, list), type(teams)

    problems = []
    for t in teams:
        if delay:
            sleep(delay)

        try:
            info("deleting team: '{t}'".format(t=t.name))
            if dry_run:
                info('  (noop)')
                continue
            t.delete()
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            yikes = pygithub.CaughtTeamError(t, e)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    return problems
Example #14
0
def tag_repos(absent_tags, **kwargs):
    if not absent_tags:
        info('nothing to do')
        return

    info("tagging {n} repo(s) [tags]:".format(n=len(absent_tags)))

    max_name_len = len(max(absent_tags, key=len))
    for k in absent_tags:
        info("  {repo: >{w}} {tags}".format(
            w=max_name_len,
            repo=k,
            tags=absent_tags[k]['need_tags']
        ))

    for k in absent_tags:
        r = absent_tags[k]['repo']
        tags = absent_tags[k]['need_tags']
        create_tags(r, tags, **kwargs)
Example #15
0
def untag_repos(present_tags, **kwargs):
    if not present_tags:
        info('nothing to do')
        return

    warn('Deleting tag(s)')
    pbar.wait_for_user_panic_once()

    info("untagging {n} repo(s) [tags]:".format(n=len(present_tags)))

    max_name_len = len(max(present_tags, key=len))
    for k in present_tags:
        info("  {repo: >{w}} {tags}".format(
            w=max_name_len,
            repo=k,
            tags=[tag_name_from_ref(ref) for ref in present_tags[k]['tags']]
        ))

    for k in present_tags:
        r = present_tags[k]['repo']
        tags = present_tags[k]['tags']
        delete_refs(r, tags, **kwargs)
def run():
    """Move the repos"""
    args = parse_args()

    codetools.setup_logging(args.debug)

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)
    org = g.get_organization(args.org)

    # only iterate over all teams once
    try:
        teams = list(org.get_teams())
    except github.RateLimitExceededException:
        raise
    except github.GithubException as e:
        msg = 'error getting teams'
        raise pygithub.CaughtOrganizationError(org, e, msg) from None

    old_team = find_team(teams, args.oldteam)
    new_team = find_team(teams, args.newteam)

    move_me = args.repos
    debug(len(move_me), 'repos to be moved')

    added = []
    removed = []
    for name in move_me:
        try:
            r = org.get_repo(name)
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = "error getting repo by name: {r}".format(r=name)
            raise pygithub.CaughtOrganizationError(org, e, msg) from None

        # Add team to the repo
        debug("Adding {repo} to '{team}' ...".format(
            repo=r.full_name,
            team=args.newteam
        ))

        if not args.dry_run:
            try:
                new_team.add_to_repos(r)
                added += r.full_name
                debug('  ok')
            except github.RateLimitExceededException:
                raise
            except github.GithubException as e:
                debug('  FAILED')

        if old_team.name in 'Owners':
            warn("Removing repo {repo} from team 'Owners' is not allowed"
                 .format(repo=r.full_name))

        debug("Removing {repo} from '{team}' ...".format(
            repo=r.full_name,
            team=args.oldteam
        ))

        if not args.dry_run:
            try:
                old_team.remove_from_repos(r)
                removed += r.full_name
                debug('  ok')
            except github.RateLimitExceededException:
                raise
            except github.GithubException as e:
                debug('  FAILED')

    info('Added:', added)
    info('Removed:', removed)
Example #17
0
def run():
    """Move the repos"""
    args = parse_args()

    codetools.setup_logging(args.debug)

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)
    org = g.get_organization(args.org)

    # only iterate over all teams once
    try:
        teams = list(org.get_teams())
    except github.RateLimitExceededException:
        raise
    except github.GithubException as e:
        msg = 'error getting teams'
        raise pygithub.CaughtOrganizationError(org, e, msg) from None

    old_team = find_team(teams, args.oldteam)
    new_team = find_team(teams, args.newteam)

    move_me = args.repos
    debug(len(move_me), 'repos to be moved')

    added = []
    removed = []
    for name in move_me:
        try:
            r = org.get_repo(name)
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = "error getting repo by name: {r}".format(r=name)
            raise pygithub.CaughtOrganizationError(org, e, msg) from None

        # Add team to the repo
        debug("Adding {repo} to '{team}' ...".format(repo=r.full_name,
                                                     team=args.newteam))

        if not args.dry_run:
            try:
                new_team.add_to_repos(r)
                added += r.full_name
                debug('  ok')
            except github.RateLimitExceededException:
                raise
            except github.GithubException as e:
                debug('  FAILED')

        if old_team.name in 'Owners':
            warn("Removing repo {repo} from team 'Owners' is not allowed".
                 format(repo=r.full_name))

        debug("Removing {repo} from '{team}' ...".format(repo=r.full_name,
                                                         team=args.oldteam))

        if not args.dry_run:
            try:
                old_team.remove_from_repos(r)
                removed += r.full_name
                debug('  ok')
            except github.RateLimitExceededException:
                raise
            except github.GithubException as e:
                debug('  FAILED')

    info('Added:', added)
    info('Removed:', removed)
Example #18
0
def tag_products(
    products,
    fail_fast=False,
    dry_run=False,
):
    problems = []
    for name, data in products.items():
        repo = data['repo']
        t_tag = data['target_tag']

        info(
            textwrap.dedent("""\
            tagging repo: {repo} @
              sha: {sha} as {gt}
              (eups version: {et})
              external repo: {v}
              replace existing tag: {update}\
            """).format(
                repo=repo.full_name,
                sha=t_tag.sha,
                gt=t_tag.name,
                et=data['eups_version'],
                v=data['v'],
                update=data['update_tag'],
            ))

        if dry_run:
            info('  (noop)')
            continue

        try:
            tag_obj = repo.create_git_tag(
                t_tag.name,
                t_tag.message,
                t_tag.sha,
                'commit',
                tagger=t_tag.tagger,
            )
            debug("  created tag object {tag_obj}".format(tag_obj=tag_obj))

            if data['update_tag']:
                ref = pygithub.find_tag_by_name(
                    repo,
                    t_tag.name,
                    safe=False,
                )
                ref.edit(tag_obj.sha, force=True)
                debug("  updated existing ref: {ref}".format(ref=ref))
            else:
                ref = repo.create_git_ref("refs/tags/{t}".format(t=t_tag.name),
                                          tag_obj.sha)
                debug("  created ref: {ref}".format(ref=ref))
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = "error creating tag: {t}".format(t=t_tag.name)
            yikes = pygithub.CaughtRepositoryError(repo, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    if problems:
        msg = "{n} tag failures".format(n=len(problems))
        raise codetools.DogpileError(problems, msg)
def run():
    """Create the tag"""
    args = parse_args()

    codetools.setup_logging(args.debug)

    git_tag = args.tag

    # if email not specified, try getting it from the gitconfig
    git_email = codetools.lookup_email(args)
    # ditto for the name of the git user
    git_user = codetools.lookup_user(args)

    # The default eups tag is derived from the git tag, otherwise specified
    # with the --eups-tag option. The reason to currently do this is that for
    # weeklies and other internal builds, it's okay to eups publish the weekly
    # and git tag post-facto. However for official releases, we don't want to
    # publish until the git tag goes down, because we want to eups publish the
    # build that has the official versions in the eups ref.
    if not args.manifest_only:
        eups_tag = args.eups_tag
        if not eups_tag:
            # generate eups-style version
            eups_tag = eups.git_tag2eups_tag(git_tag)
        debug("using eups tag: {eups_tag}".format(eups_tag=eups_tag))

    # sadly we need to "just" know this
    # XXX this can be parsed from the eups tag file post d_2018_05_08
    manifest = args.manifest
    debug("using manifest: {manifest}".format(manifest=manifest))

    if not args.manifest_only:
        # release from eups tag
        message_template = "Version {{git_tag}}"\
            " release from {eups_tag}/{manifest}".format(
                eups_tag=eups_tag,
                manifest=manifest,
            )
    else:
        # release from manifest only
        message_template = "Version {{git_tag}}"\
            " release from manifest {manifest}".format(
                manifest=manifest,
            )

    debug("using tag message: {msg}".format(msg=message_template))

    tagger = github.InputGitAuthor(
        git_user,
        git_email,
        codetools.current_timestamp(),
    )
    debug("using taggger: {tagger}".format(tagger=tagger))

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)
    org = g.get_organization(args.org)
    info("tagging repos in org: {org}".format(org=org.login))

    problems = []

    manifest_products = versiondb.Manifest(
        manifest,
        base_url=args.versiondb_base_url).products

    if not args.manifest_only:
        # cross-reference eups tag version strings with manifest
        eups_products = eups.EupsTag(
            eups_tag,
            base_url=args.eupstag_base_url).products

        # do not fail-fast on non-write operations
        products, err = cross_reference_products(
            eups_products,
            manifest_products,
            ignore_manifest_versions=args.ignore_manifest_versions,
            fail_fast=False,
        )
        problems += err
    else:
        # no eups tag; use manifest products without sanity check against eups
        # tag version strings
        products = manifest_products

    if args.limit:
        products = dict(itertools.islice(products.items(), args.limit))

    # do not fail-fast on non-write operations
    products, err = get_repo_for_products(
        org=org,
        products=products,
        allow_teams=args.allow_team,
        ext_teams=args.external_team,
        deny_teams=args.deny_team,
        fail_fast=False,
    )
    problems += err

    # do not fail-fast on non-write operations
    products_to_tag, err = check_product_tags(
        products,
        git_tag,
        tag_message_template=message_template,
        tagger=tagger,
        force_tag=args.force_tag,
        fail_fast=False,
        ignore_git_message=args.ignore_git_message,
        ignore_git_tagger=args.ignore_git_tagger,
    )
    problems += err

    if args.verify:
        # in verify mode, it is an error if there are products that need to be
        # tagged.
        err = identify_products_missing_tags(products_to_tag)
        problems += err

    if problems:
        msg = "{n} pre-flight error(s)".format(n=len(problems))
        raise codetools.DogpileError(problems, msg)

    tag_products(
        products_to_tag,
        fail_fast=args.fail_fast,
        dry_run=args.dry_run,
    )
def tag_products(
    products,
    fail_fast=False,
    dry_run=False,
):
    problems = []
    for name, data in products.items():
        repo = data['repo']
        t_tag = data['target_tag']

        info(textwrap.dedent("""\
            tagging repo: {repo} @
              sha: {sha} as {gt}
              (eups version: {et})
              external repo: {v}
              replace existing tag: {update}\
            """).format(
            repo=repo.full_name,
            sha=t_tag.sha,
            gt=t_tag.name,
            et=data['eups_version'],
            v=data['v'],
            update=data['update_tag'],
        ))

        if dry_run:
            info('  (noop)')
            continue

        try:
            tag_obj = repo.create_git_tag(
                t_tag.name,
                t_tag.message,
                t_tag.sha,
                'commit',
                tagger=t_tag.tagger,
            )
            debug("  created tag object {tag_obj}".format(tag_obj=tag_obj))

            if data['update_tag']:
                ref = pygithub.find_tag_by_name(
                    repo,
                    t_tag.name,
                    safe=False,
                )
                ref.edit(tag_obj.sha, force=True)
                debug("  updated existing ref: {ref}".format(ref=ref))
            else:
                ref = repo.create_git_ref(
                    "refs/tags/{t}".format(t=t_tag.name),
                    tag_obj.sha
                )
                debug("  created ref: {ref}".format(ref=ref))
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = "error creating tag: {t}".format(t=t_tag.name)
            yikes = pygithub.CaughtRepositoryError(repo, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    if problems:
        msg = "{n} tag failures".format(n=len(problems))
        raise codetools.DogpileError(problems, msg)
Example #21
0
def run():
    args = parse_args()

    codetools.setup_logging(args.debug)

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)

    # protect destination org
    codetools.validate_org(args.dst_org)
    src_org = g.get_organization(args.src_org)
    dst_org = g.get_organization(args.dst_org)
    info("forking repos from: {org}".format(org=src_org.login))
    info("                to: {org}".format(org=dst_org.login))

    debug('looking for repos -- this can take a while for large orgs...')
    if args.team:
        debug('checking that selection team(s) exist')
        try:
            org_teams = list(src_org.get_teams())
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = 'error getting teams'
            raise pygithub.CaughtOrganizationError(src_org, e, msg) from None

        missing_teams = [n for n in args.team if n not in
                         [t.name for t in org_teams]]
        if missing_teams:
            error("{n} team(s) do not exist:".format(n=len(missing_teams)))
            [error("  '{t}'".format(t=n)) for n in missing_teams]
            return
        fork_teams = [t for t in org_teams if t.name in args.team]
        repos = pygithub.get_repos_by_team(fork_teams)
        debug('selecting repos by membership in team(s):')
        [debug("  '{t}'".format(t=t.name)) for t in fork_teams]
    else:
        repos = pygithub.get_repos_by_team(fork_teams)

    src_repos = list(itertools.islice(repos, args.limit))

    repo_count = len(src_repos)
    if not repo_count:
        debug('nothing to do -- exiting')
        return

    debug("found {n} repos to be forked from org {src_org}:".format(
        n=repo_count,
        src_org=src_org.login
    ))
    [debug("  {r}".format(r=r.full_name)) for r in src_repos]

    if args.copy_teams:
        debug('checking source repo team membership...')
        # dict of repo and team objects, keyed by repo name
        src_rt = find_teams_by_repo(src_repos)

        # extract a non-duplicated list of team names from all repos being
        # forked as a dict, keyed by team name
        src_teams = find_used_teams(src_rt)

        debug('found {n} teams in use within org {o}:'.format(
            n=len(src_teams),
            o=src_org.login
        ))
        [debug("  '{t}'".format(t=t)) for t in src_teams.keys()]

        # check for conflicting teams in dst org before attempting to create
        # any forks so its possible to bail out before any resources have been
        # created.
        debug('checking teams in destination org')
        conflicting_teams = pygithub.get_teams_by_name(
            dst_org,
            list(src_teams.keys())
        )
        if conflicting_teams:
            raise TeamError(
                "found {n} conflicting teams in {o}: {teams}".format(
                    n=len(conflicting_teams),
                    o=dst_org.login,
                    teams=[t.name for t in conflicting_teams]
                ))

    debug('there is no spoon...')
    problems = []
    pygithub.debug_ratelimit(g)
    dst_repos, skipped_repos, err = create_forks(
        dst_org,
        src_repos,
        fail_fast=args.fail_fast,
        dry_run=args.dry_run
    )
    if err:
        problems += err

    if args.copy_teams:
        # filter out repos which were skipped
        # dict of str(fork_repo.name): fork_repo
        dst_forks = dict((r.name, r) for r in dst_repos)
        bad_repos = dict((r.name, r) for r in skipped_repos)
        # dict of str(team.name): [repos] to be created
        dst_teams = {}
        for name, repos in src_teams.items():
            dst_teams[name] = [dst_forks[r.name] for r in repos
                               if r.name not in bad_repos]

        _, err = create_teams(
            dst_org,
            dst_teams,
            with_repos=True,
            fail_fast=args.fail_fast,
            dry_run=args.dry_run
        )
        if err:
            problems += err

    if problems:
        msg = "{n} errors forking repo(s)/teams(s)".format(
            n=len(problems))
        raise codetools.DogpileError(problems, msg)
Example #22
0
def run():
    args = parse_args()

    codetools.setup_logging(args.debug)

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)

    # protect destination org
    codetools.validate_org(args.dst_org)
    src_org = g.get_organization(args.src_org)
    dst_org = g.get_organization(args.dst_org)
    info("forking repos from: {org}".format(org=src_org.login))
    info("                to: {org}".format(org=dst_org.login))

    debug('looking for repos -- this can take a while for large orgs...')
    if args.team:
        debug('checking that selection team(s) exist')
        try:
            org_teams = list(src_org.get_teams())
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = 'error getting teams'
            raise pygithub.CaughtOrganizationError(src_org, e, msg) from None

        missing_teams = [
            n for n in args.team if n not in [t.name for t in org_teams]
        ]
        if missing_teams:
            error("{n} team(s) do not exist:".format(n=len(missing_teams)))
            [error("  '{t}'".format(t=n)) for n in missing_teams]
            return
        fork_teams = [t for t in org_teams if t.name in args.team]
        repos = pygithub.get_repos_by_team(fork_teams)
        debug('selecting repos by membership in team(s):')
        [debug("  '{t}'".format(t=t.name)) for t in fork_teams]
    else:
        repos = pygithub.get_repos_by_team(fork_teams)

    src_repos = list(itertools.islice(repos, args.limit))

    repo_count = len(src_repos)
    if not repo_count:
        debug('nothing to do -- exiting')
        return

    debug("found {n} repos to be forked from org {src_org}:".format(
        n=repo_count, src_org=src_org.login))
    [debug("  {r}".format(r=r.full_name)) for r in src_repos]

    if args.copy_teams:
        debug('checking source repo team membership...')
        # dict of repo and team objects, keyed by repo name
        src_rt = find_teams_by_repo(src_repos)

        # extract a non-duplicated list of team names from all repos being
        # forked as a dict, keyed by team name
        src_teams = find_used_teams(src_rt)

        debug('found {n} teams in use within org {o}:'.format(n=len(src_teams),
                                                              o=src_org.login))
        [debug("  '{t}'".format(t=t)) for t in src_teams.keys()]

        # check for conflicting teams in dst org before attempting to create
        # any forks so its possible to bail out before any resources have been
        # created.
        debug('checking teams in destination org')
        conflicting_teams = pygithub.get_teams_by_name(dst_org,
                                                       list(src_teams.keys()))
        if conflicting_teams:
            raise TeamError(
                "found {n} conflicting teams in {o}: {teams}".format(
                    n=len(conflicting_teams),
                    o=dst_org.login,
                    teams=[t.name for t in conflicting_teams]))

    debug('there is no spoon...')
    problems = []
    pygithub.debug_ratelimit(g)
    dst_repos, skipped_repos, err = create_forks(dst_org,
                                                 src_repos,
                                                 fail_fast=args.fail_fast,
                                                 dry_run=args.dry_run)
    if err:
        problems += err

    if args.copy_teams:
        # filter out repos which were skipped
        # dict of str(fork_repo.name): fork_repo
        dst_forks = dict((r.name, r) for r in dst_repos)
        bad_repos = dict((r.name, r) for r in skipped_repos)
        # dict of str(team.name): [repos] to be created
        dst_teams = {}
        for name, repos in src_teams.items():
            dst_teams[name] = [
                dst_forks[r.name] for r in repos if r.name not in bad_repos
            ]

        _, err = create_teams(dst_org,
                              dst_teams,
                              with_repos=True,
                              fail_fast=args.fail_fast,
                              dry_run=args.dry_run)
        if err:
            problems += err

    if problems:
        msg = "{n} errors forking repo(s)/teams(s)".format(n=len(problems))
        raise codetools.DogpileError(problems, msg)
Example #23
0
def run():
    """Create the tag"""
    args = parse_args()

    codetools.setup_logging(args.debug)

    git_tag = args.tag

    # if email not specified, try getting it from the gitconfig
    git_email = codetools.lookup_email(args)
    # ditto for the name of the git user
    git_user = codetools.lookup_user(args)

    # The default eups tag is derived from the git tag, otherwise specified
    # with the --eups-tag option. The reason to currently do this is that for
    # weeklies and other internal builds, it's okay to eups publish the weekly
    # and git tag post-facto. However for official releases, we don't want to
    # publish until the git tag goes down, because we want to eups publish the
    # build that has the official versions in the eups ref.
    if not args.manifest_only:
        eups_tag = args.eups_tag
        if not eups_tag:
            # generate eups-style version
            eups_tag = eups.git_tag2eups_tag(git_tag)
        debug("using eups tag: {eups_tag}".format(eups_tag=eups_tag))

    # sadly we need to "just" know this
    # XXX this can be parsed from the eups tag file post d_2018_05_08
    manifest = args.manifest
    debug("using manifest: {manifest}".format(manifest=manifest))

    if not args.manifest_only:
        # release from eups tag
        message_template = "Version {{git_tag}}"\
            " release from {eups_tag}/{manifest}".format(
                eups_tag=eups_tag,
                manifest=manifest,
            )
    else:
        # release from manifest only
        message_template = "Version {{git_tag}}"\
            " release from manifest {manifest}".format(
                manifest=manifest,
            )

    debug("using tag message: {msg}".format(msg=message_template))

    tagger = github.InputGitAuthor(
        git_user,
        git_email,
        codetools.current_timestamp(),
    )
    debug("using taggger: {tagger}".format(tagger=tagger))

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)
    org = g.get_organization(args.org)
    info("tagging repos in org: {org}".format(org=org.login))

    problems = []

    manifest_products = versiondb.Manifest(
        manifest, base_url=args.versiondb_base_url).products

    if not args.manifest_only:
        # cross-reference eups tag version strings with manifest
        eups_products = eups.EupsTag(eups_tag,
                                     base_url=args.eupstag_base_url).products

        # do not fail-fast on non-write operations
        products, err = cross_reference_products(
            eups_products,
            manifest_products,
            ignore_manifest_versions=args.ignore_manifest_versions,
            fail_fast=False,
        )
        problems += err
    else:
        # no eups tag; use manifest products without sanity check against eups
        # tag version strings
        products = manifest_products

    if args.limit:
        products = dict(itertools.islice(products.items(), args.limit))

    # do not fail-fast on non-write operations
    products, err = get_repo_for_products(
        org=org,
        products=products,
        allow_teams=args.allow_team,
        ext_teams=args.external_team,
        deny_teams=args.deny_team,
        fail_fast=False,
    )
    problems += err

    # do not fail-fast on non-write operations
    products_to_tag, err = check_product_tags(
        products,
        git_tag,
        tag_message_template=message_template,
        tagger=tagger,
        force_tag=args.force_tag,
        fail_fast=False,
        ignore_git_message=args.ignore_git_message,
        ignore_git_tagger=args.ignore_git_tagger,
    )
    problems += err

    if args.verify:
        # in verify mode, it is an error if there are products that need to be
        # tagged.
        err = identify_products_missing_tags(products_to_tag)
        problems += err

    if problems:
        msg = "{n} pre-flight error(s)".format(n=len(problems))
        raise codetools.DogpileError(problems, msg)

    tag_products(
        products_to_tag,
        fail_fast=args.fail_fast,
        dry_run=args.dry_run,
    )