Beispiel #1
0
def identify_products_missing_tags(products_to_tag):
    problems = []
    for name, data in products_to_tag.items():
        repo = data['repo']
        t_tag = data['target_tag']

        yikes = GitTagMissingError(
            textwrap.dedent("""\
            tag: {gt} missing from repo: {repo} @
              sha: {sha}
              (eups version: {et})
              external repo: {v}
              replace existing tag: {update}\
            """).format(
                repo=repo.full_name,
                sha=t_tag.sha,
                gt=t_tag.name,
                et=data['eups_version'],
                v=data['v'],
                update=data['update_tag'],
            ))
        problems.append(yikes)
        error(yikes)

    if problems:
        error("{n} product(s) have error(s)".format(n=len(problems)))

    return problems
def identify_products_missing_tags(products_to_tag):
    problems = []
    for name, data in products_to_tag.items():
        repo = data['repo']
        t_tag = data['target_tag']

        yikes = GitTagMissingError(textwrap.dedent("""\
            tag: {gt} missing from repo: {repo} @
              sha: {sha}
              (eups version: {et})
              external repo: {v}
              replace existing tag: {update}\
            """).format(
            repo=repo.full_name,
            sha=t_tag.sha,
            gt=t_tag.name,
            et=data['eups_version'],
            v=data['v'],
            update=data['update_tag'],
        ))
        problems.append(yikes)
        error(yikes)

    if problems:
        error("{n} product(s) have error(s)".format(n=len(problems)))

    return problems
def delete_repos(repos, fail_fast=False, dry_run=False, delay=0):
    assert isinstance(repos, list), type(repos)

    problems = []
    for r in repos:
        assert isinstance(r, github.Repository.Repository), type(r)

        if delay:
            sleep(delay)

        try:
            info("deleting: {r}".format(r=r.full_name))
            if dry_run:
                info('  (noop)')
                continue
            r.delete()
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = 'FAILED - does your token have delete_repo scope?'
            yikes = pygithub.CaughtRepositoryError(r, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    return problems
Beispiel #4
0
def delete_repos(repos, fail_fast=False, dry_run=False, delay=0):
    assert isinstance(repos, list), type(repos)

    problems = []
    for r in repos:
        assert isinstance(r, github.Repository.Repository), type(r)

        if delay:
            sleep(delay)

        try:
            info("deleting: {r}".format(r=r.full_name))
            if dry_run:
                info('  (noop)')
                continue
            r.delete()
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = 'FAILED - does your token have delete_repo scope?'
            yikes = pygithub.CaughtRepositoryError(r, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    return problems
Beispiel #5
0
def create_forks(dst_org, src_repos, fail_fast=False, dry_run=False):
    assert isinstance(dst_org, github.Organization.Organization),\
        type(dst_org)
    assert isinstance(src_repos, list), type(src_repos)

    repo_count = len(src_repos)

    dst_repos = []
    skipped_repos = []
    problems = []
    with pbar.eta_bar(msg='forking', max_value=repo_count) as progress:
        repo_idx = 0
        for r in src_repos:
            progress.update(repo_idx)
            repo_idx += 1

            # XXX per
            # https://developer.github.com/v3/repos/forks/#create-a-fork
            # fork creation is async and pygithub doesn't appear to wait.
            # https://github.com/PyGithub/PyGithub/blob/c44469965e4ea368b78c4055a8afcfcf08314585/github/Organization.py#L321-L336
            # so its possible that this may fail in some strange way such as
            # not returning all repo data, but it hasn't yet been observed.

            # get current time before API call in case fork creation is slow.
            now = datetime.datetime.now()

            debug("forking {r}".format(r=r.full_name))
            if dry_run:
                debug('  (noop)')
                continue

            try:
                fork = dst_org.create_fork(r)
                dst_repos.append(fork)
                debug("  -> {r}".format(r=fork.full_name))
            except github.RateLimitExceededException:
                raise
            except github.GithubException as e:
                if 'Empty repositories cannot be forked.' in e.data['message']:
                    warn("{r} is empty and can not be forked".format(
                        r=r.full_name))
                    skipped_repos.append(r)
                    continue

                msg = "error forking repo {r}".format(r=r.full_name)
                yikes = pygithub.CaughtOrganizationError(dst_org, e, msg)
                if fail_fast:
                    raise yikes from None
                problems.append(yikes)
                error(yikes)

            if fork.created_at < now:
                warn("fork of {r} already exists\n  created_at {ctime}".format(
                    r=fork.full_name, ctime=fork.created_at))

    return dst_repos, skipped_repos, problems
Beispiel #6
0
def check_tags(repos, tags, ignore_existing=False, fail_fast=False):
    """ check if tags already exist in repos"""

    debug("looking for {n} tag(s):".format(n=len(tags)))
    [debug("  {t}".format(t=t)) for t in tags]
    debug("in {n} repo(s):".format(n=len(repos)))
    [debug("  {r}".format(r=r.full_name)) for r in repos]

    # present/missing tags by repo name
    present_tags = {}
    absent_tags = {}

    problems = []
    for r in repos:
        has_tags = find_tags_in_repo(r, tags)
        if has_tags:
            if not ignore_existing:
                yikes = GitTagExistsError(
                    "tag(s) {tag} already exists in repos {r}".format(
                        tag=list(has_tags.keys()),
                        r=r.full_name
                    ))
                if fail_fast:
                    raise yikes
                problems.append(yikes)
                error(yikes)

            present_tags[r.full_name] = {
                'repo': r,
                'tags': list(has_tags.values()),
            }

        missing_tags = [x for x in tags if x not in has_tags]
        if missing_tags:
            absent_tags[r.full_name] = {
                'repo': r,
                'need_tags': missing_tags,
            }

    debug(textwrap.dedent("""\
        found:
          {n_with:>4} repos with tag(s)
          {n_none:>4} repos with no tag(s)
          {errors:>4} repos with error(s)\
        """).format(
        n_with=len(present_tags),
        n_none=len(absent_tags),
        errors=len(problems),
    ))

    return present_tags, absent_tags, problems
def main():
    try:
        try:
            run()
        except codetools.DogpileError as e:
            error(e)
            n = len(e.errors)
            sys.exit(n if n < 256 else 255)
        else:
            sys.exit(0)
        finally:
            if 'g' in globals():
                pygithub.debug_ratelimit(g)
    except SystemExit as e:
        debug("exit {status}".format(status=str(e)))
        raise e
Beispiel #8
0
def main():
    try:
        try:
            run()
        except codetools.DogpileError as e:
            error(e)
            n = len(e.errors)
            sys.exit(n if n < 256 else 255)
        else:
            sys.exit(0)
        finally:
            if 'g' in globals():
                pygithub.debug_ratelimit(g)
    except SystemExit as e:
        debug("exit {status}".format(status=str(e)))
        raise e
Beispiel #9
0
def check_repos(repos, allow_teams, deny_teams, fail_fast=False):
    problems = []
    for r in repos:
        try:
            pygithub.check_repo_teams(
                r,
                allow_teams=allow_teams,
                deny_teams=deny_teams,
                team_names=[t.name for t in find_repo_teams(r)],
            )
        except pygithub.RepositoryTeamMembershipError as e:
            if fail_fast:
                raise
            problems.append(e)
            error(e)

            continue

    return problems
def delete_teams(teams, fail_fast=False, dry_run=False, delay=0):
    assert isinstance(teams, list), type(teams)

    problems = []
    for t in teams:
        if delay:
            sleep(delay)

        try:
            info("deleting team: '{t}'".format(t=t.name))
            if dry_run:
                info('  (noop)')
                continue
            t.delete()
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            yikes = pygithub.CaughtTeamError(t, e)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    return problems
Beispiel #11
0
def delete_teams(teams, fail_fast=False, dry_run=False, delay=0):
    assert isinstance(teams, list), type(teams)

    problems = []
    for t in teams:
        if delay:
            sleep(delay)

        try:
            info("deleting team: '{t}'".format(t=t.name))
            if dry_run:
                info('  (noop)')
                continue
            t.delete()
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            yikes = pygithub.CaughtTeamError(t, e)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    return problems
Beispiel #12
0
def cross_reference_products(
    eups_products,
    manifest_products,
    ignore_manifest_versions=False,
    fail_fast=False,
):
    """
    Cross reference EupsTag and Manifest data and return a merged result

    Parameters
    ----------
    eups_products:
    manifest:
    fail_fast: bool
    ignore_manifest_versions: bool

    Returns
    -------
    products: dict

    Raises
    ------
    RuntimeError
        Upon error if `fail_fast` is `True`.
    """
    products = {}

    problems = []
    for name, eups_data in eups_products.items():
        try:
            manifest_data = manifest_products[name]
        except KeyError:
            yikes = RuntimeError(
                textwrap.dedent("""\
                failed to find record in manifest for:
                  {product} {eups_version}\
                """).format(
                    product=name,
                    eups_version=eups_data['eups_version'],
                ))
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

        if ignore_manifest_versions:
            # ignore the manifest eups_version string by simply setting it to
            # the eups tag value.  This ensures that the eups tag value will be
            # passed though.
            manifest_data = manifest_data.copy()
            manifest_data['eups_version'] = eups_data['eups_version']

        if eups_data['eups_version'] != manifest_data['eups_version']:
            yikes = RuntimeError(
                textwrap.dedent("""\
                eups version string mismatch:
                  eups tag: {product} {eups_eups_version}
                  manifest: {product} {manifest_eups_version}\
                """).format(
                    product=name,
                    eups_eups_version=eups_data['eups_version'],
                    manifest_eups_version=manifest_data['eups_version'],
                ))
            if fail_fast:
                raise yikes
            problems.append(yikes)
            error(yikes)

        products[name] = eups_data.copy()
        products[name].update(manifest_data)

    if problems:
        error("{n} product(s) have error(s)".format(n=len(problems)))

    return products, problems
def tag_products(
    products,
    fail_fast=False,
    dry_run=False,
):
    problems = []
    for name, data in products.items():
        repo = data['repo']
        t_tag = data['target_tag']

        info(textwrap.dedent("""\
            tagging repo: {repo} @
              sha: {sha} as {gt}
              (eups version: {et})
              external repo: {v}
              replace existing tag: {update}\
            """).format(
            repo=repo.full_name,
            sha=t_tag.sha,
            gt=t_tag.name,
            et=data['eups_version'],
            v=data['v'],
            update=data['update_tag'],
        ))

        if dry_run:
            info('  (noop)')
            continue

        try:
            tag_obj = repo.create_git_tag(
                t_tag.name,
                t_tag.message,
                t_tag.sha,
                'commit',
                tagger=t_tag.tagger,
            )
            debug("  created tag object {tag_obj}".format(tag_obj=tag_obj))

            if data['update_tag']:
                ref = pygithub.find_tag_by_name(
                    repo,
                    t_tag.name,
                    safe=False,
                )
                ref.edit(tag_obj.sha, force=True)
                debug("  updated existing ref: {ref}".format(ref=ref))
            else:
                ref = repo.create_git_ref(
                    "refs/tags/{t}".format(t=t_tag.name),
                    tag_obj.sha
                )
                debug("  created ref: {ref}".format(ref=ref))
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = "error creating tag: {t}".format(t=t_tag.name)
            yikes = pygithub.CaughtRepositoryError(repo, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    if problems:
        msg = "{n} tag failures".format(n=len(problems))
        raise codetools.DogpileError(problems, msg)
Beispiel #14
0
def get_repo_for_products(org,
                          products,
                          allow_teams,
                          ext_teams,
                          deny_teams,
                          fail_fast=False):
    debug("allowed teams: {allow}".format(allow=allow_teams))
    debug("external teams: {ext}".format(ext=ext_teams))
    debug("denied teams: {deny}".format(deny=deny_teams))

    resolved_products = {}

    problems = []
    for name, data in products.items():
        debug("looking for git repo for: {name} [{ver}]".format(
            name=name, ver=data['eups_version']))

        try:
            repo = org.get_repo(name)
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = "error getting repo by name: {r}".format(r=name)
            yikes = pygithub.CaughtOrganizationError(org, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

            continue

        debug("  found: {slug}".format(slug=repo.full_name))

        try:
            repo_team_names = [t.name for t in repo.get_teams()]
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = 'error getting teams'
            yikes = pygithub.CaughtRepositoryError(repo, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

            continue

        debug("  teams: {teams}".format(teams=repo_team_names))

        try:
            pygithub.check_repo_teams(repo,
                                      allow_teams=allow_teams,
                                      deny_teams=deny_teams,
                                      team_names=repo_team_names)
        except pygithub.RepositoryTeamMembershipError as e:
            if fail_fast:
                raise
            problems.append(e)
            error(e)

            continue

        has_ext_team = any(x in repo_team_names for x in ext_teams)
        debug("  external repo: {v}".format(v=has_ext_team))

        resolved_products[name] = data.copy()
        resolved_products[name]['repo'] = repo
        resolved_products[name]['v'] = has_ext_team

    if problems:
        error("{n} product(s) have error(s)".format(n=len(problems)))

    return resolved_products, problems
def check_product_tags(
    products,
    git_tag,
    tag_message_template,
    tagger,
    force_tag=False,
    fail_fast=False,
    ignore_git_message=False,
    ignore_git_tagger=False,
):
    assert isinstance(tagger, github.InputGitAuthor), type(tagger)

    checked_products = {}

    problems = []
    for name, data in products.items():
        repo = data['repo']
        tag_name = git_tag

        # prefix tag name with `v`?
        if data['v'] and re.match('\d', tag_name):
            tag_name = "v{git_tag}".format(git_tag=tag_name)

        # message can not be formatted until we've determined if the tag must
        # be prefixed.  The 'v' prefix appearing in the tag message is required
        # to match historical behavior and allow verification of past releases.
        message = tag_message_template.format(git_tag=tag_name)

        # "target tag"
        t_tag = codekit.pygithub.TargetTag(
            name=tag_name,
            sha=data['sha'],
            message=message,
            tagger=tagger,
        )

        # control whether to create a new tag or update an existing one
        update_tag = False

        try:
            # if the existing tag is in sync, do nothing
            if check_existing_git_tag(
                repo,
                t_tag,
                ignore_git_message=ignore_git_message,
                ignore_git_tagger=ignore_git_tagger,
            ):
                warn(textwrap.dedent("""\
                    No action for {repo}
                      existing tag: {tag} is already in sync\
                    """).format(
                    repo=repo.full_name,
                    tag=t_tag.name,
                ))

                continue
        except github.RateLimitExceededException:
            raise
        except GitTagExistsError as e:
            # if force_tag is set, and the tag already exists, set
            # update_tag and fall through. Otherwise, treat it as any other
            # exception.
            if force_tag:
                update_tag = True
                warn(textwrap.dedent("""\
                      existing tag: {tag} WILL BE MOVED\
                    """).format(
                    repo=repo.full_name,
                    tag=t_tag.name,
                ))
            elif fail_fast:
                raise
            else:
                problems.append(e)
                error(e)
                continue
        except github.GithubException as e:
            msg = "error checking for existance of tag: {t}".format(
                t=t_tag.name,
            )
            yikes = pygithub.CaughtRepositoryError(repo, e, msg)

            if fail_fast:
                raise yikes from None
            else:
                problems.append(yikes)
                error(yikes)
                continue

        checked_products[name] = data.copy()
        checked_products[name]['target_tag'] = t_tag
        checked_products[name]['update_tag'] = update_tag

    if problems:
        error("{n} product(s) have error(s)".format(n=len(problems)))

    return checked_products, problems
def get_repo_for_products(
    org,
    products,
    allow_teams,
    ext_teams,
    deny_teams,
    fail_fast=False
):
    debug("allowed teams: {allow}".format(allow=allow_teams))
    debug("external teams: {ext}".format(ext=ext_teams))
    debug("denied teams: {deny}".format(deny=deny_teams))

    resolved_products = {}

    problems = []
    for name, data in products.items():
        debug("looking for git repo for: {name} [{ver}]".format(
            name=name,
            ver=data['eups_version']
        ))

        try:
            repo = org.get_repo(name)
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = "error getting repo by name: {r}".format(r=name)
            yikes = pygithub.CaughtOrganizationError(org, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

            continue

        debug("  found: {slug}".format(slug=repo.full_name))

        try:
            repo_team_names = [t.name for t in repo.get_teams()]
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = 'error getting teams'
            yikes = pygithub.CaughtRepositoryError(repo, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

            continue

        debug("  teams: {teams}".format(teams=repo_team_names))

        try:
            pygithub.check_repo_teams(
                repo,
                allow_teams=allow_teams,
                deny_teams=deny_teams,
                team_names=repo_team_names
            )
        except pygithub.RepositoryTeamMembershipError as e:
            if fail_fast:
                raise
            problems.append(e)
            error(e)

            continue

        has_ext_team = any(x in repo_team_names for x in ext_teams)
        debug("  external repo: {v}".format(v=has_ext_team))

        resolved_products[name] = data.copy()
        resolved_products[name]['repo'] = repo
        resolved_products[name]['v'] = has_ext_team

    if problems:
        error("{n} product(s) have error(s)".format(n=len(problems)))

    return resolved_products, problems
def run():
    args = parse_args()

    codetools.setup_logging(args.debug)

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)

    # protect destination org
    codetools.validate_org(args.dst_org)
    src_org = g.get_organization(args.src_org)
    dst_org = g.get_organization(args.dst_org)
    info("forking repos from: {org}".format(org=src_org.login))
    info("                to: {org}".format(org=dst_org.login))

    debug('looking for repos -- this can take a while for large orgs...')
    if args.team:
        debug('checking that selection team(s) exist')
        try:
            org_teams = list(src_org.get_teams())
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = 'error getting teams'
            raise pygithub.CaughtOrganizationError(src_org, e, msg) from None

        missing_teams = [n for n in args.team if n not in
                         [t.name for t in org_teams]]
        if missing_teams:
            error("{n} team(s) do not exist:".format(n=len(missing_teams)))
            [error("  '{t}'".format(t=n)) for n in missing_teams]
            return
        fork_teams = [t for t in org_teams if t.name in args.team]
        repos = pygithub.get_repos_by_team(fork_teams)
        debug('selecting repos by membership in team(s):')
        [debug("  '{t}'".format(t=t.name)) for t in fork_teams]
    else:
        repos = pygithub.get_repos_by_team(fork_teams)

    src_repos = list(itertools.islice(repos, args.limit))

    repo_count = len(src_repos)
    if not repo_count:
        debug('nothing to do -- exiting')
        return

    debug("found {n} repos to be forked from org {src_org}:".format(
        n=repo_count,
        src_org=src_org.login
    ))
    [debug("  {r}".format(r=r.full_name)) for r in src_repos]

    if args.copy_teams:
        debug('checking source repo team membership...')
        # dict of repo and team objects, keyed by repo name
        src_rt = find_teams_by_repo(src_repos)

        # extract a non-duplicated list of team names from all repos being
        # forked as a dict, keyed by team name
        src_teams = find_used_teams(src_rt)

        debug('found {n} teams in use within org {o}:'.format(
            n=len(src_teams),
            o=src_org.login
        ))
        [debug("  '{t}'".format(t=t)) for t in src_teams.keys()]

        # check for conflicting teams in dst org before attempting to create
        # any forks so its possible to bail out before any resources have been
        # created.
        debug('checking teams in destination org')
        conflicting_teams = pygithub.get_teams_by_name(
            dst_org,
            list(src_teams.keys())
        )
        if conflicting_teams:
            raise TeamError(
                "found {n} conflicting teams in {o}: {teams}".format(
                    n=len(conflicting_teams),
                    o=dst_org.login,
                    teams=[t.name for t in conflicting_teams]
                ))

    debug('there is no spoon...')
    problems = []
    pygithub.debug_ratelimit(g)
    dst_repos, skipped_repos, err = create_forks(
        dst_org,
        src_repos,
        fail_fast=args.fail_fast,
        dry_run=args.dry_run
    )
    if err:
        problems += err

    if args.copy_teams:
        # filter out repos which were skipped
        # dict of str(fork_repo.name): fork_repo
        dst_forks = dict((r.name, r) for r in dst_repos)
        bad_repos = dict((r.name, r) for r in skipped_repos)
        # dict of str(team.name): [repos] to be created
        dst_teams = {}
        for name, repos in src_teams.items():
            dst_teams[name] = [dst_forks[r.name] for r in repos
                               if r.name not in bad_repos]

        _, err = create_teams(
            dst_org,
            dst_teams,
            with_repos=True,
            fail_fast=args.fail_fast,
            dry_run=args.dry_run
        )
        if err:
            problems += err

    if problems:
        msg = "{n} errors forking repo(s)/teams(s)".format(
            n=len(problems))
        raise codetools.DogpileError(problems, msg)
def create_forks(
    dst_org,
    src_repos,
    fail_fast=False,
    dry_run=False
):
    assert isinstance(dst_org, github.Organization.Organization),\
        type(dst_org)
    assert isinstance(src_repos, list), type(src_repos)

    repo_count = len(src_repos)

    dst_repos = []
    skipped_repos = []
    problems = []
    with pbar.eta_bar(msg='forking', max_value=repo_count) as progress:
        repo_idx = 0
        for r in src_repos:
            progress.update(repo_idx)
            repo_idx += 1

            # XXX per
            # https://developer.github.com/v3/repos/forks/#create-a-fork
            # fork creation is async and pygithub doesn't appear to wait.
            # https://github.com/PyGithub/PyGithub/blob/c44469965e4ea368b78c4055a8afcfcf08314585/github/Organization.py#L321-L336
            # so its possible that this may fail in some strange way such as
            # not returning all repo data, but it hasn't yet been observed.

            # get current time before API call in case fork creation is slow.
            now = datetime.datetime.now()

            debug("forking {r}".format(r=r.full_name))
            if dry_run:
                debug('  (noop)')
                continue

            try:
                fork = dst_org.create_fork(r)
                dst_repos.append(fork)
                debug("  -> {r}".format(r=fork.full_name))
            except github.RateLimitExceededException:
                raise
            except github.GithubException as e:
                if 'Empty repositories cannot be forked.' in e.data['message']:
                    warn("{r} is empty and can not be forked".format(
                        r=r.full_name
                    ))
                    skipped_repos.append(r)
                    continue

                msg = "error forking repo {r}".format(r=r.full_name)
                yikes = pygithub.CaughtOrganizationError(dst_org, e, msg)
                if fail_fast:
                    raise yikes from None
                problems.append(yikes)
                error(yikes)

            if fork.created_at < now:
                warn("fork of {r} already exists\n  created_at {ctime}".format(
                    r=fork.full_name,
                    ctime=fork.created_at
                ))

    return dst_repos, skipped_repos, problems
Beispiel #19
0
def run():
    args = parse_args()

    codetools.setup_logging(args.debug)

    global g
    g = pygithub.login_github(token_path=args.token_path, token=args.token)

    # protect destination org
    codetools.validate_org(args.dst_org)
    src_org = g.get_organization(args.src_org)
    dst_org = g.get_organization(args.dst_org)
    info("forking repos from: {org}".format(org=src_org.login))
    info("                to: {org}".format(org=dst_org.login))

    debug('looking for repos -- this can take a while for large orgs...')
    if args.team:
        debug('checking that selection team(s) exist')
        try:
            org_teams = list(src_org.get_teams())
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = 'error getting teams'
            raise pygithub.CaughtOrganizationError(src_org, e, msg) from None

        missing_teams = [
            n for n in args.team if n not in [t.name for t in org_teams]
        ]
        if missing_teams:
            error("{n} team(s) do not exist:".format(n=len(missing_teams)))
            [error("  '{t}'".format(t=n)) for n in missing_teams]
            return
        fork_teams = [t for t in org_teams if t.name in args.team]
        repos = pygithub.get_repos_by_team(fork_teams)
        debug('selecting repos by membership in team(s):')
        [debug("  '{t}'".format(t=t.name)) for t in fork_teams]
    else:
        repos = pygithub.get_repos_by_team(fork_teams)

    src_repos = list(itertools.islice(repos, args.limit))

    repo_count = len(src_repos)
    if not repo_count:
        debug('nothing to do -- exiting')
        return

    debug("found {n} repos to be forked from org {src_org}:".format(
        n=repo_count, src_org=src_org.login))
    [debug("  {r}".format(r=r.full_name)) for r in src_repos]

    if args.copy_teams:
        debug('checking source repo team membership...')
        # dict of repo and team objects, keyed by repo name
        src_rt = find_teams_by_repo(src_repos)

        # extract a non-duplicated list of team names from all repos being
        # forked as a dict, keyed by team name
        src_teams = find_used_teams(src_rt)

        debug('found {n} teams in use within org {o}:'.format(n=len(src_teams),
                                                              o=src_org.login))
        [debug("  '{t}'".format(t=t)) for t in src_teams.keys()]

        # check for conflicting teams in dst org before attempting to create
        # any forks so its possible to bail out before any resources have been
        # created.
        debug('checking teams in destination org')
        conflicting_teams = pygithub.get_teams_by_name(dst_org,
                                                       list(src_teams.keys()))
        if conflicting_teams:
            raise TeamError(
                "found {n} conflicting teams in {o}: {teams}".format(
                    n=len(conflicting_teams),
                    o=dst_org.login,
                    teams=[t.name for t in conflicting_teams]))

    debug('there is no spoon...')
    problems = []
    pygithub.debug_ratelimit(g)
    dst_repos, skipped_repos, err = create_forks(dst_org,
                                                 src_repos,
                                                 fail_fast=args.fail_fast,
                                                 dry_run=args.dry_run)
    if err:
        problems += err

    if args.copy_teams:
        # filter out repos which were skipped
        # dict of str(fork_repo.name): fork_repo
        dst_forks = dict((r.name, r) for r in dst_repos)
        bad_repos = dict((r.name, r) for r in skipped_repos)
        # dict of str(team.name): [repos] to be created
        dst_teams = {}
        for name, repos in src_teams.items():
            dst_teams[name] = [
                dst_forks[r.name] for r in repos if r.name not in bad_repos
            ]

        _, err = create_teams(dst_org,
                              dst_teams,
                              with_repos=True,
                              fail_fast=args.fail_fast,
                              dry_run=args.dry_run)
        if err:
            problems += err

    if problems:
        msg = "{n} errors forking repo(s)/teams(s)".format(n=len(problems))
        raise codetools.DogpileError(problems, msg)
Beispiel #20
0
def check_product_tags(
    products,
    git_tag,
    tag_message_template,
    tagger,
    force_tag=False,
    fail_fast=False,
    ignore_git_message=False,
    ignore_git_tagger=False,
):
    assert isinstance(tagger, github.InputGitAuthor), type(tagger)

    checked_products = {}

    problems = []
    for name, data in products.items():
        repo = data['repo']
        tag_name = git_tag

        # prefix tag name with `v`?
        if data['v'] and re.match('\d', tag_name):
            tag_name = "v{git_tag}".format(git_tag=tag_name)

        # message can not be formatted until we've determined if the tag must
        # be prefixed.  The 'v' prefix appearing in the tag message is required
        # to match historical behavior and allow verification of past releases.
        message = tag_message_template.format(git_tag=tag_name)

        # "target tag"
        t_tag = codekit.pygithub.TargetTag(
            name=tag_name,
            sha=data['sha'],
            message=message,
            tagger=tagger,
        )

        # control whether to create a new tag or update an existing one
        update_tag = False

        try:
            # if the existing tag is in sync, do nothing
            if check_existing_git_tag(
                    repo,
                    t_tag,
                    ignore_git_message=ignore_git_message,
                    ignore_git_tagger=ignore_git_tagger,
            ):
                warn(
                    textwrap.dedent("""\
                    No action for {repo}
                      existing tag: {tag} is already in sync\
                    """).format(
                        repo=repo.full_name,
                        tag=t_tag.name,
                    ))

                continue
        except github.RateLimitExceededException:
            raise
        except GitTagExistsError as e:
            # if force_tag is set, and the tag already exists, set
            # update_tag and fall through. Otherwise, treat it as any other
            # exception.
            if force_tag:
                update_tag = True
                warn(
                    textwrap.dedent("""\
                      existing tag: {tag} WILL BE MOVED\
                    """).format(
                        repo=repo.full_name,
                        tag=t_tag.name,
                    ))
            elif fail_fast:
                raise
            else:
                problems.append(e)
                error(e)
                continue
        except github.GithubException as e:
            msg = "error checking for existance of tag: {t}".format(
                t=t_tag.name, )
            yikes = pygithub.CaughtRepositoryError(repo, e, msg)

            if fail_fast:
                raise yikes from None
            else:
                problems.append(yikes)
                error(yikes)
                continue

        checked_products[name] = data.copy()
        checked_products[name]['target_tag'] = t_tag
        checked_products[name]['update_tag'] = update_tag

    if problems:
        error("{n} product(s) have error(s)".format(n=len(problems)))

    return checked_products, problems
Beispiel #21
0
def create_teams(org,
                 teams,
                 with_repos=False,
                 ignore_existing=False,
                 fail_fast=False,
                 dry_run=False):
    assert isinstance(org, github.Organization.Organization), type(org)
    assert isinstance(teams, dict), type(teams)

    # it takes fewer api calls to create team(s) with an explicit list of
    # members after all repos have been forked but this blows up if the team
    # already exists.

    debug("creating teams in {org}".format(org=org.login))

    # dict of dst org teams keyed by name (str) with team object as value
    dst_teams = {}
    problems = []
    batch_repos = 50
    for name, repos in teams.items():
        pygithub.debug_ratelimit(g)
        debug("creating team {o}/'{t}'".format(o=org.login, t=name))

        if dry_run:
            debug('  (noop)')
            continue

        dst_t = None
        try:
            if with_repos:
                debug("  with {n} member repos:".format(n=len(repos)))
                [debug("    {r}".format(r=r.full_name)) for r in repos]

                leftover_repos = repos[batch_repos:]
                if leftover_repos:
                    debug("  creating team with first {b} of {n} repos".format(
                        b=batch_repos, n=len(repos)))
                dst_t = org.create_team(name, repo_names=repos[:batch_repos])
                if leftover_repos:
                    # add any repos over the batch limit individually to team
                    for r in leftover_repos:
                        debug("  adding repo {r}".format(r=r.full_name))
                        dst_t.add_to_repos(r)
            else:
                dst_t = org.create_team(name)
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            # if the error is for any cause other than the team already
            # existing, puke.
            team_exists = False
            if ignore_existing and 'errors' in e.data:
                for oops in e.data['errors']:
                    msg = oops['message']
                    if 'Name has already been taken' in msg:
                        # find existing team
                        dst_t = pygithub.get_teams_by_name(org, name)[0]
                        team_exists = True
            if not (ignore_existing and team_exists):
                msg = "error creating team: {t}".format(t=name)
                yikes = pygithub.CaughtOrganizationError(org, e, msg)
                if fail_fast:
                    raise yikes from None
                problems.append(yikes)
                error(yikes)
                break
        else:
            dst_teams[dst_t.name] = dst_t

    return dst_teams, problems
def cross_reference_products(
    eups_products,
    manifest_products,
    ignore_manifest_versions=False,
    fail_fast=False,
):
    """
    Cross reference EupsTag and Manifest data and return a merged result

    Parameters
    ----------
    eups_products:
    manifest:
    fail_fast: bool
    ignore_manifest_versions: bool

    Returns
    -------
    products: dict

    Raises
    ------
    RuntimeError
        Upon error if `fail_fast` is `True`.
    """
    products = {}

    problems = []
    for name, eups_data in eups_products.items():
        try:
            manifest_data = manifest_products[name]
        except KeyError:
            yikes = RuntimeError(textwrap.dedent("""\
                failed to find record in manifest for:
                  {product} {eups_version}\
                """).format(
                product=name,
                eups_version=eups_data['eups_version'],
            ))
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

        if ignore_manifest_versions:
            # ignore the manifest eups_version string by simply setting it to
            # the eups tag value.  This ensures that the eups tag value will be
            # passed though.
            manifest_data = manifest_data.copy()
            manifest_data['eups_version'] = eups_data['eups_version']

        if eups_data['eups_version'] != manifest_data['eups_version']:
            yikes = RuntimeError(textwrap.dedent("""\
                eups version string mismatch:
                  eups tag: {product} {eups_eups_version}
                  manifest: {product} {manifest_eups_version}\
                """).format(
                product=name,
                eups_eups_version=eups_data['eups_version'],
                manifest_eups_version=manifest_data['eups_version'],
            ))
            if fail_fast:
                raise yikes
            problems.append(yikes)
            error(yikes)

        products[name] = eups_data.copy()
        products[name].update(manifest_data)

    if problems:
        error("{n} product(s) have error(s)".format(n=len(problems)))

    return products, problems
def create_teams(
    org,
    teams,
    with_repos=False,
    ignore_existing=False,
    fail_fast=False,
    dry_run=False
):
    assert isinstance(org, github.Organization.Organization), type(org)
    assert isinstance(teams, dict), type(teams)

    # it takes fewer api calls to create team(s) with an explicit list of
    # members after all repos have been forked but this blows up if the team
    # already exists.

    debug("creating teams in {org}".format(org=org.login))

    # dict of dst org teams keyed by name (str) with team object as value
    dst_teams = {}
    problems = []
    batch_repos = 50
    for name, repos in teams.items():
        pygithub.debug_ratelimit(g)
        debug("creating team {o}/'{t}'".format(
            o=org.login,
            t=name
        ))

        if dry_run:
            debug('  (noop)')
            continue

        dst_t = None
        try:
            if with_repos:
                debug("  with {n} member repos:".format(n=len(repos)))
                [debug("    {r}".format(r=r.full_name)) for r in repos]

                leftover_repos = repos[batch_repos:]
                if leftover_repos:
                    debug("  creating team with first {b} of {n} repos"
                          .format(
                              b=batch_repos,
                              n=len(repos)
                          ))
                dst_t = org.create_team(name, repo_names=repos[:batch_repos])
                if leftover_repos:
                    # add any repos over the batch limit individually to team
                    for r in leftover_repos:
                        debug("  adding repo {r}".format(r=r.full_name))
                        dst_t.add_to_repos(r)
            else:
                dst_t = org.create_team(name)
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            # if the error is for any cause other than the team already
            # existing, puke.
            team_exists = False
            if ignore_existing and 'errors' in e.data:
                for oops in e.data['errors']:
                    msg = oops['message']
                    if 'Name has already been taken' in msg:
                        # find existing team
                        dst_t = pygithub.get_teams_by_name(org, name)[0]
                        team_exists = True
            if not (ignore_existing and team_exists):
                msg = "error creating team: {t}".format(t=name)
                yikes = pygithub.CaughtOrganizationError(org, e, msg)
                if fail_fast:
                    raise yikes from None
                problems.append(yikes)
                error(yikes)
                break
        else:
            dst_teams[dst_t.name] = dst_t

    return dst_teams, problems
Beispiel #24
0
def tag_products(
    products,
    fail_fast=False,
    dry_run=False,
):
    problems = []
    for name, data in products.items():
        repo = data['repo']
        t_tag = data['target_tag']

        info(
            textwrap.dedent("""\
            tagging repo: {repo} @
              sha: {sha} as {gt}
              (eups version: {et})
              external repo: {v}
              replace existing tag: {update}\
            """).format(
                repo=repo.full_name,
                sha=t_tag.sha,
                gt=t_tag.name,
                et=data['eups_version'],
                v=data['v'],
                update=data['update_tag'],
            ))

        if dry_run:
            info('  (noop)')
            continue

        try:
            tag_obj = repo.create_git_tag(
                t_tag.name,
                t_tag.message,
                t_tag.sha,
                'commit',
                tagger=t_tag.tagger,
            )
            debug("  created tag object {tag_obj}".format(tag_obj=tag_obj))

            if data['update_tag']:
                ref = pygithub.find_tag_by_name(
                    repo,
                    t_tag.name,
                    safe=False,
                )
                ref.edit(tag_obj.sha, force=True)
                debug("  updated existing ref: {ref}".format(ref=ref))
            else:
                ref = repo.create_git_ref("refs/tags/{t}".format(t=t_tag.name),
                                          tag_obj.sha)
                debug("  created ref: {ref}".format(ref=ref))
        except github.RateLimitExceededException:
            raise
        except github.GithubException as e:
            msg = "error creating tag: {t}".format(t=t_tag.name)
            yikes = pygithub.CaughtRepositoryError(repo, e, msg)
            if fail_fast:
                raise yikes from None
            problems.append(yikes)
            error(yikes)

    if problems:
        msg = "{n} tag failures".format(n=len(problems))
        raise codetools.DogpileError(problems, msg)