示例#1
0
def add_fixer_args(parser: argparse.ArgumentParser) -> None:
    cli.add_common_args(parser)

    mutex = parser.add_mutually_exclusive_group()
    mutex.add_argument(
        '--dry-run', action='store_true',
        help='show what would happen but do not push.',
    )
    mutex.add_argument(
        '-i', '--interactive', action='store_true',
        help='interactively approve / deny fixes.',
    )
    cli.add_jobs_arg(mutex, default=1)

    parser.add_argument(
        '--limit', type=int, default=None,
        help='maximum number of repos to process (default: unlimited).',
    )
    parser.add_argument(
        '--author',
        help=(
            'override commit author.  '
            'This is passed directly to `git commit`.  '
            "An example: `--author='Herp Derp <*****@*****.**>'`."
        ),
    )
    parser.add_argument(
        '--repos', nargs='*',
        help=(
            'run against specific repositories instead.  This is especially '
            'useful with `xargs autofixer ... --repos`.  This can be used to '
            'specify repositories which are not managed by `all-repos`.'
        ),
    )
示例#2
0
def main(argv: Optional[Sequence[str]] = None) -> int:
    parser = argparse.ArgumentParser(
        description=(
            'Clone all the repositories into the `output_dir`.  If '
            'run again, this command will update existing repositories.'),
        usage='%(prog)s [options]',
    )
    cli.add_common_args(parser)
    cli.add_jobs_arg(parser)
    args = parser.parse_args(argv)

    config = load_config(args.config_filename)

    repos = config.list_repos(config.source_settings)
    repos_filtered = {
        k: v
        for k, v in sorted(repos.items())
        if config.include.search(k) and not config.exclude.search(k)
    }

    # If the previous `repos.json` / `repos_filtered.json` files exist
    # remove them.
    for path in (config.repos_path, config.repos_filtered_path):
        if os.path.exists(path):
            os.remove(path)

    current_repos = set(_get_current_state(config.output_dir).items())
    filtered_repos = set(repos_filtered.items())

    # Remove old no longer cloned repositories
    for path, _ in current_repos - filtered_repos:
        _remove(config.output_dir, path)

    for path, remote in filtered_repos - current_repos:
        _init(config.output_dir, path, remote)

    fn = functools.partial(_fetch_reset, all_branches=config.all_branches)
    todo = [os.path.join(config.output_dir, p) for p in repos_filtered]
    with mapper.thread_mapper(args.jobs) as do_map:
        mapper.exhaust(do_map(fn, todo))

    # write these last
    os.makedirs(config.output_dir, exist_ok=True)
    with open(config.repos_path, 'w') as f:
        f.write(json.dumps(repos))
    with open(config.repos_filtered_path, 'w') as f:
        f.write(json.dumps(repos_filtered))
    return 0