def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser( description=( 'Clone all the repositories into the `output_dir`. If ' 'run again, this command will update existing repositories.'), usage='%(prog)s [options]', ) cli.add_common_args(parser) cli.add_jobs_arg(parser) args = parser.parse_args(argv) config = load_config(args.config_filename) repos = config.list_repos(config.source_settings) repos_filtered = { k: v for k, v in sorted(repos.items()) if config.include.search(k) and not config.exclude.search(k) } # If the previous `repos.json` / `repos_filtered.json` files exist # remove them. for path in (config.repos_path, config.repos_filtered_path): if os.path.exists(path): os.remove(path) current_repos = set(_get_current_state(config.output_dir).items()) filtered_repos = set(repos_filtered.items()) # Remove old no longer cloned repositories for path, _ in current_repos - filtered_repos: _remove(config.output_dir, path) for path, remote in filtered_repos - current_repos: _init(config.output_dir, path, remote) fn = functools.partial(_fetch_reset, all_branches=config.all_branches) todo = [os.path.join(config.output_dir, p) for p in repos_filtered] with mapper.thread_mapper(args.jobs) as do_map: mapper.exhaust(do_map(fn, todo)) # write these last os.makedirs(config.output_dir, exist_ok=True) with open(config.repos_path, 'w') as f: f.write(json.dumps(repos)) with open(config.repos_filtered_path, 'w') as f: f.write(json.dumps(repos_filtered)) return 0
import pytest from all_repos import mapper def square(n): return n * n @pytest.mark.parametrize( 'ctx', ( mapper.process_mapper(1), mapper.process_mapper(2), mapper.thread_mapper(1), mapper.thread_mapper(2), ), ) def test_mappers(ctx): with ctx as do_map: assert tuple(do_map(square, (3, 4, 5))) == (9, 16, 25) def test_exhaust(): def gen(): yield 1 yield 2 yield 3 inst = gen() assert next(inst) == 1