def test_load_config_too_permissive(file_config): file_config.cfg.chmod(0o777) with pytest.raises(SystemExit) as excinfo: load_config(file_config.cfg) msg, = excinfo.value.args assert msg == ( f'{file_config.cfg} has too-permissive permissions, Expected 0o600, ' f'got 0o777')
def test_fix_failing_check_no_changes(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, check_fix=failing_check_fix, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) out, err = capfd.readouterr() assert 'nope!' in err assert out.count('Errored') == 2 # An error while checking should not allow the changes assert file_config_files.dir1.join('f').read() == 'OHAI\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n'
def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser( description='Similar to a distributed `git grep ...`.', usage='%(prog)s [options] [GIT_GREP_OPTIONS]', add_help=False, ) # Handle --help like normal, pass -h through to git grep parser.add_argument( '--help', action='help', help='show this help message and exit', ) cli.add_common_args(parser) cli.add_repos_with_matches_arg(parser) cli.add_output_paths_arg(parser) args, rest = parser.parse_known_args(argv) config = load_config(args.config_filename) if args.repos_with_matches: return repos_matching_cli(config, rest) else: return grep_cli( config, rest, output_paths=args.output_paths, use_color=args.color, )
def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser( description=( 'Add git-clone tab completion for all-repos repositories.\n\n' 'Add to .bash_profile:\n' ' `eval "$(all-repos-complete -C ~/.../all-repos.json --bash)"`'), formatter_class=argparse.RawDescriptionHelpFormatter, usage='%(prog)s [options] {--bash}', ) cli.add_common_args(parser) mutex = parser.add_mutually_exclusive_group(required=True) mutex.add_argument('--bash', action='store_true') mutex.add_argument('--zsh', action='store_true') args = parser.parse_args(argv) config = load_config(args.config_filename) print(f'__all_repos__repos_json={config.repos_filtered_path}') if args.bash: print(BASH) elif args.zsh: print(ZSH) else: raise NotImplementedError() return 0
def test_find_repos_skips_already_migrated(file_config_files): write_file_commit( file_config_files.dir1, 'setup.py', 'from setuptools import setup\nsetup()\n', ) clone.main(('--config-filename', str(file_config_files.cfg))) assert find_repos(load_config(str(file_config_files.cfg))) == set()
def test_fix_dry_run_no_change(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=True, interactive=False, ), ) out, err = capfd.readouterr() assert err == '' assert 'Errored' not in out # Showed the diff of what would have happened assert '-OHAI\n+ohai\n' in out assert '-OHELLO\n+ohello\n' in out # Didn't actually perform any changes assert file_config_files.dir1.join('f').read() == 'OHAI\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n'
def from_cli(args, *, find_repos, msg, branch_name): config = load_config(args.config_filename) return ( filter_repos(config, args.repos, find_repos), config, Commit.from_cli(args, msg=msg, branch_name=branch_name), AutofixSettings.from_cli(args), )
def test_find_repos_finds_a_repo(file_config_files): write_file_commit( file_config_files.dir1, 'setup.py', 'from setuptools import setup\nsetup(name="pkg", version="1")\n', ) clone.main(('--config-filename', str(file_config_files.cfg))) ret = find_repos(load_config(str(file_config_files.cfg))) assert ret == {str(file_config_files.output_dir.join('repo1'))}
def test_autofix_makes_commits(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', 'A B <[email protected]>'), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) out, err = capfd.readouterr() assert err == '' assert 'Errored' not in out assert file_config_files.dir1.join('f').read() == 'ohai\n' assert file_config_files.dir2.join('f').read() == 'ohello\n' # The branch name should be what we specified last_commit_msg = subprocess.check_output(( 'git', '-C', file_config_files.dir1, 'log', '--format=%s', '--first-parent', '-1', )).strip().decode() potential_msgs = testing.git.merge_msgs('all-repos_autofix_test-branch') assert last_commit_msg in potential_msgs # We should see a commit from the autofix change we made commit = subprocess.check_output(( 'git', '-C', file_config_files.dir1, 'log', '--patch', '--grep', 'message!', '--format=%an %ae\n%B', )).decode() assert commit.startswith( 'A B [email protected]\n' 'message!\n' '\n' 'Committed via https://github.com/asottile/all-repos\n', ) assert commit.endswith('-OHAI\n+ohai\n')
def main(argv=None): parser = argparse.ArgumentParser( description='List all cloned repository names.', usage='all-repos-list-repos [options]', ) cli.add_common_args(parser) args = parser.parse_args(argv) config = load_config(args.config_filename) for repo in config.get_cloned_repos(): print(repo)
def test_grep(file_config_files): config = load_config(file_config_files.cfg) ret = grep(config, ['^OH']) assert ret == { file_config_files.output_dir.join('repo1'): b'f:OHAI\n', file_config_files.output_dir.join('repo2'): b'f:OHELLO\n', } ret = grep(config, ['^OHAI']) assert ret == {file_config_files.output_dir.join('repo1'): b'f:OHAI\n'} ret = grep(config, ['nope']) assert ret == {}
def test_repos_matching(file_config_files): config = load_config(file_config_files.cfg) ret = repos_matching(config, ['^OH']) assert ret == { file_config_files.output_dir.join('repo1'), file_config_files.output_dir.join('repo2'), } ret = repos_matching(config, ['^OHAI']) assert ret == {file_config_files.output_dir.join('repo1')} ret = repos_matching(config, ['nope']) assert ret == set()
def test_find_repos(file_config_files): write_file_commit( # A migrated configuration file_config_files.dir1, '.pre-commit-config.yaml', 'repos: []\n', ) write_file_commit( # A non-migrated configuration file_config_files.dir2, '.pre-commit-config.yaml', '[]\n', ) clone.main(('--config-filename', str(file_config_files.cfg))) ret = find_repos(load_config(str(file_config_files.cfg))) assert ret == {str(file_config_files.output_dir.join('repo2'))}
def test_find_repos_finds_a_repo(file_config_files): contents = '''\ - repo: https://github.com/pre-commit/pre-commit-hooks rev: v1.4.0-1 hooks: - id: autopep8-wrapper ''' git.write_file_commit( file_config_files.dir1, '.pre-commit-config.yaml', contents, ) clone.main(('--config-filename', str(file_config_files.cfg))) ret = find_repos(load_config(str(file_config_files.cfg))) assert ret == {str(file_config_files.output_dir.join('repo1'))}
def from_cli( args: Any, *, find_repos: Callable[[Config], Iterable[str]], msg: str, branch_name: str, ) -> Tuple[Iterable[str], Config, Commit, AutofixSettings]: config = load_config(args.config_filename) return ( filter_repos(config, args.repos, find_repos), config, Commit(msg=msg, branch_name=branch_name, author=args.author), AutofixSettings.from_cli(args), )
def main(argv=None): parser = argparse.ArgumentParser( description='Similar to a distributed `git grep ...`.', usage='%(prog)s [options] [GIT_GREP_OPTIONS]', ) cli.add_common_args(parser) cli.add_repos_with_matches_arg(parser) args, rest = parser.parse_known_args(argv) config = load_config(args.config_filename) if args.repos_with_matches: return repos_matching_cli(config, rest) else: return grep_cli(config, rest, use_color=args.color)
def test_find_repos_does_not_find_migrated_repo(file_config_files): contents = '''\ - repo: https://gitlab.com/pycqa/flake8 rev: 3.7.0 hooks: - id: flake8 ''' git.write_file_commit( file_config_files.dir1, '.pre-commit-config.yaml', contents, ) clone.main(('--config-filename', str(file_config_files.cfg))) ret = find_repos(load_config(str(file_config_files.cfg))) assert ret == set()
def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser( description=( 'Clone all the repositories into the `output_dir`. If ' 'run again, this command will update existing repositories.'), usage='%(prog)s [options]', ) cli.add_common_args(parser) cli.add_jobs_arg(parser) args = parser.parse_args(argv) config = load_config(args.config_filename) repos = config.list_repos(config.source_settings) repos_filtered = { k: v for k, v in sorted(repos.items()) if config.include.search(k) and not config.exclude.search(k) } # If the previous `repos.json` / `repos_filtered.json` files exist # remove them. for path in (config.repos_path, config.repos_filtered_path): if os.path.exists(path): os.remove(path) current_repos = set(_get_current_state(config.output_dir).items()) filtered_repos = set(repos_filtered.items()) # Remove old no longer cloned repositories for path, _ in current_repos - filtered_repos: _remove(config.output_dir, path) for path, remote in filtered_repos - current_repos: _init(config.output_dir, path, remote) fn = functools.partial(_fetch_reset, all_branches=config.all_branches) todo = [os.path.join(config.output_dir, p) for p in repos_filtered] with mapper.thread_mapper(args.jobs) as do_map: mapper.exhaust(do_map(fn, todo)) # write these last os.makedirs(config.output_dir, exist_ok=True) with open(config.repos_path, 'w') as f: f.write(json.dumps(repos)) with open(config.repos_filtered_path, 'w') as f: f.write(json.dumps(repos_filtered)) return 0
def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser( description='List all cloned repository names.', usage='all-repos-list-repos [options]', ) cli.add_common_args(parser) cli.add_output_paths_arg(parser) args = parser.parse_args(argv) config = load_config(args.config_filename) for repo in config.get_cloned_repos(): if args.output_paths: print(os.path.join(config.output_dir, repo)) else: print(repo) return 0
def main(argv=None): parser = argparse.ArgumentParser( description=( 'Similar to a distributed `git ls-files | grep -P PATTERN`.'), usage='%(prog)s [options] PATTERN', ) cli.add_common_args(parser) cli.add_repos_with_matches_arg(parser) parser.add_argument('pattern', help='the python regex to match.') args = parser.parse_args(argv) config = load_config(args.config_filename) if args.repos_with_matches: return find_files_repos_cli(config, args.pattern, use_color=args.color) else: return find_files_cli(config, args.pattern, use_color=args.color)
def main(argv: Optional[Sequence[str]] = None) -> int: parser = argparse.ArgumentParser( description='Run tox4 on all cloned repositories.', usage='python tox.py -C configfile', ) cli.add_common_args(parser) cli.add_output_paths_arg(parser) args = parser.parse_args(argv) config = load_config(args.config_filename) results = { "notox": [], "successful": [], "problems": [], } number_of_repos = len(config.get_cloned_repos()) for i, repo in enumerate(list(config.get_cloned_repos())): if repo == "zopefoundation/zopetoolkit": # causes buildout/setuptools endless loop continue if repo == "plone/plone.memoize": # causes buildout/setuptools endless loop continue path_repo = os.path.join(config.output_dir, repo) path_tox = os.path.join(path_repo, "tox.ini") if os.path.exists(path_tox): print(f"about to run tox for {repo}, {i+1} of {number_of_repos}") run = subprocess.run(["tox4", "-e py38", "-c", path_tox], stdout=subprocess.DEVNULL) if run.returncode == 0: print(f"tox4 run successful for {repo}") results["successful"].append(repo) else: print(f"tox4 run failed for {repo}") results["problems"].append(repo) else: print(f"{repo} does not contain a tox configuration. Boo!") results["notox"].append(repo) print(f'notox: {len(results["notox"])}') print(f'{results["notox"]}') print(f'successful: {len(results["successful"])}') print(f'{results["successful"]}') print(f'problems: {len(results["problems"])}') print(f'{results["problems"]}') return 0
def test_fix_non_default_branch(file_config_non_default): clone.main(('--config-filename', str(file_config_non_default.cfg))) autofix_lib.fix( (str(file_config_non_default.output_dir.join('repo1')), ), apply_fix=lower_case_f, config=load_config(file_config_non_default.cfg), commit=autofix_lib.Commit('message!', 'test-branch', 'A B <[email protected]>'), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) assert file_config_non_default.dir1.join('f').read() == 'ohai\n'
def test_fix_interactive(file_config_files, capfd, mock_input): mock_input.set_side_effect('y', 'n') autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=True, ), ) assert file_config_files.dir1.join('f').read() == 'ohai\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n'
def test_noop_does_not_commit(file_config_files, capfd): rev_before1 = testing.git.revparse(file_config_files.dir1) rev_before2 = testing.git.revparse(file_config_files.dir2) autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lambda: None, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) rev_after1 = testing.git.revparse(file_config_files.dir1) rev_after2 = testing.git.revparse(file_config_files.dir2) assert (rev_before1, rev_before2) == (rev_after1, rev_after2)
def test_fix_with_limit(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=1, dry_run=True, interactive=False, ), ) out, err = capfd.readouterr() assert err == '' assert 'Errored' not in out # Should still see the diff from the first repository assert '-OHAI\n+ohai\n' in out assert '-OHELLO\n+ohello\n' not in out
def test_find_repos_finds_a_repo(file_config_files): write_file_commit(file_config_files.dir1, 'azure-pipelines.yml', SAMPLE) clone.main(('--config-filename', str(file_config_files.cfg))) config = load_config(str(file_config_files.cfg)) ret = azure_pipelines_autoupdate.find_repos(config) assert ret == {str(file_config_files.output_dir.join('repo1'))}
def test_get_cloned_repos(file_config): clone.main(('--config-filename', str(file_config.cfg))) cfg = load_config(file_config.cfg) ret = set(cfg.get_cloned_repos()) assert ret == {'repo1', 'repo2'}
def test_load_config(file_config): cfg = load_config(file_config.cfg) assert cfg.list_repos is all_repos.source.json_file.list_repos
def test_find_repos_finds_a_repo(file_config_files): write_file_commit(file_config_files.dir1, '.pre-commit-config.yaml', '[]') clone.main(('--config-filename', str(file_config_files.cfg))) ret = find_repos(load_config(str(file_config_files.cfg))) assert ret == {str(file_config_files.output_dir.join('repo1'))}
def test_find_repos_none(file_config_files): assert find_repos(load_config(str(file_config_files.cfg))) == set()