def test_find_repos_skips_already_migrated(file_config_files): write_file_commit( file_config_files.dir1, 'setup.py', 'from setuptools import setup\nsetup()\n', ) clone.main(('--config-filename', str(file_config_files.cfg))) assert find_repos(load_config(str(file_config_files.cfg))) == set()
def test_clones_all_branches_true(file_config): subprocess.check_call(( 'git', '-C', str(file_config.dir1), 'checkout', 'master', '-b', 'b2', )) subprocess.check_call(( 'git', '-C', str(file_config.dir1), 'checkout', 'master', )) assert not main(('--config-file', str(file_config.cfg))) # initially we should not see multiple branches branch_out = subprocess.check_output(( 'git', '-C', str(file_config.output_dir.join('repo1')), 'branch', '--remote', )).decode() assert branch_out == ' origin/master\n' # set that we want to clone all branches cfg_contents = json.loads(file_config.cfg.read()) cfg_contents['all_branches'] = True file_config.cfg.write(json.dumps(cfg_contents)) assert not main(('--config-file', str(file_config.cfg))) branch_out = subprocess.check_output(( 'git', '-C', str(file_config.output_dir.join('repo1')), 'branch', '--remote', )).decode() assert branch_out == ' origin/b2\n origin/master\n'
def test_find_repos_finds_a_repo(file_config_files): write_file_commit( file_config_files.dir1, 'setup.py', 'from setuptools import setup\nsetup(name="pkg", version="1")\n', ) clone.main(('--config-filename', str(file_config_files.cfg))) ret = find_repos(load_config(str(file_config_files.cfg))) assert ret == {str(file_config_files.output_dir.join('repo1'))}
def test_it_removes_directories(file_config): assert not main(('--config-file', str(file_config.cfg))) # Recloning with a removed directory should remove the repo new_contents = json.dumps({'repo2': str(file_config.dir2)}) file_config.repos_json.write(new_contents) assert not main(('--config-file', str(file_config.cfg))) assert not file_config.output_dir.join('repo1').exists()
def test_main(file_config_files): clone.main(('--config-filename', str(file_config_files.cfg))) assert not main(( '--config-filename', str(file_config_files.cfg), 's/HAI/BAI/g', '*', )) assert file_config_files.dir1.join('f').read() == 'OBAI\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n'
def test_main_custom_file_pattern(file_config_files): write_file_commit(file_config_files.dir1, 'g', 'OHAI\n') clone.main(('--config-filename', str(file_config_files.cfg))) assert not main(( '--config-filename', str(file_config_files.cfg), 's/AI/IE/g', 'g', )) assert file_config_files.dir1.join('f').read() == 'OHAI\n' assert file_config_files.dir1.join('g').read() == 'OHIE\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n'
def test_it_updates(file_config): assert not main(('--config-file', str(file_config.cfg))) # Recloning should end up with an updated revision subprocess.check_call(( 'git', '-C', file_config.dir1, 'commit', '--allow-empty', '-m', 'foo', )) new_rev = revparse(file_config.dir1) assert new_rev != file_config.rev1 assert not main(('--config-file', str(file_config.cfg))) assert revparse(file_config.output_dir.join('repo1')) == new_rev
def test_find_repos(file_config_files): write_file_commit( # A migrated configuration file_config_files.dir1, '.pre-commit-config.yaml', 'repos: []\n', ) write_file_commit( # A non-migrated configuration file_config_files.dir2, '.pre-commit-config.yaml', '[]\n', ) clone.main(('--config-filename', str(file_config_files.cfg))) ret = find_repos(load_config(str(file_config_files.cfg))) assert ret == {str(file_config_files.output_dir.join('repo2'))}
def test_it_removes_empty_directories(file_config): new_contents = json.dumps({'dir1/repo2': str(file_config.dir2)}) file_config.repos_json.write(new_contents) assert not main(('--config-file', str(file_config.cfg))) assert file_config.output_dir.join('dir1/repo2').isdir() new_contents = json.dumps({'repo1': str(file_config.dir1)}) file_config.repos_json.write(new_contents) assert not main(('--config-file', str(file_config.cfg))) assert not file_config.output_dir.join('dir1/repo2').exists() assert not file_config.output_dir.join('dir1').exists()
def test_find_repos_finds_a_repo(file_config_files): contents = '''\ - repo: https://github.com/pre-commit/pre-commit-hooks rev: v1.4.0-1 hooks: - id: autopep8-wrapper ''' git.write_file_commit( file_config_files.dir1, '.pre-commit-config.yaml', contents, ) clone.main(('--config-filename', str(file_config_files.cfg))) ret = find_repos(load_config(str(file_config_files.cfg))) assert ret == {str(file_config_files.output_dir.join('repo1'))}
def test_find_repos_does_not_find_migrated_repo(file_config_files): contents = '''\ - repo: https://gitlab.com/pycqa/flake8 rev: 3.7.0 hooks: - id: flake8 ''' git.write_file_commit( file_config_files.dir1, '.pre-commit-config.yaml', contents, ) clone.main(('--config-filename', str(file_config_files.cfg))) ret = find_repos(load_config(str(file_config_files.cfg))) assert ret == set()
def test_no_crash_repo_without_branch(file_config): file_config.dir1.remove() # this repo does not have a default branch, or any for that matter subprocess.check_call(('git', 'init', file_config.dir1)) # should not crash assert not main(('--config-filename', str(file_config.cfg)))
def test_it_continues_on_unclonable_repositories(file_config, capsys): new_contents = json.dumps({'dir1/repo2': '/does/not/exist'}) file_config.repos_json.write(new_contents) assert not main(('--config-file', str(file_config.cfg))) out, err = capsys.readouterr() assert 'Error fetching ' in out
def test_it_can_clone_non_master_default_branch(file_config_non_default): assert not main(('--config-file', str(file_config_non_default.cfg))) repo = file_config_non_default.output_dir.join('repo1') cmd = ('git', '-C', repo, 'branch') out = subprocess.check_output(cmd).strip().decode() assert out == '* m2'
def test_fix_non_default_branch(file_config_non_default): clone.main(('--config-filename', str(file_config_non_default.cfg))) autofix_lib.fix( (str(file_config_non_default.output_dir.join('repo1')), ), apply_fix=lower_case_f, config=load_config(file_config_non_default.cfg), commit=autofix_lib.Commit('message!', 'test-branch', 'A B <[email protected]>'), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) assert file_config_non_default.dir1.join('f').read() == 'ohai\n'
def test_it_sorts_filtered_repos(file_config): # make the repos json out of order contents = json.loads(file_config.repos_json.read()) # TODO: in python3.8+ this can use `reversed(contents.items())` new_contents = json.dumps({ k: contents[k] for k in reversed(tuple(contents)) }) file_config.repos_json.write(new_contents) assert not main(('--config-file', str(file_config.cfg))) repos_filtered = file_config.output_dir.join('repos_filtered.json') repos_filtered = json.loads(repos_filtered.read()) assert sorted(repos_filtered) == list(repos_filtered)
def test_it_clones(file_config): assert not main(('--config-file', str(file_config.cfg))) assert file_config.output_dir.isdir() expected = {'repo1': str(file_config.dir1), 'repo2': str(file_config.dir2)} repos = json.loads(file_config.output_dir.join('repos.json').read()) assert repos == expected repos_filtered = file_config.output_dir.join('repos_filtered.json') repos_filtered = json.loads(repos_filtered.read()) assert repos_filtered == expected assert file_config.output_dir.join('repo1').isdir() assert revparse(file_config.output_dir.join('repo1')) == file_config.rev1 assert file_config.output_dir.join('repo2').isdir() assert revparse(file_config.output_dir.join('repo2')) == file_config.rev2
def test_main(file_config_files): write_file_commit( file_config_files.dir1, '.travis.yml', 'language: python\n' 'matrix:\n' ' include:\n' ' - env: TOXENV=py36\n' ' python: 3.6\n' 'install: pip install coveralls tox\n' 'script: tox\n' 'after_success: coveralls\n' 'cache:\n' ' directories:\n' ' - $HOME/.cache/pip\n' ' - $HOME/.pre-commit\n', ) write_file_commit( file_config_files.dir2, 'appveyor.yml', r'environment:\n' r' matrix:\n' r' - TOXENV: py36\n' r'install:\n' r' - "SET PATH=C:\\Python36;C:\\Python36\\Scripts;%PATH%"\n' r' - pip install tox\n' r'build: false\n' r'test_script: tox\n' r'cache:\n' r" - '%LOCALAPPDATA%\pip\cache'\n" r" - '%USERPROFILE%\.pre-commit'\n", ) clone.main(('--config-filename', str(file_config_files.cfg))) assert not main(('--config-filename', str(file_config_files.cfg))) assert file_config_files.dir1.join('.travis.yml').read() == ( 'language: python\n' 'matrix:\n' ' include:\n' ' - env: TOXENV=py36\n' ' python: 3.6\n' 'install: pip install coveralls tox\n' 'script: tox\n' 'after_success: coveralls\n' 'cache:\n' ' directories:\n' ' - $HOME/.cache/pip\n' ' - $HOME/.cache/pre-commit\n') assert file_config_files.dir2.join('appveyor.yml').read() == ( r'environment:\n' r' matrix:\n' r' - TOXENV: py36\n' r'install:\n' r' - "SET PATH=C:\\Python36;C:\\Python36\\Scripts;%PATH%"\n' r' - pip install tox\n' r'build: false\n' r'test_script: tox\n' r'cache:\n' r" - '%LOCALAPPDATA%\pip\cache'\n" r" - '%USERPROFILE%\.cache\pre-commit'\n")
def test_find_repos_finds_a_repo(file_config_files): write_file_commit(file_config_files.dir1, '.pre-commit-config.yaml', '[]') clone.main(('--config-filename', str(file_config_files.cfg))) ret = find_repos(load_config(str(file_config_files.cfg))) assert ret == {str(file_config_files.output_dir.join('repo1'))}
def test_it_does_not_crash_with_no_repos(file_config): cfg = json.loads(file_config.cfg.read()) cfg['include'] = '^$' file_config.cfg.write(json.dumps(cfg)) assert not main(('--config-file', str(file_config.cfg)))
def test_get_cloned_repos(file_config): clone.main(('--config-filename', str(file_config.cfg))) cfg = load_config(file_config.cfg) ret = set(cfg.get_cloned_repos()) assert ret == {'repo1', 'repo2'}
def generated(tmpdir_factory): root = tmpdir_factory.mktemp('generated') repodir = root.join('repos') with _git_dir(repodir.join('1')) as r1: r1.join('setup.py').write( 'from setuptools import setup\n' 'setup(name="pkg1", install_requires=["pkg2", "six"])\n', ) with _git_dir(repodir.join('2')) as r2: r2.join('setup.py').write( 'from setuptools import setup\n' 'setup(name="pkg2")\n', ) r2.join('requirements-dev.txt').write('pytest\npre-commit\n') # intentional error with _git_dir(repodir.join('3')) as r3: r3.join('setup.py').write('from setuptools import setup; setup()') # intentionally empty with _git_dir(repodir.join('4')) as r4: r4.join('f').ensure() # also provides a pkg2, but in javascript with _git_dir(repodir.join('5')) as r5: r5.join('package.json').write('{"name": "pkg2"}') repos_json = repodir.join('repos.json') repos_json.write( json.dumps({ 'r1': str(r1), 'r2': str(r2), 'r3': str(r3), 'r4': str(r4), 'r5': str(r5), }), ) all_repos = root.join('all_repos').ensure_dir() all_repos_cfg = all_repos.join('all-repos.json') all_repos_cfg.write( json.dumps({ 'output_dir': 'output', 'source': 'all_repos.source.json_file', 'source_settings': {'filename': str(repos_json)}, 'push': 'all_repos.push.merge_to_master', 'push_settings': {}, }), ) all_repos_cfg.chmod(0o600) assert not clone.main(('--config-filename', str(all_repos_cfg))) all_repos_depends = root.join('all_repos_depends').ensure_dir() all_repos_depends_cfg = all_repos_depends.join('all-repos-depends.json') all_repos_depends_cfg.write( json.dumps({ 'all_repos_config': str(all_repos_cfg), 'get_packages': [ 'all_repos_depends.packages.setup_py', 'all_repos_depends.packages.package_json', ], 'get_depends': [ 'all_repos_depends.depends.setup_py', 'all_repos_depends.depends.requirements_tools', ], }), ) database_path = all_repos_depends.join('database.db') assert not generate.main(( '--config-filename', str(all_repos_depends_cfg), '--database', str(database_path), )) return str(database_path)
def file_config_files(file_config): write_file_commit(file_config.dir1, 'f', 'OHAI\n') write_file_commit(file_config.dir2, 'f', 'OHELLO\n') write_file_commit(file_config.dir2, 'f2', '') clone.main(('--config-filename', str(file_config.cfg))) return file_config
def test_find_repos_finds_a_repo(file_config_files): write_file_commit(file_config_files.dir1, 'azure-pipelines.yml', SAMPLE) clone.main(('--config-filename', str(file_config_files.cfg))) config = load_config(str(file_config_files.cfg)) ret = azure_pipelines_autoupdate.find_repos(config) assert ret == {str(file_config_files.output_dir.join('repo1'))}