def validate_config(filenames: list[str]) -> int:
    ret = 0

    for filename in filenames:
        try:
            clientlib.load_config(filename)
        except clientlib.InvalidConfigError as e:
            print(e)
            ret = 1

    return ret
Esempio n. 2
0
def check_useless_excludes(config_file: str) -> int:
    config = load_config(config_file)
    filenames = git.get_all_files()
    classifier = Classifier.from_config(
        filenames,
        config["files"],
        config["exclude"],
    )
    retv = 0

    exclude = config["exclude"]
    if not exclude_matches_any(filenames, "", exclude):
        print(
            f"The global exclude pattern {exclude!r} does not match any files",
        )
        retv = 1

    for repo in config["repos"]:
        for hook in repo["hooks"]:
            # Not actually a manifest dict, but this more accurately reflects
            # the defaults applied during runtime
            hook = apply_defaults(hook, MANIFEST_HOOK_DICT)
            names = classifier.filenames
            types, exclude_types = hook["types"], hook["exclude_types"]
            names = classifier.by_types(names, types, exclude_types)
            include, exclude = hook["files"], hook["exclude"]
            if not exclude_matches_any(names, include, exclude):
                print(
                    f'The exclude pattern {exclude!r} for {hook["id"]} does '
                    f"not match any files", )
                retv = 1

    return retv
Esempio n. 3
0
 def repositories(self):
     """Returns a tuple of the configured repositories."""
     config = load_config(self.config_file_path)
     repositories = tuple(Repository.create(x, self.store) for x in config)
     for repository in repositories:
         repository.require_installed()
     return repositories
Esempio n. 4
0
def test_gc_unused_local_repo_with_env(store, in_git_dir, cap_out):
    config = {
        'repo':
        'local',
        'hooks': [{
            'id': 'flake8',
            'name': 'flake8',
            'entry': 'flake8',
            # a `language: python` local hook will create an environment
            'types': ['python'],
            'language': 'python',
        }],
    }
    write_config('.', config)
    store.mark_config_used(C.CONFIG_FILE)

    # this causes the repositories to be created
    all_hooks(load_config(C.CONFIG_FILE), store)

    assert _config_count(store) == 1
    assert _repo_count(store) == 1
    assert not gc(store)
    assert _config_count(store) == 1
    assert _repo_count(store) == 1
    assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'

    _remove_config_assert_cleared(store, cap_out)
Esempio n. 5
0
def check_useless_excludes(config_file):
    config = load_config(config_file)
    files = git.get_all_files()
    retv = 0

    exclude = config['exclude']
    if not exclude_matches_any(files, '', exclude):
        print(
            'The global exclude pattern {!r} does not match any files'.format(
                exclude), )
        retv = 1

    for repo in config['repos']:
        for hook in repo['hooks']:
            # Not actually a manifest dict, but this more accurately reflects
            # the defaults applied during runtime
            hook = apply_defaults(hook, MANIFEST_HOOK_DICT)
            include, exclude = hook['files'], hook['exclude']
            if not exclude_matches_any(files, include, exclude):
                print(
                    'The exclude pattern {!r} for {} does not match any files'.
                    format(exclude, hook['id']), )
                retv = 1

    return retv
def check_useless_excludes(config_file):
    config = load_config(config_file)
    classifier = Classifier(git.get_all_files())
    retv = 0

    exclude = config['exclude']
    if not exclude_matches_any(classifier.filenames, '', exclude):
        print(
            'The global exclude pattern {!r} does not match any files'
            .format(exclude),
        )
        retv = 1

    for repo in config['repos']:
        for hook in repo['hooks']:
            # Not actually a manifest dict, but this more accurately reflects
            # the defaults applied during runtime
            hook = apply_defaults(hook, MANIFEST_HOOK_DICT)
            names = classifier.filenames
            types, exclude_types = hook['types'], hook['exclude_types']
            names = classifier.by_types(names, types, exclude_types)
            include, exclude = hook['files'], hook['exclude']
            if not exclude_matches_any(names, include, exclude):
                print(
                    'The exclude pattern {!r} for {} does not match any files'
                    .format(exclude, hook['id']),
                )
                retv = 1

    return retv
Esempio n. 7
0
 def repositories(self):
     """Returns a tuple of the configured repositories."""
     config = load_config(self.config_file_path)
     repositories = tuple(Repository.create(x, self.store) for x in config)
     for repository in repositories:
         repository.require_installed()
     return repositories
Esempio n. 8
0
def migrate_config(config_file: str, quiet: bool = False) -> int:
    # ensure that the configuration is a valid pre-commit configuration
    load_config(config_file)

    with open(config_file) as f:
        orig_contents = contents = f.read()

    contents = _migrate_map(contents)
    contents = _migrate_sha_to_rev(contents)

    if contents != orig_contents:
        with open(config_file, "w") as f:
            f.write(contents)

        print("Configuration has been migrated.")
    elif not quiet:
        print("Configuration is already migrated.")
    return 0
Esempio n. 9
0
def test_autoupdate_local_hooks(tempdir_factory, store):
    git_path = git_dir(tempdir_factory)
    config = config_with_local_hooks()
    path = add_config_to_repo(git_path, config)
    runner = Runner(path, C.CONFIG_FILE)
    assert autoupdate(runner, store, tags_only=False) == 0
    new_config_writen = load_config(runner.config_file_path)
    assert len(new_config_writen['repos']) == 1
    assert new_config_writen['repos'][0] == config
Esempio n. 10
0
def test_autoupdate_local_hooks(tempdir_factory):
    git_path = git_dir(tempdir_factory)
    config = config_with_local_hooks()
    path = add_config_to_repo(git_path, config)
    runner = Runner(path, C.CONFIG_FILE)
    assert autoupdate(runner, tags_only=False) == 0
    new_config_writen = load_config(runner.config_file_path)
    assert len(new_config_writen['repos']) == 1
    assert new_config_writen['repos'][0] == config
def _hook_types(cfg_filename: str, hook_types: list[str] | None) -> list[str]:
    if hook_types is not None:
        return hook_types
    else:
        try:
            cfg = load_config(cfg_filename)
        except InvalidConfigError:
            return ['pre-commit']
        else:
            return cfg['default_install_hook_types']
Esempio n. 12
0
def check_all_hooks_match_files(config_file: str) -> int:
    classifier = Classifier(git.get_all_files())
    retv = 0

    for hook in all_hooks(load_config(config_file), Store()):
        if hook.always_run or hook.language == 'fail':
            continue
        elif not classifier.filenames_for_hook(hook):
            print(f'{hook.id} does not apply to this repository')
            retv = 1

    return retv
Esempio n. 13
0
def check_all_hooks_match_files(config_file):
    classifier = Classifier(git.get_all_files())
    retv = 0

    for hook in all_hooks(load_config(config_file), Store()):
        if hook.always_run or hook.language == 'fail':
            continue
        elif not classifier.filenames_for_hook(hook):
            print('{} does not apply to this repository'.format(hook.id))
            retv = 1

    return retv
Esempio n. 14
0
def run(
    config_file: str,
    store: Store,
    args: argparse.Namespace,
    environ: EnvironT = os.environ,
) -> int:
    stash = not args.all_files and not args.files

    # Check if we have unresolved merge conflict files and fail fast.
    if _has_unmerged_paths():
        logger.error('Unmerged files.  Resolve before committing.')
        return 1
    if bool(args.source) != bool(args.origin):
        logger.error('Specify both --origin and --source.')
        return 1
    if stash and _has_unstaged_config(config_file):
        logger.error(
            f'Your pre-commit configuration is unstaged.\n'
            f'`git add {config_file}` to fix this.', )
        return 1

    # Expose origin / source as environment variables for hooks to consume
    if args.origin and args.source:
        environ['PRE_COMMIT_ORIGIN'] = args.origin
        environ['PRE_COMMIT_SOURCE'] = args.source

    if args.remote_name and args.remote_url:
        environ['PRE_COMMIT_REMOTE_NAME'] = args.remote_name
        environ['PRE_COMMIT_REMOTE_URL'] = args.remote_url

    with contextlib.ExitStack() as exit_stack:
        if stash:
            exit_stack.enter_context(staged_files_only(store.directory))

        config = load_config(config_file)
        hooks = [
            hook for hook in all_hooks(config, store)
            if not args.hook or hook.id == args.hook or hook.alias == args.hook
            if args.hook_stage in hook.stages
        ]

        if args.hook and not hooks:
            output.write_line(
                f'No hook with id `{args.hook}` in stage `{args.hook_stage}`',
            )
            return 1

        install_hook_envs(hooks, store)

        return _run_hooks(config, hooks, args, environ)

    # https://github.com/python/mypy/issues/7726
    raise AssertionError('unreachable')
Esempio n. 15
0
def autoupdate(
        config_file: str,
        store: Store,
        tags_only: bool,
        freeze: bool,
        repos: Sequence[str] = (),
) -> int:
    """Auto-update the pre-commit config to the latest versions of repos."""
    migrate_config(config_file, quiet=True)
    retv = 0
    rev_infos: List[Optional[RevInfo]] = []
    changed = False

    config = load_config(config_file)
    for repo_config in config['repos']:
        if repo_config['repo'] in {LOCAL, META}:
            continue

        info = RevInfo.from_config(repo_config)
        if repos and info.repo not in repos:
            rev_infos.append(None)
            continue

        output.write(f'Updating {info.repo} ... ')
        new_info = info.update(tags_only=tags_only, freeze=freeze)
        try:
            _check_hooks_still_exist_at_rev(repo_config, new_info, store)
        except RepositoryCannotBeUpdatedError as error:
            output.write_line(error.args[0])
            rev_infos.append(None)
            retv = 1
            continue

        if new_info.rev != info.rev:
            changed = True
            if new_info.frozen:
                updated_to = f'{new_info.frozen} (frozen)'
            else:
                updated_to = new_info.rev
            msg = f'updating {info.rev} -> {updated_to}.'
            output.write_line(msg)
            rev_infos.append(new_info)
        else:
            output.write_line('already up to date.')
            rev_infos.append(None)

    if changed:
        _write_new_config(config_file, rev_infos)

    return retv
Esempio n. 16
0
def test_autoupdate_local_hooks_with_out_of_date_repo(
        out_of_date_repo, in_tmpdir, mock_out_store_directory,
):
    stale_config = make_config_from_repo(
        out_of_date_repo.path, sha=out_of_date_repo.original_sha, check=False,
    )
    local_config = config_with_local_hooks()
    config = {'repos': [local_config, stale_config]}
    write_config('.', config)
    runner = Runner('.', C.CONFIG_FILE)
    assert autoupdate(runner, tags_only=False) == 0
    new_config_writen = load_config(runner.config_file_path)
    assert len(new_config_writen['repos']) == 2
    assert new_config_writen['repos'][0] == local_config
def find_hook(args: argparse.Namespace, store: Store) -> Hook:
    config = load_config(args.config)
    hooks = [
        hook for hook in all_hooks(config, store)
        if not args.hook or hook.id == args.hook or hook.alias == args.hook
        if args.hook_stage in hook.stages
    ]

    if not hooks:
        raise ValueError(
            f"No hook with id `{args.hook}` in stage `{args.hook_stage}`")

    install_hook_envs(hooks, store)
    return hooks[0]
Esempio n. 18
0
def test_autoupdate_local_hooks_with_out_of_date_repo(
        out_of_date_repo, in_tmpdir, mock_out_store_directory,
):
    stale_config = make_config_from_repo(
        out_of_date_repo.path, sha=out_of_date_repo.original_sha, check=False,
    )
    local_config = config_with_local_hooks()
    config = {'repos': [local_config, stale_config]}
    write_config('.', config)
    runner = Runner('.', C.CONFIG_FILE)
    assert autoupdate(runner, tags_only=False) == 0
    new_config_writen = load_config(runner.config_file_path)
    assert len(new_config_writen['repos']) == 2
    assert new_config_writen['repos'][0] == local_config
Esempio n. 19
0
def check_all_hooks_match_files(config_file):
    files = git.get_all_files()
    retv = 0

    for hook in all_hooks(load_config(config_file), Store()):
        if hook.always_run or hook.language == 'fail':
            continue
        include, exclude = hook.files, hook.exclude
        filtered = _filter_by_include_exclude(files, include, exclude)
        types, exclude_types = hook.types, hook.exclude_types
        filtered = _filter_by_types(filtered, types, exclude_types)
        if not filtered:
            print('{} does not apply to this repository'.format(hook.id))
            retv = 1

    return retv
Esempio n. 20
0
def run(config_file, store, args, environ=os.environ):
    no_stash = args.all_files or bool(args.files)

    # Check if we have unresolved merge conflict files and fail fast.
    if _has_unmerged_paths():
        logger.error('Unmerged files.  Resolve before committing.')
        return 1
    if bool(args.source) != bool(args.origin):
        logger.error('Specify both --origin and --source.')
        return 1
    if _has_unstaged_config(config_file) and not no_stash:
        logger.error(
            'Your pre-commit configuration is unstaged.\n'
            '`git add {}` to fix this.'.format(config_file),
        )
        return 1

    # Expose origin / source as environment variables for hooks to consume
    if args.origin and args.source:
        environ['PRE_COMMIT_ORIGIN'] = args.origin
        environ['PRE_COMMIT_SOURCE'] = args.source

    if no_stash:
        ctx = noop_context()
    else:
        ctx = staged_files_only(store.directory)

    with ctx:
        config = load_config(config_file)
        hooks = [
            hook
            for hook in all_hooks(config, store)
            if not args.hook or hook.id == args.hook or hook.alias == args.hook
            if args.hook_stage in hook.stages
        ]

        if args.hook and not hooks:
            output.write_line(
                'No hook with id `{}` in stage `{}`'.format(
                    args.hook, args.hook_stage,
                ),
            )
            return 1

        install_hook_envs(hooks, store)

        return _run_hooks(config, hooks, args, environ)
Esempio n. 21
0
def check_all_hooks_match_files(config_file):
    files = git.get_all_files()
    retv = 0

    for repo in repositories(load_config(config_file), Store()):
        for hook_id, hook in repo.hooks:
            if hook['always_run'] or hook['language'] == 'fail':
                continue
            include, exclude = hook['files'], hook['exclude']
            filtered = _filter_by_include_exclude(files, include, exclude)
            types, exclude_types = hook['types'], hook['exclude_types']
            filtered = _filter_by_types(filtered, types, exclude_types)
            if not filtered:
                print('{} does not apply to this repository'.format(hook_id))
                retv = 1

    return retv
Esempio n. 22
0
def check_all_hooks_match_files(config_file: str) -> int:
    config = load_config(config_file)
    classifier = Classifier.from_config(
        git.get_all_files(),
        config["files"],
        config["exclude"],
    )
    retv = 0

    for hook in all_hooks(config, Store()):
        if hook.always_run or hook.language == "fail":
            continue
        elif not classifier.filenames_for_hook(hook):
            print(f"{hook.id} does not apply to this repository")
            retv = 1

    return retv
Esempio n. 23
0
def autoupdate(config_file, store, tags_only, repos=()):
    """Auto-update the pre-commit config to the latest versions of repos."""
    migrate_config(config_file, quiet=True)
    retv = 0
    output_repos = []
    changed = False

    input_config = load_config(config_file)

    for repo_config in input_config['repos']:
        if (
            repo_config['repo'] in {LOCAL, META} or
            # Skip updating any repo_configs that aren't for the specified repo
            repos and repo_config['repo'] not in repos
        ):
            output_repos.append(repo_config)
            continue
        output.write('Updating {}...'.format(repo_config['repo']))
        try:
            new_repo_config = _update_repo(repo_config, store, tags_only)
        except RepositoryCannotBeUpdatedError as error:
            output.write_line(error.args[0])
            output_repos.append(repo_config)
            retv = 1
            continue

        if new_repo_config['rev'] != repo_config['rev']:
            changed = True
            output.write_line(
                'updating {} -> {}.'.format(
                    repo_config['rev'], new_repo_config['rev'],
                ),
            )
            output_repos.append(new_repo_config)
        else:
            output.write_line('already up to date.')
            output_repos.append(repo_config)

    if changed:
        output_config = input_config.copy()
        output_config['repos'] = output_repos
        _write_new_config_file(config_file, output_config)

    return retv
Esempio n. 24
0
def autoupdate(config_file, store, tags_only, freeze, repos=()):
    """Auto-update the pre-commit config to the latest versions of repos."""
    migrate_config(config_file, quiet=True)
    retv = 0
    rev_infos = []
    changed = False

    config = load_config(config_file)
    for repo_config in config['repos']:
        if repo_config['repo'] in {LOCAL, META}:
            continue

        info = RevInfo.from_config(repo_config)
        if repos and info.repo not in repos:
            rev_infos.append(None)
            continue

        output.write('Updating {} ... '.format(info.repo))
        new_info = info.update(tags_only=tags_only, freeze=freeze)
        try:
            _check_hooks_still_exist_at_rev(repo_config, new_info, store)
        except RepositoryCannotBeUpdatedError as error:
            output.write_line(error.args[0])
            rev_infos.append(None)
            retv = 1
            continue

        if new_info.rev != info.rev:
            changed = True
            if new_info.frozen:
                updated_to = '{} (frozen)'.format(new_info.frozen)
            else:
                updated_to = new_info.rev
            msg = 'updating {} -> {}.'.format(info.rev, updated_to)
            output.write_line(msg)
            rev_infos.append(new_info)
        else:
            output.write_line('already up to date.')
            rev_infos.append(None)

    if changed:
        _write_new_config(config_file, rev_infos)

    return retv
Esempio n. 25
0
def autoupdate(config_file, store, tags_only, repos=()):
    """Auto-update the pre-commit config to the latest versions of repos."""
    migrate_config(config_file, quiet=True)
    retv = 0
    output_repos = []
    changed = False

    input_config = load_config(config_file)

    for repo_config in input_config['repos']:
        if (
            repo_config['repo'] in {LOCAL, META} or
            # Skip updating any repo_configs that aren't for the specified repo
            repos and repo_config['repo'] not in repos
        ):
            output_repos.append(repo_config)
            continue
        output.write('Updating {}...'.format(repo_config['repo']))
        try:
            new_repo_config = _update_repo(repo_config, store, tags_only)
        except RepositoryCannotBeUpdatedError as error:
            output.write_line(error.args[0])
            output_repos.append(repo_config)
            retv = 1
            continue

        if new_repo_config['rev'] != repo_config['rev']:
            changed = True
            output.write_line(
                'updating {} -> {}.'.format(
                    repo_config['rev'], new_repo_config['rev'],
                ),
            )
            output_repos.append(new_repo_config)
        else:
            output.write_line('already up to date.')
            output_repos.append(repo_config)

    if changed:
        output_config = input_config.copy()
        output_config['repos'] = output_repos
        _write_new_config_file(config_file, output_config)

    return retv
Esempio n. 26
0
def run(config_file, store, args, environ=os.environ):
    no_stash = args.all_files or bool(args.files)

    # Check if we have unresolved merge conflict files and fail fast.
    if _has_unmerged_paths():
        logger.error('Unmerged files.  Resolve before committing.')
        return 1
    if bool(args.source) != bool(args.origin):
        logger.error('Specify both --origin and --source.')
        return 1
    if _has_unstaged_config(config_file) and not no_stash:
        logger.error(
            'Your pre-commit configuration is unstaged.\n'
            '`git add {}` to fix this.'.format(config_file),
        )
        return 1

    # Expose origin / source as environment variables for hooks to consume
    if args.origin and args.source:
        environ['PRE_COMMIT_ORIGIN'] = args.origin
        environ['PRE_COMMIT_SOURCE'] = args.source

    if no_stash:
        ctx = noop_context()
    else:
        ctx = staged_files_only(store.directory)

    with ctx:
        config = load_config(config_file)
        hooks = [
            hook
            for hook in all_hooks(config, store)
            if not args.hook or hook.id == args.hook or hook.alias == args.hook
            if args.hook_stage in hook.stages
        ]

        if args.hook and not hooks:
            output.write_line('No hook with id `{}`'.format(args.hook))
            return 1

        install_hook_envs(hooks, store)

        return _run_hooks(config, hooks, args, environ)
Esempio n. 27
0
def autoupdate(runner, tags_only):
    """Auto-update the pre-commit config to the latest versions of repos."""
    retv = 0
    output_configs = []
    changed = False

    input_configs = load_config(
        runner.config_file_path,
        load_strategy=ordered_load,
    )

    for repo_config in input_configs:
        if is_local_repo(repo_config):
            output_configs.append(repo_config)
            continue
        output.write('Updating {}...'.format(repo_config['repo']))
        try:
            new_repo_config = _update_repo(repo_config, runner, tags_only)
        except RepositoryCannotBeUpdatedError as error:
            output.write_line(error.args[0])
            output_configs.append(repo_config)
            retv = 1
            continue

        if new_repo_config['sha'] != repo_config['sha']:
            changed = True
            output.write_line('updating {} -> {}.'.format(
                repo_config['sha'],
                new_repo_config['sha'],
            ))
            output_configs.append(new_repo_config)
        else:
            output.write_line('already up to date.')
            output_configs.append(repo_config)

    if changed:
        with open(runner.config_file_path, 'w') as config_file:
            config_file.write(
                ordered_dump(remove_defaults(output_configs, CONFIG_SCHEMA),
                             **C.YAML_DUMP_KWARGS))

    return retv
Esempio n. 28
0
def autoupdate(runner, tags_only):
    """Auto-update the pre-commit config to the latest versions of repos."""
    retv = 0
    output_configs = []
    changed = False

    input_configs = load_config(
        runner.config_file_path,
        load_strategy=ordered_load,
    )

    for repo_config in input_configs:
        if is_local_repo(repo_config):
            output_configs.append(repo_config)
            continue
        output.write('Updating {}...'.format(repo_config['repo']))
        try:
            new_repo_config = _update_repo(repo_config, runner, tags_only)
        except RepositoryCannotBeUpdatedError as error:
            output.write_line(error.args[0])
            output_configs.append(repo_config)
            retv = 1
            continue

        if new_repo_config['sha'] != repo_config['sha']:
            changed = True
            output.write_line('updating {} -> {}.'.format(
                repo_config['sha'], new_repo_config['sha'],
            ))
            output_configs.append(new_repo_config)
        else:
            output.write_line('already up to date.')
            output_configs.append(repo_config)

    if changed:
        with open(runner.config_file_path, 'w') as config_file:
            config_file.write(ordered_dump(
                remove_defaults(output_configs, CONFIG_SCHEMA),
                **C.YAML_DUMP_KWARGS
            ))

    return retv
Esempio n. 29
0
def autoupdate(runner, tags_only, repo=None):
    """Auto-update the pre-commit config to the latest versions of repos."""
    migrate_config(runner, quiet=True)
    retv = 0
    output_repos = []
    changed = False

    input_config = load_config(runner.config_file_path)

    for repo_config in input_config['repos']:
        if (is_local_repo(repo_config) or is_meta_repo(repo_config) or
                # Skip updating any repo_configs that aren't for the specified repo
                repo and repo != repo_config['repo']):
            output_repos.append(repo_config)
            continue
        output.write('Updating {}...'.format(repo_config['repo']))
        try:
            new_repo_config = _update_repo(repo_config, runner, tags_only)
        except RepositoryCannotBeUpdatedError as error:
            output.write_line(error.args[0])
            output_repos.append(repo_config)
            retv = 1
            continue

        if new_repo_config['sha'] != repo_config['sha']:
            changed = True
            output.write_line('updating {} -> {}.'.format(
                repo_config['sha'],
                new_repo_config['sha'],
            ))
            output_repos.append(new_repo_config)
        else:
            output.write_line('already up to date.')
            output_repos.append(repo_config)

    if changed:
        output_config = input_config.copy()
        output_config['repos'] = output_repos
        _write_new_config_file(runner.config_file_path, output_config)

    return retv
Esempio n. 30
0
def test_gc_config_with_missing_hook(
        tempdir_factory, store, in_git_dir, cap_out,
):
    path = make_repo(tempdir_factory, 'script_hooks_repo')
    write_config('.', make_config_from_repo(path))
    store.mark_config_used(C.CONFIG_FILE)
    # to trigger a clone
    all_hooks(load_config(C.CONFIG_FILE), store)

    with modify_config() as config:
        # add a hook which does not exist, make sure we don't crash
        config['repos'][0]['hooks'].append({'id': 'does-not-exist'})

    assert _config_count(store) == 1
    assert _repo_count(store) == 1
    assert not gc(store)
    assert _config_count(store) == 1
    assert _repo_count(store) == 1
    assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'

    _remove_config_assert_cleared(store, cap_out)
Esempio n. 31
0
def test_gc_config_with_missing_hook(
        tempdir_factory, store, in_git_dir, cap_out,
):
    path = make_repo(tempdir_factory, 'script_hooks_repo')
    write_config('.', make_config_from_repo(path))
    store.mark_config_used(C.CONFIG_FILE)
    # to trigger a clone
    all_hooks(load_config(C.CONFIG_FILE), store)

    with modify_config() as config:
        # add a hook which does not exist, make sure we don't crash
        config['repos'][0]['hooks'].append({'id': 'does-not-exist'})

    assert _config_count(store) == 1
    assert _repo_count(store) == 1
    assert not gc(store)
    assert _config_count(store) == 1
    assert _repo_count(store) == 1
    assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'

    _remove_config_assert_cleared(store, cap_out)
Esempio n. 32
0
def check_useless_excludes(config_file: str) -> int:
    config = load_config(config_file)
    filenames = git.get_all_files()
    classifier = Classifier.from_config(
        filenames,
        config['files'],
        config['exclude'],
    )
    retv = 0

    exclude = config['exclude']
    if not exclude_matches_any(filenames, '', exclude):
        print(
            f'The global exclude pattern {exclude!r} does not match any files',
        )
        retv = 1

    for repo in config['repos']:
        for hook in repo['hooks']:
            # the default of manifest hooks is `types: [file]` but we may
            # be configuring a symlink hook while there's a broken symlink
            hook.setdefault('types', [])
            # Not actually a manifest dict, but this more accurately reflects
            # the defaults applied during runtime
            hook = apply_defaults(hook, MANIFEST_HOOK_DICT)
            names = classifier.filenames
            types = hook['types']
            types_or = hook['types_or']
            exclude_types = hook['exclude_types']
            names = classifier.by_types(names, types, types_or, exclude_types)
            include, exclude = hook['files'], hook['exclude']
            if not exclude_matches_any(names, include, exclude):
                print(
                    f'The exclude pattern {exclude!r} for {hook["id"]} does '
                    f'not match any files', )
                retv = 1

    return retv
Esempio n. 33
0
def autoupdate(runner, tags_only):
    """Auto-update the pre-commit config to the latest versions of repos."""
    migrate_config(runner, quiet=True)
    retv = 0
    output_repos = []
    changed = False

    input_config = load_config(runner.config_file_path)

    for repo_config in input_config['repos']:
        if is_local_repo(repo_config):
            output_repos.append(repo_config)
            continue
        output.write('Updating {}...'.format(repo_config['repo']))
        try:
            new_repo_config = _update_repo(repo_config, runner, tags_only)
        except RepositoryCannotBeUpdatedError as error:
            output.write_line(error.args[0])
            output_repos.append(repo_config)
            retv = 1
            continue

        if new_repo_config['sha'] != repo_config['sha']:
            changed = True
            output.write_line('updating {} -> {}.'.format(
                repo_config['sha'], new_repo_config['sha'],
            ))
            output_repos.append(new_repo_config)
        else:
            output.write_line('already up to date.')
            output_repos.append(repo_config)

    if changed:
        output_config = input_config.copy()
        output_config['repos'] = output_repos
        _write_new_config_file(runner.config_file_path, output_config)

    return retv
Esempio n. 34
0
def test_gc_unused_local_repo_with_env(store, in_git_dir, cap_out):
    config = {
        'repo': 'local',
        'hooks': [{
            'id': 'flake8', 'name': 'flake8', 'entry': 'flake8',
            # a `language: python` local hook will create an environment
            'types': ['python'], 'language': 'python',
        }],
    }
    write_config('.', config)
    store.mark_config_used(C.CONFIG_FILE)

    # this causes the repositories to be created
    all_hooks(load_config(C.CONFIG_FILE), store)

    assert _config_count(store) == 1
    assert _repo_count(store) == 1
    assert not gc(store)
    assert _config_count(store) == 1
    assert _repo_count(store) == 1
    assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'

    _remove_config_assert_cleared(store, cap_out)
Esempio n. 35
0
def _gc_repos(store):
    configs = store.select_all_configs()
    repos = store.select_all_repos()

    # delete config paths which do not exist
    dead_configs = [p for p in configs if not os.path.exists(p)]
    live_configs = [p for p in configs if os.path.exists(p)]

    all_repos = {(repo, ref): path for repo, ref, path in repos}
    unused_repos = set(all_repos)
    for config_path in live_configs:
        try:
            config = load_config(config_path)
        except InvalidConfigError:
            dead_configs.append(config_path)
            continue
        else:
            for repo in config['repos']:
                _mark_used_repos(store, all_repos, unused_repos, repo)

    store.delete_configs(dead_configs)
    for db_repo_name, ref in unused_repos:
        store.delete_repo(db_repo_name, ref, all_repos[(db_repo_name, ref)])
    return len(unused_repos)
Esempio n. 36
0
def _gc_repos(store: Store) -> int:
    configs = store.select_all_configs()
    repos = store.select_all_repos()

    # delete config paths which do not exist
    dead_configs = [p for p in configs if not os.path.exists(p)]
    live_configs = [p for p in configs if os.path.exists(p)]

    all_repos = {(repo, ref): path for repo, ref, path in repos}
    unused_repos = set(all_repos)
    for config_path in live_configs:
        try:
            config = load_config(config_path)
        except InvalidConfigError:
            dead_configs.append(config_path)
            continue
        else:
            for repo in config['repos']:
                _mark_used_repos(store, all_repos, unused_repos, repo)

    store.delete_configs(dead_configs)
    for db_repo_name, ref in unused_repos:
        store.delete_repo(db_repo_name, ref, all_repos[(db_repo_name, ref)])
    return len(unused_repos)
Esempio n. 37
0
def run(
    config_file: str,
    store: Store,
    args: argparse.Namespace,
    environ: MutableMapping[str, str] = os.environ,
) -> int:
    stash = not args.all_files and not args.files

    # Check if we have unresolved merge conflict files and fail fast.
    if _has_unmerged_paths():
        logger.error('Unmerged files.  Resolve before committing.')
        return 1
    if bool(args.from_ref) != bool(args.to_ref):
        logger.error('Specify both --from-ref and --to-ref.')
        return 1
    if stash and _has_unstaged_config(config_file):
        logger.error(
            f'Your pre-commit configuration is unstaged.\n'
            f'`git add {config_file}` to fix this.', )
        return 1
    if (args.hook_stage in {'prepare-commit-msg', 'commit-msg'}
            and not args.commit_msg_filename):
        logger.error(
            f'`--commit-msg-filename` is required for '
            f'`--hook-stage {args.hook_stage}`', )
        return 1
    # prevent recursive post-checkout hooks (#1418)
    if (args.hook_stage == 'post-checkout'
            and environ.get('_PRE_COMMIT_SKIP_POST_CHECKOUT')):
        return 0

    # Expose from-ref / to-ref as environment variables for hooks to consume
    if args.from_ref and args.to_ref:
        # legacy names
        environ['PRE_COMMIT_ORIGIN'] = args.from_ref
        environ['PRE_COMMIT_SOURCE'] = args.to_ref
        # new names
        environ['PRE_COMMIT_FROM_REF'] = args.from_ref
        environ['PRE_COMMIT_TO_REF'] = args.to_ref

    if args.remote_name and args.remote_url:
        environ['PRE_COMMIT_REMOTE_NAME'] = args.remote_name
        environ['PRE_COMMIT_REMOTE_URL'] = args.remote_url

    if args.checkout_type:
        environ['PRE_COMMIT_CHECKOUT_TYPE'] = args.checkout_type

    # Set pre_commit flag
    environ['PRE_COMMIT'] = '1'

    with contextlib.ExitStack() as exit_stack:
        if stash:
            exit_stack.enter_context(staged_files_only(store.directory))

        config = load_config(config_file)
        hooks = [
            hook for hook in all_hooks(config, store)
            if not args.hook or hook.id == args.hook or hook.alias == args.hook
            if args.hook_stage in hook.stages
        ]

        if args.hook and not hooks:
            output.write_line(
                f'No hook with id `{args.hook}` in stage `{args.hook_stage}`',
            )
            return 1

        install_hook_envs(hooks, store)

        return _run_hooks(config, hooks, args, environ)

    # https://github.com/python/mypy/issues/7726
    raise AssertionError('unreachable')
Esempio n. 38
0
def install_hooks(config_file, store):
    install_hook_envs(all_hooks(load_config(config_file), store), store)
Esempio n. 39
0
def install_hooks(config_file, store):
    install_hook_envs(all_hooks(load_config(config_file), store), store)
Esempio n. 40
0
 def config(self):
     return load_config(self.config_file_path)
def install_hooks(config_file: str, store: Store) -> int:
    install_hook_envs(all_hooks(load_config(config_file), store), store)
    return 0
Esempio n. 42
0
 def config(self):
     return load_config(self.config_file_path)