def enforce_present_absent(self, *partial_names: str) -> Iterator[Fuss]: """Enforce files that should be present or absent. :param partial_names: Names of the files to enforce configs for. :return: Fuss generator. """ if not self.project: return for present in (True, False): key = "present" if present else "absent" logger.debug(f"Enforce {key} files") absent = not present file_mapping = self.project.nitpick_files_section.get(key, {}) for filename in filter_names(file_mapping, *partial_names): custom_message = file_mapping[filename] file_path: Path = self.project.root / filename exists = file_path.exists() if (present and exists) or (absent and not exists): continue reporter = Reporter(FileInfo.create(self.project, filename)) extra = f": {custom_message}" if custom_message else "" violation = ProjectViolations.MISSING_FILE if present else ProjectViolations.FILE_SHOULD_BE_DELETED yield reporter.make_fuss(violation, extra=extra)
def merge_styles(self, offline: bool) -> Iterator[Fuss]: """Merge one or multiple style files.""" config = self.read_configuration() # pylint: disable=import-outside-toplevel from nitpick.style import StyleManager style = StyleManager(self, offline, config.cache) base = config.file.expanduser().resolve().as_uri( ) if config.file else None style_errors = list( style.find_initial_styles(peekable(always_iterable(config.styles)), base)) if style_errors: raise QuitComplainingError(style_errors) self.style_dict = style.merge_toml_dict() from nitpick.flake8 import NitpickFlake8Extension minimum_version = search_json(self.style_dict, NITPICK_MINIMUM_VERSION_JMEX, None) logger.debug(f"Minimum version: {minimum_version}") if minimum_version and version_to_tuple( NitpickFlake8Extension.version) < version_to_tuple( minimum_version): yield Reporter().make_fuss( ProjectViolations.MINIMUM_VERSION, project=PROJECT_NAME, expected=minimum_version, actual=NitpickFlake8Extension.version, ) self.nitpick_section = self.style_dict.get("nitpick", {}) self.nitpick_files_section = self.nitpick_section.get("files", {})
def read_configuration(self) -> Configuration: """Search for a configuration file and validate it against a Marshmallow schema.""" config_file: Path | None = None for possible_file in CONFIG_FILES: path: Path = self.root / possible_file if not path.exists(): continue if not config_file: logger.info(f"Config file: reading from {path}") config_file = path else: logger.warning(f"Config file: ignoring existing {path}") if not config_file: logger.warning("Config file: none found") return Configuration(None, [], "") toml_doc = TomlDoc(path=config_file) config_dict = search_json(toml_doc.as_object, TOOL_NITPICK_JMEX, {}) validation_errors = ToolNitpickSectionSchema().validate(config_dict) if not validation_errors: return Configuration(config_file, config_dict.get("style", []), config_dict.get("cache", "")) # pylint: disable=import-outside-toplevel from nitpick.plugins.info import FileInfo raise QuitComplainingError( Reporter(FileInfo(self, PYPROJECT_TOML)).make_fuss( StyleViolations.INVALID_DATA_TOOL_NITPICK, flatten_marshmallow_errors(validation_errors), section=TOOL_NITPICK_KEY, ))
def common_fix_or_check(context, verbose: int, files, check_only: bool) -> None: """Common CLI code for both "fix" and "check" commands.""" if verbose: level = logging.INFO if verbose == 1 else logging.DEBUG # https://loguru.readthedocs.io/en/stable/resources/recipes.html#changing-the-level-of-an-existing-handler # https://github.com/Delgan/loguru/issues/138#issuecomment-525594566 logger.remove() logger.add(sys.stderr, level=logging.getLevelName(level)) logger.enable(PROJECT_NAME) nit = get_nitpick(context) try: for fuss in nit.run(*files, autofix=not check_only): nit.echo(fuss.pretty) except QuitComplainingError as err: for fuss in err.violations: click.echo(fuss.pretty) raise Exit(2) from err click.secho(Reporter.get_counts()) if Reporter.manual or Reporter.fixed: raise Exit(1)
def run(self, *partial_names: str, autofix=False) -> Iterator[Fuss]: """Run Nitpick. :param partial_names: Names of the files to enforce configs for. :param autofix: Flag to modify files, if the plugin supports it (default: True). :return: Fuss generator. """ Reporter.reset() try: yield from chain( self.project.merge_styles(self.offline), self.enforce_present_absent(*partial_names), self.enforce_style(*partial_names, autofix=autofix), ) except QuitComplainingError as err: yield from err.violations
def __init__(self, info: FileInfo, expected_config: JsonDict, autofix=False) -> None: self.info = info self.filename = info.path_from_root self.reporter = Reporter(info, self.violation_base_code) self.file_path: Path = self.info.project.root / self.filename # Configuration for this file as a TOML dict, taken from the style file. self.expected_config: JsonDict = expected_config or {} self.autofix = self.fixable and autofix # Dirty flag to avoid changing files without need self.dirty: bool = False self._merge_special_configs()
def confirm_project_root(dir_: PathOrStr | None = None) -> Path: """Confirm this is the root dir of the project (the one that has one of the ``ROOT_FILES``).""" possible_root_dir = Path(dir_ or Path.cwd()).resolve() root_files = glob_files(possible_root_dir, ROOT_FILES) logger.debug(f"Root files found: {root_files}") if root_files: return next(iter(root_files)).parent logger.error(f"No root files found on directory {possible_root_dir}") raise QuitComplainingError(Reporter().make_fuss( ProjectViolations.NO_ROOT_DIR))
def _read_toml(self, file_contents: str, display_name: str) -> JsonDict: toml = TomlDoc(string=file_contents) try: read_toml_dict = toml.as_object # TODO: refactor: replace by this error when using tomlkit only in the future: # except TOMLKitError as err: except TomlDecodeError as err: # If the TOML itself could not be parsed, we can't go on raise QuitComplainingError( Reporter(FileInfo(self.project, display_name)).make_fuss( StyleViolations.INVALID_TOML, exception=pretty_exception(err))) from err return read_toml_dict
def _include_style(self, style_url: furl) -> Iterator[Fuss]: if style_url.url in self._already_included: return self._already_included.add(style_url.url) file_contents = self._style_fetcher_manager.fetch(style_url) if file_contents is None: return # generate a 'human readable' version of the URL; a relative path for local files # and the URL otherwise. display_name = style_url.url if style_url.scheme == "file": path = furl_path_to_python_path(style_url.path) with suppress(ValueError): path = path.relative_to(self.project.root) display_name = str(path) read_toml_dict = self._read_toml(file_contents, display_name) # normalize sub-style URIs, before merging sub_styles = [ self._style_fetcher_manager.normalize_url(ref, style_url) for ref in always_iterable( search_json(read_toml_dict, NITPICK_STYLES_INCLUDE_JMEX, [])) ] if sub_styles: read_toml_dict.setdefault("nitpick", {}).setdefault( "styles", {})["include"] = [str(url) for url in sub_styles] toml_dict, validation_errors = self._config_validator.validate( read_toml_dict) if validation_errors: yield Reporter(FileInfo(self.project, display_name)).make_fuss( StyleViolations.INVALID_CONFIG, flatten_marshmallow_errors(validation_errors)) dpath.util.merge(self._merged_styles, flatten(toml_dict, custom_reducer(SEPARATOR_FLATTEN))) yield from self.include_multiple_styles(sub_styles)
def find_main_python_file(root_dir: Path) -> Path: """Find the main Python file in the root dir, the one that will be used to report Flake8 warnings. The search order is: 1. Python files that belong to the root dir of the project (e.g.: ``setup.py``, ``autoapp.py``). 2. ``manage.py``: they can be on the root or on a subdir (Django projects). 3. Any other ``*.py`` Python file on the root dir and subdir. This avoid long recursions when there is a ``node_modules`` subdir for instance. """ for the_file in itertools.chain( # 1. [root_dir / root_file for root_file in ROOT_PYTHON_FILES], # 2. root_dir.glob(f"*/{MANAGE_PY}"), # 3. root_dir.glob("*.py"), root_dir.glob("*/*.py"), ): if the_file.exists(): logger.info(f"Found the file {the_file}") return Path(the_file) raise QuitComplainingError(Reporter().make_fuss( ProjectViolations.NO_PYTHON_FILE, root=str(root_dir)))
def test_reporter(): """Test error reporter.""" reporter = Reporter() reporter.reset() assert reporter.manual == 0 assert reporter.fixed == 0 assert reporter.get_counts() == "No violations found. ✨ 🍰 ✨" reporter.increment() assert reporter.manual == 1 assert reporter.fixed == 0 assert reporter.get_counts() == "Violations: ❌ 1 to change manually." reporter.increment(True) assert reporter.manual == 1 assert reporter.fixed == 1 assert reporter.get_counts( ) == "Violations: ✅ 1 fixed, ❌ 1 to change manually." reporter.reset() assert reporter.manual == 0 assert reporter.fixed == 0 reporter.increment(True) assert reporter.manual == 0 assert reporter.fixed == 1 assert reporter.get_counts() == "Violations: ✅ 1 fixed."
class NitpickPlugin(metaclass=abc.ABCMeta): # pylint: disable=too-many-instance-attributes """Base class for Nitpick plugins. :param data: File information (project, path, tags). :param expected_config: Expected configuration for the file :param autofix: Flag to modify files, if the plugin supports it (default: True). """ __str__, __unicode__ = autotext( "{self.info.path_from_root} ({self.__class__.__name__})") filename = "" # TODO: refactor: remove filename attribute after fixing dynamic/fixed schema loading violation_base_code: int = 0 #: Can this plugin modify its files directly? Are the files fixable? fixable: bool = False #: Nested validation field for this file, to be applied in runtime when the validation schema is rebuilt. #: Useful when you have a strict configuration for a file type (e.g. :py:class:`nitpick.plugins.json.JsonPlugin`). validation_schema: Schema | None = None #: Which ``identify`` tags this :py:class:`nitpick.plugins.base.NitpickPlugin` child recognises. identify_tags: set[str] = set() skip_empty_suggestion = False def __init__(self, info: FileInfo, expected_config: JsonDict, autofix=False) -> None: self.info = info self.filename = info.path_from_root self.reporter = Reporter(info, self.violation_base_code) self.file_path: Path = self.info.project.root / self.filename # Configuration for this file as a TOML dict, taken from the style file. self.expected_config: JsonDict = expected_config or {} self.autofix = self.fixable and autofix # Dirty flag to avoid changing files without need self.dirty: bool = False self._merge_special_configs() def _merge_special_configs(self): """Merge the predefined plugin config with the style dict to create the special config.""" spc = self.predefined_special_config() temp_dict = spc.list_keys.from_plugin.copy() # pylint: disable=no-member # The user can override the default list keys (if any) by setting them on the style file. # pylint: disable=assigning-non-slot,no-member spc.list_keys.from_style = self.expected_config.pop( DUNDER_LIST_KEYS, None) or {} temp_dict.update(flatten_quotes(spc.list_keys.from_style)) flat_config = flatten_quotes(self.expected_config) for key_with_pattern, parent_child_keys in temp_dict.items(): for expanded_key in fnmatch.filter(flat_config.keys(), key_with_pattern): spc.list_keys.value[expanded_key] = parent_child_keys self.special_config = spc def predefined_special_config(self) -> SpecialConfig: # pylint: disable=no-self-use """Create a predefined special configuration for this plugin. Each plugin can override this method.""" return SpecialConfig() @mypy_property @lru_cache() def nitpick_file_dict(self) -> JsonDict: """Nitpick configuration for this file as a TOML dict, taken from the style file.""" return search_json(self.info.project.nitpick_section, f'files."{self.filename}"', {}) def entry_point(self) -> Iterator[Fuss]: """Entry point of the Nitpick plugin.""" self.post_init() should_exist: bool = bool( self.info.project.nitpick_files_section.get(self.filename, True)) if self.file_path.exists() and not should_exist: logger.info( f"{self}: File {self.filename} exists when it should not") # Only display this message if the style is valid. yield self.reporter.make_fuss(SharedViolations.DELETE_FILE) return has_config_dict = bool(self.expected_config or self.nitpick_file_dict) if not has_config_dict: return yield from self._enforce_file_configuration() def _enforce_file_configuration(self): file_exists = self.file_path.exists() if file_exists: logger.info(f"{self}: Enforcing rules") yield from self.enforce_rules() else: yield from self._suggest_when_file_not_found() if self.autofix and self.dirty: fuss = self.write_file(file_exists) # pylint: disable=assignment-from-none if fuss: yield fuss def post_init(self): """Hook for plugin initialization after the instance was created. The name mimics ``__post_init__()`` on dataclasses, without the magic double underscores: `Post-init processing <https://docs.python.org/3/library/dataclasses.html#post-init-processing>`_ """ def _suggest_when_file_not_found(self): suggestion = self.initial_contents if not suggestion and self.skip_empty_suggestion: return logger.info(f"{self}: Suggest initial contents for {self.filename}") if suggestion: yield self.reporter.make_fuss( SharedViolations.CREATE_FILE_WITH_SUGGESTION, suggestion, fixed=self.autofix) else: yield self.reporter.make_fuss(SharedViolations.CREATE_FILE) def write_file(self, file_exists: bool) -> Fuss | None: # pylint: disable=unused-argument,no-self-use """Hook to write the new file when autofix mode is on. Should be used by inherited classes.""" return None @abc.abstractmethod def enforce_rules(self) -> Iterator[Fuss]: """Enforce rules for this file. It must be overridden by inherited classes if needed.""" @property @abc.abstractmethod def initial_contents(self) -> str: """Suggested initial content when the file doesn't exist.""" def write_initial_contents(self, doc_class: type[BaseDoc], expected_dict: dict = None) -> str: """Helper to write initial contents based on a format.""" if not expected_dict: expected_dict = self.expected_config formatted_str = doc_class(obj=expected_dict).reformatted if self.autofix: self.file_path.parent.mkdir(exist_ok=True, parents=True) self.file_path.write_text(formatted_str) return formatted_str