def get_gh_actions_matrix(self) -> Dict[str, Tuple[str, bool]]: """ Determines the matrix of Python versions used in GitHub Actions. .. versionadded:: $VERSION """ config = self.templates.globals python_versions = config["python_versions"] tox_py_versions = config["tox_py_versions"] third_party_version_matrix = config["third_party_version_matrix"] # TODO: support multi-library matrices if third_party_version_matrix: third_party_library = list(third_party_version_matrix.keys())[0] third_party_versions = DelimitedList( third_party_version_matrix[third_party_library]) matrix_testenv_string = f"-{third_party_library}{{{third_party_versions:,}}}" else: matrix_testenv_string = '' output: Dict[str, Tuple[str, bool]] = {} for (py_version, metadata), gh_py_version, tox_py_version in zip( python_versions.items(), set_gh_actions_versions(python_versions), tox_py_versions, ): output[str(gh_py_version)] = ( f"{tox_py_version}{matrix_testenv_string},build", metadata["experimental"], ) return output
def envlists(self): """ ``[envlists]``. """ tox_envs: List[str] = [] if self["third_party_version_matrix"]: cov_envlist: List[str] = [] for third_party_library in self["third_party_version_matrix"]: third_party_versions = DelimitedList(self["third_party_version_matrix"][third_party_library]) matrix_testenv_string = f"-{third_party_library}{{{third_party_versions:,}}}" tox_envs.extend(v + matrix_testenv_string for v in self["tox_py_versions"]) if not cov_envlist: cov_envlist = [ f"py{self['python_deploy_version'].replace('.', '')}-{third_party_library}{third_party_versions[0]}", "coverage", ] else: tox_envs = self["tox_py_versions"] cov_envlist = [f"py{self['python_deploy_version']}".replace('.', ''), "coverage"] self._ini["envlists"]["test"] = tox_envs self._ini["envlists"]["qa"] = ["mypy", "lint"] if self["enable_tests"]: self._ini["envlists"]["cov"] = cov_envlist
def parse_help_args(obj: MarkdownHelpMixin, ctx: click.Context, args: List[str]) -> List[str]: """ Helper function for markdown help classes to parse the given arguments and modify the context as necessary. .. versionadded:: 1.1.0 :param obj: :param ctx: :param args: """ # noqa: D400 # This is necessary to parse any --colour/--no-colour commands before generating the help, # to ensure the option is honoured. if not args and obj.no_args_is_help and not ctx.resilient_parsing: click.echo(ctx.get_help(), color=ctx.color) ctx.exit() parser = obj.make_parser(ctx) opts, args, param_order = parser.parse_args(args=args) obj._colour = opts.get("colour", ctx.color) for param in iter_params_for_processing(param_order, obj.get_params(ctx)): value, args = param.handle_parse_result(ctx, opts, args) if args and not ctx.allow_extra_args and not ctx.resilient_parsing: args_string = DelimitedList(map(make_str, args)) ctx.fail( f"Got unexpected extra {_argument(len(args))} ({args_string: })") ctx.args = args return args
def use_package(package: str, config: Config, *args: str, **kwargs: str) -> None: r""" Configure LaTeX to use the given package. The ``\usepackage`` entry is added to the :py:obj:`sphinx.config.Config.latex_elements` ``["preamble"]`` attribute. :param package: :param config: :param \*args: :param \*\*kwargs: """ options: DelimitedList[str] = DelimitedList() options.extend(args) options.extend(map("{}={}".format, kwargs.items())) use_string = rf"\usepackage[{options:,}]{{{package}}}" if not hasattr( config, "latex_elements") or not config.latex_elements: # pragma: no cover config.latex_elements = {} # type: ignore latex_preamble = config.latex_elements.get("preamble", '') if use_string not in latex_preamble: config.latex_elements["preamble"] = f"{latex_preamble}\n{use_string}"
def __init__(self, repo_path: pathlib.Path, templates: Environment): self.repo_path = repo_path self.templates = templates self.actions = templates.get_template("github_ci.yml") self.workflows_dir = PathPlus(repo_path / ".github" / "workflows") self.workflows_dir.maybe_make(parents=True) code_file_filter: DelimitedList[str] = DelimitedList() if self.templates.globals["enable_docs"]: code_file_filter.append(f"{templates.globals['docs_dir']}/**") else: code_file_filter.append(f"doc-source/**") code_file_filter.extend([ "CONTRIBUTING.rst", ".imgbotconfig", ".pre-commit-config.yaml", ".pylintrc", ".readthedocs.yml", ]) # ".bumpversion.cfg", # ".style.yapf", # "stubs.txt", self._code_file_filter = f"!({code_file_filter:|})"
def __repr__(self) -> str: """ Return a string representation of the :class:`~.HeaderMapping`. .. versionadded:: 0.4.1 """ items = DelimitedList([f"{k!r}: {v!r}" for k, v in self.items()]) as_dict = f"{{{items:, }}}" return f"<{self.__class__.__name__}({as_dict})>"
def visit_collapse_node(translator: HTMLTranslator, node: CollapseNode): """ Visit a :class:`~.CollapseNode`. :param translator: :param node: The node being visited. """ tag_parts = DelimitedList(["details"]) if node.get("names", None): names = DelimitedList(node["names"]) tag_parts.append(f'name="{names: }"') if node.get("classes", None): classes = DelimitedList(node["classes"]) tag_parts.append(f'class="{classes: }"') translator.body.append(f"<{tag_parts: }>\n<summary>{node.label}</summary>") translator.context.append("</details>")
def create_docs_install_block( repo_name: str, username: str, conda: bool = True, pypi: bool = True, pypi_name: Optional[str] = None, conda_channels: Optional[Sequence[str]] = None, ) -> str: """ Create the installation instructions for insertion into the documentation. :param repo_name: The name of the GitHub repository. :param username: The username of the GitHub account that owns the repository. (Not used; ensures API compatibility with :func:`~.create_readme_install_block`) :param conda: Whether to show Anaconda installation instructions. :param pypi: Whether to show PyPI installation instructions. :param pypi_name: The name of the project on PyPI. Defaults to the value of ``repo_name`` if unset. :param conda_channels: List of required Conda channels. :return: The installation block created from the above settings. """ if not conda_channels and conda: raise ValueError( "Please supply a list of 'conda_channels' if Conda builds are supported" ) if not pypi_name: pypi_name = repo_name conda_channels = DelimitedList(conda_channels or []) block = StringList( [".. start installation", '', f".. installation:: {pypi_name}"]) with block.with_indent_size(1): if pypi: block.append(":pypi:") block.append(":github:") if conda: block.append(":anaconda:") block.append(f":conda-channels: {conda_channels:, }") block.blankline() block.append(".. end installation") return str(block)
def visit_ImportFrom(self, node: ast.ImportFrom) -> None: # noqa: D102 if node.level != 0: # relative import return if node.module != "collections": return new_imports = [] collections_abc_imports: DelimitedList[str] = DelimitedList() collections_imports: DelimitedList[str] = DelimitedList() name: ast.alias for name in node.names: if name.name in collections.abc.__all__: # type: ignore collections_abc_imports.append(name.name) else: collections_imports.append(name.name) text_range = self.tokens.get_text_range(node) if collections_abc_imports: new_imports.append( f"from collections.abc import {collections_abc_imports:, }") if collections_imports: new_imports.append( f"from collections import {collections_imports:, }") indent = re.split("[A-Za-z]", self.source.split('\n')[node.lineno - 1])[0] rewritten_imports = [new_imports[0]] rewritten_imports.extend(indent + imp for imp in new_imports[1:]) self.record_replacement(text_range, '\n'.join(rewritten_imports))
def get_third_party_version_matrix(self) -> Tuple[str, DelimitedList, str]: """ Returns information about the matrix of third party versions. The returned object is a three-element tuple, comprising: * The name of the third party library. * A list of version strings. * The testenv suffix, e.g. ``-attrs{19.3,20.1}``. """ third_party_library = list(self["third_party_version_matrix"].keys())[0] third_party_versions = DelimitedList(self["third_party_version_matrix"][third_party_library]) matrix_testenv_string = f"-{third_party_library}{{{third_party_versions:,}}}" return third_party_library, third_party_versions, matrix_testenv_string
def dump_list(self, v): values = DelimitedList(str(self.dump_value(u)) for u in v) single_line = f"[{values:, }]" if len(single_line) <= self.max_width: return single_line retval = StringList(['[']) with retval.with_indent(" ", 1): for u in v: retval.append(f"{str(self.dump_value(u))},") retval.append(']') return str(retval)
def format_signature(obj: Union[type, FunctionType]) -> StringList: """ Format the signature of the given object, for insertion into the highlight panel. :param obj: :return: A list of reStructuredText lines. """ with monkeypatcher(): obj.__annotations__ = get_type_hints(obj) signature: inspect.Signature = inspect.signature(obj) buf = StringList(".. parsed-literal::") buf.blankline() buf.indent_type = " " buf.indent_size = 1 if signature.return_annotation is not inspect.Signature.empty and not isinstance( obj, type): return_annotation = f") -> {format_annotation(signature.return_annotation)}" else: return_annotation = f")" total_length = len(obj.__name__) + len(return_annotation) arguments_buf: DelimitedList[str] = DelimitedList() param: inspect.Parameter for param in signature.parameters.values(): arguments_buf.append(f"{format_parameter(param)}") total_length += len(arguments_buf[-1]) if total_length <= 60: signature_buf = StringList(''.join( [f"{obj.__name__}(", f"{arguments_buf:, }", return_annotation])) else: signature_buf = StringList([f"{obj.__name__}("]) signature_buf.indent_type = " " with signature_buf.with_indent_size(1): signature_buf.extend( [f"{arguments_buf:,\n}" + ',', return_annotation]) buf.extend(signature_buf) return buf
def version_callback(ctx, param, value): # noqa: D103 # 3rd party import repo_helper # this package from repo_helper_rtd import __version__ if not value or ctx.resilient_parsing: return parts = DelimitedList([f"repo_helper_rtd version {__version__}"]) if value > 1: parts.append(f"repo_helper {repo_helper.__version__}") click.echo(f"{parts:, }", color=ctx.color) ctx.exit()
def test_delimitedlist(): data = DelimitedList(['a', 'b', 'c', 'd', 'e']) assert data.__format__(", ") == "a, b, c, d, e" assert data.__format__("; ") == "a; b; c; d; e" assert data.__format__(';') == "a;b;c;d;e" assert data.__format__('\n') == "a\nb\nc\nd\ne" assert f"{data:, }" == "a, b, c, d, e" assert f"{data:; }" == "a; b; c; d; e" assert f"{data:;}" == "a;b;c;d;e" assert f"{data:\n}" == "a\nb\nc\nd\ne" assert f"{data:, }" == "a, b, c, d, e" assert f"{data:; }" == "a; b; c; d; e" assert f"{data:;}" == "a;b;c;d;e" assert f"{data:\n}" == "a\nb\nc\nd\ne"
def version_callback(ctx: click.Context, param: click.Option, value: int): # noqa: D103 # 3rd party import repo_helper # this package from repo_helper_github import __version__ if not value or ctx.resilient_parsing: # pragma: no cover return parts = DelimitedList([f"repo_helper_github version {__version__}"]) if value > 1: parts.append(f"repo_helper {repo_helper.__version__}") click.echo(f"{parts:, }", color=ctx.color) ctx.exit()
def _format_envvar(param) -> Iterator[str]: """ Format the envvars of a `click.Option` or `click.Argument`. """ yield f'.. envvar:: {param.envvar}' yield " :noindex:" yield '' # TODO: Provide a role to link to the option that accepts slashes # Or replace the option directive entirely? # That would allow linking to the option of a specific command. if isinstance(param, click.Argument): yield f" Provides a default for :option:`{param.human_readable_name}`" else: param_ref = DelimitedList(param.opts) yield f" Provides a default for :option:`{param_ref: / } <{param_ref[0]}>`"
def dump_inline_table(self, section) -> str: """ Preserve an inline table in its compact syntax instead of expanding into sections. .. seealso:: https://github.com/toml-lang/toml/blob/master/toml.md#user-content-inline-table """ # See also: https://github.com/uiri/toml/pull/336/ if isinstance(section, Mapping): val_list: DelimitedList[str] = DelimitedList([]) for k, v in section.items(): val_list.append(f"{k} = {self.dump_inline_table(v).rstrip()}") return f"{{ {val_list:, } }}" else: return str(self.dump_value(section))
def format(self, line_offset: int = 0) -> str: # noqa: A003 # pylint: disable=redefined-builtin """ Formats the :class:`~.Generic`. :param line_offset: """ if line_offset + len(repr(self)) > 110: # Line too long as is elements: DelimitedList[str] = DelimitedList() for element in self.elements: if isinstance(element, Generic): elements.append( textwrap.indent(element.format(line_offset + 4), '\t')) else: elements.append(textwrap.indent(str(element), '\t')) return f"{self.name}[\n{elements:,\n}\n ]" else: return repr(self)
def visit_Attribute(self, node: ast.Attribute) -> None: # noqa: D102 parts: DelimitedList[str] = DelimitedList() value: typing.Union[ast.Name, ast.expr] = node.value while True: if isinstance(value, ast.Name): parts.append(value.id) break elif isinstance(value, ast.Attribute): parts.append(value.value.id) # type: ignore value = value.attr # type: ignore elif isinstance(value, str): parts.append(value) break else: raise NotImplementedError( f"Unsupported value type {type(value)}") self.structure.append(f"{parts:.}.{node.attr}")
def version_callback(ctx: click.Context, param: click.Option, value: int): # noqa: D103 if not value or ctx.resilient_parsing: return # 3rd party import virtualenv # type: ignore from domdf_python_tools.stringlist import DelimitedList # this package import pyproject_devenv parts = DelimitedList( [f"pyproject-devenv version {pyproject_devenv.__version__}"]) if value > 1: parts.append(f"virualenv {virtualenv.__version__}") click.echo(f"{parts:, }", color=ctx.color) ctx.exit()
def tox(self): """ ``[tox]``. """ tox_envs: List[str] = [] if self["third_party_version_matrix"]: for third_party_library in self["third_party_version_matrix"]: third_party_versions = DelimitedList(self["third_party_version_matrix"][third_party_library]) matrix_testenv_string = f"-{third_party_library}{{{third_party_versions:,}}}" tox_envs.extend(v + matrix_testenv_string for v in self["tox_py_versions"]) else: tox_envs = self["tox_py_versions"] self._ini["tox"]["envlist"] = [*tox_envs, "mypy", "build"] self._ini["tox"]["skip_missing_interpreters"] = True self._ini["tox"]["isolated_build"] = True tox_requires = {"pip>=20.3.3", "tox-pip-version>=0.0.7", *self["tox_requirements"]} if self["pypi_name"] != "tox-envlist": tox_requires.add("tox-envlist>=0.2.1") self._ini["tox"]["requires"] = indent_join(sorted(tox_requires))
def make(self) -> StringList: """ Constructs the contents of the shields block. """ buf = StringList() sections = {} substitutions = {} repo_name = self.repo_name username = self.username pypi_name = self.pypi_name if self.unique_name: buf.append(f".. start shields {self.unique_name.lstrip('_')}") else: buf.append(f".. start shields") buf.blankline(ensure_single=True) buf.extend( [".. list-table::", "\t:stub-columns: 1", "\t:widths: 10 90"]) buf.blankline(ensure_single=True) sections["Activity"] = [ "commits-latest", "commits-since", "maintained" ] substitutions["commits-since"] = self.make_activity_shield( repo_name, username, self.version) substitutions["commits-latest"] = self.make_last_commit_shield( repo_name, username) substitutions["maintained"] = self.make_maintained_shield() sections["Other"] = ["license", "language", "requires"] substitutions["requires"] = self.make_requires_shield( repo_name, username) substitutions["license"] = self.make_license_shield( repo_name, username) substitutions["language"] = self.make_language_shield( repo_name, username) sections["QA"] = ["codefactor", "actions_flake8", "actions_mypy"] substitutions["codefactor"] = self.make_codefactor_shield( repo_name, username) substitutions["actions_flake8"] = self.make_actions_shield( repo_name, username, "Flake8", "Flake8 Status") substitutions["actions_mypy"] = self.make_actions_shield( repo_name, username, "mypy", "mypy status") if self.docs: sections["Docs"] = ["docs", "docs_check"] substitutions["docs"] = self.make_rtfd_shield( repo_name, self.docs_url) substitutions["docs_check"] = self.make_docs_check_shield( repo_name, username) sections["Tests"] = [] if "Linux" in self.platforms: sections["Tests"].append("actions_linux") substitutions["actions_linux"] = self.make_actions_shield( repo_name, username, "Linux", "Linux Test Status", ) if "Windows" in self.platforms: sections["Tests"].append("actions_windows") substitutions["actions_windows"] = self.make_actions_shield( repo_name, username, "Windows", "Windows Test Status", ) if "macOS" in self.platforms: sections["Tests"].append("actions_macos") substitutions["actions_macos"] = self.make_actions_shield( repo_name, username, "macOS", "macOS Test Status", ) if self.tests: sections["Tests"].append("coveralls") substitutions["coveralls"] = self.make_coveralls_shield( repo_name, username) if self.on_pypi: sections["PyPI"] = [ "pypi-version", "supported-versions", "supported-implementations", "wheel" ] substitutions["pypi-version"] = self.make_pypi_version_shield( pypi_name) substitutions[ "supported-versions"] = self.make_python_versions_shield( pypi_name) substitutions[ "supported-implementations"] = self.make_python_implementations_shield( pypi_name) substitutions["wheel"] = self.make_wheel_shield(pypi_name) sections["Activity"].append("pypi-downloads") substitutions["pypi-downloads"] = self.make_pypi_downloads_shield( pypi_name) if self.conda: sections["Anaconda"] = ["conda-version", "conda-platform"] substitutions["conda-version"] = self.make_conda_version_shield( pypi_name, self.primary_conda_channel) substitutions["conda-platform"] = self.make_conda_platform_shield( pypi_name, self.primary_conda_channel) if self.docker_shields: docker_name = self.docker_name sections["Docker"] = [ "docker_build", "docker_automated", "docker_size" ] substitutions[ "docker_build"] = self.make_docker_build_status_shield( docker_name, username) substitutions[ "docker_automated"] = self.make_docker_automated_build_shield( docker_name, username) substitutions["docker_size"] = self.make_docker_size_shield( docker_name, username) for section in self.sections: if section not in sections or not sections[section]: continue images = DelimitedList( [f"|{name}{self.unique_name}|" for name in sections[section]]) buf.extend([f" * - {section}", f" - {images: }"]) for sub_name in self.substitutions: if sub_name not in substitutions: continue buf.blankline(ensure_single=True) buf.append( f".. |{sub_name}{self.unique_name}| {substitutions[sub_name][3:]}" ) buf.blankline(ensure_single=True) buf.append(".. end shields") # buf.blankline(ensure_single=True) return buf
def _get_additional_requirements(self) -> Iterator[str]: if self.templates.globals["travis_additional_requirements"]: additional_requirements = DelimitedList( self.templates.globals["travis_additional_requirements"]) yield f"python -m pip install --upgrade {additional_requirements: }"
def __init__(self, name: str, elements: typing.Sequence[typing.Union[str, "Generic", "List"]]): self.name = str(name) self.elements: DelimitedList[typing.Union[ str, Generic, List]] = DelimitedList(elements)
def __init__(self, elements: typing.Sequence[typing.Union[str, Generic, "List"]]): self.elements = DelimitedList(elements)
def format_annotation(annotation, fully_qualified: bool = False) -> str: """ Format a type annotation. :param annotation: :param fully_qualified: Whether the fully qualified name should be shown (e.g. ``typing.List``) or only the object name (e.g. ``List``). :rtype: .. versionchanged:: 2.13.0 Added support for :py:obj:`True` and :py:obj:`False` """ prefix = '' if fully_qualified else '~' # Special cases if annotation is None or annotation is type(None): # noqa: E721 return ":py:obj:`None`" elif isinstance(annotation, bool): return f":py:obj:`{annotation}`" elif annotation is Ellipsis: return "..." elif annotation is itertools.cycle: return f":func:`{prefix}itertools.cycle`" elif annotation is types.GetSetDescriptorType: # noqa: E721 return f":py:data:`{prefix}types.GetSetDescriptorType`" elif annotation is types.MemberDescriptorType: # noqa: E721 return f":py:data:`{prefix}types.MemberDescriptorType`" elif annotation is types.MappingProxyType: # noqa: E721 return f":py:class:`{prefix}types.MappingProxyType`" elif annotation is types.ModuleType: # noqa: E721 return f":py:class:`{prefix}types.ModuleType`" elif annotation is types.FunctionType: # noqa: E721 return f":py:data:`{prefix}types.FunctionType`" elif annotation is types.BuiltinFunctionType: # noqa: E721 return f":py:data:`{prefix}types.BuiltinFunctionType`" elif annotation is types.MethodType: # noqa: E721 return f":py:data:`{prefix}types.MethodType`" elif annotation is MethodDescriptorType: return f":py:data:`{prefix}types.MethodDescriptorType`" elif annotation is ClassMethodDescriptorType: return f":py:data:`{prefix}types.ClassMethodDescriptorType`" elif annotation is MethodWrapperType: return f":py:data:`{prefix}types.MethodWrapperType`" elif annotation is WrapperDescriptorType: return f":py:data:`{prefix}types.WrapperDescriptorType`" elif isinstance(annotation, ForwardRef): # Unresolved forward ref return f":py:obj:`{prefix}.{annotation.__forward_arg__}`" elif annotation is type(re.compile('')): # noqa: E721 return f":py:class:`{prefix}typing.Pattern`" elif annotation is TemporaryDirectory: return f":py:obj:`{prefix}tempfile.TemporaryDirectory`" elif sys.version_info >= (3, 10): # pragma: no cover (<py310) if annotation is types.UnionType: # noqa: E721 return f":py:data:`{prefix}types.UnionType`" try: module = get_annotation_module(annotation) class_name = get_annotation_class_name(annotation, module) # Special case for typing.NewType being a class in 3.10 # Fixed upstream in 1.13.0 if sys.version_info >= (3, 10) and isinstance( annotation, NewType): # pragma: no cover (<py310) module, class_name = "typing", "NewType" args = get_annotation_args(annotation, module, class_name) except ValueError: return f":py:obj:`~.{annotation}`" if module == "_io": module = "io" elif module == "_ast": module = "ast" # Redirect all typing_extensions types to the stdlib typing module elif module == "typing_extensions": module = "typing" full_name = ( f"{module}.{class_name}") if module != "builtins" else class_name prefix = '' if fully_qualified or full_name == class_name else '~' role = "data" if class_name in pydata_annotations else "class" args_format = "\\[{}]" formatted_args = '' # Type variables are also handled specially with suppress(TypeError): if isinstance(annotation, TypeVar) and annotation is not AnyStr: if sys.version_info < (3, 7): # pragma: no cover (py37)+ typevar_name = annotation.__name__ else: # pragma: no cover (<py37) typevar_name = (annotation.__module__ + '.' + annotation.__name__) return f":py:data:`{repr(annotation)} <{typevar_name}>`" # Some types require special handling if full_name == "typing.NewType": args_format = f"\\(:py:data:`~{annotation.__name__}`, {{}})" role = "class" if sys.version_info > (3, 10) else "func" elif full_name in {"typing.Union", "types.UnionType" } and len(args) == 2 and type(None) in args: full_name = "typing.Optional" elif full_name == "types.UnionType": full_name = "typing.Union" role = "data" elif full_name == "typing.Callable" and args and args[0] is not ...: formatted_args = "\\[\\[" + ", ".join( format_annotation(arg) for arg in args[:-1]) + ']' formatted_args += ", " + format_annotation(args[-1]) + ']' elif full_name == "typing.Literal": # TODO: Enums? formatted_arg_list: DelimitedList[str] = DelimitedList() for arg in args: if isinstance(arg, bool): formatted_arg_list.append(format_annotation(arg)) else: formatted_arg_list.append(code_repr(arg)) formatted_args = f"\\[{formatted_arg_list:, }]" if full_name == "typing.Optional": args = tuple(x for x in args if x is not type(None)) # noqa: E721 # TODO: unions with one or more forward refs if args and not formatted_args: formatted_args = args_format.format(", ".join( format_annotation(arg, fully_qualified) for arg in args)) return f":py:{role}:`{prefix}{full_name}`{formatted_args}"
def _sortedset(iterable: Iterable[str]) -> DelimitedList: return DelimitedList(sorted(set(iterable)))
def process_file(filename: PathLike) -> bool: """ Augment Flake8 noqa comments with PyLint comments in the given file. :param filename: :return: :py:obj:`True` if the file contents were changed. :py:obj:`False` otherwise. """ file = PathPlus(filename) contents = file.read_lines() original_contents = contents[:] for idx, line in enumerate(contents): noqa = find_noqa(line) if noqa is None: continue if noqa.groupdict()["codes"] is None: continue # Line has one or more noqa codes flake8_codes = DelimitedList( filter(bool, re.split("[,; ]", noqa.groupdict()["codes"]))) line_before_comment = line[:noqa.span()[0]].rstrip() line_after_comments = line[noqa.span()[1]:] # Search for pylint: disable= after the noqa comment disabled = find_pylint_disable(line[noqa.span()[1]:]) disabled_checks = set() if disabled: line_after_comments = line[noqa.span()[1]:][disabled.span()[1]:] checks = disabled.groupdict()["checks"] if checks: disabled_checks = set(re.split("[,; ]", checks)) for code in flake8_codes: disabled_checks.add(code_mapping.get(code, '')) disabled_checks = set(filter(bool, map(str.strip, disabled_checks))) if line_before_comment: buf = [line_before_comment, f" # noqa: {flake8_codes:,}"] else: buf = [f"# noqa: {flake8_codes:,}"] if disabled_checks: buf.extend([ " # pylint: disable=", f"{DelimitedList(sorted(disabled_checks)):,}", ]) buf.extend([ " ", line_after_comments.lstrip(), ]) contents[idx] = ''.join(buf).rstrip() changed = contents != original_contents if changed: file.write_lines(contents, trailing_whitespace=True) return changed