Ejemplo n.º 1
0
def make_dependabotv2(repo_path: pathlib.Path, templates: Environment) -> List[str]:
	"""
	Add configuration for ``dependabot`` to the desired repo.

	https://dependabot.com/

	:param repo_path: Path to the repository root.
	:param templates:

	.. versionadded:: 2020.12.11
	"""

	dependabot_file = PathPlus(repo_path / ".github" / "dependabot.yml")
	dependabot_file.parent.maybe_make()

	updates = {
			"package-ecosystem": "pip",
			"directory": '/',
			"schedule": {"interval": "weekly"},
			"reviewers": [templates.globals["assignee"]],
			}

	config = {"version": 2, "updates": [updates]}

	dependabot_file.write_lines([
			f"# {templates.globals['managed_message']}",
			"---",
			_round_trip_dump(config),
			])

	return [dependabot_file.relative_to(repo_path).as_posix()]
Ejemplo n.º 2
0
    def process_file(self, filename):  # noqa: D102
        self.statements = []

        with fix_import_path([filename]):
            self._check_files(self.get_ast,
                              self._iterate_file_descrs([filename]))

        filename = PathPlus(filename)
        file_lines = filename.read_lines()

        for node in self.statements:

            if node.tolineno != node.lineno:
                warnings.warn("Currently unable to convert this statement")

            else:
                value = node.value.as_string()
                col = node.col_offset
                lineno = node.lineno - 1

                line = file_lines[lineno]
                line_pre_statement = line[:col]
                line_post_statement = line[col + len(value):]
                # print(f"{line_pre_statement}print({value}){line_post_statement}")
                file_lines[
                    lineno] = f"{line_pre_statement}print({value}){line_post_statement}"

        if file_lines[-1]:
            # ensure there's a newline at the end
            file_lines.append('')

        # print("\n".join(file_lines))
        filename.write_lines(file_lines)
Ejemplo n.º 3
0
def make_dependabot(repo_path: pathlib.Path, templates: Environment) -> List[str]:
	"""
	Add configuration for ``dependabot`` to the desired repo.

	https://dependabot.com/

	:param repo_path: Path to the repository root.
	:param templates:

	.. deprecated:: 2020.12.11
	"""

	dependabot_file = PathPlus(repo_path / ".dependabot" / "config.yml")
	dependabot_file.parent.maybe_make()

	update_configs = {
			"package_manager": "python",
			"directory": '/',
			"update_schedule": "weekly",
			"default_reviewers": [templates.globals["assignee"]],
			}

	config = {"version": 1, "update_configs": [update_configs]}

	dependabot_file.write_lines([
			f"# {templates.globals['managed_message']}",
			"---",
			_round_trip_dump(config),
			])

	return [dependabot_file.relative_to(repo_path).as_posix()]
Ejemplo n.º 4
0
def requirement(requirement: str, file: Optional[str] = None) -> int:
    """
	Add a requirement.
	"""

    # 3rd party
    from consolekit.utils import abort
    from domdf_python_tools.paths import PathPlus, traverse_to_file
    from domdf_python_tools.stringlist import StringList
    from packaging.requirements import InvalidRequirement
    from packaging.specifiers import SpecifierSet
    from shippinglabel import normalize_keep_dot
    from shippinglabel.requirements import ComparableRequirement, combine_requirements, read_requirements

    repo_dir: PathPlus = traverse_to_file(PathPlus.cwd(), "repo_helper.yml",
                                          "git_helper.yml")

    if file is None:
        requirements_file = repo_dir / "requirements.txt"

        if not requirements_file.is_file():
            raise abort(f"'{file}' not found.")

    else:
        requirements_file = PathPlus(file)

        if not requirements_file.is_file():
            raise abort("'requirements.txt' not found.")

    try:
        req = ComparableRequirement(requirement)
    except InvalidRequirement as e:
        raise BadRequirement(requirement, e)

    response = (PYPI_API / req.name / "json/").get()
    if response.status_code != 200:
        raise click.BadParameter(f"No such project {req.name}")
    else:
        req.name = normalize(response.json()["info"]["name"])
        if not req.specifier:
            req.specifier = SpecifierSet(
                f">={response.json()['info']['version']}")

        click.echo(f"Adding requirement '{req}'")

    requirements, comments, invalid_lines = read_requirements(
        req_file=requirements_file,
        include_invalid=True,
        normalize_func=normalize_keep_dot,
    )

    requirements.add(req)

    buf = StringList([*comments, *invalid_lines])
    buf.extend(str(req) for req in sorted(combine_requirements(requirements)))
    requirements_file.write_lines(buf)

    return 0
Ejemplo n.º 5
0
	def write_out(self):
		"""
		Write out to the ``.ini`` file.
		"""

		ini_file = PathPlus(self.base_path / self.filename)

		for section_name in self.managed_sections:
			getattr(self, re.sub("[:.-]", '_', section_name))()

		self.merge_existing(ini_file)
		self._output.append(str(self._ini))
		ini_file.write_lines(self._output)
Ejemplo n.º 6
0
def make_docs_contributing(repo_path: pathlib.Path, templates: Environment) -> List[str]:
	"""
	Add CONTRIBUTING.rst to the documentation directory of the repo.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

	file = PathPlus(repo_path / templates.globals["docs_dir"] / "contributing.rst")
	file.parent.maybe_make(parents=True)

	contributing = templates.get_template("CONTRIBUTING.rst")

	if templates.globals["standalone_contrib_guide"]:
		file.write_clean(contributing.render(bash_block=sphinx_bash_block))

	else:
		file.write_lines([
				"Overview",
				"---------",
				*contributing.render(bash_block=sphinx_bash_block).splitlines()[3:],
				])

	return [file.relative_to(repo_path).as_posix()]
Ejemplo n.º 7
0
def make_rtfd(repo_path: pathlib.Path, templates: Environment) -> List[str]:
    """
	Add configuration for ``ReadTheDocs``.

	https://readthedocs.org/

	See https://docs.readthedocs.io/en/stable/config-file/v2.html for details

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    file = PathPlus(repo_path / ".readthedocs.yml")

    docs_dir = PathPlus(repo_path / templates.globals["docs_dir"])

    sphinx_config = {
        "builder": "html",
        "configuration": f"{templates.globals['docs_dir']}/conf.py",
    }

    install_requirements = [
        "requirements.txt",
        f"{templates.globals['docs_dir']}/requirements.txt",
        *templates.globals["additional_requirements_files"],
    ]

    install_config: List[Dict] = [{
        "requirements": r
    } for r in install_requirements]

    if (docs_dir / "rtd-extra-deps.txt").is_file():
        install_config.append({
            "requirements":
            f"{templates.globals['docs_dir']}/rtd-extra-deps.txt"
        })
    elif templates.globals["tox_testenv_extras"]:
        install_config.append({
            "method":
            "pip",
            "path":
            '.',
            "extra_requirements": [templates.globals["tox_testenv_extras"]],
        })

    else:
        install_config.append({"method": "pip", "path": '.'})

    python_config = {"version": 3.8, "install": install_config}

    # Formats: Optionally build your docs in additional formats such as PDF and ePub
    config = {
        "version": 2,
        "sphinx": sphinx_config,
        "formats": ["pdf", "htmlzip"],
        "python": python_config
    }

    # TODO: support user customisation of search rankings
    # https://docs.readthedocs.io/en/stable/config-file/v2.html#search-ranking

    dumper = yaml.YAML()
    dumper.indent(mapping=2, sequence=3, offset=1)

    yaml_buf = yaml.StringIO()
    dumper.dump(config, yaml_buf)

    file.write_lines([
        f"# {templates.globals['managed_message']}",
        "# Read the Docs configuration file", "---",
        yaml_buf.getvalue()
    ])

    return [file.relative_to(repo_path).as_posix()]
Ejemplo n.º 8
0
def make_pre_commit(repo_path: pathlib.Path,
                    templates: Environment) -> List[str]:
    """
	Add configuration for ``pre-commit``.

	https://github.com/pre-commit/pre-commit

	# See https://pre-commit.com for more information
	# See https://pre-commit.com/hooks.html for more hooks

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    docs_dir = templates.globals["docs_dir"]
    import_name = templates.globals["import_name"]
    stubs_package = templates.globals["stubs_package"]

    non_source_files = [
        posixpath.join(docs_dir, "conf"), "__pkginfo__", "setup"
    ]

    domdfcoding_hooks = Repo(
        repo=make_github_url("domdfcoding", "pre-commit-hooks"),
        rev="v0.3.0",
        hooks=[
            {
                "id": "requirements-txt-sorter",
                "args": ["--allow-git"]
            },
            {
                "id":
                "check-docstring-first",
                "exclude":
                fr"^({'|'.join(non_source_files)}|{templates.globals['tests_dir']}/.*)\.py$"
            },
            "bind-requirements",
        ])

    flake8_dunder_all = Repo(
        repo=make_github_url("domdfcoding", "flake8-dunder-all"),
        rev="v0.1.8",
        hooks=[{
            "id":
            "ensure-dunder-all",
            "files":
            fr"^{import_name}{'-stubs' if stubs_package else ''}/.*\.py$"
        }])

    snippet_fmt = Repo(
        repo=make_github_url("python-formate", "snippet-fmt"),
        rev="v0.1.4",
        hooks=["snippet-fmt"],
    )

    formate_excludes = fr"^({'|'.join([*templates.globals['yapf_exclude'], *non_source_files])})\.(_)?py$"

    formate = Repo(
        repo=make_github_url("python-formate", "formate"),
        rev="v0.4.9",
        hooks=[{
            "id": "formate",
            "exclude": formate_excludes
        }],
    )

    dep_checker_args = [templates.globals["import_name"].replace('.', '/')]

    if templates.globals["source_dir"]:
        dep_checker_args.extend(
            ["--work-dir", templates.globals["source_dir"]])

    dep_checker = Repo(repo=make_github_url("domdfcoding", "dep_checker"),
                       rev="v0.6.2",
                       hooks=[{
                           "id": "dep_checker",
                           "args": dep_checker_args
                       }])

    pre_commit_file = PathPlus(repo_path / ".pre-commit-config.yaml")

    if not pre_commit_file.is_file():
        pre_commit_file.touch()

    dumper = ruamel.yaml.YAML()
    dumper.indent(mapping=2, sequence=3, offset=1)

    output = StringList([
        f"# {templates.globals['managed_message']}",
        "---",
        '',
        f"exclude: {templates.globals['pre_commit_exclude']}",
        '',
        "repos:",
    ])

    indent_re = re.compile("^ {3}")

    managed_hooks = [
        pyproject_parser,
        pre_commit_hooks,
        domdfcoding_hooks,
        flake8_dunder_all,
        flake2lint,
        pygrep_hooks,
        pyupgrade,
        lucas_c_hooks,
        snippet_fmt,
        formate,
    ]

    if not templates.globals["stubs_package"]:
        managed_hooks.append(dep_checker)

    managed_hooks_urls = [str(hook.repo) for hook in managed_hooks]

    custom_hooks_comment = "# Custom hooks can be added below this comment"

    for hook in managed_hooks:
        buf = StringIO()
        dumper.dump(hook.to_dict(), buf)
        output.append(indent_re.sub(" - ", indent(buf.getvalue(), "   ")))
        output.blankline(ensure_single=True)
    output.append(custom_hooks_comment)
    output.blankline(ensure_single=True)

    raw_yaml = pre_commit_file.read_text()

    if custom_hooks_comment in raw_yaml:
        custom_hooks_yaml = pre_commit_file.read_text().split(
            custom_hooks_comment)[1]

        custom_hooks = []
        local_hooks = []

        for repo in yaml_safe_loader.load(custom_hooks_yaml) or []:
            if repo["repo"] == "local":
                local_hooks.append(repo)

            elif repo["repo"] not in managed_hooks_urls:
                custom_hooks.append(Repo(**repo))

        for hook in custom_hooks:
            buf = StringIO()
            dumper.dump(hook.to_dict(), buf)
            output.append(indent_re.sub(" - ", indent(buf.getvalue(), "   ")))
            output.blankline(ensure_single=True)

        for hook in local_hooks:
            buf = StringIO()
            dumper.dump(hook, buf)
            output.append(indent_re.sub(" - ", indent(buf.getvalue(), "   ")))
            output.blankline(ensure_single=True)

    pre_commit_file.write_lines(output)

    return [pre_commit_file.name]
Ejemplo n.º 9
0
def process_file(filename: PathLike) -> bool:
    """
	Augment Flake8 noqa comments with PyLint comments in the given file.

	:param filename:

	:return: :py:obj:`True` if the file contents were changed. :py:obj:`False` otherwise.
	"""

    file = PathPlus(filename)
    contents = file.read_lines()
    original_contents = contents[:]

    for idx, line in enumerate(contents):
        noqa = find_noqa(line)

        if noqa is None:
            continue

        if noqa.groupdict()["codes"] is None:
            continue

        # Line has one or more noqa codes
        flake8_codes = DelimitedList(
            filter(bool, re.split("[,; ]",
                                  noqa.groupdict()["codes"])))

        line_before_comment = line[:noqa.span()[0]].rstrip()
        line_after_comments = line[noqa.span()[1]:]

        # Search for pylint: disable= after the noqa comment
        disabled = find_pylint_disable(line[noqa.span()[1]:])
        disabled_checks = set()

        if disabled:
            line_after_comments = line[noqa.span()[1]:][disabled.span()[1]:]
            checks = disabled.groupdict()["checks"]

            if checks:
                disabled_checks = set(re.split("[,; ]", checks))

        for code in flake8_codes:
            disabled_checks.add(code_mapping.get(code, ''))

        disabled_checks = set(filter(bool, map(str.strip, disabled_checks)))

        if line_before_comment:
            buf = [line_before_comment, f"  # noqa: {flake8_codes:,}"]
        else:
            buf = [f"# noqa: {flake8_codes:,}"]

        if disabled_checks:
            buf.extend([
                "  # pylint: disable=",
                f"{DelimitedList(sorted(disabled_checks)):,}",
            ])

        buf.extend([
            "  ",
            line_after_comments.lstrip(),
        ])

        contents[idx] = ''.join(buf).rstrip()

    changed = contents != original_contents

    if changed:
        file.write_lines(contents, trailing_whitespace=True)

    return changed
Ejemplo n.º 10
0
def ensure_bumpversion(repo_path: pathlib.Path,
                       templates: Environment) -> List[str]:
    """
	Add configuration for ``bumpversion`` to the desired repo.

	https://pypi.org/project/bumpversion/

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    bumpversion_file = PathPlus(repo_path / ".bumpversion.cfg")

    if not bumpversion_file.is_file():
        bumpversion_file.write_lines([
            "[bumpversion]",
            f"current_version = {templates.globals['version']}",
            "commit = True",
            "tag = True",
        ])

    bv = ConfigUpdater()
    bv.read(str(bumpversion_file))

    old_sections = [
        "bumpversion:file:git_helper.yml", "bumpversion:file:__pkginfo__.py"
    ]
    required_sections = {
        f"bumpversion:file:{filename}"
        for filename in get_bumpversion_filenames(templates)
    }

    if not templates.globals["enable_docs"]:
        old_sections.append(
            f"bumpversion:file:{templates.globals['docs_dir']}/index.rst")

    if not templates.globals["enable_conda"]:
        old_sections.append(f"bumpversion:file:.github/workflows/conda_ci.yml")

    if templates.globals["use_whey"]:
        old_sections.append("bumpversion:file:setup.cfg")

    for section in old_sections:
        if section in bv.sections():
            bv.remove_section(section)
        if section in required_sections:
            required_sections.remove(section)

    for section in sorted(required_sections):
        if section not in bv.sections():
            bv.add_section(section)

    init_filename = get_init_filename(templates)
    if init_filename is not None:
        init_section = bv[f"bumpversion:file:{init_filename}"]
        if "search" not in init_section:
            init_section["search"] = ': str = "{current_version}"'
            init_section["replace"] = ': str = "{new_version}"'

    if "bumpversion:file:setup.cfg" in bv.sections():
        setup_cfg_section = bv["bumpversion:file:setup.cfg"]
        if ("search" not in setup_cfg_section or
            ("search" in setup_cfg_section and
             setup_cfg_section["search"].value == "name = {current_version}")):
            setup_cfg_section["search"] = "version = {current_version}"
            setup_cfg_section["replace"] = "version = {new_version}"

    if "bumpversion:file:pyproject.toml" in bv.sections():
        pp_toml_section = bv["bumpversion:file:pyproject.toml"]
        if "search" not in pp_toml_section:
            pp_toml_section["search"] = 'version = "{current_version}"'
            pp_toml_section["replace"] = 'version = "{new_version}"'

    bv["bumpversion"]["current_version"] = templates.globals["version"]
    bv["bumpversion"]["commit"] = "True"
    bv["bumpversion"]["tag"] = "True"

    bumpversion_file.write_clean(str(bv))

    return [bumpversion_file.name]