Esempio n. 1
0
def concatenate_json(*files: PathLike, outfile: Optional[PathLike] = None) -> SampleList:
	r"""
	Concatenate multiple JSON files together and return a list of :class:`Sample`
	objects in the concatenated json output.

	:param \*files: The files to concatenate.
	:param outfile: The file to save the output as. If :py:obj:`None` no file will be saved.
	"""  # noqa: D400

	all_samples = SampleList()

	for json_file in files:
		samples = PathPlus(json_file).load_json(
				json_library=sdjson,  # type: ignore
				)
		# TODO: https://github.com/python/mypy/issues/5018
		# If it ever gets fixed

		for sample in samples:
			all_samples.append(Sample(**sample))

	if outfile is not None:
		PathPlus(outfile).dump_json(
				all_samples,
				json_library=sdjson,  # type: ignore
				indent=2,
				)
		# TODO: https://github.com/python/mypy/issues/5018
		# If it ever gets fixed

	return all_samples
def iter_submodules(module: str) -> Iterator[str]:
    """
	Returns an iterator over the names of the submodules and subpackages of the given module.

	.. versionadded:: 2.6.0

	:param module:
	"""

    spec: Optional[importlib.machinery.ModuleSpec] = importlib.util.find_spec(
        module)

    if spec is None or spec.origin is None:
        return

    yield module

    if spec.submodule_search_locations is None or PathPlus(
            spec.origin).name != "__init__.py":
        return

    for submodule_search_path in spec.submodule_search_locations:
        for item in sort_paths(*PathPlus(submodule_search_path).iterdir()):
            if item.name == "__init__.py":
                continue
            elif item.suffix == ".py":
                yield f"{module}.{item.stem}"
            elif item.name == "__pycache__":
                continue
            elif item.is_dir():
                yield from sorted(iter_submodules(f"{module}.{item.name}"))
Esempio n. 3
0
def test_cwd():
    p = PathPlus.cwd()
    q = PathPlus(os.getcwd())
    assert (p == q)
    assertEqualNormCase(str(p), str(q))
    assert (type(p) is type(q))
    assert (p.is_absolute())
Esempio n. 4
0
def enable_docs(
    repo_path: pathlib.Path,
    templates: Environment,
    init_repo_templates: Environment,
) -> List[str]:
    docs_dir = PathPlus(repo_path / templates.globals["docs_dir"])
    docs_dir.maybe_make()
    (docs_dir / "api").maybe_make()

    for filename in {"index.rst"}:
        template = init_repo_templates.get_template(filename)
        (docs_dir / filename).write_clean(template.render())

    api_buf = StringList()
    header_line: str = '=' * (len(templates.globals["import_name"]) + 1)
    api_buf.append(header_line)
    api_buf.append(templates.globals["import_name"])
    api_buf.append(header_line)
    api_buf.blankline(ensure_single=True)
    api_buf.append(f".. automodule:: {templates.globals['import_name']}")
    api_buf.blankline(ensure_single=True)

    (docs_dir / "api" /
     templates.globals["modname"]).with_suffix(".rst").write_lines(api_buf)

    return [
        posixpath.join(templates.globals["docs_dir"], "api",
                       f"{templates.globals['modname']}.rst"),
        posixpath.join(templates.globals["docs_dir"], "index.rst"),
    ]
Esempio n. 5
0
def process_multiple_notebooks(
		notebooks: Iterable[PathLike],
		outdir: PathLike,
		overwrite: bool = False,
		) -> int:
	"""
	Process multiple Jupyter notebooks for conversion into Python scripts.

	:param notebooks: An iterable of notebook filenames to process
	:param outdir: The directory to store the Python output in.
	:param overwrite: Whether to overwrite existing files.
	"""

	ret = 0
	outdir = PathPlus(outdir)

	for notebook in notebooks:
		notebook = PathPlus(notebook)
		outfile = outdir / f"{notebook.stem}.py"

		if outfile.is_file() and not overwrite:
			print(f"Info: Skipping existing file {outfile}")
		else:
			if notebook.is_file():
				print(f"Converting {notebook} to {outfile}")
				convert_notebook(notebook, outfile)
			else:
				print(f"{notebook} not found")
				ret |= 1

	return ret
Esempio n. 6
0
    def __init__(self, repo_path: pathlib.Path, templates: Environment):
        self.repo_path = repo_path
        self.templates = templates

        self.actions = templates.get_template("github_ci.yml")

        self.workflows_dir = PathPlus(repo_path / ".github" / "workflows")
        self.workflows_dir.maybe_make(parents=True)

        code_file_filter: DelimitedList[str] = DelimitedList()

        if self.templates.globals["enable_docs"]:
            code_file_filter.append(f"{templates.globals['docs_dir']}/**")
        else:
            code_file_filter.append(f"doc-source/**")

        code_file_filter.extend([
            "CONTRIBUTING.rst",
            ".imgbotconfig",
            ".pre-commit-config.yaml",
            ".pylintrc",
            ".readthedocs.yml",
        ])
        # ".bumpversion.cfg",
        # ".style.yapf",
        # "stubs.txt",

        self._code_file_filter = f"!({code_file_filter:|})"
Esempio n. 7
0
def convert_notebook(
		nb_file: PathLike,
		outfile: PathLike,
		):
	"""
	Convert a notebook to a Python file.

	:param nb_file: Filename of the Jupyter Notebook to convert.
	:param outfile: The filename to store the Python output as.
	"""

	nb_file = PathPlus(nb_file)
	outfile = PathPlus(outfile)
	outfile.parent.maybe_make()

	script, *_ = py_exporter.from_file(str(nb_file))

	outfile.write_clean(script)

	with importlib_resources.path("notebook2script", "isort.cfg") as isort_config:
		with importlib_resources.path("notebook2script", "style.yapf") as yapf_style:
			reformat_file(outfile, yapf_style=str(yapf_style), isort_config_file=str(isort_config))

	linter.process_file(outfile)

	with open(outfile, "r+b") as f:
		fix_encoding_pragma(f, remove=True, expected_pragma=b"# coding: utf-8")
Esempio n. 8
0
def requirements_from_flit(
    package_root: pathlib.Path,
    options: Dict,
    env: sphinx.environment.BuildEnvironment,
    extra: str,
) -> List[str]:
    """
	Load requirements from the ``[tool.flit.metadata.requires-extra]`` section of
	a ``pyproject.toml`` file in the root of the repository.

	:param package_root: The path to the package root.
	:param options:
	:param env:
	:param extra: The name of the "extra" that the requirements are for.

	:return: List of requirements.
	"""  # noqa D400

    pyproject_file = PathPlus(env.srcdir).parent / "pyproject.toml"

    if not pyproject_file.is_file():
        raise FileNotFoundError(
            f"Cannot find pyproject.toml in '{pyproject_file.parent}'")

    flit_extras = parse_pyproject_extras(pyproject_file,
                                         flavour="flit",
                                         normalize_func=normalize_keep_dot)

    if extra not in flit_extras:
        raise ValueError(
            f"'{extra}' not found in '[tool.flit.metadata.requires-extra]'")

    requirements = flit_extras[extra]

    return list(map(str, sorted(combine_requirements(requirements))))
Esempio n. 9
0
def main(
    project: "PathLike" = '.',
    outfile: str = "conda/meta.yaml",
    artifact_type: "Literal['sdist', 'wheel']" = "sdist",
    show_traceback: bool = False,
):
    """
	Make a conda recipe for the given project.
	"""

    # 3rd party
    from domdf_python_tools.paths import PathPlus
    from pyproject_parser.cli import ConfigTracebackHandler

    # this package
    from mkrecipe import MaryBerry

    with handle_tracebacks(show_traceback, ConfigTracebackHandler):
        recipe_file = PathPlus(outfile)
        recipe_file.parent.maybe_make(parents=True)

        if artifact_type == "sdist":
            recipe = MaryBerry(project).make()
        elif artifact_type == "wheel":
            recipe = MaryBerry(project).make_for_wheel()
        else:  # pragma: no cover
            # Click should handle this case for us
            raise click.BadOptionUsage(
                "type", f"Unknown value for '--type': {artifact_type}")

        recipe_file.write_clean(recipe)
        click.echo(f"Recipe written to {recipe_file.as_posix()!r}")
Esempio n. 10
0
def make_dependabotv2(repo_path: pathlib.Path, templates: Environment) -> List[str]:
	"""
	Add configuration for ``dependabot`` to the desired repo.

	https://dependabot.com/

	:param repo_path: Path to the repository root.
	:param templates:

	.. versionadded:: 2020.12.11
	"""

	dependabot_file = PathPlus(repo_path / ".github" / "dependabot.yml")
	dependabot_file.parent.maybe_make()

	updates = {
			"package-ecosystem": "pip",
			"directory": '/',
			"schedule": {"interval": "weekly"},
			"reviewers": [templates.globals["assignee"]],
			}

	config = {"version": 2, "updates": [updates]}

	dependabot_file.write_lines([
			f"# {templates.globals['managed_message']}",
			"---",
			_round_trip_dump(config),
			])

	return [dependabot_file.relative_to(repo_path).as_posix()]
def replace_emoji(app: Sphinx, exception: Optional[Exception] = None):
    if exception:
        return

    if app.builder.name.lower() != "latex":
        return

    output_file = PathPlus(
        app.builder.outdir) / f"{app.builder.titles[0][1]}.tex"

    output_content = output_file.read_text()

    # Documentation summary emoji
    output_content = output_content.replace(" 🐍 🛠️", '')
    output_content = output_content.replace('🐍', '')
    output_content = output_content.replace('🛠', '')
    output_content = output_content.replace('️', '')  # Variation Selector-16

    output_content = output_content.replace(
        '≈', r" $\approx$ ")  # coming in sphinx-toolbox 2.12
    output_content = output_content.replace(
        'μ', r"\textmu ")  # fixed in sphinx-toolbox 2.12
    output_content = output_content.replace(
        r"\textmum", r"\textmu m")  # fixed in sphinx-toolbox 2.12
    output_content = output_content.replace(
        '\u205f', r"\medspace ")  # medium mathematical space

    # in words.py
    output_content = output_content.replace(r'A\sphinxhyphen{}Ω',
                                            r"A\sphinxhyphen{}\textOmega")
    output_content = output_content.replace(
        r'α\sphinxhyphen{}ϖ', r"\textalpha\sphinxhyphen{}\textomega")

    output_file.write_clean(output_content)
Esempio n. 12
0
    def run(self, filename: PathLike):
        """
		Parse configuration from the given file.

		:param filename: The filename of the YAML configuration file.
		"""

        filename = PathPlus(filename)

        if not filename.is_file():
            raise FileNotFoundError(str(filename))

        with tempfile.TemporaryDirectory() as tmpdir:
            tmpdir_p = PathPlus(tmpdir)
            schema_file = tmpdir_p / "schema.json"
            schema = make_schema(*self.config_vars)
            schema["additionalProperties"] = self.allow_unknown_keys
            schema_file.dump_json(schema)
            validate_files(schema_file, filename)

        parsed_config_vars: MutableMapping[str, Any] = {}

        with filename.open() as file:
            raw_config_vars: Mapping[str, Any] = YAML(typ="safe",
                                                      pure=True).load(file)

        for var in self.config_vars:
            parsed_config_vars[var.__name__] = getattr(
                self, f"visit_{var.__name__}", var.get)(raw_config_vars)

        return self.custom_parsing(raw_config_vars, parsed_config_vars,
                                   filename)
Esempio n. 13
0
def the_app(app: Sphinx) -> Sphinx:
    fake_repo_root = PathPlus(
        app.env.srcdir).parent  # type: ignore[union-attr]

    PathPlus(fake_repo_root / "__pkginfo__.py").write_lines([
        "extras_require = {",
        "\t\t'extra_b': [",
        '\t\t\t\t"flask >=1.1.2",',
        '\t\t\t\t"click < 7.1.2",',
        '\t\t\t\t"sphinx ==3.0.3",',
        "\t\t\t\t]",
        "\t\t}",
    ])

    PathPlus(fake_repo_root / "pyproject.toml").write_lines([
        "[tool.flit.metadata]",
        'author = "Joe Bloggs"',
        'module = "FooBar"',
        '',
        "[tool.flit.metadata.requires-extra]",
        "test = [",
        '\t"pytest >=2.7.3",',
        '\t"pytest-cov",',
        ']',
        'doc = ["sphinx"]',
        '',
        "[project.optional-dependencies]",
        "test = [",
        '\t"pytest >=2.7.3",',
        '\t"pytest-cov",',
        ']',
        'doc = ["sphinx"]',
    ])

    (fake_repo_root / "setup.cfg").write_lines([
        "[options.extras_require]",
        "extra_c = faker; pytest; tox",
    ])

    subpackage = fake_repo_root / "dummy_package" / "subpackage"
    if not subpackage.is_dir():
        subpackage.mkdir(parents=True)

    (subpackage / "requirements.txt").write_lines([
        "# a comment",
        "numpy>=1.18.4",
        "scipy==1.4.1",
        "# Old scipy version",
        "# scipy==1.3.0",
        "pandas>=0.25.0, !=1.0.0",
    ])

    (fake_repo_root / "dummy_package" / "empty_requirements.txt").write_lines([
        "# a comment",
        "# numpy>=1.18.4",
        "# scipy==1.4.1",
        "# pandas>=0.25.0, !=1.0.0",
    ])

    return app
Esempio n. 14
0
def test_mkdir_concurrent_parent_creation(BASE):
    for pattern_num in range(32):
        p = PathPlus(BASE, "dirCPC%d" % pattern_num)
        assert not (p.exists())

        def my_mkdir(path, mode=0o777):
            path = str(path)
            # Emulate another process that would create the directory
            # just before we try to create it ourselves.  We do it
            # in all possible pattern combinations, assuming that this
            # function is called at most 5 times (dirCPC/dir1/dir2,
            # dirCPC/dir1, dirCPC, dirCPC/dir1, dirCPC/dir1/dir2).
            if pattern.pop():
                os.mkdir(path, mode)  # From another process.
                concurrently_created.add(path)
            os.mkdir(path, mode)  # Our real call.

        pattern = [bool(pattern_num & (1 << n)) for n in range(5)]
        concurrently_created: Set = set()
        p12 = p / "dir1" / "dir2"
        try:
            with mock.patch("pathlib._normal_accessor.mkdir", my_mkdir):
                p12.mkdir(parents=True, exist_ok=False)
        except FileExistsError:
            assert (str(p12) in concurrently_created)
        else:
            assert (str(p12) not in concurrently_created)
        assert (p.exists())
Esempio n. 15
0
    def process_file(self, filename):  # noqa: D102
        self.statements = []

        with fix_import_path([filename]):
            self._check_files(self.get_ast,
                              self._iterate_file_descrs([filename]))

        filename = PathPlus(filename)
        file_lines = filename.read_lines()

        for node in self.statements:

            if node.tolineno != node.lineno:
                warnings.warn("Currently unable to convert this statement")

            else:
                value = node.value.as_string()
                col = node.col_offset
                lineno = node.lineno - 1

                line = file_lines[lineno]
                line_pre_statement = line[:col]
                line_post_statement = line[col + len(value):]
                # print(f"{line_pre_statement}print({value}){line_post_statement}")
                file_lines[
                    lineno] = f"{line_pre_statement}print({value}){line_post_statement}"

        if file_lines[-1]:
            # ensure there's a newline at the end
            file_lines.append('')

        # print("\n".join(file_lines))
        filename.write_lines(file_lines)
Esempio n. 16
0
def copy_assets(app: Sphinx, exception: Optional[Exception] = None) -> None:
	"""
	Copy asset files to the output.

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.
	"""

	if exception:  # pragma: no cover
		return

	style = StringList([
			".docutils.container {",
			"    padding-left: 0 !important;",
			"    padding-right: 0 !important;",
			'}',
			'',
			# "div.sphinx-tabs.docutils.container {",
			# "    padding-left: 0 !important;",
			# "    padding-right: 0 !important;",
			# "}",
			# '',
			"div.ui.top.attached.tabular.menu.sphinx-menu.docutils.container {",
			# "    padding-left: 0 !important;",
			# "    padding-right: 0 !important;",
			"    margin-left: 0 !important;",
			"    margin-right: 0 !important;",
			'}',
			])

	css_dir = PathPlus(app.builder.outdir) / "_static" / "css"
	css_dir.maybe_make(parents=True)
	css_file = css_dir / "tabs_customise.css"
	css_file.write_lines(style)
Esempio n. 17
0
def make_dependabot(repo_path: pathlib.Path, templates: Environment) -> List[str]:
	"""
	Add configuration for ``dependabot`` to the desired repo.

	https://dependabot.com/

	:param repo_path: Path to the repository root.
	:param templates:

	.. deprecated:: 2020.12.11
	"""

	dependabot_file = PathPlus(repo_path / ".dependabot" / "config.yml")
	dependabot_file.parent.maybe_make()

	update_configs = {
			"package_manager": "python",
			"directory": '/',
			"update_schedule": "weekly",
			"default_reviewers": [templates.globals["assignee"]],
			}

	config = {"version": 1, "update_configs": [update_configs]}

	dependabot_file.write_lines([
			f"# {templates.globals['managed_message']}",
			"---",
			_round_trip_dump(config),
			])

	return [dependabot_file.relative_to(repo_path).as_posix()]
Esempio n. 18
0
def test_iterchildren_match(
        advanced_data_regression: AdvancedDataRegressionFixture,
        absolute: bool):
    repo_path = PathPlus(__file__).parent.parent
    with in_directory(repo_path.parent):

        assert repo_path.is_dir()

        if not absolute:
            repo_path = repo_path.relative_to(repo_path.parent)

        if (repo_path / "build").is_dir():
            shutil.rmtree(repo_path / "build")

        children = list(repo_path.iterchildren(match="**/*.py"))
        assert children

        child_paths = sorted(
            p.relative_to(repo_path).as_posix() for p in children)

        for exclude_filename in {
                ".coverage", "pathtype_demo.py", "dist", "htmlcov", "conda",
                ".idea", "mutdef.py"
        }:
            if exclude_filename in child_paths:
                child_paths.remove(exclude_filename)

        advanced_data_regression.check(child_paths,
                                       basename="test_iterchildren_match")
Esempio n. 19
0
def make_recipe(out_dir: str = "./conda/"):
    """
	Make a Conda ``meta.yaml`` recipe.
	"""

    # stdlib
    import warnings

    # 3rd party
    from consolekit.terminal_colours import Fore, resolve_color_default
    from domdf_python_tools.paths import PathPlus, traverse_to_file

    # this package
    from repo_helper import conda

    warnings.warn(
        "'repo-helper make-recipe' is deprecated. "
        "Please use 'mkrecipe' instead: https://mkrecipe.readthedocs.io/",
        DeprecationWarning,
    )

    repo_dir = traverse_to_file(PathPlus.cwd(), "repo_helper.yml")

    recipe_file = PathPlus(out_dir).resolve() / "meta.yaml"
    recipe_file.parent.maybe_make()

    conda.make_recipe(repo_dir, recipe_file)

    click.echo(Fore.GREEN(f"Wrote recipe to {recipe_file!s}"),
               color=resolve_color_default())
Esempio n. 20
0
def copy_assets(app: Sphinx, exception: Optional[Exception] = None) -> None:
    """
	Copy asset files to the output.

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.
	"""

    if exception:  # pragma: no cover
        return

    style = {}

    for colour, hex_ in _colour_map.items():
        style[
            f"div.sphinx-highlights div.highlight-{colour} div.card-header"] = {
                "background-color": hex_
            }

    # if app.config.html_theme in {"domdf_sphinx_theme", "sphinx_rtd_theme"}:
    # 	header_colour = app.config.html_theme_options.get("style_nav_header_background", "#2980B9")
    #
    # 	style.blankline()
    # 	style.extend([
    # 			"div.sphinx-highlights div.card-header {",
    # 			f"    background-color: {header_colour}",
    # 			'}',
    # 			])

    css_dir = PathPlus(app.builder.outdir) / "_static" / "css"
    css_dir.maybe_make(parents=True)

    dict2css.dump(style, css_dir / "sphinx_highlights.css")
Esempio n. 21
0
def requirements_from_setup_cfg(
    package_root: pathlib.Path,
    options: Dict,
    env: sphinx.environment.BuildEnvironment,
    extra: str,
) -> List[str]:
    """
	Load requirements from a ``setup.cfg`` file in the root of the repository.

	:param package_root: The path to the package root.
	:param options:
	:param env:
	:param extra: The name of the "extra" that the requirements are for.

	:return: List of requirements.
	"""

    setup_cfg_file = PathPlus(env.srcdir).parent / "setup.cfg"
    assert setup_cfg_file.is_file()

    setup_cfg = read_configuration(setup_cfg_file)

    if "options" in setup_cfg and "extras_require" in setup_cfg["options"]:
        if extra in setup_cfg["options"]["extras_require"]:
            return setup_cfg["options"]["extras_require"][extra]
        else:
            raise ValueError(
                f"'{extra}' not found in '[options.extras_require]'")
    else:
        raise ValueError(
            "'options.extras_require' section not found in 'setup.cfg")
Esempio n. 22
0
def test_rename(BASE, tmp_pathplus: PathPlus):
    P = PathPlus(BASE)
    p = P / "fileA"
    size = p.stat().st_size
    # Renaming to another path.
    q = P / "dirA" / "fileAA"

    if sys.version_info < (3, 9):  # pragma: no cover (>=py39)
        p.replace(q)

    else:  # pragma: no cover (<py39)
        renamed_p = p.replace(q)
        assert (renamed_p == q)

    assert (q.stat().st_size == size)
    with pytest.raises(FileNotFoundError):
        p.stat()

    # Renaming to a str of a relative path.
    r = tmp_pathplus / "fileAAA"

    if sys.version_info < (3, 9):  # pragma: no cover (>=py39)
        q.replace(r)

    else:  # pragma: no cover (<py39)
        renamed_q = q.replace(r)
        assert (renamed_q == PathPlus(r))

    assert (os.stat(r).st_size == size)
    with pytest.raises(FileNotFoundError):
        q.stat()
Esempio n. 23
0
    def __init__(self, app_name: str):
        self.app_name: str = str(app_name)
        self.cache_dir = PathPlus(
            platformdirs.user_cache_dir(f"{self.app_name}_cache"))
        self.cache_dir.maybe_make(parents=True)

        # Mapping of function names to their caches
        self.caches: Dict[str, Dict[str, Any]] = {}
Esempio n. 24
0
 def __init__(self, filename: PathLike, yapf_style: str,
              isort_config: Config):
     self.file_to_format = PathPlus(filename)
     self.filename = self.file_to_format.as_posix()
     self.yapf_style = yapf_style
     self.isort_config = isort_config
     self._unformatted_source = self.file_to_format.read_text()
     self._reformatted_source: Optional[str] = None
Esempio n. 25
0
def test_iterchildren(advanced_data_regression: AdvancedDataRegressionFixture):
    repo_path = PathPlus(__file__).parent.parent
    assert repo_path.is_dir()

    children = list((repo_path / "domdf_python_tools").iterchildren())
    assert children
    advanced_data_regression.check(
        sorted(p.relative_to(repo_path).as_posix() for p in children))
Esempio n. 26
0
def rewrite_readme(repo_path: pathlib.Path,
                   templates: Environment) -> List[str]:
    """
	Update blocks in the ``README.rst`` file.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    # TODO: link to documentation below installation

    readme_file = PathPlus(repo_path / "README.rst")

    shields_block = ShieldsBlock(
        username=templates.globals["username"],
        repo_name=templates.globals["repo_name"],
        version=templates.globals["version"],
        conda=templates.globals["enable_conda"],
        tests=templates.globals["enable_tests"]
        and not templates.globals["stubs_package"],
        docs=templates.globals["enable_docs"],
        pypi_name=templates.globals["pypi_name"],
        docker_shields=templates.globals["docker_shields"],
        docker_name=templates.globals["docker_name"],
        platforms=templates.globals["platforms"],
        pre_commit=templates.globals["enable_pre_commit"],
        on_pypi=templates.globals["on_pypi"],
        docs_url=templates.globals["docs_url"],
        primary_conda_channel=templates.globals["primary_conda_channel"],
    ).make()

    if templates.globals["on_pypi"]:
        install_block = create_readme_install_block(
            templates.globals["modname"],
            templates.globals["username"],
            templates.globals["enable_conda"],
            templates.globals["on_pypi"],
            templates.globals["pypi_name"],
            templates.globals["conda_channels"],
        )
    else:
        install_block = get_readme_installation_block_no_pypi_template(
        ).render(
            modname=templates.globals["modname"],
            username=templates.globals["username"],
            repo_name=templates.globals["repo_name"],
        )

    readme = readme_file.read_text(encoding="UTF-8")
    readme = shields_regex.sub(str(shields_block), readme)
    readme = installation_regex.sub(install_block + '\n', readme)
    short_desc_block = create_short_desc_block(
        templates.globals["short_desc"], )
    readme = short_desc_regex.sub(short_desc_block, readme)

    readme_file.write_clean(readme)

    return [readme_file.name]
Esempio n. 27
0
def replace_unknown_unicode(app: Sphinx,
                            exception: Optional[Exception] = None):
    r"""
	Replaces certain unknown unicode characters in the Sphinx LaTeX output with the best equivalents.

	.. only:: html

		The mapping is as follows:

		* ♠ -- \spadesuit
		* ♥ -- \heartsuit
		* ♦ -- \diamondsuit
		* ♣ -- \clubsuit
		* Zero width space -- \hspace{0pt}
		* μ -- \textmu
		* ≡ -- \equiv (new in version 2.11.0)
		* ≈ -- \approx (new in version 2.12.0)
		* ≥ -- \geq (new in version 2.13.0)
		* ≤ -- \leq (new in version 2.13.0)

	This function can be hooked into the :event:`build-finished` event as follows:

	.. code-block:: python

		app.connect("build-finished", replace_unknown_unicode)

	.. versionadded:: 2.9.0

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.
	"""

    if exception:  # pragma: no cover
        return

    if app.builder is None or app.builder.name.lower() != "latex":
        return

    builder = cast(LaTeXBuilder, app.builder)
    output_file = PathPlus(
        builder.outdir) / f"{builder.titles[0][1].lower()}.tex"

    output_content = output_file.read_text()

    output_content = output_content.replace('♠', r' $\spadesuit$ ')
    output_content = output_content.replace('♥', r' $\heartsuit$ ')
    output_content = output_content.replace('♦', r' $\diamondsuit$ ')
    output_content = output_content.replace('♣', r' $\clubsuit$ ')
    output_content = output_content.replace(
        '\u200b', r'\hspace{0pt}')  # Zero width space
    output_content = output_content.replace('μ', r"\textmu{}")
    output_content = output_content.replace('≡', r" $\equiv$ ")
    output_content = output_content.replace('≈', r" $\approx$ ")
    output_content = output_content.replace('≥', r" $\geq$ ")
    output_content = output_content.replace('≤', r" $\leq$ ")

    output_file.write_clean(output_content)
Esempio n. 28
0
def test_mkdir_with_unknown_drive():
    for d in "ZYXWVUTSRQPONMLKJIHGFEDCBA":
        p = PathPlus(d + ":\\")
        if not p.is_dir():
            break
    else:
        pytest.skip("cannot find a drive that doesn't exist")
    with pytest.raises(OSError):
        (p / "child" / "path").mkdir(parents=True)
Esempio n. 29
0
	def __init__(self, raw_results_dir: PathLike, json_results_dir: PathLike, csv_results_dir: PathLike):

		self.raw_results_dir = PathPlus(raw_results_dir)

		self.json_results_dir = PathPlus(json_results_dir)
		self.json_results_dir.maybe_make(parents=True)

		self.csv_results_dir = PathPlus(csv_results_dir)
		self.csv_results_dir.maybe_make(parents=True)
Esempio n. 30
0
def test_is_mount(BASE):
    P = PathPlus(BASE)
    R = PathPlus('/')  # TODO: Work out Windows.
    assert not ((P / "fileA").is_mount())
    assert not ((P / "dirA").is_mount())
    assert not ((P / "non-existing").is_mount())
    assert not ((P / "fileA" / "bah").is_mount())
    assert (R.is_mount())
    if can_symlink():
        assert not ((P / "linkA").is_mount())