Exemplo n.º 1
0
def visit_asset_node(translator: HTMLTranslator, node: AssetNode):
	"""
	Visit an :class:`~.AssetNode`.

	:param translator:
	:param node: The node being visited.
	"""

	if not hasattr(translator, "_asset_node_seen_files"):
		# Files that have already been seen
		translator._asset_node_seen_files = []  # type: ignore

	assets_out_dir = PathPlus(translator.builder.outdir) / "_assets"
	assets_out_dir.maybe_make(parents=True)

	source_file = PathPlus(translator.builder.confdir) / node["source_file"]

	if source_file not in translator._asset_node_seen_files and source_file.is_file():  # type: ignore
		# Avoid unnecessary copies of potentially large files.
		translator._asset_node_seen_files.append(source_file)  # type: ignore
		shutil.copy2(source_file, assets_out_dir)
	elif not source_file.is_file():
		stderr_writer(Fore.RED(f"{translator.builder.current_docname}: Asset file '{source_file}' not found."))
		translator.context.append('')
		return

	# Create the HTML
	current_uri = (pathlib.PurePosixPath('/') / translator.builder.current_docname).parent
	refuri = posixpath.relpath(f"/_assets/{node['refuri']}", str(current_uri))
	translator.body.append(f'<a class="reference external" href="{refuri}")/">')
	translator.context.append("</a>")
Exemplo n.º 2
0
def copy_assets(app: Sphinx, exception: Optional[Exception] = None) -> None:
    """
	Copy asset files to the output.

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.
	"""

    if exception:  # pragma: no cover
        return

    style = {}

    for colour, hex_ in _colour_map.items():
        style[
            f"div.sphinx-highlights div.highlight-{colour} div.card-header"] = {
                "background-color": hex_
            }

    # if app.config.html_theme in {"domdf_sphinx_theme", "sphinx_rtd_theme"}:
    # 	header_colour = app.config.html_theme_options.get("style_nav_header_background", "#2980B9")
    #
    # 	style.blankline()
    # 	style.extend([
    # 			"div.sphinx-highlights div.card-header {",
    # 			f"    background-color: {header_colour}",
    # 			'}',
    # 			])

    css_dir = PathPlus(app.builder.outdir) / "_static" / "css"
    css_dir.maybe_make(parents=True)

    dict2css.dump(style, css_dir / "sphinx_highlights.css")
Exemplo n.º 3
0
def copy_assets(app: Sphinx, exception: Optional[Exception] = None) -> None:
	"""
	Copy asset files to the output.

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.
	"""

	if exception:  # pragma: no cover
		return

	style = StringList([
			".docutils.container {",
			"    padding-left: 0 !important;",
			"    padding-right: 0 !important;",
			'}',
			'',
			# "div.sphinx-tabs.docutils.container {",
			# "    padding-left: 0 !important;",
			# "    padding-right: 0 !important;",
			# "}",
			# '',
			"div.ui.top.attached.tabular.menu.sphinx-menu.docutils.container {",
			# "    padding-left: 0 !important;",
			# "    padding-right: 0 !important;",
			"    margin-left: 0 !important;",
			"    margin-right: 0 !important;",
			'}',
			])

	css_dir = PathPlus(app.builder.outdir) / "_static" / "css"
	css_dir.maybe_make(parents=True)
	css_file = css_dir / "tabs_customise.css"
	css_file.write_lines(style)
Exemplo n.º 4
0
def enable_docs(
    repo_path: pathlib.Path,
    templates: Environment,
    init_repo_templates: Environment,
) -> List[str]:
    docs_dir = PathPlus(repo_path / templates.globals["docs_dir"])
    docs_dir.maybe_make()
    (docs_dir / "api").maybe_make()

    for filename in {"index.rst"}:
        template = init_repo_templates.get_template(filename)
        (docs_dir / filename).write_clean(template.render())

    api_buf = StringList()
    header_line: str = '=' * (len(templates.globals["import_name"]) + 1)
    api_buf.append(header_line)
    api_buf.append(templates.globals["import_name"])
    api_buf.append(header_line)
    api_buf.blankline(ensure_single=True)
    api_buf.append(f".. automodule:: {templates.globals['import_name']}")
    api_buf.blankline(ensure_single=True)

    (docs_dir / "api" /
     templates.globals["modname"]).with_suffix(".rst").write_lines(api_buf)

    return [
        posixpath.join(templates.globals["docs_dir"], "api",
                       f"{templates.globals['modname']}.rst"),
        posixpath.join(templates.globals["docs_dir"], "index.rst"),
    ]
Exemplo n.º 5
0
def copy_asset_files(app: Sphinx, exception: Exception = None):
    """
	Copy additional stylesheets into the HTML build directory.

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.

	.. versionadded:: 1.2.0
	"""

    if exception:  # pragma: no cover
        return

    if app.builder.format.lower() != "html":
        return

    style = {
        "span.regex_literal": {
            "color": "dimgrey"
        },
        "span.regex_at": {
            "color": "orangered"
        },
        "span.regex_repeat_brace": {
            "color": "orangered"
        },
        "span.regex_branch": {
            "color": "orangered"
        },
        "span.regex_subpattern": {
            "color": "dodgerblue"
        },
        "span.regex_in": {
            "color": "darkorange"
        },
        "span.regex_category": {
            "color": "darkseagreen"
        },
        "span.regex_repeat": {
            "color": "orangered"
        },
        "span.regex_any": {
            "color": "orangered"
        },
        "code.regex": {
            "font-size": "80%"
        },
        "span.regex": {
            "font-weight": "bold"
        },
    }

    static_dir = PathPlus(app.outdir) / "_static"
    static_dir.maybe_make(parents=True)
    (static_dir / "regex.css").write_clean(dict2css.dumps(style, minify=True))
Exemplo n.º 6
0
class ResultParser:
	"""
	Given a directory of CSV results exported from MassHunter, parse them to CSV and JSON.

	:param raw_results_dir: The directory in which the raw exports from MassHunter are stored.
	:param json_results_dir: The directory to store the output json files in.
	:param csv_results_dir: The directory to store the output csv files in.
	"""

	def __init__(self, raw_results_dir: PathLike, json_results_dir: PathLike, csv_results_dir: PathLike):

		self.raw_results_dir = PathPlus(raw_results_dir)

		self.json_results_dir = PathPlus(json_results_dir)
		self.json_results_dir.maybe_make(parents=True)

		self.csv_results_dir = PathPlus(csv_results_dir)
		self.csv_results_dir.maybe_make(parents=True)

	def parse_for_directory(self, directory: PathLike):
		"""
		Convert the "CSV Results.csv" file in the given directory to CSV and JSON.

		:param directory:
		"""

		(self.json_results_dir / directory).maybe_make()
		(self.csv_results_dir / directory).maybe_make()

		infile = self.raw_results_dir / directory / "CSV Results.csv"
		csv_outfile = self.csv_results_dir / directory / "CSV Results Parsed.csv"
		json_outfile = self.json_results_dir / directory / "results.json"
		print(f"{infile} -> {csv_outfile}")
		print(f"{' ' * len(str(infile))} -> {json_outfile}")

		parse_masshunter_csv(infile, csv_outfile, json_outfile)

	def parse_directory_list(self, directory_list: Iterable[PathLike]):
		"""
		Runs :meth:`.~ResultsParser.parse_for_directory` for each directory in ``directory_list``.

		:param directory_list: A list of directories to process.
		"""

		for directory in directory_list:
			print(f"Processing directory {directory}")
			self.parse_for_directory(directory)
Exemplo n.º 7
0
def copy_asset_files(app: Sphinx, exception: Optional[Exception] = None):
    """
	Copy additional stylesheets into the HTML build directory.

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.
	"""

    if exception:  # pragma: no cover
        return

    if app.builder is None or app.builder.format.lower(
    ) != "html":  # pragma: no cover
        return

    static_dir = PathPlus(app.outdir) / "_static"
    static_dir.maybe_make(parents=True)
    dict2css.dump(_css.regex_styles, static_dir / "regex.css", minify=True)
Exemplo n.º 8
0
def BASE(tmp_pathplus: PathPlus):
    top_dir = tmp_pathplus
    tmp_pathplus = top_dir / "a/b/c/d"
    tmp_pathplus.maybe_make(parents=True)

    join = lambda *x: os.path.join(tmp_pathplus, *x)

    if os.name == "nt":
        # Workaround for http://bugs.python.org/issue13772.
        def dirlink(src, dest):
            os.symlink(src, dest, target_is_directory=True)
    else:

        def dirlink(src, dest):
            os.symlink(src, dest)

    os.mkdir(join("dirA"))
    os.mkdir(join("dirB"))
    os.mkdir(join("dirC"))
    os.mkdir(join("dirC", "dirD"))
    os.mkdir(join("dirE"))
    with open(join("fileA"), "wb") as f:
        f.write(b"this is file A\n")
    with open(join("dirB", "fileB"), "wb") as f:
        f.write(b"this is file B\n")
    with open(join("dirC", "fileC"), "wb") as f:
        f.write(b"this is file C\n")
    with open(join("dirC", "dirD", "fileD"), "wb") as f:
        f.write(b"this is file D\n")
    os.chmod(join("dirE"), 0)

    if not PYPY and sys.platform != "win32":
        # Relative symlinks.
        os.symlink("fileA", join("linkA"))
        os.symlink("non-existing", join("brokenLink"))
        dirlink("dirB", join("linkB"))
        dirlink(os.path.join("..", "dirB"), join("dirA", "linkC"))
        # This one goes upwards, creating a loop.
        dirlink(os.path.join("..", "dirB"), join("dirB", "linkD"))

    yield tmp_pathplus

    os.chmod(join("dirE"), 0o777)
    shutil.rmtree(top_dir)
Exemplo n.º 9
0
def make_conda_actions_ci(repo_path: pathlib.Path,
                          templates: Environment) -> List[str]:
    """
	Add configuration for testing conda packages on `GitHub Actions` to the desired repo.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    workflows_dir = PathPlus(repo_path / ".github" / "workflows")
    conda_ci_file = workflows_dir / "conda_ci.yml"

    if templates.globals["enable_conda"]:
        actions = templates.get_template("github_conda_ci.yml")
        workflows_dir.maybe_make(parents=True)

        def no_pypy_versions(versions):
            """
			Returns the subset of ``versions`` which does not end with ``-dev``.

			:param versions:
			"""

            return [
                v for v in no_dev_versions(versions)
                if "pypy" not in v.lower()
            ]

        pip_dependencies = ["whey-conda"]

        pyproject_file = PathPlus(repo_path / "pyproject.toml")
        if pyproject_file.is_file():
            data: DefaultDict[str,
                              Any] = DefaultDict(dom_toml.load(pyproject_file))
            pip_dependencies.extend(data["build-system"]["requires"])

        conda_ci_file.write_clean(
            actions.render(no_dev_versions=no_pypy_versions,
                           pip_dependencies=pip_dependencies))

    else:
        conda_ci_file.unlink(missing_ok=True)

    return [conda_ci_file.relative_to(repo_path).as_posix()]
Exemplo n.º 10
0
def make_issue_templates(repo_path: pathlib.Path, templates: Environment) -> List[str]:
	"""
	Add issue templates for GitHub to the desired repo.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

	managed_files = []

	issue_template_dir = PathPlus(repo_path / ".github" / "ISSUE_TEMPLATE")
	issue_template_dir.maybe_make(parents=True)

	for filename in ["bug_report.md", "feature_request.md"]:
		filepath = issue_template_dir / filename
		filepath.write_clean(templates.get_template(filename).render())
		managed_files.append(filepath.relative_to(repo_path).as_posix())

	return managed_files
Exemplo n.º 11
0
class HTTPCache:
	"""
	Cache HTTP requests for up to 28 days and limit the rate of requests to no more than 5/second.

	:param app_name: The name of the app. This dictates the name of the cache directory.
	:param expires_after: The maximum time to cache responses for.
	"""

	app_name: str  #: The name of the app. This dictates the name of the cache directory.
	cache_dir: PathPlus  #: The location of the cache directory on disk.
	caches: Dict[str, Dict[str, Any]]  #: Mapping of function names to their caches.

	def __init__(self, app_name: str, expires_after: datetime.timedelta = datetime.timedelta(days=28)):
		self.app_name: str = str(app_name)
		self.cache_dir = PathPlus(platformdirs.user_cache_dir(self.app_name))
		self.cache_dir.maybe_make(parents=True)

		self.session: requests.Session = CacheControl(
				sess=requests.Session(),
				cache=FileCache(self.cache_dir),
				heuristic=ExpiresAfter(
						days=expires_after.days,
						seconds=expires_after.seconds,
						microseconds=expires_after.microseconds,
						),
				adapter_class=RateLimitAdapter
				)

	def clear(self) -> bool:
		"""
		Clear the cache.

		:returns: True to indicate success. False otherwise.
		"""

		try:
			shutil.rmtree(self.cache_dir)
			return True

		except Exception as e:  # pragma: no cover
			warnings.warn(f"Could not remove cache. The error was: {e}")
			return False
def copy_asset_files(app: Sphinx, exception: Optional[Exception] = None) -> None:
	"""
	Copy asset files to the output.

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.

	.. versionchanged:: 2.7.0

		Renamed from ``copy_assets``.
		The old name is deprecated an will be removed in 3.0.0
	"""

	if exception:  # pragma: no cover
		return

	if app.builder is None or app.builder.format.lower() != "html":  # pragma: no cover
		return

	# style = StringList([
	# 		".docutils.container {",
	# 		"    padding-left: 0 !important;",
	# 		"    padding-right: 0 !important;",
	# 		'}',
	# 		'',
	# 		# "div.sphinx-tabs.docutils.container {",
	# 		# "    padding-left: 0 !important;",
	# 		# "    padding-right: 0 !important;",
	# 		# "}",
	# 		# '',
	# 		"div.ui.top.attached.tabular.menu.sphinx-menu.docutils.container {",
	# 		# "    padding-left: 0 !important;",
	# 		# "    padding-right: 0 !important;",
	# 		"    margin-left: 0 !important;",
	# 		"    margin-right: 0 !important;",
	# 		'}',
	# 		])

	css_static_dir = PathPlus(app.builder.outdir) / "_static" / "css"
	css_static_dir.maybe_make(parents=True)
	dict2css.dump(_css.tweaks_sphinx_panels_tabs_styles, css_static_dir / "tabs_customise.css")
Exemplo n.º 13
0
def copy_asset_files(app: Sphinx, exception: Optional[Exception] = None):
	"""
	Copy additional stylesheets into the HTML build directory.

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.
	"""

	if exception:  # pragma: no cover
		return

	if app.builder is None or app.builder.format.lower() != "html":  # pragma: no cover
		return

	extensions_selector = ", ".join([
			"p.sphinx-toolbox-extensions",
			"div.sphinx-toolbox-extensions.highlight-python",
			"div.sphinx-toolbox-extensions.highlight-python div.highlight",
			])

	rest_example_style = {
			"padding-left": "5px",
			"border-style": "dotted",
			"border-width": "1px",
			"border-color": "darkgray",
			}

	style: MutableMapping[str, dict2css.Style] = {
			"p.source-link": {"margin-bottom": 0},
			"p.source-link + hr.docutils": {"margin-top": "10px"},
			extensions_selector: {"margin-bottom": "10px"},
			"div.rest-example.docutils.container": rest_example_style,
			**installation_styles,
			**shields_styles,
			**regex_styles,
			}

	css_static_dir = PathPlus(app.outdir) / "_static" / "css"
	css_static_dir.maybe_make(parents=True)
	dict2css.dump(style, css_static_dir / "sphinx-toolbox.css")
Exemplo n.º 14
0
def copy_asset_files(app: Sphinx, exception: Optional[Exception] = None):
	"""
	Copy additional stylesheets into the HTML build directory.

	.. versionadded:: 1.2.0

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.
	"""

	if exception:  # pragma: no cover
		return

	if app.builder is None or app.builder.format.lower() != "html":  # pragma: no cover
		return

	static_dir = PathPlus(app.outdir) / "_static"
	static_dir.maybe_make(parents=True)
	dict2css.dump(_css.installation_styles, static_dir / "sphinx_toolbox_installation.css", minify=True)

	(static_dir / "sphinx_toolbox_installation.js").write_lines([
			"// Based on https://github.com/executablebooks/sphinx-tabs/blob/master/sphinx_tabs/static/tabs.js",
			"// Copyright (c) 2017 djungelorm",
			"// MIT Licensed",
			'',
			"function deselectTabset(target) {",
			"  const parent = target.parentNode;",
			"  const grandparent = parent.parentNode;",
			'',
			'  if (parent.parentNode.parentNode.getAttribute("id").startsWith("installation")) {',
			'',
			"    // Hide all tabs in current tablist, but not nested",
			"    Array.from(parent.children).forEach(t => {",
			'      if (t.getAttribute("name") !== target.getAttribute("name")) {',
			'        t.setAttribute("aria-selected", "false");',
			"      }",
			"    });",
			'',
			"    // Hide all associated panels",
			"    Array.from(grandparent.children).slice(1).forEach(p => {  // Skip tablist",
			'      if (p.getAttribute("name") !== target.getAttribute("name")) {',
			'        p.setAttribute("hidden", "false")',
			"      }",
			"    });",
			"  }",
			'',
			"  else {",
			"    // Hide all tabs in current tablist, but not nested",
			"    Array.from(parent.children).forEach(t => {",
			'      t.setAttribute("aria-selected", "false");',
			"    });",
			'',
			"    // Hide all associated panels",
			"    Array.from(grandparent.children).slice(1).forEach(p => {  // Skip tablist",
			'      p.setAttribute("hidden", "true")',
			"    });",
			"  }",
			'',
			'}',
			'',
			"// Compatibility with sphinx-tabs 2.1.0 and later",
			"function deselectTabList(tab) {deselectTabset(tab)}",
			'',
			])
Exemplo n.º 15
0
def copy_asset_files(app: Sphinx, exception: Optional[Exception] = None):
    """
	Copy additional stylesheets into the HTML build directory.

	.. versionadded:: 2.6.0

	:param app: The Sphinx application.
	:param exception: Any exception which occurred and caused Sphinx to abort.
	"""

    if exception:  # pragma: no cover
        return

    if app.builder is None or app.builder.format.lower(
    ) != "html":  # pragma: no cover
        return

    prompt_style: dict2css.Style = {
        "user-select": None,
        "font-size": "13px",
        "font-family":
        '"SFMono-Regular", Menlo, Consolas, Monaco, Liberation Mono, Lucida Console, monospace',
        "border": None,
        "padding": "11px 0 0",
        "margin": "0 5px 0 0",
        "box-shadow": None,
        "wrap-option": None,
        "white-space": "nowrap",
    }

    container_style: dict2css.Style = {
        "padding-top": "5px",
        "display": "flex",
        "align-items": "stretch",
        "margin": 0,
    }

    code_style_string = "div.code-cell.container div.code-cell-code, div.output-cell.container div.output-cell-code"
    code_style: dict2css.Style = {
        "width": "100%",
        "padding-top": 0,
        "margin-top": 0,
    }

    style: MutableMapping[str, dict2css.Style] = {
        "div.code-cell.container div.prompt": {
            "color": "#307FC1"
        },
        "div.output-cell.container div.prompt": {
            "color": "#BF5B3D"
        },
        "div.code-cell.container div.prompt, div.output-cell.container div.prompt":
        prompt_style,
        "div.code-cell.container, div.output-cell.container":
        container_style,
        code_style_string:
        code_style,
    }

    static_dir = PathPlus(app.outdir) / "_static"
    static_dir.maybe_make(parents=True)
    dict2css.dump(style, static_dir / "sphinx-toolbox-code.css")
Exemplo n.º 16
0
def make_module(
    name: str,
    module: ModuleType,
    attr_list: Iterable[str] = (),
    first_party_imports: Iterable[str] = (),
    converter=Converter()
) -> bool:
    """
	Create type stubs for a module.

	:param name: The name of the module.
	:param module: The module object.
	:param attr_list: A list of attributes to create stubs for.
	:param first_party_imports: A list of first-party imports to include at the top of the file.
	"""

    buf = StringList()
    path = name.split('.')

    stubs_dir = PathPlus(f"{path[0]}-stubs")
    stubs_dir.maybe_make()
    (stubs_dir / '/'.join(x for x in path[1:-1])).maybe_make(parents=True)
    stub_file = stubs_dir / '/'.join(x for x in path[1:-1]) / f"{path[-1]}.pyi"

    import_name = name.replace(".__init__", '')

    for imp in (*make_imports(name), *first_party_imports):
        imp = re.sub(
            fr"import {import_name}\.([A-Za-z_]+)\.([A-Za-z_]+)\.([A-Za-z_]+)",
            r"from .\1.\2 import \3", imp)
        imp = re.sub(fr"import {import_name}\.([A-Za-z_]+)\.([A-Za-z_]+)",
                     r"from .\1 import \2", imp)
        imp = re.sub(fr"import {import_name}\.([A-Za-z_]+)",
                     r"from . import \1", imp)
        imp = re.sub(fr"import {import_name}$", '', imp)
        buf.append(imp)

    if import_name != "System.ComponentModel":
        if import_name == "System":
            buf.append("from .ComponentModel import MarshalByValueComponent")
        else:
            buf.append(
                "from System.ComponentModel import MarshalByValueComponent")

    for attr_name in dedup(attr_list):
        stub_code = walk_attrs(module, attr_name, converter=converter)
        stub_code = stub_code.replace(f": {import_name}.", ": ")
        stub_code = stub_code.replace(f" -> {import_name}.", " -> ")
        stub_code = stub_code.replace(f"[{import_name}.", '[')
        stub_code.replace(
            "System.Collections.Generic.IDictionary[System.String,System.String]",
            "Any")

        buf.blankline(ensure_single=True)
        buf.blankline()

        buf.append(stub_code)

    sorted_code = isort.code(str(buf), config=isort_config)
    sans_unneeded_imports = fix_code(
        sorted_code,
        additional_imports=None,
        expand_star_imports=False,
        remove_all_unused_imports=False,
        remove_duplicate_keys=False,
        remove_unused_variables=False,
        ignore_init_module_imports=False,
    )

    stub_file.write_text(sans_unneeded_imports)

    return True
Exemplo n.º 17
0
class ActionsManager:
    """
	Responsible for creating, updating and removing GitHub Actions workflows.

	:param repo_path: Path to the repository root.
	:param templates:

	.. versionadded:: 2020.12.18
	"""
    def __init__(self, repo_path: pathlib.Path, templates: Environment):
        self.repo_path = repo_path
        self.templates = templates

        self.actions = templates.get_template("github_ci.yml")

        self.workflows_dir = PathPlus(repo_path / ".github" / "workflows")
        self.workflows_dir.maybe_make(parents=True)

        code_file_filter: DelimitedList[str] = DelimitedList()

        if self.templates.globals["enable_docs"]:
            code_file_filter.append(f"{templates.globals['docs_dir']}/**")
        else:
            code_file_filter.append(f"doc-source/**")

        code_file_filter.extend([
            "CONTRIBUTING.rst",
            ".imgbotconfig",
            ".pre-commit-config.yaml",
            ".pylintrc",
            ".readthedocs.yml",
        ])
        # ".bumpversion.cfg",
        # ".style.yapf",
        # "stubs.txt",

        self._code_file_filter = f"!({code_file_filter:|})"

    def get_gh_actions_python_versions(self) -> Dict[str, str]:
        """
		Prepares the mapping of Python versions to tox testenvs for use with GitHub Actions.

		.. versionadded:: 2020.12.21
		"""

        gh_action_matrix = self.get_gh_actions_matrix()
        version_mapping: Dict[str, str] = {}

        for py_version, (testenvs, experimental) in gh_action_matrix.items():
            version_mapping[str(py_version)] = testenvs

        return version_mapping

    def get_gh_actions_matrix(self) -> Dict[str, Tuple[str, bool]]:
        """
		Determines the matrix of Python versions used in GitHub Actions.

		.. versionadded:: $VERSION
		"""

        config = self.templates.globals

        python_versions = config["python_versions"]
        tox_py_versions = config["tox_py_versions"]
        third_party_version_matrix = config["third_party_version_matrix"]

        # TODO: support multi-library matrices
        if third_party_version_matrix:
            third_party_library = list(third_party_version_matrix.keys())[0]
            third_party_versions = DelimitedList(
                third_party_version_matrix[third_party_library])
            matrix_testenv_string = f"-{third_party_library}{{{third_party_versions:,}}}"
        else:
            matrix_testenv_string = ''

        output: Dict[str, Tuple[str, bool]] = {}

        for (py_version, metadata), gh_py_version, tox_py_version in zip(
                python_versions.items(),
                set_gh_actions_versions(python_versions),
                tox_py_versions,
        ):
            output[str(gh_py_version)] = (
                f"{tox_py_version}{matrix_testenv_string},build",
                metadata["experimental"],
            )

        return output

    @staticmethod
    def _is_experimental(version: str):
        if version in {"pypy-3.7", "pypy-3.8"}:
            return True

        try:
            return Version(version).is_prerelease
        except InvalidVersion:
            return False

    def make_windows(self) -> PathPlus:
        """
		Create, update or remove the Windows action, as appropriate.
		"""

        platform_name = "Windows"
        ci_file = self.workflows_dir / "python_ci.yml"

        if platform_name in self.templates.globals["platforms"]:
            ci_file.write_clean(
                self.actions.render(
                    no_dev_versions=no_dev_versions,
                    ci_platform=platform_ci_names[platform_name],
                    ci_name=platform_name,
                    python_versions=set_gh_actions_versions(
                        self.get_windows_ci_versions()),
                    dependency_lines=self.get_windows_ci_requirements(),
                    gh_actions_versions=self.get_gh_actions_matrix(),
                    code_file_filter=self._code_file_filter,
                    is_experimental=self._is_experimental))
        elif ci_file.is_file():
            ci_file.unlink()

        return ci_file

    def make_macos(self) -> PathPlus:
        """
		Create, update or remove the macOS action, as appropriate.
		"""

        platform_name = "macOS"
        ci_file = self.workflows_dir / f"python_ci_{platform_name.lower()}.yml"

        if platform_name in self.templates.globals["platforms"]:
            ci_file.write_clean(
                self.actions.render(
                    no_dev_versions=no_dev_versions,
                    ci_platform=platform_ci_names[platform_name],
                    ci_name=platform_name,
                    python_versions=set_gh_actions_versions(
                        self.get_macos_ci_versions()),
                    dependency_lines=self.get_macos_ci_requirements(),
                    gh_actions_versions=self.get_gh_actions_matrix(),
                    code_file_filter=self._code_file_filter,
                    is_experimental=self._is_experimental))
        elif ci_file.is_file():
            ci_file.unlink()

        return ci_file

    def make_linux(self) -> PathPlus:
        """
		Create, update or remove the Linux action, as appropriate.
		"""

        platform_name = "Linux"
        ci_file = self.workflows_dir / f"python_ci_{platform_name.lower()}.yml"

        if platform_name in self.templates.globals["platforms"]:

            conda_pip_dependencies = ["mkrecipe"]

            pyproject_file = PathPlus(self.repo_path / "pyproject.toml")
            if pyproject_file.is_file():
                data: DefaultDict[str, Any] = DefaultDict(
                    dom_toml.load(pyproject_file))
                conda_pip_dependencies.extend(data["build-system"]["requires"])

            ci_file.write_clean(
                self.actions.render(
                    no_dev_versions=no_dev_versions,
                    python_versions=set_gh_actions_versions(
                        self.get_linux_ci_versions()),
                    ci_platform=platform_ci_names[platform_name],
                    ci_name=platform_name,
                    dependency_lines=self.get_linux_ci_requirements(),
                    gh_actions_versions=self.get_gh_actions_matrix(),
                    code_file_filter=self._code_file_filter,
                    run_on_tags="    tags:\n      - '*'",
                    is_experimental=self._is_experimental,
                    conda_pip_dependencies=conda_pip_dependencies,
                ))
        elif ci_file.is_file():
            ci_file.unlink()

        return ci_file

    def make_rustpython(self) -> PathPlus:
        """
		Create, update or remove the RustPython action, as appropriate.
		"""

        platform_name = "Linux"

        template = self.templates.get_template("github_ci_rustpython.yml")
        ci_file = self.workflows_dir / f"rustpython_ci_{platform_name.lower()}.yml"

        if platform_name in self.templates.globals[
                "platforms"] and "rustpython" in self.get_linux_ci_versions():
            ci_file.write_clean(
                template.render(
                    ci_platform=platform_ci_names[platform_name],
                    dependency_lines=self.get_linux_ci_requirements(),
                    code_file_filter=self._code_file_filter,
                ))
        elif ci_file.is_file():
            ci_file.unlink()

        return ci_file

    def make_mypy(self) -> PathPlus:
        """
		Create, update or remove the mypy action, as appropriate.

		.. versionadded:: 2020.1.27
		"""

        ci_file = self.workflows_dir / "mypy.yml"
        template = self.templates.get_template(ci_file.name)
        # TODO: handle case where Linux is not a supported platform

        platforms = set(self.templates.globals["platforms"])
        if "macOS" in platforms:
            platforms.remove("macOS")

        platforms = set(
            filter(None, (platform_ci_names.get(p, None) for p in platforms)))

        dependency_lines = self.get_linux_mypy_requirements()
        linux_platform = platform_ci_names["Linux"]

        if dependency_lines == self.standard_python_install_lines:
            dependencies_block = StringList([
                "- name: Install dependencies 🔧",
                "  run: |",
            ])
            with dependencies_block.with_indent("  ", 2):
                dependencies_block.extend(self.standard_python_install_lines)
        else:
            dependencies_block = StringList([
                "- name: Install dependencies (Linux) 🔧",
                f"  if: ${{{{ matrix.os == '{linux_platform}' && steps.changes.outputs.code == 'true' }}}}",
                "  run: |",
            ])
            with dependencies_block.with_indent("  ", 2):
                dependencies_block.extend(dependency_lines)

            if self.templates.globals["platforms"] != ["Linux"]:
                dependencies_block.blankline(ensure_single=True)
                dependencies_block.extend([
                    "- name: Install dependencies (Win/mac) 🔧",
                    f"  if: ${{{{ matrix.os != '{linux_platform}' && steps.changes.outputs.code == 'true' }}}}",
                    "  run: |",
                ])
                with dependencies_block.with_indent("  ", 2):
                    dependencies_block.extend(
                        self.standard_python_install_lines)

        ci_file.write_clean(
            template.render(
                platforms=sorted(platforms),
                linux_platform=platform_ci_names["Linux"],
                dependencies_block=indent(str(dependencies_block), "      "),
                code_file_filter=self._code_file_filter,
            ))

        return ci_file

    def make_flake8(self) -> PathPlus:
        """
		Create, update or remove the flake8 action, as appropriate.

		.. versionadded:: 2021.8.11
		"""

        ci_file = self.workflows_dir / "flake8.yml"
        template = self.templates.get_template(ci_file.name)
        # TODO: handle case where Linux is not a supported platform

        ci_file.write_clean(
            template.render(code_file_filter=self._code_file_filter))

        return ci_file

    def get_windows_ci_versions(self) -> List[str]:
        """
		Returns the Python versions to run tests for on Windows.
		"""

        py_versions: List[str] = list(
            self.templates.globals["python_versions"])

        if not self.templates.globals["pure_python"] and "3.8" in py_versions:
            py_versions.remove(
                "3.8"
            )  # FIXME: Python 3.8 tests fail on Windows for native wheels.

        return py_versions

    def get_linux_ci_versions(self) -> List[str]:
        """
		Returns the Python versions to run tests for on Linux.
		"""

        return self.templates.globals["python_versions"]

    def get_macos_ci_versions(self) -> List[str]:
        """
		Returns the Python versions to run tests for on macOS.
		"""

        py_versions: List[str] = list(
            self.templates.globals["python_versions"])

        if "pypy36" in py_versions:
            py_versions.remove(
                "pypy36")  # PyPy 3.6 requires patching on Big Sur

        return py_versions

    standard_python_install_lines = [
        "python -VV",
        "python -m site",
        "python -m pip install --upgrade pip setuptools wheel",
        "python -m pip install --upgrade tox virtualenv",
    ]

    def _get_additional_requirements(self) -> Iterator[str]:
        if self.templates.globals["travis_additional_requirements"]:
            additional_requirements = DelimitedList(
                self.templates.globals["travis_additional_requirements"])
            yield f"python -m pip install --upgrade {additional_requirements: }"

    def get_windows_ci_requirements(self) -> List[str]:
        """
		Returns the Python requirements to run tests for on Windows.
		"""

        dependency_lines = StringList(
            self.templates.globals["github_ci_requirements"]["Windows"]["pre"])
        dependency_lines.extend(self.standard_python_install_lines)

        dependency_lines.extend(self._get_additional_requirements())
        dependency_lines.extend(
            self.templates.globals["github_ci_requirements"]["Windows"]
            ["post"])

        return dependency_lines

    def get_linux_ci_requirements(self) -> List[str]:
        """
		Returns the Python requirements to run tests for on Linux.
		"""

        dependency_lines = StringList(
            self.templates.globals["github_ci_requirements"]["Linux"]["pre"])
        dependency_lines.extend(self.standard_python_install_lines)

        if self.templates.globals["enable_tests"]:
            dependency_lines.append(
                "python -m pip install --upgrade coverage_pyver_pragma")

        dependency_lines.extend(self._get_additional_requirements())
        dependency_lines.extend(
            self.templates.globals["github_ci_requirements"]["Linux"]["post"])

        return dependency_lines

    def get_linux_mypy_requirements(self) -> List[str]:
        """
		Returns the Python requirements to run tests for on Linux.
		"""

        dependency_lines = StringList(
            self.templates.globals["github_ci_requirements"]["Linux"]["pre"])
        dependency_lines.extend(self.standard_python_install_lines)
        dependency_lines.extend(
            self.templates.globals["github_ci_requirements"]["Linux"]["post"])

        return dependency_lines

    def get_macos_ci_requirements(self) -> List[str]:
        """
		Returns the Python requirements to run tests for on macOS.
		"""

        dependency_lines = StringList(
            self.templates.globals["github_ci_requirements"]["macOS"]["pre"])
        dependency_lines.extend(self.standard_python_install_lines)

        dependency_lines.extend(self._get_additional_requirements())
        dependency_lines.extend(
            self.templates.globals["github_ci_requirements"]["macOS"]["post"])

        return dependency_lines
Exemplo n.º 18
0
class Cache:
    """
	Cache function arguments to and in-memory dictionary and a JSON file.

	:param app_name: The name of the app. This dictates the name of the cache directory.
	"""

    app_name: str  #: The name of the app. This dictates the name of the cache directory.
    cache_dir: PathPlus  #: The location of the cache directory on disk.
    caches: Dict[str, Dict[str,
                           Any]]  #: Mapping of function names to their caches.

    def __init__(self, app_name: str):
        self.app_name: str = str(app_name)
        self.cache_dir = PathPlus(
            platformdirs.user_cache_dir(f"{self.app_name}_cache"))
        self.cache_dir.maybe_make(parents=True)

        # Mapping of function names to their caches
        self.caches: Dict[str, Dict[str, Any]] = {}

    def clear(self, func: Optional[Callable] = None) -> bool:
        """
		Clear the cache.

		:param func: Optional function to clear the cache for.
			By default, the whole cache is cleared.
		:no-default func:

		:returns: True to indicate success. False otherwise.
		"""

        try:
            if func is None:
                shutil.rmtree(self.cache_dir)
                self.cache_dir.maybe_make()
                for function in self.caches:
                    self.caches[function] = {}
            else:
                function_name = func.__name__
                cache_file = self.cache_dir / f"{function_name}.json"
                if cache_file.is_file():
                    cache_file.unlink()

                if function_name in self.caches:
                    del self.caches[function_name]
                self.caches[function_name] = {}

            return True

        except Exception as e:  # pragma: no cover
            warnings.warn(f"Could not remove cache. The error was: {e}")
            return False

    def load_cache(self, func: Callable) -> None:
        """
		Loads the cache for the given function.

		:param func:
		"""

        cache_file: PathPlus = self.cache_dir / f"{func.__name__}.json"

        if cache_file.is_file():
            cache = json.loads(cache_file.read_text())
        else:
            cache = {}

        self.caches[func.__name__] = cache
        return cache

    def __call__(self, func: Callable):
        """
		Decorator to cache the return values of a function based on its inputs.

		:param func:
		"""

        function_name = func.__name__
        posargs: Iterable[str] = inspect.getfullargspec(func).args
        cache_file: PathPlus = self.cache_dir / f"{function_name}.json"
        self.load_cache(func)

        @wraps(func)
        def wrapper(*args, **kwargs: Any):
            kwargs = posargs2kwargs(args, posargs, kwargs)
            key: str = json.dumps(kwargs)
            response: Any

            cache = self.caches[function_name]
            if key in cache:
                # Return cached response
                response = json.loads(cache[key])
            else:
                response = func(**kwargs)

                if response is not None:
                    # Don't cache None values.
                    cache[key] = json.dumps(response)

            cache_file.write_text(json.dumps(cache))

            return response

        return wrapper
Exemplo n.º 19
0
		"cache_dir",
		"resolve_url",
		"cache_dir_for_url",
		"download_objects_inv",
		"find_url",
		]

__author__: str = "Dominic Davis-Foster"
__copyright__: str = "2021 Dominic Davis-Foster"
__license__: str = "MIT License"
__version__: str = "0.2.1"
__email__: str = "*****@*****.**"

#: Directory in which cached files are stored.
cache_dir = PathPlus(appdirs.user_cache_dir("searchdocs"))
cache_dir.maybe_make(parents=True)


def resolve_url(url: Union[str, RequestsURL]) -> RequestsURL:
	"""
	Resolve any redirects in the given URL.

	:param url:
	"""

	return RequestsURL(RequestsURL(url).head(allow_redirects=True).url)


@functools.lru_cache()
def cache_dir_for_url(url: Union[str, URL]) -> PathPlus:
	"""