Esempio n. 1
0
    tmp_pathplus: PathPlus,
    advanced_file_regression: AdvancedFileRegressionFixture,
    advanced_data_regression: AdvancedDataRegressionFixture,
    modules: Iterable[str],
    show: bool,
) -> None:

    checker = ImportChecker(modules, show=show)

    advanced_data_regression.check(dict(checker.check_modules()))
    advanced_file_regression.check(checker.format_statistics())


@pytest.mark.parametrize("version", [
    pytest.param(
        3.6, marks=only_version(3.6, reason="Output differs on Python 3.6")),
    pytest.param(
        3.7, marks=only_version(3.7, reason="Output differs on Python 3.7")),
    pytest.param(
        3.8, marks=only_version(3.8, reason="Output differs on Python 3.8")),
    pytest.param(
        3.9, marks=only_version(3.9, reason="Output differs on Python 3.9")),
    pytest.param("3.10",
                 marks=only_version("3.10",
                                    reason="Output differs on Python 3.10")),
    pytest.param("3.11",
                 marks=only_version("3.11",
                                    reason="Output differs on Python 3.11")),
])
@pytest.mark.parametrize("show", [True, False])
def test_importchecker_errors_show(
Esempio n. 2
0
# stdlib
import os

# 3rd party
import pytest
from coincidence.selectors import only_version

pytest_plugins = ("coincidence", )


@pytest.fixture(params=[
    pytest.param("3.7",
                 marks=only_version(3.7, "Output differs on each version.")),
    pytest.param("3.8",
                 marks=only_version(3.8, "Output differs on each version.")),
    pytest.param("3.9",
                 marks=only_version(3.9, "Output differs on each version.")),
    pytest.param("3.10",
                 marks=only_version("3.10",
                                    "Output differs on each version.")),
])
def version(request):
    return request.param


@pytest.fixture(params=[
    pytest.param(
        "forward",
        marks=pytest.mark.skipif(
            os.sep == '\\',
            reason=r"Output differs on platforms where os.sep == '\\'")),
Esempio n. 3
0
        'username: "******"',
        "license: 'LGPLv3+'",
        "short_desc: 'Update multiple configuration files, build scripts etc. from a single location.'",
    ])

    with in_directory(tmp_repo):
        runner = CliRunner()
        result: Result = runner.invoke(show.changelog, catch_exceptions=False)

    assert result.exit_code == 0
    result.check_stdout(advanced_file_regression)


version_specific = pytest.mark.parametrize("py_version", [
    pytest.param(
        "3.6", marks=only_version(3.6, reason="Output differs on Python 3.6")),
    pytest.param(
        "3.7", marks=only_version(3.7, reason="Output differs on Python 3.7")),
    pytest.param(
        "3.8", marks=only_version(3.8, reason="Output differs on Python 3.8")),
    pytest.param(
        "3.9", marks=only_version(3.9, reason="Output differs on Python 3.9")),
    pytest.param("3.10",
                 marks=only_version("3.10", "Output differs on Python 3.10")),
    pytest.param("3.11+",
                 marks=min_version("3.11", "Output differs on Python 3.11+")),
])

show_directories = [
    PathPlus(__file__).parent.parent.parent,
    PathPlus(__file__).parent.parent,
                                   only_linux, only_macos, only_pypy,
                                   only_version, only_windows)


@min_version((3, 4), reason="Failure")
def test_min_version():
    pass


@max_version("4.10", reason="Failure")
def test_max_version():
    pass


@pytest.mark.parametrize("py_version", [
    pytest.param((3, 4), marks=only_version(3.4, "Success")),
    pytest.param((3, 5), marks=only_version(3.5, "Success")),
    pytest.param((3, 6), marks=only_version(3.6, "Success")),
    pytest.param((3, 8), marks=only_version(3.8, "Success")),
    pytest.param((3, 9), marks=only_version(3.9, "Success")),
    pytest.param((3, 10), marks=only_version(3.10, "Success")),
])
def test_only_version(py_version: Tuple[int, int]):
    if sys.version_info[:2] != py_version:
        assert False  # noqa: PT015


@pytest.mark.parametrize(
    "py_version",
    parametrized_versions((3, 4), (3, 5), (3, 6), (3, 8), (3, 9), (3, 10),
                          reasons="Success"),
Esempio n. 5
0
 param("github.html", False, idx=0),
 param("latex.html", False, idx=0),
 param("collapse.html", False, idx=0),
 param("footnote_symbols.html", False, idx=0),
 param(
     "instancevar.html",
     True,
     marks=pytest.mark.skipif(
         condition=sys.version_info < (3, 7),
         reason="Output differs on Python 3.6",
     ),
     idx=0,
 ),
 pytest.param("generic_bases.html",
              True,
              marks=only_version(3.6,
                                 reason="Output differs on Python 3.6"),
              id="generic_bases_36"),
 pytest.param("generic_bases.html",
              True,
              marks=min_version(3.7,
                                reason="Output differs on Python 3.8+"),
              id="generic_bases"),
 pytest.param(
     "autonamedtuple_pep563.html",
     True,
     marks=min_version(
         3.7, reason="Output differs on Python 3.6, and not as relevant."),
     id="autonamedtuple_pep563"),
 pytest.param("genericalias.html",
              True,
              marks=min_version(3.7, reason="Output differs on Python 3.6"),
Esempio n. 6
0
# 3rd party
import pytest
from coincidence.regressions import AdvancedFileRegressionFixture
from coincidence.selectors import min_version, not_pypy, only_version, only_windows
from consolekit.testing import CliRunner, Result
from domdf_python_tools.paths import PathPlus, in_directory

# this package
from repo_helper.cli.commands import show

pytestmark = only_windows("Requirements differ on Windows")

version_specific = pytest.mark.parametrize(
		"py_version",
		[
				pytest.param("3.6", marks=only_version(3.6, reason="Output differs on Python 3.6")),
				pytest.param("3.7", marks=only_version(3.7, reason="Output differs on Python 3.7")),
				pytest.param("3.8", marks=only_version(3.8, reason="Output differs on Python 3.8")),
				pytest.param("3.9", marks=only_version(3.9, reason="Output differs on Python 3.9")),
				pytest.param("3.10+", marks=min_version("3.10", "Output differs on Python 3.10+")),
				]
		)

show_directories = [
		PathPlus(__file__).parent.parent.parent,
		PathPlus(__file__).parent.parent,
		]


@not_pypy("Output differs on PyPy.")
class TestShowRequirements:
Esempio n. 7
0
def parametrized_versions(
		*versions: Union[str, float, Tuple[int, ...]],
		reasons: Union[str, Iterable[Optional[str]]] = (),
		) -> List[ParameterSet]:
	r"""
	Return a list of parametrized version numbers.

	**Examples:**

	.. code-block:: python

		@pytest.mark.parametrize(
			"version",
			parametrized_versions(
				3.6,
				3.7,
				3.8,
				reason="Output differs on each version.",
				),
			)
		def test_something(version: str):
			pass


	.. code-block:: python

		@pytest.fixture(
			params=parametrized_versions(
				3.6,
				3.7,
				3.8,
				reason="Output differs on each version.",
				),
			)
		def version(request):
			return request.param

		def test_something(version: str):
			pass

	.. versionadded:: 0.4.0

	:param \*versions: The Python versions to parametrize.
	:param reasons: The reasons to use when skipping versions.
		Either a string value to use for all versions,
		or a list of values which correspond to ``*versions``.
	"""

	version_list = list(versions)
	params = []

	if isinstance(reasons, str):
		reasons = [reasons] * len(version_list)
	else:
		reasons = extend_with_none(reasons, len(version_list))

	for version, reason in zip(version_list, reasons):
		version_ = _make_version(version)

		the_param = pytest.param(
				f"{version_.major}.{version_.minor}",
				marks=only_version(version_, reason=reason),
				)

		params.append(the_param)

	return params