def test_fixtures_with_errors( pytester: Pytester, fixture: str, style: str, errors: int, passes: int, failures: int, ): code = f""" (ns test-fixtures-with-errors (:require [basilisp.test :refer [deftest is use-fixtures]])) (defn error-during-setup [] (throw (ex-info "Setup error" {{}})) (yield)) (defn error-during-teardown [] (yield) (throw (ex-info "Teardown error" {{}}))) (use-fixtures {style} {fixture}) (deftest passing-test (is true)) (deftest failing-test (is false)) """ pytester.makefile(".lpy", test_fixtures_with_errors=code) result: pytester.RunResult = pytester.runpytest() result.assert_outcomes(passed=passes, failed=failures, errors=errors)
def test_issue2369_collect_module_fileext(self, pytester: Pytester) -> None: """Ensure we can collect files with weird file extensions as Python modules (#2369)""" # We'll implement a little finder and loader to import files containing # Python source code whose file extension is ".narf". pytester.makeconftest( """ import sys, os, imp from _pytest.python import Module class Loader(object): def load_module(self, name): return imp.load_source(name, name + ".narf") class Finder(object): def find_module(self, name, path=None): if os.path.exists(name + ".narf"): return Loader() sys.meta_path.append(Finder()) def pytest_collect_file(path, parent): if path.ext == ".narf": return Module.from_parent(fspath=path, parent=parent)""" ) pytester.makefile( ".narf", """\ def test_something(): assert 1 + 1 == 2""", ) # Use runpytest_subprocess, since we're futzing with sys.meta_path. result = pytester.runpytest_subprocess() result.stdout.fnmatch_lines(["*1 passed*"])
def test_multiple_patterns(self, pytester: Pytester): """Test support for multiple --doctest-glob arguments (#1255).""" pytester.maketxtfile(xdoc=""" >>> 1 1 """) pytester.makefile( ".foo", test=""" >>> 1 1 """, ) pytester.maketxtfile(test_normal=""" >>> 1 1 """) expected = {"xdoc.txt", "test.foo", "test_normal.txt"} assert {x.name for x in pytester.path.iterdir()} == expected args = ["--doctest-glob=xdoc*.txt", "--doctest-glob=*.foo"] result = pytester.runpytest(*args) result.stdout.fnmatch_lines( ["*test.foo *", "*xdoc.txt *", "*2 passed*"]) result = pytester.runpytest() result.stdout.fnmatch_lines(["*test_normal.txt *", "*1 passed*"])
def test_image_or_build_env_not_set(request, pytester: Pytester, docker_client: DockerClient): # A fake build. pytester.makefile( "", Dockerfile="\n".join(( "FROM alpine:3.13 AS builder", "LABEL test_image_or_build_env_not_set=yes", )), ) pytester.makeconftest("\n".join(( "from pytest_docker_tools import image_or_build", "memcache_image = image_or_build('ENVIRON_KEY', path='.')", ))) pytester.makepyfile(test_reusable_container="\n".join(( "def test_session_1(memcache_image):", " assert 'test_image_or_build_env_not_set' in memcache_image.labels", ))) with mock.patch.dict(os.environ, {}): result = pytester.runpytest() result.assert_outcomes(passed=1, errors=0)
def test_clean_up(pytester: Pytester) -> None: """Test that the plugin cleans up after itself.""" # This is tough to test behaviorly because the cleanup really runs last. # So the test make several implementation assumptions: # - Cleanup is done in pytest_unconfigure(). # - Not a hookwrapper. # So we can add a hookwrapper ourselves to test what it does. pytester.makefile(".ini", pytest="[pytest]\npythonpath=I_SHALL_BE_REMOVED\n") pytester.makepyfile(test_foo="""def test_foo(): pass""") before: Optional[List[str]] = None after: Optional[List[str]] = None class Plugin: @pytest.hookimpl(hookwrapper=True, tryfirst=True) def pytest_unconfigure(self) -> Generator[None, None, None]: nonlocal before, after before = sys.path.copy() yield after = sys.path.copy() result = pytester.runpytest_inprocess(plugins=[Plugin()]) assert result.ret == 0 assert before is not None assert after is not None assert any("I_SHALL_BE_REMOVED" in entry for entry in before) assert not any("I_SHALL_BE_REMOVED" in entry for entry in after)
def run_result(self, pytester: Pytester) -> RunResult: code = """ (ns test-testrunner (:require [basilisp.test :refer [deftest is are testing]])) (deftest assertion-test (testing "is assertions" (is true) (is false) (is (= "string" "string")) (is (thrown? basilisp.lang.exception/ExceptionInfo (throw (ex-info "Exception" {})))) (is (thrown? basilisp.lang.exception/ExceptionInfo (throw (python/Exception)))) (is (throw (ex-info "Uncaught exception" {})))) (testing "are assertions" (are [exp actual] (= exp actual) 1 1 :hi :hi "true" false 4.6 4.6))) (deftest passing-test (is true)) (deftest error-test (throw (ex-info "This test will count as an error." {}))) """ pytester.makefile(".lpy", test_testrunner=code) yield pytester.runpytest() runtime.Namespace.remove(sym.symbol("test-testrunner"))
def test_module_not_found(pytester: Pytester, file_structure) -> None: """Without the pythonpath setting, the module should not be found.""" pytester.makefile(".ini", pytest="[pytest]\n") result = pytester.runpytest("test_foo.py") assert result.ret == pytest.ExitCode.INTERRUPTED result.assert_outcomes(errors=1) expected_error = "E ModuleNotFoundError: No module named 'foo'" result.stdout.fnmatch_lines([expected_error])
def test_pytester_makefile_dot_prefixes_extension_with_warning( pytester: Pytester, ) -> None: with pytest.raises( ValueError, match="pytester.makefile expects a file extension, try .foo.bar instead of foo.bar", ): pytester.makefile("foo.bar", "")
def test_conftest_existing_junitxml(pytester: Pytester) -> None: x = pytester.mkdir("tests") x.joinpath("conftest.py").write_text( textwrap.dedent("""\ def pytest_addoption(parser): parser.addoption("--xyz", action="store_true") """)) pytester.makefile(ext=".xml", junit="") # Writes junit.xml result = pytester.runpytest("-h", "--junitxml", "junit.xml") result.stdout.fnmatch_lines(["*--xyz*"])
def test_tag_stages(request, pytester: Pytester, docker_client: DockerClient): builder_tag = "localhost/pytest-docker-tools/buildtest:builder" latest_tag = "localhost/pytest-docker-tools/buildtest:latest" def _cleanup(): for tag in (builder_tag, latest_tag): try: docker_client.images.remove(tag) except ImageNotFound: return request.addfinalizer(_cleanup) with pytest.raises(ImageNotFound): for tag in (builder_tag, latest_tag): docker_client.images.get(tag) # A fake multi stage build. pytester.makefile( "", Dockerfile="\n".join(( "FROM alpine:3.13 AS builder", "RUN touch /hello-intermediate-step", "RUN touch /hello", "FROM alpine:3.13", "COPY --from=builder /hello /hello", )), ) pytester.makeconftest("\n".join(( "from pytest_docker_tools import build", "myimage = build(", " path='.',", f" tag='{latest_tag}',", f" stages={{'builder': '{builder_tag}'}},", ")", ))) pytester.makepyfile(test_reusable_network="\n".join(( "def test_session_1(myimage):", f" assert '{latest_tag}' in myimage.tags", ))) result = pytester.runpytest() result.assert_outcomes(passed=1) latest = docker_client.images.get(latest_tag) assert latest is not None builder = docker_client.images.get(builder_tag) assert builder is not None assert latest.id != builder.id
def test_fixtures(pytester: Pytester): code = """ (ns test-fixtures (:require [basilisp.test :refer [deftest is use-fixtures]])) (def once-no-cleanup (volatile! 0)) (def once-cleanup (volatile! 0)) (def each-no-cleanup (volatile! 0)) (def each-cleanup (volatile! 0)) ;; return here rather than yielding (defn once-fixture-no-cleanup [] (vswap! once-no-cleanup inc)) (defn once-fixture-w-cleanup [] (vswap! once-cleanup inc) (yield) (vswap! once-cleanup dec)) ;; yield here rather than returning, even w/o cleanup step (defn each-fixture-no-cleanup [] (vswap! each-no-cleanup inc) (yield)) (defn each-fixture-w-cleanup [] (vswap! each-cleanup inc) (yield) (vswap! each-cleanup dec)) (use-fixtures :once once-fixture-no-cleanup once-fixture-w-cleanup) (use-fixtures :each each-fixture-no-cleanup each-fixture-w-cleanup) (deftest passing-test (is true)) (deftest failing-test (is false)) """ pytester.makefile(".lpy", test_fixtures=code) result: pytester.RunResult = pytester.runpytest() result.assert_outcomes(passed=1, failed=1) get_volatile = lambda vname: runtime.Var.find_safe( sym.symbol(vname, ns="test-fixtures")).value.deref() assert 1 == get_volatile("once-no-cleanup") assert 0 == get_volatile("once-cleanup") assert 2 == get_volatile("each-no-cleanup") assert 0 == get_volatile("each-cleanup")
def test_marker_without_description(pytester: Pytester) -> None: pytester.makefile( ".cfg", setup=""" [tool:pytest] markers=slow """, ) pytester.makeconftest(""" import pytest pytest.mark.xfail('FAIL') """) ftdir = pytester.mkdir("ft1_dummy") pytester.path.joinpath("conftest.py").replace( ftdir.joinpath("conftest.py")) rec = pytester.runpytest("--strict-markers") rec.assert_outcomes()
def test_not_collectable_arguments(self, pytester: Pytester) -> None: p1 = pytester.makepyfile("") p2 = pytester.makefile(".pyc", "123") result = pytester.runpytest(p1, p2) assert result.ret == ExitCode.USAGE_ERROR result.stderr.fnmatch_lines([ f"ERROR: not found: {p2}", "(no name {!r} in any of [[][]])".format(str(p2)), "", ])
def test_not_collectable_arguments(self, pytester: Pytester) -> None: p1 = pytester.makepyfile("") p2 = pytester.makefile(".pyc", "123") result = pytester.runpytest(p1, p2) assert result.ret == ExitCode.USAGE_ERROR result.stderr.fnmatch_lines( [ f"ERROR: found no collectors for {p2}", "", ] )
def dummy_yaml_custom_test(pytester: Pytester): """Writes a conftest file that collects and executes a dummy yaml test. Taken from the docs, but stripped down to the bare minimum, useful for tests which needs custom items collected. """ pytester.makeconftest(""" import pytest def pytest_collect_file(parent, file_path): if file_path.suffix == ".yaml" and file_path.name.startswith("test"): return YamlFile.from_parent(path=file_path, parent=parent) class YamlFile(pytest.File): def collect(self): yield YamlItem.from_parent(name=self.path.name, parent=self) class YamlItem(pytest.Item): def runtest(self): pass """) pytester.makefile(".yaml", test1="")
def test_clean_up_pythonpath(pytester: Pytester) -> None: """Test that the srcpaths plugin cleans up after itself.""" pytester.makefile(".ini", pytest="[pytest]\npythonpath=I_SHALL_BE_REMOVED\n") pytester.makepyfile(test_foo="""def test_foo(): pass""") before: Optional[List[str]] = None after: Optional[List[str]] = None class Plugin: @pytest.hookimpl(hookwrapper=True, tryfirst=True) def pytest_unconfigure(self) -> Generator[None, None, None]: nonlocal before, after before = sys.path.copy() yield after = sys.path.copy() result = pytester.runpytest_inprocess(plugins=[Plugin()]) assert result.ret == 0 assert before is not None assert after is not None assert any("I_SHALL_BE_REMOVED" in entry for entry in before) assert not any("I_SHALL_BE_REMOVED" in entry for entry in after)
def test_doctest_id(self, pytester: Pytester) -> None: pytester.makefile( ".txt", """ >>> x=3 >>> x 4 """, ) testid = "test_doctest_id.txt::test_doctest_id.txt" expected_lines = [ "*= FAILURES =*", "*_ ?doctest? test_doctest_id.txt _*", "FAILED test_doctest_id.txt::test_doctest_id.txt", "*= 1 failed in*", ] result = pytester.runpytest(testid, "-rf", "--tb=short") result.stdout.fnmatch_lines(expected_lines) # Ensure that re-running it will still handle it as # doctest.DocTestFailure, which was not the case before when # re-importing doctest, but not creating a new RUNNER_CLASS. result = pytester.runpytest(testid, "-rf", "--tb=short") result.stdout.fnmatch_lines(expected_lines)
def test_two_dirs(pytester: Pytester, file_structure) -> None: pytester.makefile(".ini", pytest="[pytest]\npythonpath=sub sub2\n") result = pytester.runpytest("test_foo.py", "test_bar.py") assert result.ret == 0 result.assert_outcomes(passed=2)
def test_one_dir(pytester: Pytester, file_structure) -> None: pytester.makefile(".ini", pytest="[pytest]\nsrcpaths=sub\n") result = pytester.runpytest("test_foo.py") assert result.ret == 0 result.assert_outcomes(passed=1)