Exemplo n.º 1
0
    def test_xfail_imperative_in_setup_function(self,
                                                pytester: Pytester) -> None:
        p = pytester.makepyfile("""
            import pytest
            def setup_function(function):
                pytest.xfail("hello")

            def test_this():
                assert 0
        """)
        result = pytester.runpytest(p)
        result.stdout.fnmatch_lines(["*1 xfailed*"])
        result = pytester.runpytest(p, "-rx")
        result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"])
        result = pytester.runpytest(p, "--runxfail")
        result.stdout.fnmatch_lines("""
            *def test_this*
            *1 fail*
        """)
Exemplo n.º 2
0
 def test_merging_markers_deep(self, pytester: Pytester) -> None:
     # issue 199 - propagate markers into nested classes
     p = pytester.makepyfile(
         """
         import pytest
         class TestA(object):
             pytestmark = pytest.mark.a
             def test_b(self):
                 assert True
             class TestC(object):
                 # this one didn't get marked
                 def test_d(self):
                     assert True
     """
     )
     items, rec = pytester.inline_genitems(p)
     for item in items:
         print(item, item.keywords)
         assert [x for x in item.iter_markers() if x.name == "a"]
Exemplo n.º 3
0
def test_setup(pytester: Pytester) -> None:
    testpath = pytester.makepyfile("""
        import unittest
        class MyTestCase(unittest.TestCase):
            def setUp(self):
                self.foo = 1
            def setup_method(self, method):
                self.foo2 = 1
            def test_both(self):
                self.assertEqual(1, self.foo)
                assert self.foo2 == 1
            def teardown_method(self, method):
                assert 0, "42"

    """)
    reprec = pytester.inline_run("-s", testpath)
    assert reprec.matchreport("test_both", when="call").passed
    rep = reprec.matchreport("test_both", when="teardown")
    assert rep.failed and "42" in str(rep.longrepr)
Exemplo n.º 4
0
def test_setup_class(pytester: Pytester) -> None:
    testpath = pytester.makepyfile("""
        import unittest
        import pytest
        class MyTestCase(unittest.TestCase):
            x = 0
            def setup_class(cls):
                cls.x += 1
            def test_func1(self):
                assert self.x == 1
            def test_func2(self):
                assert self.x == 1
            def teardown_class(cls):
                cls.x -= 1
        def test_teareddown():
            assert MyTestCase.x == 0
    """)
    reprec = pytester.inline_run(testpath)
    reprec.assertoutcome(passed=3)
Exemplo n.º 5
0
    def test_select_simple(self, pytester: Pytester) -> None:
        file_test = pytester.makepyfile("""
            def test_one():
                assert 0
            class TestClass(object):
                def test_method_one(self):
                    assert 42 == 43
        """)

        def check(keyword, name):
            reprec = pytester.inline_run("-s", "-k", keyword, file_test)
            passed, skipped, failed = reprec.listoutcomes()
            assert len(failed) == 1
            assert failed[0].nodeid.split("::")[-1] == name
            assert len(reprec.getcalls("pytest_deselected")) == 1

        for keyword in ["test_one", "est_on"]:
            check(keyword, "test_one")
        check("TestClass and test", "test_method_one")
Exemplo n.º 6
0
def test_xfail_handling(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
    """Ensure normal xfail is ignored, and strict xfail interrupts the session in sw mode

    (#5547)
    """
    monkeypatch.setattr("sys.dont_write_bytecode", True)

    contents = """
        import pytest
        def test_a(): pass

        @pytest.mark.xfail(strict={strict})
        def test_b(): assert {assert_value}

        def test_c(): pass
        def test_d(): pass
    """
    pytester.makepyfile(contents.format(assert_value="0", strict="False"))
    result = pytester.runpytest("--sw", "-v")
    result.stdout.fnmatch_lines(
        [
            "*::test_a PASSED *",
            "*::test_b XFAIL *",
            "*::test_c PASSED *",
            "*::test_d PASSED *",
            "* 3 passed, 1 xfailed in *",
        ]
    )

    pytester.makepyfile(contents.format(assert_value="1", strict="True"))
    result = pytester.runpytest("--sw", "-v")
    result.stdout.fnmatch_lines(
        [
            "*::test_a PASSED *",
            "*::test_b FAILED *",
            "* Interrupted*",
            "* 1 failed, 1 passed in *",
        ]
    )

    pytester.makepyfile(contents.format(assert_value="0", strict="True"))
    result = pytester.runpytest("--sw", "-v")
    result.stdout.fnmatch_lines(
        [
            "*::test_b XFAIL *",
            "*::test_c PASSED *",
            "*::test_d PASSED *",
            "* 2 passed, 1 deselected, 1 xfailed in *",
        ]
    )
Exemplo n.º 7
0
def test_pdb_suspends_fixture_capturing(pytester: Pytester,
                                        fixture: str) -> None:
    """Using "-s" with pytest should suspend/resume fixture capturing."""
    p1 = pytester.makepyfile("""
        def test_inner({fixture}):
            import sys

            print("out_inner_before")
            sys.stderr.write("err_inner_before\\n")

            __import__("pdb").set_trace()

            print("out_inner_after")
            sys.stderr.write("err_inner_after\\n")

            out, err = {fixture}.readouterr()
            assert out =="out_inner_before\\nout_inner_after\\n"
            assert err =="err_inner_before\\nerr_inner_after\\n"
        """.format(fixture=fixture))

    child = pytester.spawn_pytest(str(p1) + " -s")

    child.expect("Pdb")
    before = child.before.decode("utf8")
    assert ("> PDB set_trace (IO-capturing turned off for fixture %s) >" %
            (fixture) in before)

    # Test that capturing is really suspended.
    child.sendline("p 40 + 2")
    child.expect("Pdb")
    assert "\r\n42\r\n" in child.before.decode("utf8")

    child.sendline("c")
    rest = child.read().decode("utf8")
    assert "out_inner" not in rest
    assert "err_inner" not in rest

    TestPDB.flush(child)
    assert child.exitstatus == 0
    assert "= 1 passed in" in rest
    assert "> PDB continue (IO-capturing resumed for fixture %s) >" % (
        fixture) in rest
Exemplo n.º 8
0
def test_teardown(pytester: Pytester) -> None:
    testpath = pytester.makepyfile(
        """
        import unittest
        class MyTestCase(unittest.TestCase):
            values = []
            def test_one(self):
                pass
            def tearDown(self):
                self.values.append(None)
        class Second(unittest.TestCase):
            def test_check(self):
                self.assertEqual(MyTestCase.values, [None])
    """
    )
    reprec = pytester.inline_run(testpath)
    passed, skipped, failed = reprec.countoutcomes()
    assert failed == 0, failed
    assert passed == 2
    assert passed + skipped + failed == 2
Exemplo n.º 9
0
 def test_pdb_set_trace_kwargs(self, pytester: Pytester) -> None:
     p1 = pytester.makepyfile("""
         import pytest
         def test_1():
             i = 0
             print("hello17")
             pytest.set_trace(header="== my_header ==")
             x = 3
             assert 0
     """)
     child = pytester.spawn_pytest(str(p1))
     child.expect("== my_header ==")
     assert "PDB set_trace" not in child.before.decode()
     child.expect("Pdb")
     child.sendline("c")
     rest = child.read().decode("utf-8")
     assert "1 failed" in rest
     assert "def test_1" in rest
     assert "hello17" in rest  # out is captured
     self.flush(child)
Exemplo n.º 10
0
    def test_mark_decorator_subclass_does_not_propagate_to_base(
        self, pytester: Pytester
    ) -> None:
        p = pytester.makepyfile(
            """
            import pytest

            @pytest.mark.a
            class Base(object): pass

            @pytest.mark.b
            class Test1(Base):
                def test_foo(self): pass

            class Test2(Base):
                def test_bar(self): pass
        """
        )
        items, rec = pytester.inline_genitems(p)
        self.assert_markers(items, test_foo=("a", "b"), test_bar=("a",))
Exemplo n.º 11
0
def test_parametrize_iterator(pytester: Pytester) -> None:
    """`parametrize` should work with generators (#5354)."""
    py_file = pytester.makepyfile(
        """\
        import pytest

        def gen():
            yield 1
            yield 2
            yield 3

        @pytest.mark.parametrize('a', gen())
        def test(a):
            assert a >= 1
        """
    )
    result = pytester.runpytest(py_file)
    assert result.ret == 0
    # should not skip any tests
    result.stdout.fnmatch_lines(["*3 passed*"])
Exemplo n.º 12
0
 def test_non_ignored_whitespace(self, pytester: Pytester):
     pytester.makeini(
         """
         [pytest]
         doctest_optionflags = ELLIPSIS
     """
     )
     p = pytester.makepyfile(
         """
         class MyClass(object):
             '''
             >>> a = "foo    "
             >>> print(a)
             foo
             '''
             pass
     """
     )
     reprec = pytester.inline_run(p, "--doctest-modules")
     reprec.assertoutcome(failed=1, passed=0)
Exemplo n.º 13
0
 def test_doctestmodule_two_tests_one_fail(self, pytester: Pytester):
     p = pytester.makepyfile(
         """
         class MyClass(object):
             def bad_meth(self):
                 '''
                 >>> magic = 42
                 >>> magic
                 0
                 '''
             def nice_meth(self):
                 '''
                 >>> magic = 42
                 >>> magic - 42
                 0
                 '''
     """
     )
     reprec = pytester.inline_run(p, "--doctest-modules")
     reprec.assertoutcome(failed=1, passed=1)
Exemplo n.º 14
0
def test_nose_setup_func_failure(pytester: Pytester) -> None:
    p = pytester.makepyfile("""
        from nose.tools import with_setup

        values = []
        my_setup = lambda x: 1
        my_teardown = lambda x: 2

        @with_setup(my_setup, my_teardown)
        def test_hello():
            print(values)
            assert values == [1]

        def test_world():
            print(values)
            assert values == [1,2]

    """)
    result = pytester.runpytest(p, "-p", "nose")
    result.stdout.fnmatch_lines(["*TypeError: <lambda>()*"])
Exemplo n.º 15
0
def test_conftest_global_import(pytester: Pytester) -> None:
    pytester.makeconftest("x=3")
    p = pytester.makepyfile("""
        import py, pytest
        from _pytest.config import PytestPluginManager
        conf = PytestPluginManager()
        mod = conf._importconftest(py.path.local("conftest.py"), importmode="prepend")
        assert mod.x == 3
        import conftest
        assert conftest is mod, (conftest, mod)
        subconf = py.path.local().ensure("sub", "conftest.py")
        subconf.write("y=4")
        mod2 = conf._importconftest(subconf, importmode="prepend")
        assert mod != mod2
        assert mod2.y == 4
        import conftest
        assert conftest is mod2, (conftest, mod)
    """)
    res = pytester.runpython(p)
    assert res.ret == 0
Exemplo n.º 16
0
def test_pytester_outcomes_with_multiple_errors(pytester: Pytester) -> None:
    p1 = pytester.makepyfile(
        """
        import pytest

        @pytest.fixture
        def bad_fixture():
            raise Exception("bad")

        def test_error1(bad_fixture):
            pass

        def test_error2(bad_fixture):
            pass
    """
    )
    result = pytester.runpytest(str(p1))
    result.assert_outcomes(errors=2)

    assert result.parseoutcomes() == {"errors": 2}
Exemplo n.º 17
0
def test_setUpModule(pytester: Pytester) -> None:
    testpath = pytester.makepyfile(
        """
        values = []

        def setUpModule():
            values.append(1)

        def tearDownModule():
            del values[0]

        def test_hello():
            assert values == [1]

        def test_world():
            assert values == [1]
        """
    )
    result = pytester.runpytest(testpath)
    result.stdout.fnmatch_lines(["*2 passed*"])
Exemplo n.º 18
0
    def test_getsourcelines_error_issue553(
        self, pytester: Pytester, monkeypatch
    ) -> None:
        monkeypatch.setattr("inspect.getsourcelines", None)
        p = pytester.makepyfile(
            """
            def raise_error(obj):
                raise OSError('source code not available')

            import inspect
            inspect.getsourcelines = raise_error

            def test_foo(invalid_fixture):
                pass
        """
        )
        res = pytester.runpytest(p)
        res.stdout.fnmatch_lines(
            ["*source code not available*", "E*fixture 'invalid_fixture' not found"]
        )
Exemplo n.º 19
0
def test_spawn_uses_tmphome(pytester: Pytester) -> None:
    tmphome = str(pytester.path)
    assert os.environ.get("HOME") == tmphome

    pytester._monkeypatch.setenv("CUSTOMENV", "42")

    p1 = pytester.makepyfile(
        """
        import os

        def test():
            assert os.environ["HOME"] == {tmphome!r}
            assert os.environ["CUSTOMENV"] == "42"
        """.format(
            tmphome=tmphome
        )
    )
    child = pytester.spawn_pytest(str(p1))
    out = child.read()
    assert child.wait() == 0, out.decode("utf8")
Exemplo n.º 20
0
def test_dynamic_fixture_request(pytester: Pytester) -> None:
    p = pytester.makepyfile("""
        import pytest
        @pytest.fixture()
        def dynamically_requested_fixture():
            pass
        @pytest.fixture()
        def dependent_fixture(request):
            request.getfixturevalue('dynamically_requested_fixture')
        def test_dyn(dependent_fixture):
            pass
    """)

    result = pytester.runpytest("--setup-only", p)
    assert result.ret == 0

    result.stdout.fnmatch_lines([
        "*SETUP    F dynamically_requested_fixture",
        "*TEARDOWN F dynamically_requested_fixture",
    ])
Exemplo n.º 21
0
    def test_skipif_markeval_namespace_ValueError(self,
                                                  pytester: Pytester) -> None:
        pytester.makeconftest("""
            import pytest

            def pytest_markeval_namespace():
                return True
            """)
        p = pytester.makepyfile("""
            import pytest

            @pytest.mark.skipif("color == 'green'")
            def test_1():
                assert True
        """)
        res = pytester.runpytest(p)
        assert res.ret == 1
        res.stdout.fnmatch_lines([
            "*ValueError: pytest_markeval_namespace() needs to return a dict, got True*"
        ])
Exemplo n.º 22
0
def test_do_cleanups_on_success(pytester: Pytester) -> None:
    testpath = pytester.makepyfile("""
        import unittest
        class MyTestCase(unittest.TestCase):
            values = []
            def setUp(self):
                def cleanup():
                    self.values.append(1)
                self.addCleanup(cleanup)
            def test_one(self):
                pass
            def test_two(self):
                pass
        def test_cleanup_called_the_right_number_of_times():
            assert MyTestCase.values == [1, 1]
    """)
    reprec = pytester.inline_run(testpath)
    passed, skipped, failed = reprec.countoutcomes()
    assert failed == 0
    assert passed == 3
Exemplo n.º 23
0
def test_unittest_expected_failure_for_failing_test_is_xfail(
        pytester: Pytester, runner) -> None:
    script = pytester.makepyfile("""
        import unittest
        class MyTestCase(unittest.TestCase):
            @unittest.expectedFailure
            def test_failing_test_is_xfail(self):
                assert False
        if __name__ == '__main__':
            unittest.main()
    """)
    if runner == "pytest":
        result = pytester.runpytest("-rxX")
        result.stdout.fnmatch_lines(
            ["*XFAIL*MyTestCase*test_failing_test_is_xfail*", "*1 xfailed*"])
    else:
        result = pytester.runpython(script)
        result.stderr.fnmatch_lines(
            ["*1 test in*", "*OK*(expected failures=1)*"])
    assert result.ret == 0
Exemplo n.º 24
0
def test_do_class_cleanups_on_setupclass_failure(pytester: Pytester) -> None:
    testpath = pytester.makepyfile("""
        import unittest
        class MyTestCase(unittest.TestCase):
            values = []
            @classmethod
            def setUpClass(cls):
                def cleanup():
                    cls.values.append(1)
                cls.addClassCleanup(cleanup)
                assert False
            def test_one(self):
                pass
        def test_cleanup_called_exactly_once():
            assert MyTestCase.values == [1]
    """)
    reprec = pytester.inline_run(testpath)
    passed, skipped, failed = reprec.countoutcomes()
    assert failed == 1
    assert passed == 1
Exemplo n.º 25
0
    def test_lastfailed_with_known_failures_not_being_selected(
            self, pytester: Pytester) -> None:
        pytester.makepyfile(
            **{
                "pkg1/test_1.py": """def test_1(): assert 0""",
                "pkg1/test_2.py": """def test_2(): pass""",
            })
        result = pytester.runpytest()
        result.stdout.fnmatch_lines(
            ["collected 2 items", "* 1 failed, 1 passed in *"])

        Path("pkg1/test_1.py").unlink()
        result = pytester.runpytest("--lf")
        result.stdout.fnmatch_lines([
            "collected 1 item",
            "run-last-failure: 1 known failures not in selected tests",
            "* 1 passed in *",
        ])

        # Recreate file with known failure.
        pytester.makepyfile(**{"pkg1/test_1.py": """def test_1(): assert 0"""})
        result = pytester.runpytest("--lf")
        result.stdout.fnmatch_lines([
            "collected 1 item",
            "run-last-failure: rerun previous 1 failure (skipped 1 file)",
            "* 1 failed in *",
        ])

        # Remove/rename test: collects the file again.
        pytester.makepyfile(
            **{"pkg1/test_1.py": """def test_renamed(): assert 0"""})
        result = pytester.runpytest("--lf", "-rf")
        result.stdout.fnmatch_lines([
            "collected 2 items",
            "run-last-failure: 1 known failures not in selected tests",
            "pkg1/test_1.py F *",
            "pkg1/test_2.py . *",
            "FAILED pkg1/test_1.py::test_renamed - assert 0",
            "* 1 failed, 1 passed in *",
        ])

        result = pytester.runpytest("--lf", "--co")
        result.stdout.fnmatch_lines([
            "collected 1 item",
            "run-last-failure: rerun previous 1 failure (skipped 1 file)",
            "",
            "<Module pkg1/test_1.py>",
            "  <Function test_renamed>",
        ])
Exemplo n.º 26
0
    def test_lastfailed_creates_cache_when_needed(self,
                                                  pytester: Pytester) -> None:
        # Issue #1342
        pytester.makepyfile(test_empty="")
        pytester.runpytest("-q", "--lf")
        assert not os.path.exists(".pytest_cache/v/cache/lastfailed")

        pytester.makepyfile(
            test_successful="def test_success():\n    assert True")
        pytester.runpytest("-q", "--lf")
        assert not os.path.exists(".pytest_cache/v/cache/lastfailed")

        pytester.makepyfile(test_errored="def test_error():\n    assert False")
        pytester.runpytest("-q", "--lf")
        assert os.path.exists(".pytest_cache/v/cache/lastfailed")
Exemplo n.º 27
0
def test_show_fixtures_with_autouse(pytester: Pytester, mode) -> None:
    p = pytester.makepyfile('''
        import pytest
        @pytest.fixture
        def arg_function():
            """function scoped fixture"""
        @pytest.fixture(scope='session', autouse=True)
        def arg_session():
            """session scoped fixture"""
        def test_arg1(arg_function):
            pass
    ''')

    result = pytester.runpytest(mode, p)
    assert result.ret == 0

    result.stdout.fnmatch_lines([
        "SETUP    S arg_session*",
        "*SETUP    F arg_function*",
        "*test_arg1 (fixtures used: arg_function, arg_session)*",
    ])
Exemplo n.º 28
0
 def test_xfail_not_run_xfail_reporting(self, pytester: Pytester) -> None:
     p = pytester.makepyfile(test_one="""
         import pytest
         @pytest.mark.xfail(run=False, reason="noway")
         def test_this():
             assert 0
         @pytest.mark.xfail("True", run=False)
         def test_this_true():
             assert 0
         @pytest.mark.xfail("False", run=False, reason="huh")
         def test_this_false():
             assert 1
     """)
     result = pytester.runpytest(p, "-rx")
     result.stdout.fnmatch_lines([
         "*test_one*test_this*",
         "*NOTRUN*noway",
         "*test_one*test_this_true*",
         "*NOTRUN*condition:*True*",
         "*1 passed*",
     ])
Exemplo n.º 29
0
def test_teardown_issue1649(pytester: Pytester) -> None:
    """
    Are TestCase objects cleaned up? Often unittest TestCase objects set
    attributes that are large and expensive during setUp.

    The TestCase will not be cleaned up if the test fails, because it
    would then exist in the stackframe.
    """
    testpath = pytester.makepyfile("""
        import unittest
        class TestCaseObjectsShouldBeCleanedUp(unittest.TestCase):
            def setUp(self):
                self.an_expensive_object = 1
            def test_demo(self):
                pass

    """)
    pytester.inline_run("-s", testpath)
    gc.collect()
    for obj in gc.get_objects():
        assert type(obj).__name__ != "TestCaseObjectsShouldBeCleanedUp"
Exemplo n.º 30
0
    def test_namespace_import_doesnt_confuse_import_hook(
            self, pytester: Pytester) -> None:
        """Ref #383.

        Python 3.3's namespace package messed with our import hooks.
        Importing a module that didn't exist, even if the ImportError was
        gracefully handled, would make our test crash.
        """
        pytester.mkdir("not_a_package")
        p = pytester.makepyfile("""
            try:
                from not_a_package import doesnt_exist
            except ImportError:
                # We handle the import error gracefully here
                pass

            def test_whatever():
                pass
        """)
        res = pytester.runpytest(p.name)
        assert res.ret == 0