Beispiel #1
0
def test_pytester_outcomes_with_multiple_errors(pytester: Pytester) -> None:
    p1 = pytester.makepyfile("""
        import pytest

        @pytest.fixture
        def bad_fixture():
            raise Exception("bad")

        def test_error1(bad_fixture):
            pass

        def test_error2(bad_fixture):
            pass
    """)
    result = pytester.runpytest(str(p1))
    result.assert_outcomes(errors=2)

    assert result.parseoutcomes() == {"errors": 2}
Beispiel #2
0
def test_marker_without_description(pytester: Pytester) -> None:
    pytester.makefile(
        ".cfg",
        setup="""
        [tool:pytest]
        markers=slow
    """,
    )
    pytester.makeconftest(
        """
        import pytest
        pytest.mark.xfail('FAIL')
    """
    )
    ftdir = pytester.mkdir("ft1_dummy")
    pytester.path.joinpath("conftest.py").replace(ftdir.joinpath("conftest.py"))
    rec = pytester.runpytest("--strict-markers")
    rec.assert_outcomes()
Beispiel #3
0
def test_image_or_build_env_set(request, pytester: Pytester,
                                docker_client: DockerClient):
    pytester.makeconftest("\n".join((
        "from pytest_docker_tools import image_or_build",
        "memcache_image = image_or_build('ENVIRON_KEY')",
    )))

    pytester.makepyfile(test_reusable_container="\n".join((
        "def test_session_1(memcache_image):",
        "    assert 'memcached:latest' in memcache_image.tags",
    )))

    docker_client.images.pull(repository="memcached:latest")

    with mock.patch.dict(os.environ, {"ENVIRON_KEY": "memcached:latest"}):
        result = pytester.runpytest()

    result.assert_outcomes(passed=1, errors=0)
Beispiel #4
0
def test_issue1073_conftest_special_objects(pytester: Pytester) -> None:
    pytester.makeconftest(
        """\
        class DontTouchMe(object):
            def __getattr__(self, x):
                raise Exception('cant touch me')

        x = DontTouchMe()
        """
    )
    pytester.makepyfile(
        """\
        def test_some():
            pass
        """
    )
    res = pytester.runpytest()
    assert res.ret == 0
Beispiel #5
0
 def test_multiple_items_per_collector_byid(self, pytester: Pytester) -> None:
     c = pytester.makeconftest(
         """
         import pytest
         class MyItem(pytest.Item):
             def runtest(self):
                 pass
         class MyCollector(pytest.File):
             def collect(self):
                 return [MyItem.from_parent(name="xyz", parent=self)]
         def pytest_collect_file(fspath, parent):
             if fspath.name.startswith("conftest"):
                 return MyCollector.from_parent(path=fspath, parent=parent)
     """
     )
     result = pytester.runpytest(c.name + "::" + "xyz")
     assert result.ret == 0
     result.stdout.fnmatch_lines(["*1 pass*"])
Beispiel #6
0
 def test_valid_setup_py(self, pytester: Pytester):
     """
     Test to make sure that pytest ignores valid setup.py files when ran
     with --doctest-modules
     """
     p = pytester.makepyfile(
         setup="""
         if __name__ == '__main__':
             from setuptools import setup, find_packages
             setup(name='sample',
                   version='0.0',
                   description='description',
                   packages=find_packages()
             )
     """
     )
     result = pytester.runpytest(p, "--doctest-modules")
     result.stdout.fnmatch_lines(["*collected 0 items*"])
Beispiel #7
0
def test_conftest_found_with_double_dash(pytester: Pytester) -> None:
    sub = pytester.mkdir("sub")
    sub.joinpath("conftest.py").write_text(
        textwrap.dedent(
            """\
            def pytest_addoption(parser):
                parser.addoption("--hello-world", action="store_true")
            """
        )
    )
    p = sub.joinpath("test_hello.py")
    p.write_text("def test_hello(): pass")
    result = pytester.runpytest(str(p) + "::test_hello", "-h")
    result.stdout.fnmatch_lines(
        """
        *--hello-world*
    """
    )
Beispiel #8
0
def test_is_generator_async_gen_syntax(pytester: Pytester) -> None:
    pytester.makepyfile(
        """
        from _pytest.compat import is_generator
        def test_is_generator_py36():
            async def foo():
                yield
                await foo()

            async def bar():
                yield

            assert not is_generator(foo)
            assert not is_generator(bar)
    """
    )
    result = pytester.runpytest()
    result.stdout.fnmatch_lines(["*1 passed*"])
Beispiel #9
0
def test_help(pytester: Pytester) -> None:
    result = pytester.runpytest("--help")
    assert result.ret == 0
    result.stdout.fnmatch_lines(
        """
          -m MARKEXPR           only run tests matching given mark expression.
                                For example: -m 'mark1 and not mark2'.
        reporting:
          --durations=N *
          -V, --version         display pytest version and information about plugins.
                                When given twice, also display information about
                                plugins.
        *setup.cfg*
        *minversion*
        *to see*markers*pytest --markers*
        *to see*fixtures*pytest --fixtures*
    """
    )
Beispiel #10
0
def test_option_precedence_mark(pytester: Pytester) -> None:
    """Filters defined by marks should always take precedence (#3946)."""
    pytester.makeini(
        """
        [pytest]
        filterwarnings = ignore
    """
    )
    pytester.makepyfile(
        """
        import pytest, warnings
        @pytest.mark.filterwarnings('error')
        def test():
            warnings.warn(UserWarning('hello'))
    """
    )
    result = pytester.runpytest("-W", "ignore")
    result.stdout.fnmatch_lines(["* 1 failed in*"])
Beispiel #11
0
    def test_idfn_marker(self, pytester: Pytester) -> None:
        pytester.makepyfile("""
            import pytest

            def idfn(param):
                if param == 0:
                    return 'spam'
                elif param == 1:
                    return 'ham'
                else:
                    return None

            @pytest.mark.parametrize('a,b', [(0, 2), (1, 2)], ids=idfn)
            def test_params(a, b):
                pass
        """)
        res = pytester.runpytest("--collect-only")
        res.stdout.fnmatch_lines(["*spam-2*", "*ham-2*"])
Beispiel #12
0
    def test_getsourcelines_error_issue553(self, pytester: Pytester,
                                           monkeypatch) -> None:
        monkeypatch.setattr("inspect.getsourcelines", None)
        p = pytester.makepyfile("""
            def raise_error(obj):
                raise OSError('source code not available')

            import inspect
            inspect.getsourcelines = raise_error

            def test_foo(invalid_fixture):
                pass
        """)
        res = pytester.runpytest(p)
        res.stdout.fnmatch_lines([
            "*source code not available*",
            "E*fixture 'invalid_fixture' not found"
        ])
Beispiel #13
0
def test_option_precedence_cmdline_over_ini(pytester: Pytester,
                                            ignore_on_cmdline) -> None:
    """Filters defined in the command-line should take precedence over filters in ini files (#3946)."""
    pytester.makeini("""
        [pytest]
        filterwarnings = error::UserWarning
    """)
    pytester.makepyfile("""
        import warnings
        def test():
            warnings.warn(UserWarning('hello'))
    """)
    args = ["-W", "ignore"] if ignore_on_cmdline else []
    result = pytester.runpytest(*args)
    if ignore_on_cmdline:
        result.stdout.fnmatch_lines(["* 1 passed in*"])
    else:
        result.stdout.fnmatch_lines(["* 1 failed in*"])
Beispiel #14
0
def test_parametrized_with_kwargs(pytester: Pytester) -> None:
    """Test collect parametrized func with wrong number of args."""
    py_file = pytester.makepyfile(
        """
        import pytest

        @pytest.fixture(params=[1,2])
        def a(request):
            return request.param

        @pytest.mark.parametrize(argnames='b', argvalues=[1, 2])
        def test_func(a, b):
            pass
    """
    )

    result = pytester.runpytest(py_file)
    assert result.ret == 0
Beispiel #15
0
def test_mktemp(pytester: Pytester, basename: str, is_ok: bool) -> None:
    mytemp = pytester.mkdir("mytemp")
    p = pytester.makepyfile(
        """
        def test_abs_path(tmpdir_factory):
            tmpdir_factory.mktemp('{}', numbered=False)
        """.format(
            basename
        )
    )

    result = pytester.runpytest(p, "--basetemp=%s" % mytemp)
    if is_ok:
        assert result.ret == 0
        assert mytemp.joinpath(basename).exists()
    else:
        assert result.ret == 1
        result.stdout.fnmatch_lines("*ValueError*")
Beispiel #16
0
def test_nose_style_setup_teardown(pytester: Pytester) -> None:
    pytester.makepyfile("""
        values = []

        def setup_module():
            values.append(1)

        def teardown_module():
            del values[0]

        def test_hello():
            assert values == [1]

        def test_world():
            assert values == [1]
        """)
    result = pytester.runpytest("-p", "nose")
    result.stdout.fnmatch_lines(["*2 passed*"])
Beispiel #17
0
def test_parametrize_iterator(pytester: Pytester) -> None:
    """`parametrize` should work with generators (#5354)."""
    py_file = pytester.makepyfile("""\
        import pytest

        def gen():
            yield 1
            yield 2
            yield 3

        @pytest.mark.parametrize('a', gen())
        def test(a):
            assert a >= 1
        """)
    result = pytester.runpytest(py_file)
    assert result.ret == 0
    # should not skip any tests
    result.stdout.fnmatch_lines(["*3 passed*"])
Beispiel #18
0
def test_testcase_handles_init_exceptions(pytester: Pytester) -> None:
    """
    Regression test to make sure exceptions in the __init__ method are bubbled up correctly.
    See https://github.com/pytest-dev/pytest/issues/3788
    """
    pytester.makepyfile("""
        from unittest import TestCase
        import pytest
        class MyTestCase(TestCase):
            def __init__(self, *args, **kwargs):
                raise Exception("should raise this exception")
            def test_hello(self):
                pass
    """)
    result = pytester.runpytest()
    assert "should raise this exception" in result.stdout.str()
    result.stdout.no_fnmatch_line(
        "*ERROR at teardown of MyTestCase.test_hello*")
Beispiel #19
0
def test_hook_proxy(pytester: Pytester) -> None:
    """Session's gethookproxy() would cache conftests incorrectly (#2016).
    It was decided to remove the cache altogether.
    """
    pytester.makepyfile(
        **{
            "root/demo-0/test_foo1.py": "def test1(): pass",
            "root/demo-a/test_foo2.py": "def test1(): pass",
            "root/demo-a/conftest.py": """\
            def pytest_ignore_collect(path, config):
                return True
            """,
            "root/demo-b/test_foo3.py": "def test1(): pass",
            "root/demo-c/test_foo4.py": "def test1(): pass",
        })
    result = pytester.runpytest()
    result.stdout.fnmatch_lines(
        ["*test_foo1.py*", "*test_foo3.py*", "*test_foo4.py*", "*3 passed*"])
Beispiel #20
0
def test_runTest_method(pytester: Pytester) -> None:
    pytester.makepyfile("""
        import unittest
        class MyTestCaseWithRunTest(unittest.TestCase):
            def runTest(self):
                self.assertEqual('foo', 'foo')
        class MyTestCaseWithoutRunTest(unittest.TestCase):
            def runTest(self):
                self.assertEqual('foo', 'foo')
            def test_something(self):
                pass
        """)
    result = pytester.runpytest("-v")
    result.stdout.fnmatch_lines("""
        *MyTestCaseWithRunTest::runTest*
        *MyTestCaseWithoutRunTest::test_something*
        *2 passed*
    """)
Beispiel #21
0
def test_setUpModule(pytester: Pytester) -> None:
    testpath = pytester.makepyfile("""
        values = []

        def setUpModule():
            values.append(1)

        def tearDownModule():
            del values[0]

        def test_hello():
            assert values == [1]

        def test_world():
            assert values == [1]
        """)
    result = pytester.runpytest(testpath)
    result.stdout.fnmatch_lines(["*2 passed*"])
Beispiel #22
0
    def test_fail_with_msg_is_deprecated(self, pytester: Pytester) -> None:
        p = pytester.makepyfile(
            """
            import pytest

            def test_failing_msg():
                pytest.fail(msg="failedmsg")
            """
        )
        result = pytester.runpytest(p)
        result.stdout.fnmatch_lines(
            [
                "*PytestRemovedIn8Warning: pytest.fail(msg=...) is now deprecated, "
                "use pytest.fail(reason=...) instead",
                '*pytest.fail(msg="failedmsg")',
            ]
        )
        result.assert_outcomes(failed=1, warnings=1)
Beispiel #23
0
def test_method_and_teardown_failing_reporting(pytester: Pytester) -> None:
    pytester.makepyfile("""
        import unittest
        class TC(unittest.TestCase):
            def tearDown(self):
                assert 0, "down1"
            def test_method(self):
                assert False, "down2"
    """)
    result = pytester.runpytest("-s")
    assert result.ret == 1
    result.stdout.fnmatch_lines([
        "*tearDown*",
        "*assert 0*",
        "*test_method*",
        "*assert False*",
        "*1 failed*1 error*",
    ])
Beispiel #24
0
    def test_unicode_doctest(self, pytester: Pytester):
        """
        Test case for issue 2434: DecodeError on Python 2 when doctest contains non-ascii
        characters.
        """
        p = pytester.maketxtfile(test_unicode_doctest="""
            .. doctest::

                >>> print("Hi\\n\\nByé")
                Hi
                ...
                Byé
                >>> 1 / 0  # Byé
                1
        """)
        result = pytester.runpytest(p)
        result.stdout.fnmatch_lines(
            ["*UNEXPECTED EXCEPTION: ZeroDivisionError*", "*1 failed*"])
Beispiel #25
0
 def test_doctest_linedata_missing(self, pytester: Pytester):
     pytester.path.joinpath("hello.py").write_text(
         textwrap.dedent(
             """\
             class Fun(object):
                 @property
                 def test(self):
                     '''
                     >>> a = 1
                     >>> 1/0
                     '''
             """
         )
     )
     result = pytester.runpytest("--doctest-modules")
     result.stdout.fnmatch_lines(
         ["*hello*", "006*>>> 1/0*", "*UNEXPECTED*ZeroDivision*", "*1 failed*"]
     )
Beispiel #26
0
def test_fixtures_in_conftest(pytester: Pytester) -> None:
    pytester.makeconftest(
        '''
        import pytest
        @pytest.fixture
        def arg1():
            """arg1 docstring"""
        @pytest.fixture
        def arg2():
            """arg2 docstring"""
        @pytest.fixture
        def arg3(arg1, arg2):
            """arg3
            docstring
            """
    '''
    )
    p = pytester.makepyfile(
        """
        def test_arg2(arg2):
            pass
        def test_arg3(arg3):
            pass
    """
    )
    result = pytester.runpytest("--fixtures-per-test", p)
    assert result.ret == 0

    result.stdout.fnmatch_lines(
        [
            "*fixtures used by test_arg2*",
            "*(test_fixtures_in_conftest.py:2)*",
            "arg2 -- conftest.py:6",
            "    arg2 docstring",
            "*fixtures used by test_arg3*",
            "*(test_fixtures_in_conftest.py:4)*",
            "arg1 -- conftest.py:3",
            "    arg1 docstring",
            "arg2 -- conftest.py:6",
            "    arg2 docstring",
            "arg3 -- conftest.py:9",
            "    arg3",
        ]
    )
Beispiel #27
0
def test_show_multi_test_fixture_setup_and_teardown_correctly_simple(
    pytester: Pytester, ) -> None:
    """Verify that when a fixture lives for longer than a single test, --setup-plan
    correctly displays the SETUP/TEARDOWN indicators the right number of times.

    As reported in https://github.com/pytest-dev/pytest/issues/2049
    --setup-plan was showing SETUP/TEARDOWN on every test, even when the fixture
    should persist through multiple tests.

    (Note that this bug never affected actual test execution, which used the
    correct fixture lifetimes. It was purely a display bug for --setup-plan, and
    did not affect the related --setup-show or --setup-only.)
    """
    pytester.makepyfile("""
        import pytest
        @pytest.fixture(scope = 'class')
        def fix():
            return object()
        class TestClass:
            def test_one(self, fix):
                assert False
            def test_two(self, fix):
                assert False
    """)

    result = pytester.runpytest("--setup-plan")
    assert result.ret == 0

    setup_fragment = "SETUP    C fix"
    setup_count = 0

    teardown_fragment = "TEARDOWN C fix"
    teardown_count = 0

    for line in result.stdout.lines:
        if setup_fragment in line:
            setup_count += 1
        if teardown_fragment in line:
            teardown_count += 1

    # before the fix this tests, there would have been a setup/teardown
    # message for each test, so the counts would each have been 2
    assert setup_count == 1
    assert teardown_count == 1
Beispiel #28
0
def test_fixture_dependency(pytester: Pytester) -> None:
    ct1 = pytester.makeconftest("")
    ct1 = pytester.makepyfile("__init__.py")
    ct1.write_text("")
    sub = pytester.mkdir("sub")
    sub.joinpath("__init__.py").write_text("")
    sub.joinpath("conftest.py").write_text(
        textwrap.dedent(
            """\
            import pytest

            @pytest.fixture
            def not_needed():
                assert False, "Should not be called!"

            @pytest.fixture
            def foo():
                assert False, "Should not be called!"

            @pytest.fixture
            def bar(foo):
                return 'bar'
            """
        )
    )
    subsub = sub.joinpath("subsub")
    subsub.mkdir()
    subsub.joinpath("__init__.py").write_text("")
    subsub.joinpath("test_bar.py").write_text(
        textwrap.dedent(
            """\
            import pytest

            @pytest.fixture
            def bar():
                return 'sub bar'

            def test_event_fixture(bar):
                assert bar == 'sub bar'
            """
        )
    )
    result = pytester.runpytest("sub")
    result.stdout.fnmatch_lines(["*1 passed*"])
    def test_lastfailed_creates_cache_when_needed(self, pytester: Pytester) -> None:
        # Issue #1342
        pytester.makepyfile(test_empty="")
        pytester.runpytest("-q", "--lf")
        assert not os.path.exists(".pytest_cache/v/cache/lastfailed")

        pytester.makepyfile(test_successful="def test_success():\n    assert True")
        pytester.runpytest("-q", "--lf")
        assert not os.path.exists(".pytest_cache/v/cache/lastfailed")

        pytester.makepyfile(test_errored="def test_error():\n    assert False")
        pytester.runpytest("-q", "--lf")
        assert os.path.exists(".pytest_cache/v/cache/lastfailed")
Beispiel #30
0
def test_errors_in_xfail_skip_expressions(pytester: Pytester) -> None:
    pytester.makepyfile("""
        import pytest
        @pytest.mark.skipif("asd")
        def test_nameerror():
            pass
        @pytest.mark.xfail("syntax error")
        def test_syntax():
            pass

        def test_func():
            pass
    """)
    result = pytester.runpytest()
    markline = "                ^"
    pypy_version_info = getattr(sys, "pypy_version_info", None)
    if pypy_version_info is not None and pypy_version_info < (6, ):
        markline = markline[5:]
    elif sys.version_info >= (3, 8) or hasattr(sys, "pypy_version_info"):
        markline = markline[4:]

    if sys.version_info[:2] >= (3, 10):
        expected = [
            "*ERROR*test_nameerror*",
            "*asd*",
            "",
            "During handling of the above exception, another exception occurred:",
        ]
    else:
        expected = [
            "*ERROR*test_nameerror*",
        ]

    expected += [
        "*evaluating*skipif*condition*",
        "*asd*",
        "*ERROR*test_syntax*",
        "*evaluating*xfail*condition*",
        "    syntax error",
        markline,
        "SyntaxError: invalid syntax",
        "*1 pass*2 errors*",
    ]
    result.stdout.fnmatch_lines(expected)