Ejemplo n.º 1
0
 def test_marked_skipif_with_boolean_without_reason(self, testdir) -> None:
     item = testdir.getitem("""
         import pytest
         @pytest.mark.skipif(False)
         def test_func():
             pass
     """)
     with pytest.raises(pytest.fail.Exception) as excinfo:
         evaluate_skip_marks(item)
     assert excinfo.value.msg is not None
     assert (
         """Error evaluating 'skipif': you need to specify reason=STRING when using booleans as conditions."""
         in excinfo.value.msg)
Ejemplo n.º 2
0
    def test_marked_skipif_with_invalid_boolean(self, testdir) -> None:
        item = testdir.getitem("""
            import pytest

            class InvalidBool:
                def __bool__(self):
                    raise TypeError("INVALID")

            @pytest.mark.skipif(InvalidBool(), reason="xxx")
            def test_func():
                pass
        """)
        with pytest.raises(pytest.fail.Exception) as excinfo:
            evaluate_skip_marks(item)
        assert excinfo.value.msg is not None
        assert "Error evaluating 'skipif' condition as a boolean" in excinfo.value.msg
        assert "INVALID" in excinfo.value.msg
Ejemplo n.º 3
0
 def test_marked_one_arg_with_reason(self, pytester: Pytester) -> None:
     item = pytester.getitem("""
         import pytest
         @pytest.mark.skipif("hasattr(os, 'sep')", attr=2, reason="hello world")
         def test_func():
             pass
     """)
     skipped = evaluate_skip_marks(item)
     assert skipped
     assert skipped.reason == "hello world"
Ejemplo n.º 4
0
 def test_marked_one_arg(self, pytester: Pytester) -> None:
     item = pytester.getitem("""
         import pytest
         @pytest.mark.skipif("hasattr(os, 'sep')")
         def test_func():
             pass
     """)
     skipped = evaluate_skip_marks(item)
     assert skipped
     assert skipped.reason == "condition: hasattr(os, 'sep')"
Ejemplo n.º 5
0
 def test_marked_skipif_no_args(self, pytester: Pytester) -> None:
     item = pytester.getitem("""
         import pytest
         @pytest.mark.skipif
         def test_func():
             pass
     """)
     skipped = evaluate_skip_marks(item)
     assert skipped
     assert skipped.reason == ""
Ejemplo n.º 6
0
 def test_marked_one_arg_twice2(self, testdir):
     item = testdir.getitem("""
         import pytest
         @pytest.mark.skipif("hasattr(os, 'murks')")
         @pytest.mark.skipif("not hasattr(os, 'murks')")
         def test_func():
             pass
     """)
     skipped = evaluate_skip_marks(item)
     assert skipped
     assert skipped.reason == "condition: not hasattr(os, 'murks')"
Ejemplo n.º 7
0
 def test_skipif_class(self, pytester: Pytester) -> None:
     (item, ) = pytester.getitems("""
         import pytest
         class TestClass(object):
             pytestmark = pytest.mark.skipif("config._hackxyz")
             def test_func(self):
                 pass
     """)
     item.config._hackxyz = 3  # type: ignore[attr-defined]
     skipped = evaluate_skip_marks(item)
     assert skipped
     assert skipped.reason == "condition: config._hackxyz"
Ejemplo n.º 8
0
 def test_skipif_class(self, testdir):
     (item, ) = testdir.getitems("""
         import pytest
         class TestClass(object):
             pytestmark = pytest.mark.skipif("config._hackxyz")
             def test_func(self):
                 pass
     """)
     item.config._hackxyz = 3
     skipped = evaluate_skip_marks(item)
     assert skipped
     assert skipped.reason == "condition: config._hackxyz"
Ejemplo n.º 9
0
 def test_marked_one_arg_twice(self, pytester: Pytester) -> None:
     lines = [
         """@pytest.mark.skipif("not hasattr(os, 'murks')")""",
         """@pytest.mark.skipif(condition="hasattr(os, 'murks')")""",
     ]
     for i in range(0, 2):
         item = pytester.getitem("""
             import pytest
             %s
             %s
             def test_func():
                 pass
         """ % (lines[i], lines[(i + 1) % 2]))
         skipped = evaluate_skip_marks(item)
         assert skipped
         assert skipped.reason == "condition: not hasattr(os, 'murks')"
Ejemplo n.º 10
0
 def test_no_marker(self, pytester: Pytester) -> None:
     item = pytester.getitem("def test_func(): pass")
     skipped = evaluate_skip_marks(item)
     assert not skipped
Ejemplo n.º 11
0
 def test_no_marker(self, testdir):
     item = testdir.getitem("def test_func(): pass")
     skipped = evaluate_skip_marks(item)
     assert not skipped
Ejemplo n.º 12
0
def pytest_runtestloop(session):
    if session.config.pluginmanager.is_registered("asyncio"):
        raise Exception(
            "pytest-asyncio-cooperative is NOT compatible with pytest-asyncio\n"
            "Uninstall pytest-asyncio or pass this option to pytest: `-p no:asyncio`\n"
        )

    # pytest-cooperative needs to hijack the runtestloop from pytest.
    # To prevent the default pytest runtestloop from running tests we make it think we
    # were only collecting tests. Slightly a hack, but it is needed for other plugins
    # which use the pytest_runtestloop hook.
    previous_collectonly = session.config.option.collectonly
    session.config.option.collectonly = True
    yield
    session.config.option.collectonly = previous_collectonly

    session.wrapped_fixtures = {}

    flakes_to_retry = []

    # Collect our coroutines
    regular_items = []
    item_by_coro = {}
    tasks = []
    for item in session.items:
        markers = {m.name: m for m in item.own_markers}

        if "skip" in markers or "skipif" in markers:
            # Best to hand off to the core pytest logic to handle this so reporting works
            if isinstance(evaluate_skip_marks(item), Skip):
                regular_items.append(item)
                continue

        # Coerce into a task
        if "asyncio_cooperative" in markers:
            try:
                task = item_to_task(item)
            except NotCoroutine:
                item.runtest = functools.partial(not_coroutine_failure, item.name)
                item.ihook.pytest_runtest_protocol(item=item, nextitem=None)
                continue

            item._flakey = "flakey" in markers
            item_by_coro[task] = item
            tasks.append(task)
        else:
            regular_items.append(item)

    def get_coro(task):
        if sys_version_info >= (3, 8):
            return task.get_coro()
        else:
            return task._coro

    async def run_tests(tasks, max_tasks: int):
        sidelined_tasks = tasks[max_tasks:]
        tasks = tasks[:max_tasks]

        task_timeout = int(session.config.getoption("--asyncio-task-timeout"))

        completed = []
        while tasks:

            # Schedule all the coroutines
            for i in range(len(tasks)):
                if asyncio.iscoroutine(tasks[i]):
                    tasks[i] = asyncio.create_task(tasks[i])

            # Mark when the task was started
            earliest_enqueue_time = time.time()
            for task in tasks:
                if isinstance(task, asyncio.Task):
                    item = item_by_coro[get_coro(task)]
                else:
                    item = item_by_coro[task]
                if not hasattr(item, "enqueue_time"):
                    item.enqueue_time = time.time()
                earliest_enqueue_time = min(item.enqueue_time, earliest_enqueue_time)

            time_to_wait = (time.time() - earliest_enqueue_time) - task_timeout
            done, pending = await asyncio.wait(
                tasks, return_when=asyncio.FIRST_COMPLETED, timeout=min(30, int(time_to_wait))
            )

            # Cancel tasks that have taken too long
            tasks = []
            for task in pending:
                now = time.time()
                item = item_by_coro[get_coro(task)]
                if task_timeout < now - item.enqueue_time:
                    if sys_version_info >= (3, 9):
                        msg = "Test took too long ({:.2f} s)".format(
                            now - item.enqueue_time
                        )
                        task.cancel(msg=msg)
                    else:
                        task.cancel()
                tasks.append(task)

            for result in done:
                item = item_by_coro[get_coro(result)]

                # Flakey tests will be run again if they failed
                # TODO: add retry count
                if item._flakey:
                    try:
                        result.result()
                    except:
                        item._flakey = None
                        new_task = item_to_task(item)
                        flakes_to_retry.append(new_task)
                        item_by_coro[new_task] = item
                        continue

                item.runtest = lambda: result.result()

                item.ihook.pytest_runtest_protocol(item=item, nextitem=None)

                # Hack: See rewrite comment below
                # pytest_runttest_protocl will disable the rewrite assertion
                # so we renable it here
                activate_assert_rewrite(item)

                completed.append(result)

            if sidelined_tasks:
                if len(tasks) < max_tasks:
                    tasks.append(sidelined_tasks.pop(0))

        return completed

    # Do assert rewrite
    # Hack: pytest's implementation sets up assert rewriting as a shared
    # resource. This causes a race condition between async tests. Therefore we
    # need to activate the assert rewriting here
    if tasks:
        item = item_by_coro[tasks[0]]
        activate_assert_rewrite(item)

    # Run the tests using cooperative multitasking
    if not previous_collectonly:
        _run_test_loop(tasks, session, run_tests)

        # Run failed flakey tests
        if flakes_to_retry:
            _run_test_loop(flakes_to_retry, session, run_tests)

        # Run synchronous tests
        session.items = regular_items
        for i, item in enumerate(session.items):
            nextitem = session.items[i + 1] if i + 1 < len(session.items) else None
            item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
            if session.shouldfail:
                raise session.Failed(session.shouldfail)
            if session.shouldstop:
                raise session.Interrupted(session.shouldstop)

        return True