def test_rerun_passes_after_temporary_setup_failure(testdir): testdir.makepyfile('def test_pass(): pass') testdir.makeconftest(""" def pytest_runtest_setup(item): {0}""".format(temporary_failure())) result = testdir.runpytest('--reruns', '1', '-r', 'R') assert_outcomes(result, passed=1, rerun=1)
def test_xdist_after_temporary_setup_failure_with_junit(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.xml' testdir.makepyfile('def test_pass(): pass') testdir.makeconftest(""" def pytest_runtest_setup(item): {0}""".format(temporary_failure(2))) result = testdir.runpytest( '--reruns', '1', '-r', 'R', '--junitxml', artifact_path, '-n', '2', '--dist', 'loadfile', ) assert_outcomes(result, passed=0, error=1, rerun=1) with open(artifact_path) as artifact: artifact_data = xmltodict.parse(artifact.read()) if artifact_data.get('testsuites'): artifact_data = artifact_data['testsuites'] assert artifact_data['testsuite']['@errors'] == '1' assert artifact_data['testsuite']['@failures'] == '0' assert artifact_data['testsuite']['@tests'] == '1' assert artifact_data['testsuite']['testcase'] assert artifact_data['testsuite']['testcase']['error']
def test_xdist_all_tests_failed_with_stats(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.json' make_simple_pytest_suite(testdir, total_failures=2, expected_reruns=1, has_failure=True) result = testdir.runpytest( '--reruns', '1', '-r', 'R', '-n', '2', '--reruns-artifact-path', artifact_path, ) assert_outcomes(result, passed=2, rerun=2, failed=2) with open(artifact_path) as artifact: artifact_data = json.load(artifact) assert artifact_data['total_reruns'] == 2 assert artifact_data['total_failed'] == 2 assert artifact_data['total_resolved_by_reruns'] == 0 assert artifact_data['rerun_tests'][0]['status'] == 'failed' assert artifact_data['rerun_tests'][1]['status'] == 'failed' assert set([ artifact_data['rerun_tests'][0]['nodeid'], artifact_data['rerun_tests'][1]['nodeid'] ]) == set([ 'test_xdist_all_tests_failed_with_stats.py::test_test_failing_1', 'test_xdist_all_tests_failed_with_stats.py::test_test_failing_0' ])
def test_xdist_all_tests_failed_with_junit(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.xml' make_simple_pytest_suite(testdir, total_failures=2, expected_reruns=1, has_failure=True) result = testdir.runpytest( '--reruns', '1', '-r', 'R', '-n', '2', '--junitxml', artifact_path, ) assert_outcomes(result, passed=2, rerun=2, failed=2) with open(artifact_path) as artifact: artifact_data = xmltodict.parse(artifact.read()) if artifact_data.get('testsuites'): artifact_data = artifact_data['testsuites'] assert artifact_data['testsuite']['@errors'] == '0' assert artifact_data['testsuite']['@failures'] == '2' assert artifact_data['testsuite']['@tests'] == '4' assert len(artifact_data['testsuite']['testcase']) == 4 assert len([ t for t in artifact_data['testsuite']['testcase'] if 'failure' in t ]) == 2
def test_xdist_after_temporary_setup_failure_with_stats(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.json' testdir.makepyfile('def test_pass(): pass') testdir.makeconftest(""" def pytest_runtest_setup(item): {0}""".format(temporary_failure(2))) result = testdir.runpytest( '--reruns', '1', '-r', 'R', '--reruns-artifact-path', artifact_path, '-n', '2', '--dist', 'loadfile', ) assert_outcomes(result, passed=0, error=1, rerun=1) with open(artifact_path) as artifact: artifact_data = json.load(artifact) print(artifact_data) assert artifact_data['total_reruns'] == 1 assert artifact_data['total_failed'] == 1 assert artifact_data['total_resolved_by_reruns'] == 0 assert artifact_data['rerun_tests'][0]['status'] == 'failed' assert artifact_data['rerun_tests'][0][ 'nodeid'] == 'test_xdist_after_temporary_setup_failure_with_stats.py::test_pass'
def test_rerun_fails_after_consistent_setup_failure(testdir): testdir.makepyfile('def test_pass(): pass') testdir.makeconftest(""" def pytest_runtest_setup(item): raise Exception('Setup failure')""") result = testdir.runpytest('--reruns', '1') assert_outcomes(result, passed=0, error=1, rerun=1)
def test_reruns_if_flaky_mark_is_called_with_positional_argument(testdir): testdir.makepyfile(""" import pytest @pytest.mark.flaky(2) def test_pass(): {0}""".format(temporary_failure(2))) result = testdir.runpytest('-r', 'R') assert_outcomes(result, passed=1, rerun=2)
def test_rerun_with_resultslog(testdir): testdir.makepyfile(""" def test_fail(): assert False""") result = testdir.runpytest('--reruns', '2', '--result-log', './pytest.log') assert_outcomes(result, passed=0, failed=1, rerun=2)
def test_rerun_passes_after_temporary_test_failure_with_flaky_mark(testdir): testdir.makepyfile(""" import pytest @pytest.mark.flaky(reruns=2) def test_pass(): {0}""".format(temporary_failure(2))) result = testdir.runpytest('-r', 'R') assert_outcomes(result, passed=1, rerun=2)
def test_no_rerun_on_xfail_call(testdir): reason = str(random.random()) testdir.makepyfile(""" import pytest def test_xfail(): pytest.xfail('{0}') """.format(reason)) result = testdir.runpytest('--reruns', '1') assert_outcomes(result, passed=0, xfailed=1)
def test_max_reruns_reached(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.json' make_simple_pytest_suite(testdir, total_failures=2, expected_reruns=1, has_failure=True) result = testdir.runpytest('--reruns', '1', '-r', 'R', '--reruns-artifact-path', artifact_path, '--max-tests-rerun', '1') assert_outcomes(result, passed=2, rerun=0, failed=2)
def test_no_rerun_on_xpass(testdir): reason = str(random.random()) testdir.makepyfile(""" import pytest @pytest.mark.xfail() def test_xpass(): pass """.format(reason)) result = testdir.runpytest('--reruns', '1') assert_outcomes(result, passed=0, xpassed=1)
def test_no_rerun_on_skipif_mark(testdir): reason = str(random.random()) testdir.makepyfile(""" import pytest @pytest.mark.skipif(reason='{0}') def test_skip(): pass """.format(reason)) result = testdir.runpytest('--reruns', '1') assert_outcomes(result, passed=0, skipped=1)
def test_rerun_on_class_setup_error_with_reruns(testdir): testdir.makepyfile(""" class TestFoo(object): @classmethod def setup_class(cls): assert False def test_pass(): pass""") result = testdir.runpytest('--reruns', '1') assert_outcomes(result, passed=0, error=1, rerun=1)
def test_reruns_with_delay(testdir, delay_time): testdir.makepyfile(""" def test_fail(): assert False""") time.sleep = mock.MagicMock() result = testdir.runpytest('--reruns', '3', '--reruns-delay', str(delay_time)) if delay_time < 0: delay_time = 0 time.sleep.assert_called_with(delay_time) assert_outcomes(result, passed=0, failed=1, rerun=3)
def test_reruns_with_delay_marker(testdir, delay_time): testdir.makepyfile(""" import pytest @pytest.mark.flaky(reruns=2, reruns_delay={}) def test_fail_two(): assert False""".format(delay_time)) time.sleep = mock.MagicMock() result = testdir.runpytest() if delay_time < 0: delay_time = 0 time.sleep.assert_called_with(delay_time) assert_outcomes(result, passed=0, failed=1, rerun=2)
def test_reruns_stats_max_reruns_reached(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.json' make_simple_pytest_suite(testdir, total_failures=2, expected_reruns=1, has_failure=True) result = testdir.runpytest('--reruns', '1', '-r', 'R', '--reruns-artifact-path', artifact_path, '--max-tests-rerun', '1') assert_outcomes(result, passed=2, rerun=0, failed=2) with open(artifact_path) as artifact: artifact_data = json.load(artifact) assert artifact_data == { 'total_reruns': 0, 'total_failed': 0, 'total_resolved_by_reruns': 0, 'rerun_tests': [] }
def test_reruns_junit_max_reruns_reached(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.xml' make_simple_pytest_suite(testdir, total_failures=2, expected_reruns=1, has_failure=True) result = testdir.runpytest('--reruns', '1', '-r', 'R', '--junitxml', artifact_path, '--max-tests-rerun', '1') assert_outcomes(result, passed=2, rerun=0, failed=2) with open(artifact_path) as artifact: artifact_data = xmltodict.parse(artifact.read()) if artifact_data.get('testsuites'): artifact_data = artifact_data['testsuites'] assert artifact_data['testsuite']['@errors'] == '0' assert artifact_data['testsuite']['@failures'] == '2' assert artifact_data['testsuite']['@tests'] == '4' assert len(artifact_data['testsuite']['testcase']) == 4 assert artifact_data['testsuite']['testcase'][2]['failure'] assert artifact_data['testsuite']['testcase'][3]['failure']
def test_reruns_stats_all_tests_passed(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.json' make_simple_pytest_suite(testdir, expected_reruns=0, has_failure=False) result = testdir.runpytest( '--reruns', '1', '-r', 'R', '--reruns-artifact-path', artifact_path, ) assert_outcomes(result, passed=3, rerun=0) with open(artifact_path) as artifact: artifact_data = json.load(artifact) assert artifact_data == { 'total_reruns': 0, 'total_failed': 0, 'total_resolved_by_reruns': 0, 'rerun_tests': [] }
def test_reruns_junit_all_tests_passed(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.xml' make_simple_pytest_suite(testdir, expected_reruns=0, has_failure=False) result = testdir.runpytest( '--reruns', '1', '-r', 'R', '--junitxml', artifact_path, ) assert_outcomes(result, passed=3, rerun=0) with open(artifact_path) as artifact: artifact_data = xmltodict.parse(artifact.read()) if artifact_data.get('testsuites'): artifact_data = artifact_data['testsuites'] assert artifact_data['testsuite']['@errors'] == '0' assert artifact_data['testsuite']['@failures'] == '0' assert artifact_data['testsuite']['@tests'] == '3' assert len(artifact_data['testsuite']['testcase']) == 3
def test_xdist_worker_rerun_stats(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.json' make_simple_pytest_suite(testdir, total_failures=2, expected_reruns=1, has_failure=False) result = testdir.runpytest( '--reruns', '1', '-r', 'R', '-n', '2', '--dist', 'loadfile', '--reruns-artifact-path', artifact_path, '--xdist-worker-reruns-artifact', ) assert_outcomes(result, passed=4, rerun=2) if os.path.isfile(testdir.tmpdir.strpath + '/gw0_artifact.json'): xdist_artifact_path = testdir.tmpdir.strpath + '/gw0_artifact.json' else: xdist_artifact_path = testdir.tmpdir.strpath + '/gw1_artifact.json' with open(xdist_artifact_path) as artifact: artifact_data = json.load(artifact) assert artifact_data['total_reruns'] == 2 assert artifact_data['total_failed'] == 2 assert artifact_data['total_resolved_by_reruns'] == 2 assert artifact_data['rerun_tests'][0]['status'] == 'flake' assert artifact_data['rerun_tests'][1]['status'] == 'flake' assert set([ artifact_data['rerun_tests'][0]['nodeid'], artifact_data['rerun_tests'][1]['nodeid'] ]) == set([ 'test_xdist_worker_rerun_stats.py::test_test_failing_1', 'test_xdist_worker_rerun_stats.py::test_test_failing_0' ])
def test_reruns_stats_2_tests_failed(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.json' make_simple_pytest_suite(testdir, total_failures=2, expected_reruns=1, has_failure=True) result = testdir.runpytest( '--reruns', '1', '-r', 'R', '--reruns-artifact-path', artifact_path, ) assert_outcomes(result, passed=2, rerun=2, failed=2) with open(artifact_path) as artifact: artifact_data = json.load(artifact) assert artifact_data['total_reruns'] == 2 assert artifact_data['total_failed'] == 2 assert artifact_data['total_resolved_by_reruns'] == 0 assert len(artifact_data['rerun_tests']) == 2 assert artifact_data['rerun_tests'][0]['status'] == 'failed' assert artifact_data['rerun_tests'][1]['status'] == 'failed'
def test_reruns_stats_after_temporary_setup_failure(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.json' testdir.makepyfile('def test_pass(): pass') testdir.makeconftest(""" def pytest_runtest_setup(item): {0}""".format(temporary_failure(2))) result = testdir.runpytest( '--reruns', '1', '-r', 'R', '--reruns-artifact-path', artifact_path, ) assert_outcomes(result, passed=0, error=1, rerun=1) with open(artifact_path) as artifact: artifact_data = json.load(artifact) assert artifact_data['total_reruns'] == 1 assert artifact_data['total_failed'] == 1 assert artifact_data['total_resolved_by_reruns'] == 0 assert len(artifact_data['rerun_tests']) == 1 assert 'test_pass' in artifact_data['rerun_tests'][0]['nodeid'] assert 'rerun_trace' in artifact_data['rerun_tests'][0] assert 'original_trace' in artifact_data['rerun_tests'][0]
def test_reruns_stats_all_tests_resolved(testdir): artifact_path = testdir.tmpdir.strpath + '/artifact.json' make_simple_pytest_suite(testdir, expected_reruns=1, has_failure=False) result = testdir.runpytest( '--reruns', '1', '-r', 'R', '--reruns-artifact-path', artifact_path, ) assert_outcomes(result, passed=3, rerun=1) with open(artifact_path) as artifact: artifact_data = json.load(artifact) assert artifact_data == { 'total_reruns': 1, 'total_failed': 1, 'total_resolved_by_reruns': 1, 'rerun_tests': [ { 'nodeid': 'test_reruns_stats_all_tests_resolved.py::test_test_failing_0', 'status': 'flake', 'rerun_trace': { 'teardown': { 'caplog': '', 'capstderr': '', 'capstdout': 'session_fixture_2 setup\nsession_fixture_1 setup\nsession_fixture_2 setup\nsession_fixture_2 teardown\nsession_fixture_1 teardown\n', 'text_repr': '' }, 'setup': { 'caplog': '', 'capstderr': '', 'capstdout': 'session_fixture_2 setup\nsession_fixture_1 setup\nsession_fixture_2 setup\n', 'text_repr': '' }, 'call': { 'caplog': '', 'capstderr': '', 'capstdout': 'session_fixture_2 setup\nsession_fixture_1 setup\nsession_fixture_2 setup\n', 'text_repr': '' } }, 'original_trace': { 'call': { 'caplog': '', 'capstderr': '', 'capstdout': 'session_fixture_2 setup\n', 'text_repr': 'session_fixture_2 = None\n\n def test_test_failing_0(session_fixture_2):\n global number_0\n number_0 += 1\n> assert number_0 == 1 + 1\nE assert 1 == (1 + 1)\n\ntest_reruns_stats_all_tests_resolved.py:60: AssertionError', }, 'setup': { 'caplog': '', 'capstderr': '', 'capstdout': 'session_fixture_2 setup\n', 'text_repr': '' }, 'teardown': { 'caplog': '', 'capstderr': '', 'capstdout': 'session_fixture_2 setup\n', 'text_repr': '' } } }, ] }
def test_rerun_fails_after_consistent_test_failure(testdir): testdir.makepyfile('def test_fail(): assert False') result = testdir.runpytest('--reruns', '1') assert_outcomes(result, passed=0, failed=1, rerun=1)
def test_no_rerun_on_pass(testdir): testdir.makepyfile('def test_pass(): pass') result = testdir.runpytest('--reruns', '1') assert_outcomes(result)