Example #1
0
def test_verbose(testdir):
    testdir.makepyfile("""
        def test_pass():
            {0}""".format(temporary_failure()))
    result = testdir.runpytest('--reruns', '1', '-v')
    result.stdout.fnmatch_lines_random(['test_*:* RERUN*'])
    assert '1 rerun' in result.stdout.str()
Example #2
0
def test_no_extra_test_summary_for_reruns_by_default(testdir):
    testdir.makepyfile("""
        def test_pass():
            {0}""".format(temporary_failure()))
    result = testdir.runpytest('--reruns', '1')
    assert 'RERUN' not in result.stdout.str()
    assert '1 rerun' in result.stdout.str()
Example #3
0
def test_extra_test_summary_for_reruns(testdir):
    testdir.makepyfile("""
        def test_pass():
            {0}""".format(temporary_failure()))
    result = testdir.runpytest('--reruns', '1', '-r', 'R')
    result.stdout.fnmatch_lines_random(['RERUN test_*:*'])
    assert '1 rerun' in result.stdout.str()
Example #4
0
def test_rerun_passes_after_temporary_setup_failure(testdir):
    testdir.makepyfile('def test_pass(): pass')
    testdir.makeconftest("""
        def pytest_runtest_setup(item):
            {0}""".format(temporary_failure()))
    result = testdir.runpytest('--reruns', '1', '-r', 'R')
    assert_outcomes(result, passed=1, rerun=1)
Example #5
0
def test_xdist_after_temporary_setup_failure_with_junit(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.xml'
    testdir.makepyfile('def test_pass(): pass')
    testdir.makeconftest("""
        def pytest_runtest_setup(item):
            {0}""".format(temporary_failure(2)))
    result = testdir.runpytest(
        '--reruns',
        '1',
        '-r',
        'R',
        '--junitxml',
        artifact_path,
        '-n',
        '2',
        '--dist',
        'loadfile',
    )
    assert_outcomes(result, passed=0, error=1, rerun=1)
    with open(artifact_path) as artifact:
        artifact_data = xmltodict.parse(artifact.read())
        if artifact_data.get('testsuites'):
            artifact_data = artifact_data['testsuites']
        assert artifact_data['testsuite']['@errors'] == '1'
        assert artifact_data['testsuite']['@failures'] == '0'
        assert artifact_data['testsuite']['@tests'] == '1'
        assert artifact_data['testsuite']['testcase']
        assert artifact_data['testsuite']['testcase']['error']
Example #6
0
def test_xdist_after_temporary_setup_failure_with_stats(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.json'
    testdir.makepyfile('def test_pass(): pass')
    testdir.makeconftest("""
        def pytest_runtest_setup(item):
            {0}""".format(temporary_failure(2)))
    result = testdir.runpytest(
        '--reruns',
        '1',
        '-r',
        'R',
        '--reruns-artifact-path',
        artifact_path,
        '-n',
        '2',
        '--dist',
        'loadfile',
    )
    assert_outcomes(result, passed=0, error=1, rerun=1)
    with open(artifact_path) as artifact:
        artifact_data = json.load(artifact)
        print(artifact_data)
        assert artifact_data['total_reruns'] == 1
        assert artifact_data['total_failed'] == 1
        assert artifact_data['total_resolved_by_reruns'] == 0
        assert artifact_data['rerun_tests'][0]['status'] == 'failed'
        assert artifact_data['rerun_tests'][0][
            'nodeid'] == 'test_xdist_after_temporary_setup_failure_with_stats.py::test_pass'
Example #7
0
def test_reruns_if_flaky_mark_is_called_with_positional_argument(testdir):
    testdir.makepyfile("""
        import pytest
        @pytest.mark.flaky(2)
        def test_pass():
            {0}""".format(temporary_failure(2)))
    result = testdir.runpytest('-r', 'R')
    assert_outcomes(result, passed=1, rerun=2)
Example #8
0
def test_rerun_passes_after_temporary_test_failure_with_flaky_mark(testdir):
    testdir.makepyfile("""
        import pytest
        @pytest.mark.flaky(reruns=2)
        def test_pass():
            {0}""".format(temporary_failure(2)))
    result = testdir.runpytest('-r', 'R')
    assert_outcomes(result, passed=1, rerun=2)
Example #9
0
def test_reruns_stats_after_temporary_setup_failure(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.json'
    testdir.makepyfile('def test_pass(): pass')
    testdir.makeconftest("""
        def pytest_runtest_setup(item):
            {0}""".format(temporary_failure(2)))
    result = testdir.runpytest(
        '--reruns',
        '1',
        '-r',
        'R',
        '--reruns-artifact-path',
        artifact_path,
    )
    assert_outcomes(result, passed=0, error=1, rerun=1)
    with open(artifact_path) as artifact:
        artifact_data = json.load(artifact)
        assert artifact_data['total_reruns'] == 1
        assert artifact_data['total_failed'] == 1
        assert artifact_data['total_resolved_by_reruns'] == 0
        assert len(artifact_data['rerun_tests']) == 1
        assert 'test_pass' in artifact_data['rerun_tests'][0]['nodeid']
        assert 'rerun_trace' in artifact_data['rerun_tests'][0]
        assert 'original_trace' in artifact_data['rerun_tests'][0]