Exemplo n.º 1
0
def test_xdist_all_tests_failed_with_stats(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.json'
    make_simple_pytest_suite(testdir,
                             total_failures=2,
                             expected_reruns=1,
                             has_failure=True)
    result = testdir.runpytest(
        '--reruns',
        '1',
        '-r',
        'R',
        '-n',
        '2',
        '--reruns-artifact-path',
        artifact_path,
    )
    assert_outcomes(result, passed=2, rerun=2, failed=2)
    with open(artifact_path) as artifact:
        artifact_data = json.load(artifact)
        assert artifact_data['total_reruns'] == 2
        assert artifact_data['total_failed'] == 2
        assert artifact_data['total_resolved_by_reruns'] == 0
        assert artifact_data['rerun_tests'][0]['status'] == 'failed'
        assert artifact_data['rerun_tests'][1]['status'] == 'failed'
        assert set([
            artifact_data['rerun_tests'][0]['nodeid'],
            artifact_data['rerun_tests'][1]['nodeid']
        ]) == set([
            'test_xdist_all_tests_failed_with_stats.py::test_test_failing_1',
            'test_xdist_all_tests_failed_with_stats.py::test_test_failing_0'
        ])
Exemplo n.º 2
0
def test_xdist_all_tests_failed_with_junit(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.xml'
    make_simple_pytest_suite(testdir,
                             total_failures=2,
                             expected_reruns=1,
                             has_failure=True)
    result = testdir.runpytest(
        '--reruns',
        '1',
        '-r',
        'R',
        '-n',
        '2',
        '--junitxml',
        artifact_path,
    )
    assert_outcomes(result, passed=2, rerun=2, failed=2)
    with open(artifact_path) as artifact:
        artifact_data = xmltodict.parse(artifact.read())
        if artifact_data.get('testsuites'):
            artifact_data = artifact_data['testsuites']
        assert artifact_data['testsuite']['@errors'] == '0'
        assert artifact_data['testsuite']['@failures'] == '2'
        assert artifact_data['testsuite']['@tests'] == '4'
        assert len(artifact_data['testsuite']['testcase']) == 4
        assert len([
            t for t in artifact_data['testsuite']['testcase'] if 'failure' in t
        ]) == 2
Exemplo n.º 3
0
def test_max_reruns_reached(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.json'
    make_simple_pytest_suite(testdir,
                             total_failures=2,
                             expected_reruns=1,
                             has_failure=True)
    result = testdir.runpytest('--reruns', '1', '-r', 'R',
                               '--reruns-artifact-path', artifact_path,
                               '--max-tests-rerun', '1')
    assert_outcomes(result, passed=2, rerun=0, failed=2)
Exemplo n.º 4
0
def test_reruns_stats_max_reruns_reached(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.json'
    make_simple_pytest_suite(testdir,
                             total_failures=2,
                             expected_reruns=1,
                             has_failure=True)
    result = testdir.runpytest('--reruns', '1', '-r', 'R',
                               '--reruns-artifact-path', artifact_path,
                               '--max-tests-rerun', '1')
    assert_outcomes(result, passed=2, rerun=0, failed=2)
    with open(artifact_path) as artifact:
        artifact_data = json.load(artifact)
        assert artifact_data == {
            'total_reruns': 0,
            'total_failed': 0,
            'total_resolved_by_reruns': 0,
            'rerun_tests': []
        }
def test_reruns_junit_max_reruns_reached(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.xml'
    make_simple_pytest_suite(testdir,
                             total_failures=2,
                             expected_reruns=1,
                             has_failure=True)
    result = testdir.runpytest('--reruns', '1', '-r', 'R', '--junitxml',
                               artifact_path, '--max-tests-rerun', '1')
    assert_outcomes(result, passed=2, rerun=0, failed=2)
    with open(artifact_path) as artifact:
        artifact_data = xmltodict.parse(artifact.read())
        if artifact_data.get('testsuites'):
            artifact_data = artifact_data['testsuites']
        assert artifact_data['testsuite']['@errors'] == '0'
        assert artifact_data['testsuite']['@failures'] == '2'
        assert artifact_data['testsuite']['@tests'] == '4'
        assert len(artifact_data['testsuite']['testcase']) == 4
        assert artifact_data['testsuite']['testcase'][2]['failure']
        assert artifact_data['testsuite']['testcase'][3]['failure']
Exemplo n.º 6
0
def test_reruns_stats_all_tests_passed(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.json'
    make_simple_pytest_suite(testdir, expected_reruns=0, has_failure=False)
    result = testdir.runpytest(
        '--reruns',
        '1',
        '-r',
        'R',
        '--reruns-artifact-path',
        artifact_path,
    )
    assert_outcomes(result, passed=3, rerun=0)
    with open(artifact_path) as artifact:
        artifact_data = json.load(artifact)
        assert artifact_data == {
            'total_reruns': 0,
            'total_failed': 0,
            'total_resolved_by_reruns': 0,
            'rerun_tests': []
        }
def test_reruns_junit_all_tests_passed(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.xml'
    make_simple_pytest_suite(testdir, expected_reruns=0, has_failure=False)
    result = testdir.runpytest(
        '--reruns',
        '1',
        '-r',
        'R',
        '--junitxml',
        artifact_path,
    )
    assert_outcomes(result, passed=3, rerun=0)
    with open(artifact_path) as artifact:
        artifact_data = xmltodict.parse(artifact.read())
        if artifact_data.get('testsuites'):
            artifact_data = artifact_data['testsuites']
        assert artifact_data['testsuite']['@errors'] == '0'
        assert artifact_data['testsuite']['@failures'] == '0'
        assert artifact_data['testsuite']['@tests'] == '3'
        assert len(artifact_data['testsuite']['testcase']) == 3
Exemplo n.º 8
0
def test_xdist_worker_rerun_stats(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.json'
    make_simple_pytest_suite(testdir,
                             total_failures=2,
                             expected_reruns=1,
                             has_failure=False)
    result = testdir.runpytest(
        '--reruns',
        '1',
        '-r',
        'R',
        '-n',
        '2',
        '--dist',
        'loadfile',
        '--reruns-artifact-path',
        artifact_path,
        '--xdist-worker-reruns-artifact',
    )
    assert_outcomes(result, passed=4, rerun=2)
    if os.path.isfile(testdir.tmpdir.strpath + '/gw0_artifact.json'):
        xdist_artifact_path = testdir.tmpdir.strpath + '/gw0_artifact.json'
    else:
        xdist_artifact_path = testdir.tmpdir.strpath + '/gw1_artifact.json'

    with open(xdist_artifact_path) as artifact:
        artifact_data = json.load(artifact)
        assert artifact_data['total_reruns'] == 2
        assert artifact_data['total_failed'] == 2
        assert artifact_data['total_resolved_by_reruns'] == 2
        assert artifact_data['rerun_tests'][0]['status'] == 'flake'
        assert artifact_data['rerun_tests'][1]['status'] == 'flake'
        assert set([
            artifact_data['rerun_tests'][0]['nodeid'],
            artifact_data['rerun_tests'][1]['nodeid']
        ]) == set([
            'test_xdist_worker_rerun_stats.py::test_test_failing_1',
            'test_xdist_worker_rerun_stats.py::test_test_failing_0'
        ])
Exemplo n.º 9
0
def test_reruns_stats_2_tests_failed(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.json'
    make_simple_pytest_suite(testdir,
                             total_failures=2,
                             expected_reruns=1,
                             has_failure=True)
    result = testdir.runpytest(
        '--reruns',
        '1',
        '-r',
        'R',
        '--reruns-artifact-path',
        artifact_path,
    )
    assert_outcomes(result, passed=2, rerun=2, failed=2)
    with open(artifact_path) as artifact:
        artifact_data = json.load(artifact)
        assert artifact_data['total_reruns'] == 2
        assert artifact_data['total_failed'] == 2
        assert artifact_data['total_resolved_by_reruns'] == 0
        assert len(artifact_data['rerun_tests']) == 2
        assert artifact_data['rerun_tests'][0]['status'] == 'failed'
        assert artifact_data['rerun_tests'][1]['status'] == 'failed'
Exemplo n.º 10
0
def test_reruns_stats_all_tests_resolved(testdir):
    artifact_path = testdir.tmpdir.strpath + '/artifact.json'
    make_simple_pytest_suite(testdir, expected_reruns=1, has_failure=False)
    result = testdir.runpytest(
        '--reruns',
        '1',
        '-r',
        'R',
        '--reruns-artifact-path',
        artifact_path,
    )
    assert_outcomes(result, passed=3, rerun=1)
    with open(artifact_path) as artifact:
        artifact_data = json.load(artifact)
        assert artifact_data == {
            'total_reruns':
            1,
            'total_failed':
            1,
            'total_resolved_by_reruns':
            1,
            'rerun_tests': [
                {
                    'nodeid':
                    'test_reruns_stats_all_tests_resolved.py::test_test_failing_0',
                    'status': 'flake',
                    'rerun_trace': {
                        'teardown': {
                            'caplog': '',
                            'capstderr': '',
                            'capstdout':
                            'session_fixture_2 setup\nsession_fixture_1 setup\nsession_fixture_2 setup\nsession_fixture_2 teardown\nsession_fixture_1 teardown\n',
                            'text_repr': ''
                        },
                        'setup': {
                            'caplog': '',
                            'capstderr': '',
                            'capstdout':
                            'session_fixture_2 setup\nsession_fixture_1 setup\nsession_fixture_2 setup\n',
                            'text_repr': ''
                        },
                        'call': {
                            'caplog': '',
                            'capstderr': '',
                            'capstdout':
                            'session_fixture_2 setup\nsession_fixture_1 setup\nsession_fixture_2 setup\n',
                            'text_repr': ''
                        }
                    },
                    'original_trace': {
                        'call': {
                            'caplog':
                            '',
                            'capstderr':
                            '',
                            'capstdout':
                            'session_fixture_2 setup\n',
                            'text_repr':
                            'session_fixture_2 = None\n\n    def test_test_failing_0(session_fixture_2):\n        global number_0\n        number_0 += 1\n>       assert number_0 == 1 + 1\nE       assert 1 == (1 + 1)\n\ntest_reruns_stats_all_tests_resolved.py:60: AssertionError',
                        },
                        'setup': {
                            'caplog': '',
                            'capstderr': '',
                            'capstdout': 'session_fixture_2 setup\n',
                            'text_repr': ''
                        },
                        'teardown': {
                            'caplog': '',
                            'capstderr': '',
                            'capstdout': 'session_fixture_2 setup\n',
                            'text_repr': ''
                        }
                    }
                },
            ]
        }