Exemple #1
0
def test_doctests(venv):
    output = run_directly(venv, 'doctests.py')
    test_name = '__main__.factorial'
    assert_service_messages(output, [
        ServiceMessage('testStarted', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])
def test_params_2(venv):
    output = run(venv, 'params_test_2.py')

    test1_name = 'tests.guinea-pigs.pytest.params_test_2.test(None-https://facebook_com/)'
    test2_name = "tests.guinea-pigs.pytest.params_test_2.test(None-https://facebook_com/share_php?http://foo_com/)"

    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "2"}),
        ServiceMessage('testStarted', {'name': test1_name}),
        ServiceMessage('testFinished', {'name': test1_name}),
        ServiceMessage('testStarted', {'name': test2_name}),
        ServiceMessage('testFinished', {'name': test2_name}),
    ])
def test_num_diff(venv):
    output = run(venv, "../diff_assert_error_nums.py")
    test_name = 'tests.guinea-pigs.diff_assert_error_nums.FooTest.test_test'
    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "1"}),
        ServiceMessage('testStarted', {'name': test_name}),
        ServiceMessage('testFailed', {
            'name': test_name,
            "expected": "123",
            "actual": "456"
        }),
        ServiceMessage('testFinished', {'name': test_name}),
    ])
Exemple #4
0
def test_hierarchy(venv):
    output = run(venv, 'namespace')
    test_name = 'tests.guinea-pigs.pytest.namespace.pig_test.TestSmoke.test_smoke'
    assert_service_messages(output, [
        ServiceMessage('testStarted', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])
def test_subtest_mixed_failure(venv):
    if sys.version_info < (3, 4):
        venv = virtual_environments.prepare_virtualenv(
            list(venv.packages) + ["unittest2"])

    output = run_directly(venv, 'subtest_mixed_failure.py')
    test_name = '__main__.TestXXX.testSubtestFailure'
    ms = assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "1"}),
        ServiceMessage('testStarted', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('blockOpened', {
            'name': '(i=0)',
            'flowId': test_name,
            'subTestResult': 'Success'
        }),
        ServiceMessage('blockClosed', {
            'name': '(i=0)',
            'flowId': test_name
        }),
        ServiceMessage(
            'blockOpened', {
                'name': "(i=|'abc_xxx|')",
                'flowId': test_name,
                'subTestResult': 'Failure'
            }),
        ServiceMessage('testStdErr', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('blockClosed', {
            'name': "(i=|'abc_xxx|')",
            'flowId': test_name
        }),
        ServiceMessage('testFailed', {
            'message': 'Failure',
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])
    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
    assert failed_ms.params['details'].find(
        "Failed subtests list: (i=|'abc_xxx|')|n|n") >= 0
    assert failed_ms.params['details'].find("AssertionError") > 0
    assert failed_ms.params['details'].find("6 == 1") > 0
def test_subtest_skip(venv):
    if sys.version_info < (3, 4):
        venv = virtual_environments.prepare_virtualenv(
            list(venv.packages) + ["unittest2"])

    output = run_directly(venv, 'subtest_skip.py')
    test_name = '__main__.TestXXX.testSubtestSkip'
    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "1"}),
        ServiceMessage('testStarted', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('blockOpened', {
            'name': '(i=2)',
            'flowId': test_name,
            'subTestResult': 'Skip'
        }),
        ServiceMessage(
            'testStdOut', {
                'name': test_name,
                'flowId': test_name,
                'out': 'SubTest skipped: skip reason|n'
            }),
        ServiceMessage('blockClosed', {
            'name': '(i=2)',
            'flowId': test_name
        }),
        ServiceMessage('blockOpened', {
            'name': '(i=0)',
            'flowId': test_name,
            'subTestResult': 'Success'
        }),
        ServiceMessage('blockClosed', {
            'name': '(i=0)',
            'flowId': test_name
        }),
        ServiceMessage('blockOpened', {
            'name': '(i=1)',
            'flowId': test_name,
            'subTestResult': 'Success'
        }),
        ServiceMessage('blockClosed', {
            'name': '(i=1)',
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])
Exemple #7
0
def test_setup_package_error(venv):
    output = run(venv, 'setup_package_error')
    test_name = 'namespace2.setup'
    ms = assert_service_messages(
        output,
        [
            _test_count(venv, 1),
            ServiceMessage('testStarted', {'name': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in setup context'}),
            ServiceMessage('testFinished', {'name': test_name}),
        ])
    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
    assert failed_ms.params['details'].find("Traceback") == 0
    assert failed_ms.params['details'].find("AssertionError") > 0
Exemple #8
0
def test_teardown_function_error(venv):
    output = run(venv, 'teardown_function_error')
    test_name = 'testa.test'
    ms = assert_service_messages(
        output,
        [
            _test_count(venv, 1),
            ServiceMessage('testStarted', {'name': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('testFinished', {'name': test_name}),
        ])
    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
    assert failed_ms.params['details'].find("Traceback") == 0
    assert failed_ms.params['details'].find("AssertionError") > 0
def test_docstrings(venv):
    output = run(venv, 'docstrings')
    test_name = 'testa.test_func (My cool test_name)'
    assert_service_messages(output, [
        _test_count(venv, 1),
        ServiceMessage('testStarted', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])
def test_fail_with_msg(venv):
    output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_with_msg')
    test_name = 'nose-guinea-pig.GuineaPig.test_fail_with_msg'
    ms = assert_service_messages(output, [
        _test_count(venv, 1),
        ServiceMessage('testStarted', {'name': test_name}),
        ServiceMessage('testFailed', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {'name': test_name}),
    ])
    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
    assert failed_ms.params['details'].find("Bitte keine Werbung") > 0
def test_discovery_errors(venv):
    output = run_directly(venv, 'discovery_errors.py')

    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "1"}),
            ServiceMessage('testStarted', {}),
            ServiceMessage('testFailed', {'message': 'Error'}),
            ServiceMessage('testFinished', {}),
        ])

    failed_ms = match(ms, ServiceMessage('testFailed', {}))
    assert failed_ms.params['details'].index("ImportError") > 0
def test_teardown_package_error(venv):
    output = run(venv, 'teardown_package_error')
    test_name = 'namespace2.teardown'
    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testStarted', {'name': 'namespace2.testa.test_mycode'}),
            ServiceMessage('testFinished', {'name': 'namespace2.testa.test_mycode'}),
            ServiceMessage('testStarted', {'name': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in teardown context'}),
            ServiceMessage('testFinished', {'name': test_name}),
        ])
    assert ms[3].params['details'].find("Traceback") == 0
    assert ms[3].params['details'].find("AssertionError") > 0
def test_fail(venv):
    output = run_directly(venv, 'fail_test.py')
    test_name = '__main__.TestXXX.runTest'
    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "1"}),
            ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'message': 'Failure', 'flowId': test_name}),
            ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
        ])

    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
    assert failed_ms.params['details'].index('fail("Grr")') > 0
def test_doctests(venv):
    output = run(venv, 'doctests', options="--with-doctest")
    test_name = 'doctests.namespace1.d.multiply'
    assert_service_messages(output, [
        _test_count(venv, 1),
        ServiceMessage('testStarted', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])
def test_docstring(venv):
    output = run_directly(venv, 'docstring.py')
    test_name = '__main__.TestXXX.runTest (A test_)'
    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "1"}),
        ServiceMessage('testStarted', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])
def test_fail(venv):
    output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail')
    test_name = 'nose-guinea-pig.GuineaPig.test_fail'
    ms = assert_service_messages(output, [
        ServiceMessage('testStarted', {'name': test_name}),
        ServiceMessage('testFailed', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {'name': test_name}),
    ])

    assert ms[1].params['details'].find("Traceback") == 0
    assert ms[1].params['details'].find("2 * 2 == 5") > 0
def test_teardown_class_error(venv):
    output = run(venv, 'teardown_class_error')
    test_name = 'testa.TestXXX.teardown'
    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testStarted', {'name': 'testa.TestXXX.runTest'}),
            ServiceMessage('testFinished', {'name': 'testa.TestXXX.runTest'}),
            ServiceMessage('testStarted', {'name': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in teardown context'}),
            ServiceMessage('testFinished', {'name': test_name}),
        ])
    assert ms[3].params['details'].find("Traceback") == 0
    assert ms[3].params['details'].find("RRR") > 0
Exemple #18
0
def test_params(venv):
    output = run(venv, 'params_test.py')

    test1_name = 'tests.guinea-pigs.pytest.params_test.test_eval(3+5-8)'
    test2_name = "tests.guinea-pigs.pytest.params_test.test_eval(|'1_5|' + |'2|'-1_52)"
    test3_name = 'tests.guinea-pigs.pytest.params_test.test_eval(6*9-42)'

    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "3"}),
        ServiceMessage('testStarted', {
            'name': test1_name,
            'metainfo': 'test_eval|[3+5-8|]'
        }),
        ServiceMessage('testFinished', {'name': test1_name}),
        ServiceMessage('testStarted', {'name': test2_name}),
        ServiceMessage('testFinished', {'name': test2_name}),
        ServiceMessage('testStarted', {'name': test3_name}),
        ServiceMessage(
            'testFailed', {
                'name':
                test3_name,
                'message':
                fix_slashes('tests/guinea-pigs/pytest/params_test.py') +
                ':3 (test_eval|[6*9-42|])|n54 != 42|n'
            }),
        ServiceMessage('testFinished', {'name': test3_name}),
    ])
def test_setup_class_error(venv):
    output = run_directly(venv, 'setup_class_error.py')
    test_name = '__main__.TestXXX.setUpClass'
    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "1"}),
            ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'message': 'Failure', 'flowId': test_name}),
            ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
        ])

    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
    assert failed_ms.params['details'].index("RRR") > 0
Exemple #20
0
def test_skip(venv):
    if "pytest==2.7" in venv.packages:
        pytest.skip("Diff is broken for ancient pytest")

    output = run(venv, 'skip_test.py')
    test_name = 'tests.guinea-pigs.pytest.skip_test.test_function'
    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "1"}),
        ServiceMessage('testStarted', {'name': test_name}),
        ServiceMessage('testIgnored', {
            'message': u'Skipped: skip reason причина',
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {'name': test_name}),
    ])
Exemple #21
0
def test_module_error(venv):
    output = run(venv, 'module_error_test.py')
    ms = assert_service_messages(output, [
        ServiceMessage('testStarted', {
            'name':
            'tests.guinea-pigs.pytest.module_error_test.top_level_collect'
        }),
        ServiceMessage('testFailed', {}),
        ServiceMessage('testFinished', {
            'name':
            'tests.guinea-pigs.pytest.module_error_test.top_level_collect'
        }),
    ])
    assert ms[1].params["details"].find("raise Exception") > 0
    assert ms[1].params["details"].find("module oops") > 0
def test_teardown_error(venv):
    output = run_directly(venv, 'teardown_error.py')
    test_name = '__main__.TestXXX.runTest'
    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "1"}),
            ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'message': 'Error', 'flowId': test_name}),
            ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
        ])

    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
    assert failed_ms.params['details'].index("RRR") > 0
    assert failed_ms.params['details'].index("tearDown") > 0
Exemple #23
0
def test_changes_name(venv):
    output = run_directly(venv, 'test_changes_name.py')
    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "1"}),
        ServiceMessage(
            'testStarted', {
                'name': "__main__.Foo.test_aa (1)",
                'flowId': "__main__.Foo.test_aa (1)"
            }),
        ServiceMessage(
            'testFinished', {
                'name': "__main__.Foo.test_aa (11)",
                'flowId': "__main__.Foo.test_aa (11)"
            }),
    ])
def test_fail_big_output(venv):
    output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_big_output')
    test_name = 'nose-guinea-pig.GuineaPig.test_fail_big_output'

    full_line = 'x' * 50000
    leftovers = 'x' * (1024 * 1024 - 50000 * 20)

    assert_service_messages(
        output,
        [ServiceMessage('testStarted', {})] +
        [ServiceMessage('testStdOut', {'out': full_line, 'flowId': test_name})] * 20 +
        [ServiceMessage('testStdOut', {'out': leftovers, 'flowId': test_name})] +
        [ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name})] +
        [ServiceMessage('testFinished', {})]
    )
def test_pass_output(venv):
    output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_pass')
    test_name = 'nose-guinea-pig.GuineaPig.test_pass'
    assert_service_messages(output, [
        _test_count(venv, 1),
        ServiceMessage('testStarted', {
            'name': test_name,
            'captureStandardOutput': 'false'
        }),
        ServiceMessage('testStdOut', {
            'out': 'Output from test_pass|n',
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {'name': test_name}),
    ])
Exemple #26
0
def test_nose_parameterized(venv):
    venv_with_params = virtual_environments.prepare_virtualenv(venv.packages + ["nose-parameterized"])

    output = run(venv_with_params, 'nose_parameterized')
    test1_name = "test.test(|'1_1|', |'https://facebook_com/share_php?http://foo_com/|')"
    test2_name = 'test.test(None, 3)'
    assert_service_messages(
        output,
        [
            _test_count(venv, 2),
            ServiceMessage('testStarted', {'name': test1_name, 'flowId': test1_name}),
            ServiceMessage('testFinished', {'name': test1_name, 'flowId': test1_name}),
            ServiceMessage('testStarted', {'name': test2_name, 'flowId': test2_name}),
            ServiceMessage('testFinished', {'name': test2_name, 'flowId': test2_name}),
        ])
def test_hierarchy(venv):
    output = run(venv, 'hierarchy')
    test_name = 'namespace1.namespace2.testmyzz.test'
    assert_service_messages(output, [
        ServiceMessage(
            'testStarted', {
                'name': test_name,
                'captureStandardOutput': 'true',
                'flowId': test_name
            }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])
Exemple #28
0
def test_flask_test_incomplete(venv):
    venv_with_flask = virtual_environments.prepare_virtualenv(venv.packages + ["Flask-Testing==0.8.1"])

    output = run(venv_with_flask, 'flask_testing_incomplete')
    test_name = 'test_foo.TestIncompleteFoo.test_add'
    ms = assert_service_messages(
        output,
        [
            _test_count(venv, 1),
            ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
        ])
    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
    assert failed_ms.params['details'].find("nNotImplementedError") > 0
def test_diff(venv):
    output = run_directly(venv, SCRIPT)
    assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "3"}),
        ] + expected_messages("__main__.FooTest"))
Exemple #30
0
def test_rerun(venv):
    run(venv, 'test_rerun.py')
    output = run(venv, 'test_rerun.py', options='--last-failed')
    test_name = "tests.guinea-pigs.pytest.test_rerun.TestPyTest.testTwo"
    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "1"}),
        ServiceMessage('testStarted', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testFailed', {'flowId': test_name}),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])