def test_teardown_class_error(venv):
    output = run_directly(venv, 'teardown_class_error.py')
    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "1"}),
            ServiceMessage('testStarted', {'name': '__main__.TestXXX.test_ok'}),
            ServiceMessage('testFinished', {'name': '__main__.TestXXX.test_ok'}),
            ServiceMessage('testStarted', {'name': '__main__.TestXXX.tearDownClass'}),
            ServiceMessage('testFailed', {'name': '__main__.TestXXX.tearDownClass', 'message': 'Failure'}),
            ServiceMessage('testFinished', {'name': '__main__.TestXXX.tearDownClass'}),
        ])

    failed_ms = match(ms, ServiceMessage('testFailed', {'name': '__main__.TestXXX.tearDownClass'}))
    assert failed_ms.params['details'].index("RRR") > 0
def test_twisted_trial(venv):
    packages = list(*venv.packages)
    packages.append("twisted==15.2.1")
    if os.name == 'nt':
        if sys.version_info < (2, 7):
            pytest.skip("pypiwin32 is available since Python 2.7")
        packages.append("pypiwin32==219")
    venv_with_twisted = virtual_environments.prepare_virtualenv(packages)

    env = virtual_environments.get_clean_system_environment()
    env['PYTHONPATH'] = os.path.join(os.getcwd(), "tests", "guinea-pigs",
                                     "unittest")

    # Start the process and wait for its output
    trial_file = os.path.join(venv_with_twisted.bin, 'trial')
    trial_exe_file = os.path.join(venv_with_twisted.bin, 'trial.exe')
    trial_py_file = os.path.join(venv_with_twisted.bin, 'trial.py')

    if os.path.exists(trial_file):
        command = trial_file
    elif os.path.exists(trial_py_file):
        command = os.path.join(venv_with_twisted.bin,
                               'python') + " " + trial_py_file
    elif os.path.exists(trial_exe_file):
        command = trial_exe_file
    else:
        raise Exception("twisted trial is not found at " + trial_py_file +
                        " or " + trial_file + " or " + trial_exe_file)

    command += " --reporter=teamcity twisted_trial"
    print("RUN: " + command)
    proc = subprocess.Popen(command,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT,
                            env=env,
                            shell=True)
    output = "".join([x.decode() for x in proc.stdout.readlines()])
    proc.wait()

    print("OUTPUT:" + output.replace("#", "*"))

    test1 = "twisted_trial.test_case.CalculationTestCase.test_fail (some desc)"
    test2 = "twisted_trial.test_case.CalculationTestCase.test_ok"

    ms = assert_service_messages(output, [
        ServiceMessage('testStarted', {'name': test1}),
        ServiceMessage('testFailed', {'name': test1}),
        ServiceMessage('testFinished', {'name': test1}),
        ServiceMessage('testStarted', {'name': test2}),
        ServiceMessage('testFinished', {'name': test2}),
    ])
    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test1}))
    assert failed_ms.params['details'].index("5 != 4") > 0
def test_generators(venv):
    output = run(venv, 'generators')
    assert_service_messages(
        output,
        [
            ServiceMessage('testStarted', {'name': 'testa.test_evens(0, 0, |\'_|\')'}),
            ServiceMessage('testFinished', {'name': 'testa.test_evens(0, 0, |\'_|\')'}),
            ServiceMessage('testStarted', {'name': "testa.test_evens(1, 3, |'_|')"}),
            ServiceMessage('testFinished', {'name': "testa.test_evens(1, 3, |'_|')"}),
            ServiceMessage('testStarted', {'name': "testa.test_evens(2, 6, |'_|')"}),
            ServiceMessage('testFinished', {'name': "testa.test_evens(2, 6, |'_|')"}),
        ])
def test_fail_fast(venv):
    output = run_directly(venv, 'fail_fast.py')
    assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "3"}),
            ServiceMessage('testStarted', {'name': '__main__.FooTest.test_1_test'}),
            ServiceMessage('testFinished', {'name': '__main__.FooTest.test_1_test'}),
            ServiceMessage('testStarted', {'name': '__main__.FooTest.test_2_test'}),
            ServiceMessage('testFailed', {'name': '__main__.FooTest.test_2_test'}),
            ServiceMessage('testFinished', {'name': '__main__.FooTest.test_2_test'}),
        ])
Exemple #5
0
def test_params(venv):
    output = run(venv, 'params_test.py')

    test1_name = 'tests.guinea-pigs.pytest.params_test.test_eval[3+5-8|]'
    test2_name = "tests.guinea-pigs.pytest.params_test.test_eval[|'1_5|' + |'2|'-1_52|]"
    test3_name = 'tests.guinea-pigs.pytest.params_test.test_eval[6*9-42|]'

    assert_service_messages(output, [
        ServiceMessage('testStarted', {'name': test1_name}),
        ServiceMessage('testFinished', {'name': test1_name}),
        ServiceMessage('testStarted', {'name': test2_name}),
        ServiceMessage('testFinished', {'name': test2_name}),
        ServiceMessage('testStarted', {'name': test3_name}),
        ServiceMessage('testFailed', {}),
        ServiceMessage('testFinished', {'name': test3_name}),
    ])
def test_teardown_module_error(venv):
    output = run_directly(venv, 'teardown_module_error.py')
    teardown_test_name = '__main__.tearDownModule'
    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "1"}),
            ServiceMessage('testStarted', {'name': '__main__.TestXXX.test_ok'}),
            ServiceMessage('testFinished', {'name': '__main__.TestXXX.test_ok'}),
            ServiceMessage('testStarted', {'name': teardown_test_name, 'flowId': teardown_test_name}),
            ServiceMessage('testFailed', {'name': teardown_test_name, 'message': 'Failure', 'flowId': teardown_test_name}),
            ServiceMessage('testFinished', {'name': teardown_test_name, 'flowId': teardown_test_name}),
        ])

    failed_ms = match(ms, ServiceMessage('testFailed', {'name': teardown_test_name}))
    assert failed_ms.params['details'].index("assert 1 == 0") > 0
Exemple #7
0
def test_chunked_output(venv):
    output = run(venv, 'chunked_output_test.py')

    full_line = 'x' * 50000
    leftovers = 'x' * (1024 * 1024 - 50000 * 20)

    assert_service_messages(
        output,
        [ServiceMessage('testCount', {'count': "1"})] +
        [ServiceMessage('testStarted', {})] +
        [ServiceMessage('testStdOut', {'out': full_line})] * 20 +
        [ServiceMessage('testStdOut', {'out': leftovers})] +
        [ServiceMessage('testStdErr', {'out': full_line})] * 20 +
        [ServiceMessage('testStdErr', {'out': leftovers})] +
        [ServiceMessage('testFinished', {})]
    )
Exemple #8
0
def test_unittest_error(venv):
    output = run(venv, 'unittest_error_test.py')
    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "2"}),
            ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.unittest_error_test.TestErrorFail.test_error'}),
            ServiceMessage('testFailed', {}),
            ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.unittest_error_test.TestErrorFail.test_error'}),
            ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.unittest_error_test.TestErrorFail.test_fail'}),
            ServiceMessage('testFailed', {}),
            ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.unittest_error_test.TestErrorFail.test_fail'}),
        ])
    assert ms[2].params["details"].find("raise Exception") > 0
    assert ms[2].params["details"].find("oops") > 0
    assert ms[5].params["details"].find("AssertionError") > 0
Exemple #9
0
def test_runtime_error(venv):
    output = run(venv, 'runtime_error_test.py')
    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "2"}),
            ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.runtime_error_test.test_exception'}),
            ServiceMessage('testFailed', {'flowId': 'tests.guinea-pigs.pytest.runtime_error_test.test_exception'}),
            ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.runtime_error_test.test_exception'}),
            ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.runtime_error_test.test_error'}),
            ServiceMessage('testFailed', {}),
            ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.runtime_error_test.test_error'}),
        ])
    assert ms[2].params["details"].find("raise Exception") > 0
    assert ms[2].params["details"].find("oops") > 0
    assert ms[5].params["details"].find("assert 0 != 0") > 0
Exemple #10
0
def test_params(venv):
    output = run(venv, 'params_test.py')

    test1_name = 'tests.guinea-pigs.pytest.params_test.test_eval(3+5-8)'
    test2_name = "tests.guinea-pigs.pytest.params_test.test_eval(|'1_5|' + |'2|'-1_52)"
    test3_name = 'tests.guinea-pigs.pytest.params_test.test_eval(6*9-42)'

    assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "3"}),
            ServiceMessage('testStarted', {'name': test1_name}),
            ServiceMessage('testFinished', {'name': test1_name}),
            ServiceMessage('testStarted', {'name': test2_name}),
            ServiceMessage('testFinished', {'name': test2_name}),
            ServiceMessage('testStarted', {'name': test3_name}),
            ServiceMessage('testFailed', {'name': test3_name,
                                          'message': fix_slashes('tests/guinea-pigs/pytest/params_test.py') + ':3 (test_eval|[6*9-42|])'}),
            ServiceMessage('testFinished', {'name': test3_name}),
        ])
Exemple #11
0
def test_rus(venv):
    output = run(venv,
                 arguments="RusFeature.feature",
                 options="--lang ru",
                 lang_dir="rus")
    assert_service_messages(output, [
        ServiceMessage('testSuiteStarted', {'name': u'Моя фича'}),
        ServiceMessage('testSuiteStarted', {'name': u'Мой сценарий'}),
        ServiceMessage('testStarted', {'name': u'Дано Я говорю по-русски'}),
        ServiceMessage('testFinished', {'name': u'Дано Я говорю по-русски'}),
        ServiceMessage('testSuiteFinished', {'name': u'Мой сценарий'}),
        ServiceMessage('testSuiteFinished', {'name': u'Моя фича'}),
    ])
def test_buffer_output(venv):
    output = run_directly(venv, 'buffer_output.py')
    test_name = '__main__.SpamTest.test_test'
    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "1"}),
        ServiceMessage('testStarted', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testStdOut', {
            'out': "stdout_test1|n",
            'flowId': test_name
        }),
        ServiceMessage('testStdOut', {
            'out': "stdout_test2|n",
            'flowId': test_name
        }),
        ServiceMessage('testStdErr', {
            'out': "stderr_test1",
            'flowId': test_name
        }),
        ServiceMessage('testFailed', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testStdErr', {
            'out': "stderr_test2",
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])

    # Check no stdout_test or stderr_test in the output (not in service messages)
    # it checks self._mirrorOutput = False
    output = output.replace("out='stdout_test",
                            "").replace("out='stderr_test", "")
    assert output.find("stdout_test") < 0
    assert output.find("stderr_test") < 0
def test_subtest_ok(venv):
    if sys.version_info < (3, 4):
        venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"])

    output = run_directly(venv, 'subtest_ok.py')
    test_name = '__main__.TestXXX.testSubtestSuccess'
    assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "1"}),
            ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('blockOpened', {'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success'}),
            ServiceMessage('blockClosed', {'name': '(i=0)', 'flowId': test_name}),
            ServiceMessage('blockOpened', {'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Success'}),
            ServiceMessage('blockClosed', {'name': '(i=1)', 'flowId': test_name}),
            ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
        ])
Exemple #14
0
def test_fail_output(venv):
    output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_output')
    test_name = 'nose-guinea-pig.GuineaPig.test_fail_output'
    assert_service_messages(
        output,
        [
            _test_count(venv, 1),
            ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('testStdOut', {'name': test_name, 'out': 'Output line 1|n', 'flowId': test_name}),
            ServiceMessage('testStdOut', {'name': test_name, 'out': 'Output line 2|n', 'flowId': test_name}),
            ServiceMessage('testStdOut', {'name': test_name, 'out': 'Output line 3|n', 'flowId': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
        ])
def test_setup_class_skip(venv):
    if sys.version_info < (2, 7):
        venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"])

    output = run_directly(venv, 'setup_class_skip.py')
    test1_name = '__main__.TestSimple.setUpClass'
    test2_name = '__main__.TestSubSimple.setUpClass'
    assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "7"}),
            ServiceMessage('testStarted', {'name': test1_name, 'flowId': test1_name}),
            ServiceMessage('testIgnored', {'name': test1_name, 'flowId': test1_name, 'message': "Skipped: Skip whole Case"}),
            ServiceMessage('testFinished', {'name': test1_name, 'flowId': test1_name}),
            ServiceMessage('testStarted', {'name': test2_name, 'flowId': test2_name}),
            ServiceMessage('testIgnored', {'name': test2_name, 'flowId': test2_name, 'message': "Skipped: Skip whole Case"}),
            ServiceMessage('testFinished', {'name': test2_name, 'flowId': test2_name}),
        ])
def test_smoke(venv):
    output = run(venv, options="--teamcity")

    file_name = "tests/guinea-pigs/flake8/smoke.py"
    test1_name = "E302: " + file_name + ":3:1"
    test2_name = "W391: " + file_name + ":7:1"

    assert_service_messages(
        output,
        [
            ServiceMessage('testSuiteStarted', {'name': "pep8: " + file_name}),

            ServiceMessage('testStarted', {'name': test1_name}),
            ServiceMessage('testFailed', {'name': test1_name, 'message': "E302: expected 2 blank lines, found 1"}),
            ServiceMessage('testFinished', {'name': test1_name}),

            ServiceMessage('testStarted', {'name': test2_name}),
            ServiceMessage('testFailed', {'name': test2_name, 'message': "W391: blank line at end of file"}),
            ServiceMessage('testFinished', {'name': test2_name}),

            ServiceMessage('testSuiteFinished', {'name': "pep8: " + file_name}),
        ])
def test_skip(venv):
    if sys.version_info < (2, 7):
        venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2==0.5.1"])

    output = run_directly(venv, 'skip_test.py')
    test_name = '__main__.TestSkip.test_skip_me'
    assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "2"}),
            ServiceMessage('testStarted', {'name': '__main__.TestSkip.test_ok'}),
            ServiceMessage('testFinished', {'name': '__main__.TestSkip.test_ok'}),
            ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('testIgnored', {'name': test_name, 'message': u'Skipped: testing skipping øпричина', 'flowId': test_name}),
            ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
        ])
Exemple #18
0
def test_teardown_error(venv):
    output = run(venv, 'teardown_error_test.py')
    teardown_test_id = 'tests.guinea-pigs.pytest.teardown_error_test.test_error_teardown'
    ms = assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "1"}),
            ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.teardown_error_test.test_error'}),
            ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.teardown_error_test.test_error'}),
            ServiceMessage('testStarted', {'name': teardown_test_id, 'flowId': teardown_test_id}),
            ServiceMessage('testFailed', {'flowId': teardown_test_id,
                                          'message': fix_slashes('tests/guinea-pigs/pytest/teardown_error_test.py') + ':13 (test_error)'}),
            ServiceMessage('testFinished', {'name': teardown_test_id, 'flowId': teardown_test_id}),
        ])
    assert ms[4].params["details"].find("raise Exception") > 0
    assert ms[4].params["details"].find("teardown oops") > 0
Exemple #19
0
def test_teardown_class_error(venv):
    output = run(venv, 'teardown_class_error')
    test_name = 'testa.TestXXX.teardown'
    ms = assert_service_messages(
        output,
        [
            _test_count(venv, 1),
            ServiceMessage('testStarted', {'name': 'testa.TestXXX.runTest'}),
            ServiceMessage('testFinished', {'name': 'testa.TestXXX.runTest'}),
            ServiceMessage('testStarted', {'name': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in teardown context'}),
            ServiceMessage('testFinished', {'name': test_name}),
        ])
    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
    assert failed_ms.params['details'].find("Traceback") == 0
    assert failed_ms.params['details'].find("RRR") > 0
Exemple #20
0
def test_teardown_package_error(venv):
    output = run(venv, 'teardown_package_error')
    test_name = 'namespace2.teardown'
    ms = assert_service_messages(
        output,
        [
            _test_count(venv, 1),
            ServiceMessage('testStarted', {'name': 'namespace2.testa.test_mycode'}),
            ServiceMessage('testFinished', {'name': 'namespace2.testa.test_mycode'}),
            ServiceMessage('testStarted', {'name': test_name}),
            ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in teardown context'}),
            ServiceMessage('testFinished', {'name': test_name}),
        ])
    failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
    assert failed_ms.params['details'].find("Traceback") == 0
    assert failed_ms.params['details'].find("AssertionError") > 0
def test_subtest_nested(venv):
    if sys.version_info < (3, 4):
        venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"])

    output = run_directly(venv, 'subtest_nested.py')
    test_name = '__main__.TestXXX.testNested'

    # Nested blocks support requires strict notifications about starting and stopping subtests
    # which is not yet supported, see https://mail.python.org/pipermail/python-dev/2016-June/145402.html
    assert_service_messages(
        output,
        [
            ServiceMessage('testCount', {'count': "1"}),
            ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
            ServiceMessage('blockOpened', {'name': '(i=2)', 'flowId': test_name, 'subTestResult': 'Success'}),
            ServiceMessage('blockClosed', {'name': '(i=2)', 'flowId': test_name}),
            ServiceMessage('blockOpened', {'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Success'}),
            ServiceMessage('blockClosed', {'name': '(i=1)', 'flowId': test_name}),
            ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
        ])
Exemple #22
0
    def test_pytest_pylint(venv):
        venv_with_pylint = virtual_environments.prepare_virtualenv(
            venv.packages + ("pytest-pylint", ))

        output = run(venv_with_pylint, 'pylint_test.py', options="--pylint")
        pylint_test_name = "tests.guinea-pigs.pytest.pylint_test.Pylint"
        test_name = "tests.guinea-pigs.pytest.pylint_test.test_ok"
        ms = assert_service_messages(output, [
            ServiceMessage('testCount', {'count': "2"}),
            ServiceMessage('testStarted', {'name': pylint_test_name}),
            ServiceMessage('testFailed', {'name': pylint_test_name}),
            ServiceMessage('testFinished', {'name': pylint_test_name}),
            ServiceMessage('testStarted', {'name': test_name}),
            ServiceMessage('testFinished', {'name': test_name}),
        ])

        assert ms[2].params["details"].find("Unused import sys") > 0
def test_smoke_flake8_v3(venv_flake8_v3):
    output = run(venv_flake8_v3, options="")

    file_name = "./smoke.py"
    test1_name = "pep8: " + file_name + ": E302 expected 2 blank lines, found 1"
    test2_name = "pep8: " + file_name + ": W391 blank line at end of file"

    assert_service_messages(
        output,
        [
            ServiceMessage('testStarted', {'name': test1_name}),
            ServiceMessage('testFailed', {'name': test1_name, 'message': "E302 expected 2 blank lines, found 1"}),
            ServiceMessage('testFinished', {'name': test1_name}),

            ServiceMessage('testStarted', {'name': test2_name}),
            ServiceMessage('testFailed', {'name': test2_name, 'message': "W391 blank line at end of file"}),
            ServiceMessage('testFinished', {'name': test2_name}),
        ])
Exemple #24
0
    def test_pytest_pep8(venv):
        venv_with_pep8 = virtual_environments.prepare_virtualenv(
            venv.packages + ("pytest-pep8", ))

        output = run(venv_with_pep8, 'pep8_test.py', options="--pep8")
        pep8_test_name = "tests.guinea-pigs.pytest.pep8_test.PEP8"
        test_name = "tests.guinea-pigs.pytest.pep8_test.test_ok"
        ms = assert_service_messages(output, [
            ServiceMessage('testCount', {'count': "2"}),
            ServiceMessage('testStarted', {'name': pep8_test_name}),
            ServiceMessage('testFailed', {'name': pep8_test_name}),
            ServiceMessage('testFinished', {'name': pep8_test_name}),
            ServiceMessage('testStarted', {'name': test_name}),
            ServiceMessage('testFinished', {'name': test_name}),
        ])

        assert ms[2].params["details"].find(
            "E302 expected 2 blank lines, found 1") > 0
Exemple #25
0
    def test_coverage(venv, coverage_version, pytest_cov_version):
        if coverage_version != "==3.7.1" and (3, 1) < sys.version_info < (3, 3):
            pytest.skip("coverage >= 4.0 dropped support for Python 3.2")

        venv_with_coverage = virtual_environments.prepare_virtualenv(
            venv.packages + (
                "coverage" + coverage_version,
                "pytest-cov" + pytest_cov_version))

        output = run(venv_with_coverage, 'coverage_test', options="--cov coverage_test")
        test_name = "tests.guinea-pigs.pytest.coverage_test.coverage_test.test_covered_func"
        assert_service_messages(
            output,
            [
                ServiceMessage('testCount', {'count': "1"}),
                ServiceMessage('testStarted', {'name': test_name}),
                ServiceMessage('testFinished', {'name': test_name}),
                ServiceMessage('buildStatisticValue', {'key': 'CodeCoverageAbsLCovered', 'value': '9'}),
                ServiceMessage('buildStatisticValue', {'key': 'CodeCoverageAbsLTotal', 'value': '13'}),
                ServiceMessage('buildStatisticValue', {'key': 'CodeCoverageAbsLUncovered', 'value': '4'}),
            ])
Exemple #26
0
def test_smoke_flake8_v2(venv_flake8_v2):
    output = run(venv_flake8_v2, options="--teamcity", set_tc_version=False)

    file_name = "./smoke.py"
    test1_name = "pep8: " + file_name + ":3:1: E302 expected 2 blank lines, found 1"
    test2_name = "pep8: " + file_name + ":7:1: W391 blank line at end of file"

    assert_service_messages(output, [
        ServiceMessage('testStarted', {'name': test1_name}),
        ServiceMessage('testFailed', {
            'name': test1_name,
            'message': "E302 expected 2 blank lines, found 1"
        }),
        ServiceMessage('testFinished', {'name': test1_name}),
        ServiceMessage('testStarted', {'name': test2_name}),
        ServiceMessage('testFailed', {
            'name': test2_name,
            'message': "W391 blank line at end of file"
        }),
        ServiceMessage('testFinished', {'name': test2_name}),
    ])
Exemple #27
0
def test_buffer_output(venv):
    output = run(venv, 'buffer_output')
    test_name = 'test_buffer_output.SpamTest.test_test'
    assert_service_messages(output, [_test_count(venv, 1)] + [
        ServiceMessage('testStarted', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testStdOut', {
            'out': "stdout_line1|n",
            'flowId': test_name
        }),
        ServiceMessage('testStdOut', {
            'out': "stdout_line2|n",
            'flowId': test_name
        }),
        ServiceMessage('testStdOut', {
            'out': "stdout_line3_nonewline",
            'flowId': test_name
        }),
        ServiceMessage('testFailed', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
    ])

    # Check no stdout_test or stderr_test in the output (not in service messages)
    # it checks self._mirrorOutput = False
    output = output.replace("out='stdout_test",
                            "").replace("out='stderr_test", "")
    assert output.find("stdout_test") < 0
    assert output.find("stderr_test") < 0

    # assert logcapture plugin works
    assert output.find("begin captured logging") > 0
    assert output.find("log info message") >= 0
def test_diff(venv):
    output = run_directly(venv, SCRIPT)
    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "1"}),
    ] + expected_messages("__main__.FooTest.test_test"))
Exemple #29
0
def test_sample(venv):
    filename = 'tests/guinea-pigs/pylint/sample.py'
    output = run(venv, filename)
    assert_service_messages(output, [
        ServiceMessage(
            'inspectionType',
            dict(category='warning',
                 description=
                 'Used when a warning note as FIXME or XXX is detected.',
                 id='W0511',
                 name='fixme')),
        ServiceMessage(
            'inspection',
            dict(SEVERITY='WARNING',
                 file='tests/guinea-pigs/pylint/sample.py',
                 line='10',
                 message='TODO gets also picked up by PyLint (W0511)',
                 typeId='W0511')),
        ServiceMessage(
            'inspectionType',
            dict(category='refactor',
                 description=
                 'Used when a function or method takes too many arguments.',
                 name='too-many-arguments',
                 id='R0913')),
        ServiceMessage(
            'inspection',
            dict(SEVERITY='WEAK WARNING',
                 file='tests/guinea-pigs/pylint/sample.py',
                 line='4',
                 message='Too many arguments (6/5)')),
        ServiceMessage(
            'inspectionType',
            dict(
                category='convention',
                description=
                'Used when more than on statement are found on the same line.',
                id='C0321',
                name='multiple-statements')),
        ServiceMessage(
            'inspection',
            dict(SEVERITY='INFO',
                 file='tests/guinea-pigs/pylint/sample.py',
                 line='6',
                 message='More than one statement on a single line',
                 typeId='C0321')),
        ServiceMessage(
            'inspectionType',
            dict(category='error',
                 description='Used when an undefined variable is accessed.',
                 id='E0602',
                 name='undefined-variable')),
        ServiceMessage(
            'inspection',
            dict(SEVERITY='ERROR',
                 file='tests/guinea-pigs/pylint/sample.py',
                 line='7',
                 message='Undefined variable |\'eight|\'',
                 typeId='E0602')),
        ServiceMessage(
            'inspection',
            dict(SEVERITY='ERROR',
                 file='tests/guinea-pigs/pylint/sample.py',
                 line='8',
                 message='Undefined variable |\'nine|\'',
                 typeId='E0602')),
        ServiceMessage(
            'inspectionType',
            dict(category='warning',
                 description='Used when a variable is defined but not used.',
                 name='unused-variable',
                 id='W0612')),
        ServiceMessage(
            'inspection',
            dict(SEVERITY='WARNING',
                 file='tests/guinea-pigs/pylint/sample.py',
                 line='7',
                 message='Unused variable |\'seven|\'',
                 typeId='W0612')),
        ServiceMessage('buildStatisticValue',
                       dict(key='PyLintScore', value='-18.0')),
    ])
Exemple #30
0
def test_output(venv):
    output = run(venv, 'output_test.py')

    test_name = 'tests.guinea-pigs.pytest.output_test.test_out'

    assert_service_messages(output, [
        ServiceMessage('testCount', {'count': "1"}),
        ServiceMessage(
            'testStarted', {
                'name': test_name,
                'flowId': test_name,
                'captureStandardOutput': 'false'
            }),
        ServiceMessage('blockOpened', {
            'name': 'test setup',
            'flowId': test_name
        }),
        ServiceMessage('testStdOut', {
            'name': test_name,
            'flowId': test_name,
            'out': 'setup stdout|n'
        }),
        ServiceMessage('testStdErr', {
            'name': test_name,
            'flowId': test_name,
            'out': 'setup stderr|n'
        }),
        ServiceMessage('blockClosed', {'name': 'test setup'}),
        ServiceMessage('testStdOut', {
            'name': test_name,
            'flowId': test_name,
            'out': 'test stdout|n'
        }),
        ServiceMessage('testStdErr', {
            'name': test_name,
            'flowId': test_name,
            'out': 'test stderr|n'
        }),
        ServiceMessage('testFinished', {
            'name': test_name,
            'flowId': test_name
        }),
        ServiceMessage('blockOpened', {
            'name': 'test teardown',
            'flowId': test_name
        }),
        ServiceMessage('testStdOut', {
            'name': test_name,
            'flowId': test_name,
            'out': 'teardown stdout|n'
        }),
        ServiceMessage('testStdErr', {
            'name': test_name,
            'flowId': test_name,
            'out': 'teardown stderr|n'
        }),
        ServiceMessage('blockClosed', {'name': 'test teardown'}),
    ])