def test_diff(venv): output = run_directly(venv, SCRIPT) assert_service_messages( output, [ ServiceMessage('testCount', {'count': "3"}), ] + expected_messages("__main__.FooTest"))
def test_params(venv): output = run(venv, "params_test.py") test1_name = "tests.guinea-pigs.pytest.params_test.test_eval|[3+5-8|]" test2_name = "tests.guinea-pigs.pytest.params_test.test_eval|[|'1_5|' + |'2|'-1_52|]" test3_name = "tests.guinea-pigs.pytest.params_test.test_eval|[6*9-42|]" assert_service_messages( output, [ ServiceMessage("testStarted", {"name": test1_name}), ServiceMessage("testFinished", {"name": test1_name}), ServiceMessage("testStarted", {"name": test2_name}), ServiceMessage("testFinished", {"name": test2_name}), ServiceMessage("testStarted", {"name": test3_name}), ServiceMessage( "testFailed", { "name": test3_name, "message": fix_slashes("tests/guinea-pigs/pytest/params_test.py") + ":3 (test_eval|[6*9-42|])", }, ), ServiceMessage("testFinished", {"name": test3_name}), ], )
def test_diff(venv): output = run(venv, SCRIPT) assert_service_messages( output, [ _test_count(venv, 3), ] + expected_messages('diff_assert.FooTest'))
def test_discovery(venv): output = run_directly(venv, 'discovery.py') assert_service_messages( output, [ ServiceMessage('testStarted', {'name': 'testsimple.TestTeamcityMessages.runTest'}), ServiceMessage('testFinished', {'name': 'testsimple.TestTeamcityMessages.runTest'}), ])
def test_pass(venv): output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_pass') assert_service_messages( output, [ ServiceMessage('testStarted', {'name': 'nose-guinea-pig.GuineaPig.test_pass'}), ServiceMessage('testFinished', {'name': 'nose-guinea-pig.GuineaPig.test_pass'}), ])
def test_hierarchy(venv): output = run(venv, 'namespace') test_name = 'tests.guinea-pigs.pytest.namespace.pig_test.TestSmoke.test_smoke' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_docstrings(venv): output = run(venv, 'docstrings') test_name = 'testa.test_func (My cool test_name)' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_nested_suits(venv): output = run_directly(venv, 'nested_suits.py') test_name = '__main__.TestXXX.runTest' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'captureStandardOutput': 'true', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name}), ])
def test_hierarchy(venv): output = run(venv, 'hierarchy') test_name = 'namespace1.namespace2.testmyzz.test' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'captureStandardOutput': 'true', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_docstring(venv): output = run_directly(venv, 'docstring.py') test_name = '__main__.TestXXX.runTest (A test_)' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_class_with_method(venv): output = run(venv, 'class_with_method.py') assert_service_messages( output, [ServiceMessage('testCount', {'count': "1"})] + [ServiceMessage('testStarted', {"metainfo": "test_method"})] + [ServiceMessage('testFailed', {})] + [ServiceMessage('testFinished', {})] )
def test_doctests(venv): output = run_directly(venv, 'doctests.py') test_name = '__main__.factorial' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_doctests(venv): output = run(venv, 'doctests', options="--with-doctest") test_name = 'doctests.namespace1.d.multiply' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_changes_name(venv): output = run_directly(venv, 'test_changes_name.py') assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': "__main__.Foo.test_aa (1)", 'flowId': "__main__.Foo.test_aa (1)"}), ServiceMessage('testFinished', {'name': "__main__.Foo.test_aa (11)", 'flowId': "__main__.Foo.test_aa (11)"}), ])
def test_skip(venv): output = run(venv, 'skip_test.py') test_name = 'tests.guinea-pigs.pytest.skip_test.test_function' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testIgnored', {'message': 'Skipped: skip reason', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name}), ])
def test_skip(venv): output = run(venv, 'skiptest') test_name = 'testa.test_func' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testIgnored', {'name': test_name, 'message': 'SKIPPED: my skip', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_pass_no_capture(venv): output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_pass', options="--nocapture") assert output.find("Output from test_pass") > 0 assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': 'nose-guinea-pig.GuineaPig.test_pass', 'captureStandardOutput': 'true'}), ServiceMessage('testFinished', {'name': 'nose-guinea-pig.GuineaPig.test_pass'}), ])
def test_diff_top_level_assert_error(venv): output = run(venv, "../diff_toplevel_assert_error.py") assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': "tests.guinea-pigs.diff_toplevel_assert_error.test_test"}), ServiceMessage('testFailed', {'name': "tests.guinea-pigs.diff_toplevel_assert_error.test_test", "actual": "spam", "expected": "eggs"}), ServiceMessage('testFinished', {'name': "tests.guinea-pigs.diff_toplevel_assert_error.test_test"}), ])
def test_diff(venv): if "pytest==2.7" in venv.packages: pytest.skip("Diff is broken for ancient pytest") output = run(venv, SCRIPT) assert_service_messages( output, [ ServiceMessage('testCount', {'count': "3"}), ] + expected_messages("tests.guinea-pigs.diff_assert.FooTest"))
def test_hierarchy(venv): output = run(venv, "namespace") test_name = "tests.guinea-pigs.pytest.namespace.pig_test.TestSmoke.test_smoke" assert_service_messages( output, [ ServiceMessage("testStarted", {"name": test_name, "flowId": test_name}), ServiceMessage("testFinished", {"name": test_name, "flowId": test_name}), ], )
def test_deprecated(venv): output = run(venv, 'deprecatedtest') test_name = 'testa.test_func' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testIgnored', {'name': test_name, 'message': 'Deprecated', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_custom_test_items(venv): output = run(venv, 'custom') assert_service_messages( output, [ ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.custom.test_simple_yml.line1'}), ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.custom.test_simple_yml.line1'}), ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.custom.test_simple_yml.line2'}), ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.custom.test_simple_yml.line2'}), ])
def test_long_diff(venv): output = run(venv, "../diff_assert_long.py") test_name = 'diff_assert_long.FooTest.test_test' assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ])
def test_num_diff(venv): output = run(venv, "../diff_assert_error_nums.py") test_name = 'tests.guinea-pigs.diff_assert_error_nums.FooTest.test_test' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name, "actual": "123", "expected": "456"}), ServiceMessage('testFinished', {'name': test_name}), ])
def test_fail_output(venv): output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_output') test_name = 'nose-guinea-pig.GuineaPig.test_fail_output' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testStdOut', {'name': test_name, 'out': 'Output line 1|nOutput line 2|nOutput line 3|n', 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_swap_diff_assert_error(venv): with make_ini('[pytest]\nswapdiff=true'): output = run(venv, "../diff_assert_error.py") assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': "tests.guinea-pigs.diff_assert_error.FooTest.test_test"}), ServiceMessage('testFailed', {'name': "tests.guinea-pigs.diff_assert_error.FooTest.test_test", "expected": "spam", "actual": "eggs"}), ServiceMessage('testFinished', {'name': "tests.guinea-pigs.diff_assert_error.FooTest.test_test"}), ])
def test_skip(venv): output = run(venv, "skip_test.py") test_name = "tests.guinea-pigs.pytest.skip_test.test_function" assert_service_messages( output, [ ServiceMessage("testStarted", {"name": test_name}), ServiceMessage("testIgnored", {"message": "Skipped: skip reason", "flowId": test_name}), ServiceMessage("testFinished", {"name": test_name}), ], )
def test_pass_output(venv): output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_pass') test_name = 'nose-guinea-pig.GuineaPig.test_pass' assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name, 'captureStandardOutput': 'false'}), ServiceMessage('testStdOut', {'out': 'Output from test_pass|n', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name}), ])
def test_subtest_ok(venv): output = run_directly(venv, 'subtest_ok.py') test_name = '__main__.TestXXX.testSubtestSuccess' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testStdOut', {'out': test_name + ' (i=0): ok|n', 'name': test_name, 'flowId': test_name}), ServiceMessage('testStdOut', {'out': test_name + ' (i=1): ok|n', 'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_custom_test_items(venv): output = run(venv, "custom") assert_service_messages( output, [ ServiceMessage("testStarted", {"name": "tests.guinea-pigs.pytest.custom.test_simple_yml.line1"}), ServiceMessage("testFinished", {"name": "tests.guinea-pigs.pytest.custom.test_simple_yml.line1"}), ServiceMessage("testStarted", {"name": "tests.guinea-pigs.pytest.custom.test_simple_yml.line2"}), ServiceMessage("testFinished", {"name": "tests.guinea-pigs.pytest.custom.test_simple_yml.line2"}), ], )
def test_teardown_class_error(venv): output = run(venv, 'teardown_class_error') test_name = 'testa.TestXXX.teardown' ms = assert_service_messages( output, [ ServiceMessage('testStarted', {'name': 'testa.TestXXX.runTest'}), ServiceMessage('testFinished', {'name': 'testa.TestXXX.runTest'}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in teardown context'}), ServiceMessage('testFinished', {'name': test_name}), ]) assert ms[3].params['details'].find("Traceback") == 0 assert ms[3].params['details'].find("RRR") > 0
def test_teardown_package_error(venv): output = run(venv, 'teardown_package_error') test_name = 'namespace2.teardown' ms = assert_service_messages( output, [ ServiceMessage('testStarted', {'name': 'namespace2.testa.test_mycode'}), ServiceMessage('testFinished', {'name': 'namespace2.testa.test_mycode'}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in teardown context'}), ServiceMessage('testFinished', {'name': test_name}), ]) assert ms[3].params['details'].find("Traceback") == 0 assert ms[3].params['details'].find("AssertionError") > 0
def test_fail(venv): output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail') test_name = 'nose-guinea-pig.GuineaPig.test_fail' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("Traceback") == 0 assert failed_ms.params['details'].find("2 * 2 == 5") > 0
def test_fail_output(venv): output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_output') test_name = 'nose-guinea-pig.GuineaPig.test_fail_output' assert_service_messages(output, [ ServiceMessage('testStarted', { 'name': test_name, 'flowId': test_name }), ServiceMessage( 'testStdOut', { 'name': test_name, 'out': 'Output line 1|nOutput line 2|nOutput line 3|n', 'flowId': test_name }), ServiceMessage('testFailed', { 'name': test_name, 'flowId': test_name }), ServiceMessage('testFinished', { 'name': test_name, 'flowId': test_name }), ])
def test_output_no_capture(venv): output = run(venv, 'output_test.py', options="-s") test_name = 'tests.guinea-pigs.pytest.output_test.test_out' assert_service_messages(output, [ ServiceMessage( 'testStarted', { 'name': test_name, 'flowId': test_name, 'captureStandardOutput': 'true' }), ServiceMessage('testFinished', { 'name': test_name, 'flowId': test_name }), ]) assert "setup stderr" in output assert "setup stdout" in output assert "test stderr" in output assert "test stdout" in output assert "teardown stderr" in output assert "teardown stdout" in output
def test_coverage(venv): venv_with_coverage = virtual_environments.prepare_virtualenv( venv.packages + ["coverage==3.7.1"]) coverage_file = os.path.join(virtual_environments.get_vroot(), "coverage-temp.xml") output = run( venv_with_coverage, 'coverage', options= "--with-coverage --cover-erase --cover-tests --cover-xml --cover-xml-file=\"" + coverage_file + "\"") assert_service_messages(output, [ ServiceMessage('testStarted', {'name': 'testa.test_mycode'}), ServiceMessage('testFinished', {'name': 'testa.test_mycode'}), ]) f = open(coverage_file, "rb") content = str(f.read()) f.close() assert content.find('<line hits="1" number="2"/>') > 0
def test_xfail(venv): output = run(venv, 'xfail_test.py') ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "2"}), ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.xfail_test.test_unexpectedly_passing'}), ServiceMessage('testFailed', {}), ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.xfail_test.test_unexpectedly_passing'}), ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.xfail_test.test_expected_to_fail'}), ServiceMessage('testIgnored', {}), ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.xfail_test.test_expected_to_fail'}), ]) assert ms[5].params["message"].find("xfail reason") > 0
def test_skip(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv( list(venv.packages) + ["unittest2==0.5.1"]) output = run_directly(venv, 'skip_test.py') test_name = '__main__.TestSkip.test_skip_me' assert_service_messages(output, [ ServiceMessage('testStarted', { 'name': test_name, 'flowId': test_name }), ServiceMessage( 'testIgnored', { 'name': test_name, 'message': 'Skipped: testing skipping', 'flowId': test_name }), ServiceMessage('testFinished', { 'name': test_name, 'flowId': test_name }), ])
def test_fail_with_msg(venv): output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_with_msg') test_name = 'nose-guinea-pig.GuineaPig.test_fail_with_msg' ms = assert_service_messages(output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', { 'name': test_name, 'flowId': test_name }), ServiceMessage('testFinished', {'name': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("Bitte keine Werbung") > 0
def test_output(venv): output = run(venv, 'output_test.py') test_name = 'tests.guinea-pigs.pytest.output_test.test_out' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name, 'captureStandardOutput': 'false'}), ServiceMessage('blockOpened', {'name': 'test setup', 'flowId': test_name}), ServiceMessage('testStdOut', {'name': test_name, 'flowId': test_name, 'out': 'setup stdout|n'}), ServiceMessage('testStdErr', {'name': test_name, 'flowId': test_name, 'out': 'setup stderr|n'}), ServiceMessage('blockClosed', {'name': 'test setup'}), ServiceMessage('testStdOut', {'name': test_name, 'flowId': test_name, 'out': 'test stdout|n'}), ServiceMessage('testStdErr', {'name': test_name, 'flowId': test_name, 'out': 'test stderr|n'}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': 'test teardown', 'flowId': test_name}), ServiceMessage('testStdOut', {'name': test_name, 'flowId': test_name, 'out': 'teardown stdout|n'}), ServiceMessage('testStdErr', {'name': test_name, 'flowId': test_name, 'out': 'teardown stderr|n'}), ServiceMessage('blockClosed', {'name': 'test teardown'}), ])
def test_issue_98(venv): env = virtual_environments.get_clean_system_environment() env['TEAMCITY_VERSION'] = "0.0.0" # Start the process and wait for its output command = os.path.join(venv.bin, 'python') + " " + os.path.join( 'tests', 'guinea-pigs', 'nose', 'issue_98', 'custom_test_loader.py') print("RUN: " + command) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, shell=True) output = "".join([x.decode() for x in proc.stdout.readlines()]) proc.wait() print("OUTPUT:" + output.replace("#", "*")) test_name = 'simple_tests.SimpleTests.test_two' assert_service_messages( output, [ ServiceMessage('testStarted', { 'name': test_name, 'flowId': test_name }), ServiceMessage( 'testIgnored', { 'name': test_name, 'message': 'Skipped: Skipping', 'flowId': test_name }), ServiceMessage('testFinished', { 'name': test_name, 'flowId': test_name }), ], actual_messages_predicate=lambda ms: ms.name != "testCount")
def test_params(venv): output = run(venv, 'params_test.py') test1_name = 'tests.guinea-pigs.pytest.params_test.test_eval(3+5-8)' test2_name = "tests.guinea-pigs.pytest.params_test.test_eval(|'1_5|' + |'2|'-1_52)" test3_name = 'tests.guinea-pigs.pytest.params_test.test_eval(6*9-42)' assert_service_messages(output, [ ServiceMessage('testCount', {'count': "3"}), ServiceMessage('testStarted', {'name': test1_name}), ServiceMessage('testFinished', {'name': test1_name}), ServiceMessage('testStarted', {'name': test2_name}), ServiceMessage('testFinished', {'name': test2_name}), ServiceMessage('testStarted', {'name': test3_name}), ServiceMessage( 'testFailed', { 'name': test3_name, 'message': fix_slashes('tests/guinea-pigs/pytest/params_test.py') + ':3 (test_eval|[6*9-42|])|n42 != 54|n' }), ServiceMessage('testFinished', {'name': test3_name}), ])
def test_teardown_class_error(venv): output = run_directly(venv, 'teardown_class_error.py') ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testFinished', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testStarted', {'name': '__main__.TestXXX.tearDownClass'}), ServiceMessage('testFailed', {'name': '__main__.TestXXX.tearDownClass', 'message': 'Failure'}), ServiceMessage('testFinished', {'name': '__main__.TestXXX.tearDownClass'}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': '__main__.TestXXX.tearDownClass'})) assert failed_ms.params['details'].index("RRR") > 0
def test_flask_test_incomplete(venv): venv_with_flask = virtual_environments.prepare_virtualenv(venv.packages + ["Flask-Testing==0.8.1"]) output = run(venv_with_flask, 'flask_testing_incomplete') test_name = 'test_foo.TestIncompleteFoo.test_add' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("nNotImplementedError") > 0
def test_buffer_output(venv): output = run(venv, 'buffer_output') test_name = 'test_buffer_output.SpamTest.test_test' assert_service_messages( output, [_test_count(venv, 1)] + [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testStdOut', {'out': "stdout_line1|n", 'flowId': test_name}), ServiceMessage('testStdOut', {'out': "stdout_line2|n", 'flowId': test_name}), ServiceMessage('testStdOut', {'out': "stdout_line3_nonewline", 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) # Check no stdout_test or stderr_test in the output (not in service messages) # it checks self._mirrorOutput = False output = output.replace("out='stdout_test", "").replace("out='stderr_test", "") assert output.find("stdout_test") < 0 assert output.find("stderr_test") < 0 # assert logcapture plugin works assert output.find("begin captured logging") > 0 assert output.find("log info message") >= 0
def test_setup_error(venv): output = run_directly(venv, 'setup_error.py') test_name = '__main__.TestXXX.runTest' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': 'Error', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].index("RRR") > 0 assert failed_ms.params['details'].index("setUp") > 0
def test_fail_big_output(venv): output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_big_output') test_name = 'nose-guinea-pig.GuineaPig.test_fail_big_output' full_line = 'x' * 50000 leftovers = 'x' * (1024 * 1024 - 50000 * 20) assert_service_messages(output, [ServiceMessage('testStarted', {})] + [ ServiceMessage('testStdOut', { 'out': full_line, 'flowId': test_name }) ] * 20 + [ ServiceMessage('testStdOut', { 'out': leftovers, 'flowId': test_name }) ] + [ ServiceMessage('testFailed', { 'name': test_name, 'flowId': test_name }) ] + [ServiceMessage('testFinished', {})])
def test_module_error(venv): output = run(venv, 'module_error_test.py') ms = assert_service_messages(output, [ ServiceMessage('testStarted', { 'name': 'tests.guinea-pigs.pytest.module_error_test.top_level_collect' }), ServiceMessage('testFailed', {}), ServiceMessage('testFinished', { 'name': 'tests.guinea-pigs.pytest.module_error_test.top_level_collect' }), ]) assert ms[1].params["details"].find("raise Exception") > 0 assert ms[1].params["details"].find("module oops") > 0
def test_coverage(venv): venv_with_coverage = virtual_environments.prepare_virtualenv( venv.packages + ["pytest-cov==1.8.1"]) output = run(venv_with_coverage, 'coverage_test', options="--cov coverage_test") test_name = "tests.guinea-pigs.pytest.coverage_test.coverage_test.test_covered_func" assert_service_messages(output, [ ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageLinesCovered', 'value': '9' }), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageLinesTotal', 'value': '13' }), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageLinesUncovered', 'value': '4' }), ])
def test_teardown_function_error(venv): output = run(venv, 'teardown_function_error') test_name = 'testa.test' ms = assert_service_messages(output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', { 'name': test_name, 'flowId': test_name }), ServiceMessage('testFinished', {'name': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("Traceback") == 0 assert failed_ms.params['details'].find("AssertionError") > 0
def test_subtest_ok(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv( list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_ok.py') test_name = '__main__.TestXXX.testSubtestSuccess' assert_service_messages(output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', { 'name': test_name, 'flowId': test_name }), ServiceMessage('blockOpened', { 'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success' }), ServiceMessage('blockClosed', { 'name': '(i=0)', 'flowId': test_name }), ServiceMessage('blockOpened', { 'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Success' }), ServiceMessage('blockClosed', { 'name': '(i=1)', 'flowId': test_name }), ServiceMessage('testFinished', { 'name': test_name, 'flowId': test_name }), ])
def test_twisted_trial(venv): packages = list(*venv.packages) packages.append("twisted==15.2.1") if os.name == 'nt': if sys.version_info < (2, 7): pytest.skip("pypiwin32 is available since Python 2.7") packages.append("pypiwin32==219") venv_with_twisted = virtual_environments.prepare_virtualenv(packages) env = virtual_environments.get_clean_system_environment() env['PYTHONPATH'] = os.path.join(os.getcwd(), "tests", "guinea-pigs", "unittest") # Start the process and wait for its output trial_file = os.path.join(venv_with_twisted.bin, 'trial') trial_exe_file = os.path.join(venv_with_twisted.bin, 'trial.exe') trial_py_file = os.path.join(venv_with_twisted.bin, 'trial.py') if os.path.exists(trial_file): command = trial_file elif os.path.exists(trial_py_file): command = os.path.join(venv_with_twisted.bin, 'python') + " " + trial_py_file elif os.path.exists(trial_exe_file): command = trial_exe_file else: raise Exception("twisted trial is not found at " + trial_py_file + " or " + trial_file + " or " + trial_exe_file) command += " --reporter=teamcity twisted_trial" print("RUN: " + command) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, shell=True) output = "".join([x.decode() for x in proc.stdout.readlines()]) proc.wait() print("OUTPUT:" + output.replace("#", "*")) test1 = "twisted_trial.test_case.CalculationTestCase.test_fail (some desc)" test2 = "twisted_trial.test_case.CalculationTestCase.test_ok" ms = assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test1}), ServiceMessage('testFailed', {'name': test1}), ServiceMessage('testFinished', {'name': test1}), ServiceMessage('testStarted', {'name': test2}), ServiceMessage('testFinished', {'name': test2}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test1})) assert failed_ms.params['details'].index("5 != 4") > 0
def test_unittest_error(venv): output = run(venv, 'unittest_error_test.py') ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "2"}), ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.unittest_error_test.TestErrorFail.test_error'}), ServiceMessage('testFailed', {}), ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.unittest_error_test.TestErrorFail.test_error'}), ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.unittest_error_test.TestErrorFail.test_fail'}), ServiceMessage('testFailed', {}), ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.unittest_error_test.TestErrorFail.test_fail'}), ]) assert ms[2].params["details"].find("raise Exception") > 0 assert ms[2].params["details"].find("oops") > 0 assert ms[5].params["details"].find("AssertionError") > 0
def test_runtime_error(venv): output = run(venv, 'runtime_error_test.py') ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "2"}), ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.runtime_error_test.test_exception'}), ServiceMessage('testFailed', {'flowId': 'tests.guinea-pigs.pytest.runtime_error_test.test_exception'}), ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.runtime_error_test.test_exception'}), ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.runtime_error_test.test_error'}), ServiceMessage('testFailed', {}), ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.runtime_error_test.test_error'}), ]) assert ms[2].params["details"].find("raise Exception") > 0 assert ms[2].params["details"].find("oops") > 0 assert ms[5].params["details"].find("assert 0 != 0") > 0
def test_teardown_error(venv): output = run(venv, 'teardown_error_test.py') teardown_test_id = 'tests.guinea-pigs.pytest.teardown_error_test.test_error_teardown' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': 'tests.guinea-pigs.pytest.teardown_error_test.test_error'}), ServiceMessage('testFinished', {'name': 'tests.guinea-pigs.pytest.teardown_error_test.test_error'}), ServiceMessage('testStarted', {'name': teardown_test_id, 'flowId': teardown_test_id}), ServiceMessage('testFailed', {'flowId': teardown_test_id, 'message': fix_slashes('tests/guinea-pigs/pytest/teardown_error_test.py') + ':13 (test_error)'}), ServiceMessage('testFinished', {'name': teardown_test_id, 'flowId': teardown_test_id}), ]) assert ms[4].params["details"].find("raise Exception") > 0 assert ms[4].params["details"].find("teardown oops") > 0
def test_teardown_module_error(venv): output = run_directly(venv, 'teardown_module_error.py') teardown_test_name = '__main__.tearDownModule' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testFinished', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testStarted', {'name': teardown_test_name, 'flowId': teardown_test_name}), ServiceMessage('testFailed', {'name': teardown_test_name, 'message': 'Failure', 'flowId': teardown_test_name}), ServiceMessage('testFinished', {'name': teardown_test_name, 'flowId': teardown_test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': teardown_test_name})) assert failed_ms.params['details'].index("assert 1 == 0") > 0
def test_pytest_pep8(venv): venv_with_pep8 = virtual_environments.prepare_virtualenv( venv.packages + ("pytest-pep8", )) output = run(venv_with_pep8, 'pep8_test.py', options="--pep8") pep8_test_name = "tests.guinea-pigs.pytest.pep8_test.PEP8" test_name = "tests.guinea-pigs.pytest.pep8_test.test_ok" ms = assert_service_messages(output, [ ServiceMessage('testStarted', {'name': pep8_test_name}), ServiceMessage('testFailed', {'name': pep8_test_name}), ServiceMessage('testFinished', {'name': pep8_test_name}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) assert ms[1].params["details"].find( "E302 expected 2 blank lines, found 1") > 0
def test_pytest_pylint(venv): venv_with_pylint = virtual_environments.prepare_virtualenv( venv.packages + ("pytest-pylint", )) output = run(venv_with_pylint, 'pylint_test.py', options="--pylint") pylint_test_name = "tests.guinea-pigs.pytest.pylint_test.Pylint" test_name = "tests.guinea-pigs.pytest.pylint_test.test_ok" ms = assert_service_messages(output, [ ServiceMessage('testCount', {'count': "2"}), ServiceMessage('testStarted', {'name': pylint_test_name}), ServiceMessage('testFailed', {'name': pylint_test_name}), ServiceMessage('testFinished', {'name': pylint_test_name}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) assert ms[2].params["details"].find("Unused import sys") > 0
def test_setup_package_error(venv): output = run(venv, 'setup_package_error') test_name = 'namespace2.setup' ms = assert_service_messages(output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage( 'testFailed', { 'name': test_name, 'flowId': test_name, 'message': 'error in setup context' }), ServiceMessage('testFinished', {'name': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("Traceback") == 0 assert failed_ms.params['details'].find("AssertionError") > 0
def test_expected_failure(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'expected_failure.py') test_name = '__main__.TestSkip.test_expected_failure' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testIgnored', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testIgnored', {'name': test_name})) assert failed_ms.params['message'].find("Expected failure") == 0 assert failed_ms.params['message'].find("this should happen unfortunately") > 0