def test_teardown_module_error(venv): output = run_directly(venv, 'teardown_module_error.py') teardown_test_name = '__main__.tearDownModule' ms = assert_service_messages(output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testFinished', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testStarted', { 'name': teardown_test_name, 'flowId': teardown_test_name }), ServiceMessage( 'testFailed', { 'name': teardown_test_name, 'message': 'Failure', 'flowId': teardown_test_name }), ServiceMessage('testFinished', { 'name': teardown_test_name, 'flowId': teardown_test_name }), ]) failed_ms = match( ms, ServiceMessage('testFailed', {'name': teardown_test_name})) assert failed_ms.params['details'].index("assert 1 == 0") > 0
def test_expected_failure(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv( list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'expected_failure.py') test_name = '__main__.TestSkip.test_expected_failure' ms = assert_service_messages(output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', { 'name': test_name, 'flowId': test_name }), ServiceMessage('testIgnored', { 'name': test_name, 'flowId': test_name }), ServiceMessage('testFinished', { 'name': test_name, 'flowId': test_name }), ]) failed_ms = match(ms, ServiceMessage('testIgnored', {'name': test_name})) assert failed_ms.params['message'].find("Expected failure") == 0 assert failed_ms.params['message'].find( "this should happen unfortunately") > 0
def test_subtest_failure(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_failure.py') test_name = '__main__.TestXXX.testSubtestFailure' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=0)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': "(i=|'abc_xxx|')", 'flowId': test_name, 'subTestResult': 'Failure'}), ServiceMessage('testStdErr', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockClosed', {'name': "(i=|'abc_xxx|')", 'flowId': test_name}), ServiceMessage('testFailed', {'details': "Failed subtests list: (i=|'abc_xxx|')", 'message': 'One or more subtests failed', 'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testStdErr', {'name': test_name})) assert failed_ms.params['out'].find("SubTest failure") >= 0 assert failed_ms.params['out'].find("AssertionError") >= 0 assert failed_ms.params['out'].find("assert 1 == 0") >= 0
def test_twisted_trial(venv): packages = list(*venv.packages) packages.append("twisted") if os.name == 'nt': if sys.version_info < (2, 7): pytest.skip("pypiwin32 is available since Python 2.7") elif sys.version_info[:2] == (3, 4): packages.append("pypiwin32==219") else: packages.append("pypiwin32") venv_with_twisted = virtual_environments.prepare_virtualenv(packages) env = virtual_environments.get_clean_system_environment() env['PYTHONPATH'] = os.path.join(os.getcwd(), "tests", "guinea-pigs", "unittest") # Start the process and wait for its output trial_file = os.path.join(venv_with_twisted.bin, 'trial') trial_exe_file = os.path.join(venv_with_twisted.bin, 'trial.exe') trial_py_file = os.path.join(venv_with_twisted.bin, 'trial.py') if os.path.exists(trial_file): command = trial_file elif os.path.exists(trial_py_file): command = os.path.join(venv_with_twisted.bin, 'python') + " " + trial_py_file elif os.path.exists(trial_exe_file): command = trial_exe_file else: raise Exception("twisted trial is not found at " + trial_py_file + " or " + trial_file + " or " + trial_exe_file) command += " --reporter=teamcity twisted_trial" print("RUN: " + command) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, shell=True) output = "".join([x.decode() for x in proc.stdout.readlines()]) proc.wait() print("OUTPUT:" + output.replace("#", "*")) test1 = "twisted_trial.test_case.CalculationTestCase.test_fail (some desc)" test2 = "twisted_trial.test_case.CalculationTestCase.test_ok" test3 = "twisted_trial.test_exception.TestFailure.testBadCode" ms = assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test1}), ServiceMessage('testFailed', {'name': test1}), ServiceMessage('testFinished', {'name': test1}), ServiceMessage('testStarted', {'name': test2}), ServiceMessage('testFinished', {'name': test2}), ServiceMessage('testStarted', {'name': test3}), ServiceMessage('testFailed', {'name': test3}), ServiceMessage('testFailed', {'name': test3}), ServiceMessage('testFinished', {'name': test3}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test1})) assert failed_ms.params['details'].index("5 != 4") > 0
def test_discovery_errors(venv): output = run_directly(venv, 'discovery_errors.py') ms = assert_service_messages(output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {}), ServiceMessage('testFailed', {'message': 'Error'}), ServiceMessage('testFinished', {}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {})) assert failed_ms.params['details'].index("ImportError") > 0
def test_twisted_trial(venv): packages = list(*venv.packages) packages.append("twisted==15.2.1") if os.name == 'nt': if sys.version_info < (2, 7): pytest.skip("pypiwin32 is available since Python 2.7") packages.append("pypiwin32==219") venv_with_twisted = virtual_environments.prepare_virtualenv(packages) env = virtual_environments.get_clean_system_environment() env['PYTHONPATH'] = os.path.join(os.getcwd(), "tests", "guinea-pigs", "unittest") # Start the process and wait for its output trial_file = os.path.join(venv_with_twisted.bin, 'trial') trial_exe_file = os.path.join(venv_with_twisted.bin, 'trial.exe') trial_py_file = os.path.join(venv_with_twisted.bin, 'trial.py') if os.path.exists(trial_file): command = trial_file elif os.path.exists(trial_py_file): command = os.path.join(venv_with_twisted.bin, 'python') + " " + trial_py_file elif os.path.exists(trial_exe_file): command = trial_exe_file else: raise Exception("twisted trial is not found at " + trial_py_file + " or " + trial_file + " or " + trial_exe_file) command += " --reporter=teamcity twisted_trial" print("RUN: " + command) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, shell=True) output = "".join([x.decode() for x in proc.stdout.readlines()]) proc.wait() print("OUTPUT:" + output.replace("#", "*")) test1 = "twisted_trial.test_case.CalculationTestCase.test_fail (some desc)" test2 = "twisted_trial.test_case.CalculationTestCase.test_ok" ms = assert_service_messages(output, [ ServiceMessage('testStarted', {'name': test1}), ServiceMessage('testFailed', {'name': test1}), ServiceMessage('testFinished', {'name': test1}), ServiceMessage('testStarted', {'name': test2}), ServiceMessage('testFinished', {'name': test2}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test1})) assert failed_ms.params['details'].index("5 != 4") > 0
def test_fail_with_msg(venv): output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_with_msg') test_name = 'nose-guinea-pig.GuineaPig.test_fail_with_msg' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("Bitte keine Werbung") > 0
def test_fail(venv): output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail') test_name = 'nose-guinea-pig.GuineaPig.test_fail' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("Traceback") == 0 assert failed_ms.params['details'].find("2 * 2 == 5") > 0
def test_setup_package_error(venv): output = run(venv, 'setup_package_error') test_name = 'namespace2.setup' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in setup context'}), ServiceMessage('testFinished', {'name': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("Traceback") == 0 assert failed_ms.params['details'].find("AssertionError") > 0
def test_fail(venv): output = run_directly(venv, 'fail_test.py') test_name = '__main__.TestXXX.runTest' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': 'Failure', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].index('fail("Grr")') > 0
def test_setup_class_error(venv): output = run_directly(venv, 'setup_class_error.py') test_name = '__main__.TestXXX.setUpClass' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': 'Failure', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].index("RRR") > 0
def test_teardown_function_error(venv): output = run(venv, 'teardown_function_error') test_name = 'testa.test' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("Traceback") == 0 assert failed_ms.params['details'].find("AssertionError") > 0
def test_discovery_errors(venv): output = run_directly(venv, 'discovery_errors.py') ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {}), ServiceMessage('testFailed', {'message': 'Error'}), ServiceMessage('testFinished', {}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {})) assert failed_ms.params['details'].index("ImportError") > 0
def test_teardown_class_error(venv): output = run_directly(venv, 'teardown_class_error.py') ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testFinished', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testStarted', {'name': '__main__.TestXXX.tearDownClass'}), ServiceMessage('testFailed', {'name': '__main__.TestXXX.tearDownClass', 'message': 'Failure'}), ServiceMessage('testFinished', {'name': '__main__.TestXXX.tearDownClass'}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': '__main__.TestXXX.tearDownClass'})) assert failed_ms.params['details'].index("RRR") > 0
def test_teardown_error(venv): output = run_directly(venv, 'teardown_error.py') test_name = '__main__.TestXXX.runTest' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': 'Error', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].index("RRR") > 0 assert failed_ms.params['details'].index("tearDown") > 0
def test_flask_test_incomplete(venv): venv_with_flask = virtual_environments.prepare_virtualenv(venv.packages + ["Flask-Testing==0.8.1"]) output = run(venv_with_flask, 'flask_testing_incomplete') test_name = 'test_foo.TestIncompleteFoo.test_add' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("nNotImplementedError") > 0
def test_flask_test_incomplete(venv): venv_with_flask = virtual_environments.prepare_virtualenv(venv.packages + ["flask_testing==0.6.2"]) output = run(venv_with_flask, 'flask_testing_incomplete') test_name = 'test_foo.TestIncompleteFoo.test_add' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("nNotImplementedError") > 0
def test_teardown_class_error(venv): output = run(venv, 'teardown_class_error') test_name = 'testa.TestXXX.teardown' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': 'testa.TestXXX.runTest'}), ServiceMessage('testFinished', {'name': 'testa.TestXXX.runTest'}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in teardown context'}), ServiceMessage('testFinished', {'name': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("Traceback") == 0 assert failed_ms.params['details'].find("RRR") > 0
def test_teardown_module_error(venv): output = run_directly(venv, 'teardown_module_error.py') teardown_test_name = '__main__.tearDownModule' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testFinished', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testStarted', {'name': teardown_test_name, 'flowId': teardown_test_name}), ServiceMessage('testFailed', {'name': teardown_test_name, 'message': 'Failure', 'flowId': teardown_test_name}), ServiceMessage('testFinished', {'name': teardown_test_name, 'flowId': teardown_test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': teardown_test_name})) assert failed_ms.params['details'].index("assert 1 == 0") > 0
def test_expected_failure(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'expected_failure.py') test_name = '__main__.TestSkip.test_expected_failure' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testIgnored', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testIgnored', {'name': test_name})) assert failed_ms.params['message'].find("Expected failure") == 0 assert failed_ms.params['message'].find("this should happen unfortunately") > 0