def test_nose_parameterized(venv): venv_with_params = virtual_environments.prepare_virtualenv( venv.packages + ["nose-parameterized"]) output = run(venv_with_params, 'nose_parameterized') test1_name = "test.test(|'1_1|', |'https://facebook_com/share_php?http://foo_com/|')" test2_name = 'test.test(None, 3)' assert_service_messages(output, [ _test_count(venv, 2), ServiceMessage('testStarted', { 'name': test1_name, 'flowId': test1_name }), ServiceMessage('testFinished', { 'name': test1_name, 'flowId': test1_name }), ServiceMessage('testStarted', { 'name': test2_name, 'flowId': test2_name }), ServiceMessage('testFinished', { 'name': test2_name, 'flowId': test2_name }), ])
def test_coverage(venv, coverage_version, pytest_cov_version): if coverage_version != "==3.7.1" and (3, 1) < sys.version_info < (3, 3): pytest.skip("coverage >= 4.0 dropped support for Python 3.2") venv_with_coverage = virtual_environments.prepare_virtualenv( venv.packages + ("coverage" + coverage_version, "pytest-cov" + pytest_cov_version)) output = run(venv_with_coverage, 'coverage_test', options="--cov coverage_test") test_name = "tests.guinea-pigs.pytest.coverage_test.coverage_test.test_covered_func" assert_service_messages(output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageAbsLCovered', 'value': '9' }), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageAbsLTotal', 'value': '13' }), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageAbsLUncovered', 'value': '4' }), ])
def test_subtest_failure(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_failure.py') test_name = '__main__.TestXXX.testSubtestFailure' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=0)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': "(i=|'abc_xxx|')", 'flowId': test_name, 'subTestResult': 'Failure'}), ServiceMessage('testStdErr', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockClosed', {'name': "(i=|'abc_xxx|')", 'flowId': test_name}), ServiceMessage('testFailed', {'details': "Failed subtests list: (i=|'abc_xxx|')", 'message': 'One or more subtests failed', 'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testStdErr', {'name': test_name})) assert failed_ms.params['out'].find("SubTest failure") >= 0 assert failed_ms.params['out'].find("AssertionError") >= 0 assert failed_ms.params['out'].find("assert 1 == 0") >= 0
def test_coverage(venv, coverage_version, pytest_cov_version): venv_with_coverage = virtual_environments.prepare_virtualenv( venv.packages + ("coverage" + coverage_version, "pytest-cov" + pytest_cov_version)) output = run(venv_with_coverage, 'coverage_test', options="--cov coverage_test") test_name = "tests.guinea-pigs.pytest.coverage_test.coverage_test.test_covered_func" assert_service_messages(output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageAbsLCovered', 'value': '9' }), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageAbsLTotal', 'value': '13' }), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageAbsLUncovered', 'value': '4' }), ])
def test_pytest_flake8(venv): venv_with_pylint = virtual_environments.prepare_virtualenv(venv.packages + ("pytest-flake8",)) file_names = ['./flake8_test1.py', './flake8_test2.py'] output = run(venv_with_pylint, file_names, options="--flake8") file_paths = [os.path.realpath(os.path.join('tests', 'guinea-pigs', 'pytest', file_name)) for file_name in file_names] expected = [ServiceMessage('testCount', {'count': "4"})] for file_path in file_paths: test_base, _ = os.path.splitext(os.path.basename(file_path)) flake8_test_name = "tests.guinea-pigs.pytest.{}.FLAKE8".format(test_base) pytest_name = "tests.guinea-pigs.pytest.{}.test_ok".format(test_base) expected.extend([ ServiceMessage('testStarted', {'name': flake8_test_name}), ServiceMessage('testFailed', {'name': flake8_test_name}), ServiceMessage('testFinished', {'name': flake8_test_name}), ServiceMessage('testStarted', {'name': pytest_name}), ServiceMessage('testFinished', {'name': pytest_name}), ]) for file_path in file_paths: test_message = "F401 |'sys|' imported but unused" test_name = "pep8: {}: {}".format(file_path.replace("\\", "/"), test_message) expected.extend([ ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': test_message}), ServiceMessage('testFinished', {'name': test_name}), ]) ms = assert_service_messages(output, expected) assert ms[2].params["details"].find(test_message.replace('|', '|||')) > 0
def test_skip(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv( list(venv.packages) + ["unittest2==0.5.1"]) output = run_directly(venv, 'skip_test.py') test_name = '__main__.TestSkip.test_skip_me' assert_service_messages(output, [ ServiceMessage('testCount', {'count': "2"}), ServiceMessage('testStarted', {'name': '__main__.TestSkip.test_ok'}), ServiceMessage('testFinished', {'name': '__main__.TestSkip.test_ok'}), ServiceMessage('testStarted', { 'name': test_name, 'flowId': test_name }), ServiceMessage( 'testIgnored', { 'name': test_name, 'message': u'Skipped: testing skipping причина', 'flowId': test_name }), ServiceMessage('testFinished', { 'name': test_name, 'flowId': test_name }), ])
def test_expected_failure(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv( list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'expected_failure.py') test_name = '__main__.TestSkip.test_expected_failure' ms = assert_service_messages(output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', { 'name': test_name, 'flowId': test_name }), ServiceMessage('testIgnored', { 'name': test_name, 'flowId': test_name }), ServiceMessage('testFinished', { 'name': test_name, 'flowId': test_name }), ]) failed_ms = match(ms, ServiceMessage('testIgnored', {'name': test_name})) assert failed_ms.params['message'].find("Expected failure") == 0 assert failed_ms.params['message'].find( "this should happen unfortunately") > 0
def test_unexpected_success(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv( list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'unexpected_success.py') test_name = '__main__.TestSkip.test_unexpected_success' assert_service_messages(output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', { 'name': test_name, 'flowId': test_name }), ServiceMessage( 'testFailed', { 'name': test_name, 'details': "Test should not succeed since it|'s marked with @unittest.expectedFailure", 'flowId': test_name }), ServiceMessage('testFinished', { 'name': test_name, 'flowId': test_name }), ])
def venv(request): if sys.version_info < (2, 7): pytest.skip("Django 1.11.17 requires Python 2.7 or 3.4+") if (3, 0) <= sys.version_info < (3, 4): pytest.skip("Django 1.11.17 requires Python 2.7 or 3.4+") if sys.version_info >= (3, 10): pytest.skip("Django 1.11.17 requires Python < 3.10") return virtual_environments.prepare_virtualenv([request.param])
def test_twisted_trial(venv): packages = list(*venv.packages) packages.append("twisted") if os.name == 'nt': if sys.version_info < (2, 7): pytest.skip("pypiwin32 is available since Python 2.7") elif sys.version_info[:2] == (3, 4): packages.append("pypiwin32==219") else: packages.append("pypiwin32") venv_with_twisted = virtual_environments.prepare_virtualenv(packages) env = virtual_environments.get_clean_system_environment() env['PYTHONPATH'] = os.path.join(os.getcwd(), "tests", "guinea-pigs", "unittest") # Start the process and wait for its output trial_file = os.path.join(venv_with_twisted.bin, 'trial') trial_exe_file = os.path.join(venv_with_twisted.bin, 'trial.exe') trial_py_file = os.path.join(venv_with_twisted.bin, 'trial.py') if os.path.exists(trial_file): command = trial_file elif os.path.exists(trial_py_file): command = os.path.join(venv_with_twisted.bin, 'python') + " " + trial_py_file elif os.path.exists(trial_exe_file): command = trial_exe_file else: raise Exception("twisted trial is not found at " + trial_py_file + " or " + trial_file + " or " + trial_exe_file) command += " --reporter=teamcity twisted_trial" print("RUN: " + command) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, shell=True) output = "".join([x.decode() for x in proc.stdout.readlines()]) proc.wait() print("OUTPUT:" + output.replace("#", "*")) test1 = "twisted_trial.test_case.CalculationTestCase.test_fail (some desc)" test2 = "twisted_trial.test_case.CalculationTestCase.test_ok" test3 = "twisted_trial.test_exception.TestFailure.testBadCode" ms = assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test1}), ServiceMessage('testFailed', {'name': test1}), ServiceMessage('testFinished', {'name': test1}), ServiceMessage('testStarted', {'name': test2}), ServiceMessage('testFinished', {'name': test2}), ServiceMessage('testStarted', {'name': test3}), ServiceMessage('testFailed', {'name': test3}), ServiceMessage('testFailed', {'name': test3}), ServiceMessage('testFinished', {'name': test3}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test1})) assert failed_ms.params['details'].index("5 != 4") > 0
def venv(request): """Virtual environment fixture with PyLint of the minimal and maximal supported version for a given python version. * Python 2.7 is supported up to PyLint 1.9. * Python 3.4+ is supported through to the latest (2.1) """ if sys.version_info < (2, 7) or (3, ) <= sys.version_info < (3, 4): pytest.skip("PyLint integration requires Python 2.7 or 3.4+") return virtual_environments.prepare_virtualenv(request.param)
def test_flask_test_ok(venv): venv_with_flask = virtual_environments.prepare_virtualenv(venv.packages + ["Flask-Testing==0.8.1"]) output = run(venv_with_flask, 'flask_testing_ok') test_name = 'test_foo.TestFoo.test_add' assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_twisted_trial(venv): packages = list(*venv.packages) packages.append("twisted==15.2.1") if os.name == 'nt': if sys.version_info < (2, 7): pytest.skip("pypiwin32 is available since Python 2.7") packages.append("pypiwin32==219") venv_with_twisted = virtual_environments.prepare_virtualenv(packages) env = virtual_environments.get_clean_system_environment() env['PYTHONPATH'] = os.path.join(os.getcwd(), "tests", "guinea-pigs", "unittest") # Start the process and wait for its output trial_file = os.path.join(venv_with_twisted.bin, 'trial') trial_exe_file = os.path.join(venv_with_twisted.bin, 'trial.exe') trial_py_file = os.path.join(venv_with_twisted.bin, 'trial.py') if os.path.exists(trial_file): command = trial_file elif os.path.exists(trial_py_file): command = os.path.join(venv_with_twisted.bin, 'python') + " " + trial_py_file elif os.path.exists(trial_exe_file): command = trial_exe_file else: raise Exception("twisted trial is not found at " + trial_py_file + " or " + trial_file + " or " + trial_exe_file) command += " --reporter=teamcity twisted_trial" print("RUN: " + command) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, shell=True) output = "".join([x.decode() for x in proc.stdout.readlines()]) proc.wait() print("OUTPUT:" + output.replace("#", "*")) test1 = "twisted_trial.test_case.CalculationTestCase.test_fail (some desc)" test2 = "twisted_trial.test_case.CalculationTestCase.test_ok" ms = assert_service_messages(output, [ ServiceMessage('testStarted', {'name': test1}), ServiceMessage('testFailed', {'name': test1}), ServiceMessage('testFinished', {'name': test1}), ServiceMessage('testStarted', {'name': test2}), ServiceMessage('testFinished', {'name': test2}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test1})) assert failed_ms.params['details'].index("5 != 4") > 0
def test_flask_test_ok(venv): venv_with_flask = virtual_environments.prepare_virtualenv(venv.packages + ["flask_testing==0.6.2"]) output = run(venv_with_flask, 'flask_testing_ok') test_name = 'test_foo.TestFoo.test_add' assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_skip(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2==0.5.1"]) output = run_directly(venv, 'skip_test.py') test_name = '__main__.TestSkip.test_skip_me' assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testIgnored', {'name': test_name, 'message': 'Skipped: testing skipping', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_subtest_skip(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv( list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_skip.py') test_name = '__main__.TestXXX.testSubtestSkip' assert_service_messages(output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', { 'name': test_name, 'flowId': test_name }), ServiceMessage('blockOpened', { 'name': '(i=2)', 'flowId': test_name, 'subTestResult': 'Skip' }), ServiceMessage( 'testStdOut', { 'name': test_name, 'flowId': test_name, 'out': 'SubTest skipped: skip reason|n' }), ServiceMessage('blockClosed', { 'name': '(i=2)', 'flowId': test_name }), ServiceMessage('blockOpened', { 'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success' }), ServiceMessage('blockClosed', { 'name': '(i=0)', 'flowId': test_name }), ServiceMessage('blockOpened', { 'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Success' }), ServiceMessage('blockClosed', { 'name': '(i=1)', 'flowId': test_name }), ServiceMessage('testFinished', { 'name': test_name, 'flowId': test_name }), ])
def test_coverage(venv): venv_with_coverage = virtual_environments.prepare_virtualenv(venv.packages + ("pytest-cov==1.8.1",)) output = run(venv_with_coverage, 'coverage_test', options="--cov coverage_test") test_name = "tests.guinea-pigs.pytest.coverage_test.coverage_test.test_covered_func" assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ServiceMessage('buildStatisticValue', {'key': 'CodeCoverageAbsLCovered', 'value': '9'}), ServiceMessage('buildStatisticValue', {'key': 'CodeCoverageAbsLTotal', 'value': '13'}), ServiceMessage('buildStatisticValue', {'key': 'CodeCoverageAbsLUncovered', 'value': '4'}), ])
def venv(request): if sys.version_info < (2, 6): pytest.skip("Django (all versions) requires Python 2.6+") if request.param != "django==1.6" and sys.version_info < (2, 7): pytest.skip("Django 1.7+ requires Python 2.7+") if (request.param == "django==1.6" or request.param == "django==1.7") and sys.version_info >= (3, 5): pytest.skip("Django supports Python 3.5+ since 1.8.6") if request.param == "django": if sys.version_info[0] == 2 and sys.version_info < (2, 7): pytest.skip("Django 1.9+ requires Python 2.7+") if sys.version_info[0] == 3 and sys.version_info < (3, 4): pytest.skip("Django 1.9+ requires Python 3.4+") return virtual_environments.prepare_virtualenv([request.param])
def test_nose_parameterized(venv): venv_with_params = virtual_environments.prepare_virtualenv(venv.packages + ["nose-parameterized"]) output = run(venv_with_params, 'nose_parameterized') test1_name = "test.test(|'1_1|', |'https://facebook_com/share_php?http://foo_com/|')" test2_name = 'test.test(None, 3)' assert_service_messages( output, [ _test_count(venv, 2), ServiceMessage('testStarted', {'name': test1_name, 'flowId': test1_name}), ServiceMessage('testFinished', {'name': test1_name, 'flowId': test1_name}), ServiceMessage('testStarted', {'name': test2_name, 'flowId': test2_name}), ServiceMessage('testFinished', {'name': test2_name, 'flowId': test2_name}), ])
def test_flask_test_incomplete(venv): venv_with_flask = virtual_environments.prepare_virtualenv(venv.packages + ["Flask-Testing==0.8.1"]) output = run(venv_with_flask, 'flask_testing_incomplete') test_name = 'test_foo.TestIncompleteFoo.test_add' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("nNotImplementedError") > 0
def test_flask_test_incomplete(venv): venv_with_flask = virtual_environments.prepare_virtualenv(venv.packages + ["flask_testing==0.6.2"]) output = run(venv_with_flask, 'flask_testing_incomplete') test_name = 'test_foo.TestIncompleteFoo.test_add' ms = assert_service_messages( output, [ _test_count(venv, 1), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("nNotImplementedError") > 0
def test_unexpected_success(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'unexpected_success.py') test_name = '__main__.TestSkip.test_unexpected_success' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'details': "Test should not succeed since it|'s marked with @unittest.expectedFailure", 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_nose_parameterized(venv): venv_with_params = virtual_environments.prepare_virtualenv( venv.packages + ("nose-parameterized", )) output = run(venv_with_params, 'nose_parameterized_test.py') test1_name = 'tests.guinea-pigs.pytest.nose_parameterized_test.test(0)' test2_name = "tests.guinea-pigs.pytest.nose_parameterized_test.test(1)" assert_service_messages(output, [ ServiceMessage('testCount', {'count': "2"}), ServiceMessage('testStarted', {'name': test1_name}), ServiceMessage('testFinished', {'name': test1_name}), ServiceMessage('testStarted', {'name': test2_name}), ServiceMessage('testFinished', {'name': test2_name}), ])
def test_subtest_ok(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_ok.py') test_name = '__main__.TestXXX.testSubtestSuccess' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=0)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=1)', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_expected_failure(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'expected_failure.py') test_name = '__main__.TestSkip.test_expected_failure' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testIgnored', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testIgnored', {'name': test_name})) assert failed_ms.params['message'].find("Expected failure") == 0 assert failed_ms.params['message'].find("this should happen unfortunately") > 0
def test_pytest_pep8(venv): venv_with_pep8 = virtual_environments.prepare_virtualenv( venv.packages + ("pytest-pep8", )) output = run(venv_with_pep8, 'pep8_test.py', options="--pep8") pep8_test_name = "tests.guinea-pigs.pytest.pep8_test.PEP8" test_name = "tests.guinea-pigs.pytest.pep8_test.test_ok" ms = assert_service_messages(output, [ ServiceMessage('testStarted', {'name': pep8_test_name}), ServiceMessage('testFailed', {'name': pep8_test_name}), ServiceMessage('testFinished', {'name': pep8_test_name}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) assert ms[1].params["details"].find( "E302 expected 2 blank lines, found 1") > 0
def test_pytest_pylint(venv): venv_with_pylint = virtual_environments.prepare_virtualenv( venv.packages + ("pytest-pylint", )) output = run(venv_with_pylint, 'pylint_test.py', options="--pylint") pylint_test_name = "tests.guinea-pigs.pytest.pylint_test.Pylint" test_name = "tests.guinea-pigs.pytest.pylint_test.test_ok" ms = assert_service_messages(output, [ ServiceMessage('testCount', {'count': "2"}), ServiceMessage('testStarted', {'name': pylint_test_name}), ServiceMessage('testFailed', {'name': pylint_test_name}), ServiceMessage('testFinished', {'name': pylint_test_name}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) assert ms[2].params["details"].find("Unused import sys") > 0
def test_setup_class_skip(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'setup_class_skip.py') test1_name = '__main__.TestSimple.setUpClass' test2_name = '__main__.TestSubSimple.setUpClass' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "7"}), ServiceMessage('testStarted', {'name': test1_name, 'flowId': test1_name}), ServiceMessage('testIgnored', {'name': test1_name, 'flowId': test1_name, 'message': "Skipped: Skip whole Case"}), ServiceMessage('testFinished', {'name': test1_name, 'flowId': test1_name}), ServiceMessage('testStarted', {'name': test2_name, 'flowId': test2_name}), ServiceMessage('testIgnored', {'name': test2_name, 'flowId': test2_name, 'message': "Skipped: Skip whole Case"}), ServiceMessage('testFinished', {'name': test2_name, 'flowId': test2_name}), ])
def test_coverage(venv): venv_with_coverage = virtual_environments.prepare_virtualenv(venv.packages + ["coverage==3.7.1"]) coverage_file = os.path.join(virtual_environments.get_vroot(), "coverage-temp.xml") output = run(venv_with_coverage, 'coverage', options="--with-coverage --cover-erase --cover-tests --cover-xml --cover-xml-file=\"" + coverage_file + "\"") assert_service_messages( output, [ ServiceMessage('testStarted', {'name': 'testa.test_mycode'}), ServiceMessage('testFinished', {'name': 'testa.test_mycode'}), ]) f = open(coverage_file, "rb") content = str(f.read()) f.close() assert content.find('<line hits="1" number="2"/>') > 0
def test_pytest_pylint(venv): venv_with_pylint = virtual_environments.prepare_virtualenv(venv.packages + ("pytest-pylint<0.14.0",)) output = run(venv_with_pylint, 'pylint_test.py', options="--pylint") pylint_test_name = "tests.guinea-pigs.pytest.pylint_test.Pylint" test_name = "tests.guinea-pigs.pytest.pylint_test.test_ok" ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "2"}), ServiceMessage('testStarted', {'name': pylint_test_name}), ServiceMessage('testFailed', {'name': pylint_test_name}), ServiceMessage('testFinished', {'name': pylint_test_name}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) assert ms[2].params["details"].find("Unused import sys") > 0
def test_pytest_pep8(venv): venv_with_pep8 = virtual_environments.prepare_virtualenv(venv.packages + ("pytest-pep8",)) output = run(venv_with_pep8, "pep8_test.py", options="--pep8") pep8_test_name = "tests.guinea-pigs.pytest.pep8_test.PEP8" test_name = "tests.guinea-pigs.pytest.pep8_test.test_ok" ms = assert_service_messages( output, [ ServiceMessage("testStarted", {"name": pep8_test_name}), ServiceMessage("testFailed", {"name": pep8_test_name}), ServiceMessage("testFinished", {"name": pep8_test_name}), ServiceMessage("testStarted", {"name": test_name}), ServiceMessage("testFinished", {"name": test_name}), ], ) assert ms[1].params["details"].find("E302 expected 2 blank lines, found 1") > 0
def test_subtest_named(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv( list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_named.py') test_id = '__main__.NumbersTest.test_even' test_name = test_id + " (Test that numbers between 0 and 5 are all even_)" assert_service_messages(output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', { 'name': test_name, 'flowId': test_name }), ServiceMessage('blockOpened', { 'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success' }), ServiceMessage('blockClosed', { 'name': '(i=0)', 'flowId': test_name }), ServiceMessage('blockOpened', { 'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Failure' }), ServiceMessage('testStdErr', { 'name': test_name, 'flowId': test_name }), ServiceMessage('blockClosed', { 'name': '(i=1)', 'flowId': test_name }), ServiceMessage('testFailed', { 'name': test_name, 'flowId': test_name }), ServiceMessage('testFinished', {'flowId': test_name}), ])
def test_subtest_nested(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_nested.py') test_name = '__main__.TestXXX.testNested' # Nested blocks support requires strict notifications about starting and stopping subtests # which is not yet supported, see https://mail.python.org/pipermail/python-dev/2016-June/145402.html assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=2)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=2)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=1)', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ])
def test_coverage(venv, coverage_version, pytest_cov_version): if coverage_version != "==3.7.1" and (3, 1) < sys.version_info < (3, 3): pytest.skip("coverage >= 4.0 dropped support for Python 3.2") venv_with_coverage = virtual_environments.prepare_virtualenv( venv.packages + ( "coverage" + coverage_version, "pytest-cov" + pytest_cov_version)) output = run(venv_with_coverage, 'coverage_test', options="--cov coverage_test") test_name = "tests.guinea-pigs.pytest.coverage_test.coverage_test.test_covered_func" assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ServiceMessage('buildStatisticValue', {'key': 'CodeCoverageAbsLCovered', 'value': '9'}), ServiceMessage('buildStatisticValue', {'key': 'CodeCoverageAbsLTotal', 'value': '13'}), ServiceMessage('buildStatisticValue', {'key': 'CodeCoverageAbsLUncovered', 'value': '4'}), ])
def test_pytest_pep8(venv): if 'pytest>=4,<5' not in venv.packages and 'pytest>=5,<6' not in venv.packages: pytest.skip("pytest-pep8 not working for pytest>=6") venv_with_pep8 = virtual_environments.prepare_virtualenv( venv.packages + ("pytest-pep8", )) output = run(venv_with_pep8, 'pep8_test.py', options="--pep8") pep8_test_name = "tests.guinea-pigs.pytest.pep8_test.PEP8" test_name = "tests.guinea-pigs.pytest.pep8_test.test_ok" ms = assert_service_messages(output, [ ServiceMessage('testCount', {'count': "2"}), ServiceMessage('testStarted', {'name': pep8_test_name}), ServiceMessage('testFailed', {'name': pep8_test_name}), ServiceMessage('testFinished', {'name': pep8_test_name}), ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) assert ms[2].params["details"].find( "E302 expected 2 blank lines, found 1") > 0
def test_pytest_pylint(venv): venv_with_pylint = virtual_environments.prepare_virtualenv(venv.packages + ("pytest-pylint",)) output = run(venv_with_pylint, "pylint_test.py", options="--pylint") pylint_test_name = "tests.guinea-pigs.pytest.pylint_test.Pylint" test_name = "tests.guinea-pigs.pytest.pylint_test.test_ok" ms = assert_service_messages( output, [ ServiceMessage("testStarted", {"name": pylint_test_name}), ServiceMessage("testStdErr", {"name": pylint_test_name}), ServiceMessage("testFailed", {"name": pylint_test_name}), ServiceMessage("testFinished", {"name": pylint_test_name}), ServiceMessage("testStarted", {"name": test_name}), ServiceMessage("testFinished", {"name": test_name}), ], ) assert ms[1].params["out"].find("No config file found") >= 0 assert ms[2].params["details"].find("Unused import sys") > 0
def test_coverage(venv, coverage_version, pytest_cov_version): if coverage_version != "==3.7.1" and (3, 1) < sys.version_info < (3, 3): pytest.skip("coverage >= 4.0 dropped support for Python 3.2") venv_with_coverage = virtual_environments.prepare_virtualenv( venv.packages + ("coverage" + coverage_version, "pytest-cov" + pytest_cov_version) ) output = run(venv_with_coverage, "coverage_test", options="--cov coverage_test") test_name = "tests.guinea-pigs.pytest.coverage_test.coverage_test.test_covered_func" assert_service_messages( output, [ ServiceMessage("testStarted", {"name": test_name}), ServiceMessage("testFinished", {"name": test_name}), ServiceMessage("buildStatisticValue", {"key": "CodeCoverageAbsLCovered", "value": "9"}), ServiceMessage("buildStatisticValue", {"key": "CodeCoverageAbsLTotal", "value": "13"}), ServiceMessage("buildStatisticValue", {"key": "CodeCoverageAbsLUncovered", "value": "4"}), ], )
def test_pytest_flake8(venv): venv_with_pylint = virtual_environments.prepare_virtualenv( venv.packages + ("pytest-flake8", )) file_names = ['./flake8_test1.py', './flake8_test2.py'] output = run(venv_with_pylint, file_names, options="--flake8") file_paths = [ os.path.realpath( os.path.join('tests', 'guinea-pigs', 'pytest', file_name)) for file_name in file_names ] expected = [ServiceMessage('testCount', {'count': "4"})] for file_path in file_paths: test_base, _ = os.path.splitext(os.path.basename(file_path)) flake8_test_name = "tests.guinea-pigs.pytest.{}.FLAKE8".format( test_base) pytest_name = "tests.guinea-pigs.pytest.{}.test_ok".format( test_base) expected.extend([ ServiceMessage('testStarted', {'name': flake8_test_name}), ServiceMessage('testFailed', {'name': flake8_test_name}), ServiceMessage('testFinished', {'name': flake8_test_name}), ServiceMessage('testStarted', {'name': pytest_name}), ServiceMessage('testFinished', {'name': pytest_name}), ]) for file_path in file_paths: test_message = "F401 |'sys|' imported but unused" test_name = "pep8: {}: {}".format(file_path.replace("\\", "/"), test_message) expected.extend([ ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFailed', { 'name': test_name, 'message': test_message }), ServiceMessage('testFinished', {'name': test_name}), ]) ms = assert_service_messages(output, expected) assert ms[2].params["details"].find(test_message.replace('|', '|||')) > 0
def test_coverage(venv): venv_with_coverage = virtual_environments.prepare_virtualenv( venv.packages + ["pytest-cov==1.8.1"]) output = run(venv_with_coverage, 'coverage_test', options="--cov coverage_test") test_name = "tests.guinea-pigs.pytest.coverage_test.coverage_test.test_covered_func" assert_service_messages(output, [ ServiceMessage('testStarted', {'name': test_name}), ServiceMessage('testFinished', {'name': test_name}), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageLinesCovered', 'value': '9' }), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageLinesTotal', 'value': '13' }), ServiceMessage('buildStatisticValue', { 'key': 'CodeCoverageLinesUncovered', 'value': '4' }), ])
def venv(request): """ Prepares a virtual environment for unittest, no extra packages required :rtype : virtual_environments.VirtualEnvDescription """ return virtual_environments.prepare_virtualenv()
def venv_flake8_v3(request): return virtual_environments.prepare_virtualenv([request.param])
def venv(request): if sys.version_info < (3, 5): pytest.skip("Django >2.0 requires Python 3.5+") return virtual_environments.prepare_virtualenv([request.param])
def venv(request): """ Prepares a virtual environment for nose. :rtype : virtual_environments.VirtualEnvDescription """ return virtual_environments.prepare_virtualenv([request.param])
def venv(request): if sys.version_info < (2, 7): pytest.skip("Django 1.11.17 requires Python 2.7 or 3.4+") if (3, 0) <= sys.version_info < (3, 4): pytest.skip("Django 1.11.17 requires Python 2.7 or 3.4+") return virtual_environments.prepare_virtualenv([request.param])
def venv(request): if request.param != "1.6" and sys.version_info < (2, 7): pytest.skip("Django 1.7+ requires Python 2.7+") return virtual_environments.prepare_virtualenv([request.param])
def venv(request): return virtual_environments.prepare_virtualenv(request.param)