def test_env_vars_set(testdir, undecorated_test_function, testsuite_attribs_exp, simple_test_config): """Verify that pytest accepts our fixture with all relevant environment variables set.""" # Setup testdir.makepyfile(undecorated_test_function.format(test_name='test_pass')) for env in ASC_TEST_ENV_VARS: os.environ[env] = env args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 0, args)[0] # Test assert is_sub_dict(testsuite_attribs_exp, junit_xml.testsuite_attribs) expected = {env: env for env in ASC_TEST_ENV_VARS} props = junit_xml.testsuite_props missingx = [] missingy = [] for x in expected: if x not in props: missingx.append(x) for y in props: if y not in expected: missingy.append(y) assert junit_xml.testsuite_props == expected
def test_improperly_decorated_class_without_steps( testdir, improperly_decorated_test_method, simple_test_config): """Verify that decorating a pytest class with 'test_case_with_steps' mark will declare all test functions as steps which inherit the parent classes marks. """ # Expect test_name_exp = 'test_method' test_id_exp = 'test_case_class_id' jira_id_exp = 'ASC-123' # Setup testdir.makepyfile( improperly_decorated_test_method.format(test_name=test_name_exp, test_id=test_id_exp, jira_id=jira_id_exp)) args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 0, args)[0] # Test assert junit_xml.get_testcase_properties( test_name_exp)['test_step'] == 'false' assert junit_xml.get_testcase_properties( test_name_exp)['test_id'] == test_id_exp assert junit_xml.get_testcase_properties( test_name_exp)['jira'] == jira_id_exp
def test_custom_config_value(testdir, single_decorated_test_function, tmpdir_factory): """Ensure that a value set in the config will end up as a property in xml.""" # Expect mark_type_exp = 'test_id' test_id_exp = '123e4567-e89b-12d3-a456-426655440000' test_name_exp = 'test_uuid' # Setup testdir.makepyfile( single_decorated_test_function.format(mark_type=mark_type_exp, mark_arg=test_id_exp, test_name=test_name_exp)) config_path = tmpdir_factory.mktemp('data').join('config.json').strpath config = \ """ { "pytest_zigzag_env_vars": { "BUILD_URL": "foo", "BUILD_NUMBER": null } } """ # noqa with open(config_path, 'w') as f: f.write(config) args = ["--pytest-zigzag-config", config_path] junit_xml = run_and_parse(testdir, 0, args)[0] # Test assert junit_xml.testsuite_props['BUILD_URL'] == 'foo'
def test_multiple_jira_references_mk8s(testdir, mocker, mk8s_test_config): """Verify that 'get_xsd' returns an XSD stream when a testcase is decorated Jira mark with multiple arguments for the 'mk8s' CI environment. """ # Mock zz = mocker.MagicMock() xmlpf = XmlParsingFacade(zz) # Setup testdir.makepyfile(""" import pytest @pytest.mark.jira('ASC-123', 'ASC-124') @pytest.mark.test_id('123e4567-e89b-12d3-a456-426655440000') def test_xsd(): pass """) args = ["--pytest-zigzag-config", mk8s_test_config] xml_doc = run_and_parse(testdir, 0, args)[0].xml_doc # noinspection PyProtectedMember xmlschema = etree.XMLSchema(etree.parse(xmlpf._get_xsd())) # Test xmlschema.assertValid(xml_doc)
def test_required_parameters_are_required(testdir, single_decorated_test_function, tmpdir_factory): """Test that a config missing required params will fail.""" # Expect mark_type_exp = 'test_id' test_id_exp = '123e4567-e89b-12d3-a456-426655440000' test_name_exp = 'test_uuid' # Setup testdir.makepyfile( single_decorated_test_function.format(mark_type=mark_type_exp, mark_arg=test_id_exp, test_name=test_name_exp)) config_path = tmpdir_factory.mktemp('data').join('config.json').strpath config = \ """ { "not_pytest_zigzag_env_vars": { "BUILD_NUMBER": null } } """ # noqa with open(config_path, 'w') as f: f.write(config) args = ["--pytest-zigzag-config", config_path] result = run_and_parse(testdir, 1, args) # Test assert "does not comply with schema:" in result[1].stderr.lines[0] assert "'pytest_zigzag_env_vars' is a required property" in result[ 1].stderr.lines[0]
def test_multiple_marks_with_multiple_arguments(testdir): """Verify that multiple property elements are present when a test is decorated with multiple marks with each containing multiple arguments. """ # Expect jira_ids_exp = { 'jira_id0': 'ASC-123', 'jira_id1': 'ASC-124', 'jira_id2': 'ASC-125', 'jira_id3': 'ASC-126' } test_name_exp = 'test_jira' # Setup testdir.makepyfile(""" import pytest @pytest.mark.jira('{jira_id2}', '{jira_id3}') @pytest.mark.jira('{jira_id0}', '{jira_id1}') def {test_name}(): pass """.format(test_name=test_name_exp, **jira_ids_exp)) junit_xml = run_and_parse(testdir) # Test (Note: So tox and py.test disagree about the ordering of marks, therefore we sort them first.) assert sorted(junit_xml.get_testcase_property( test_name_exp, 'jira')) == sorted(jira_ids_exp.values())
def test_class_with_steps(testdir, properly_decorated_test_class_with_steps, simple_test_config): """Verify that decorating a pytest class with 'test_case_with_steps' mark will declare all test functions as steps which inherit the parent classes marks. """ # Expect test_steps = { 'test_step_one': 'test_step_one', 'test_step_two': 'test_step_two', 'test_step_three': 'test_step_three' } tc_props_exps = { 'test_name': 'TestCaseWithSteps', 'test_id': 'test_case_class_id', 'jira_id': 'ASC-123' } # Setup testdir.makepyfile( properly_decorated_test_class_with_steps.format( **merge_dicts(test_steps, tc_props_exps))) args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 0, args)[0] # Test for test_step in test_steps.values(): assert junit_xml.get_testcase_properties( test_step)['test_step'] == 'true' assert junit_xml.get_testcase_properties( test_step)['test_id'] == tc_props_exps['test_id'] assert junit_xml.get_testcase_properties( test_step)['jira'] == tc_props_exps['jira_id']
def test_missing_required_marks(testdir, undecorated_test_function): """Verify that XSD will enforce the presence of 'test_id' and 'jira_id' properties for test cases.""" # Setup testdir.makepyfile( undecorated_test_function.format(test_name='test_typo_global')) xml_doc = run_and_parse(testdir).xml_doc xmlschema = etree.XMLSchema(etree.parse(get_xsd())) # Test assert xmlschema.validate(xml_doc) is False
def test_no_env_vars_set(testdir, undecorated_test_function, testsuite_attribs_exp): """Verify that pytest accepts our fixture without setting any environment variables.""" # Setup testdir.makepyfile(undecorated_test_function.format(test_name='test_pass')) junit_xml = run_and_parse(testdir) # Test assert is_sub_dict(testsuite_attribs_exp, junit_xml.testsuite_attribs) for env_var in ASC_ENV_VARS: assert junit_xml.testsuite_props[env_var] == 'Unknown'
def test_missing_uuid_mark(testdir, single_decorated_test_function): """Verify that XSD will enforce the presence of 'test_id' property for test cases.""" # Setup testdir.makepyfile( single_decorated_test_function.format(test_name='test_missing_uuid', mark_type='jira', mark_arg='ASC-123')) xml_doc = run_and_parse(testdir).xml_doc xmlschema = etree.XMLSchema(etree.parse(get_xsd())) # Test assert xmlschema.validate(xml_doc) is False
def test_no_env_vars_set(testdir, undecorated_test_function, testsuite_attribs_exp, simple_test_config): """Verify that pytest accepts our fixture without setting any environment variables.""" # Setup testdir.makepyfile(undecorated_test_function.format(test_name='test_pass')) args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 0, args)[0] # Test assert is_sub_dict(testsuite_attribs_exp, junit_xml.testsuite_attribs) for env_var in ASC_TEST_ENV_VARS: assert junit_xml.testsuite_props[env_var] == 'None' or '[]'
def test_end_time(testdir, sleepy_test_function): """Verify that 'end_time' property element is present.""" # Expect test_name_exp = 'test_i_can_has_end_time' # Setup testdir.makepyfile( sleepy_test_function.format(test_name=test_name_exp, seconds='1')) junit_xml = run_and_parse(testdir) # Test assert 'end_time' in junit_xml.get_testcase_properties( test_name_exp).keys()
def test_happy_path_asc(testdir, properly_decorated_test_function): """Verify that 'get_xsd' returns an XSD stream that can be used to validate JUnitXML.""" # Setup testdir.makepyfile( properly_decorated_test_function.format( test_name='test_happy_path', test_id='123e4567-e89b-12d3-a456-426655440000', jira_id='ASC-123')) xml_doc = run_and_parse(testdir).xml_doc xmlschema = etree.XMLSchema(etree.parse(get_xsd())) # Test xmlschema.assertValid(xml_doc)
def test_end_time(testdir, sleepy_test_function, simple_test_config): """Verify that 'end_time' property element is present.""" # Expect test_name_exp = 'test_i_can_has_end_time' # Setup testdir.makepyfile( sleepy_test_function.format(test_name=test_name_exp, seconds='1')) args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 0, args)[0] # Test assert 'end_time' in junit_xml.get_testcase_properties( test_name_exp).keys()
def test_env_vars_set(testdir, undecorated_test_function, testsuite_attribs_exp): """Verify that pytest accepts our fixture with all relevant environment variables set.""" # Setup testdir.makepyfile(undecorated_test_function.format(test_name='test_pass')) for env in ASC_ENV_VARS: os.environ[env] = env junit_xml = run_and_parse(testdir) # Test assert is_sub_dict(testsuite_attribs_exp, junit_xml.testsuite_attribs) expected = {env: env for env in ASC_ENV_VARS} expected['ci-environment'] = 'asc' # This is not supplied by the environment assert junit_xml.testsuite_props == expected
def test_default_config(testdir, single_decorated_test_function): """Test that a default config is present if none is configured""" # Expect mark_type_exp = 'test_id' test_id_exp = '123e4567-e89b-12d3-a456-426655440000' test_name_exp = 'test_uuid' # Setup testdir.makepyfile( single_decorated_test_function.format(mark_type=mark_type_exp, mark_arg=test_id_exp, test_name=test_name_exp)) args = [] # no config is provided junit_xml = run_and_parse(testdir, 0, args)[0] # Test assert 'BUILD_NUMBER' in junit_xml.testsuite_props # if there was no config we would not have a BUILD_NUMBER
def test_jira_mark_present(testdir, single_decorated_test_function): """Verify that 'jira' property element is present when a test is decorated with a Jira mark.""" # Expect mark_type_exp = 'jira' jira_id_exp = 'ASC-123' test_name_exp = 'test_jira' # Setup testdir.makepyfile( single_decorated_test_function.format(mark_type=mark_type_exp, mark_arg=jira_id_exp, test_name=test_name_exp)) junit_xml = run_and_parse(testdir) # Test assert junit_xml.get_testcase_properties( test_name_exp)[mark_type_exp] == jira_id_exp
def test_missing_marks(testdir, undecorated_test_function): """Verify that 'test_id' and 'jira' property elements are absent when a test is NOT decorated with required marks. """ # Expect test_name_exp = 'test_no_marks' # Setup testdir.makepyfile( undecorated_test_function.format(test_name=test_name_exp)) junit_xml = run_and_parse(testdir) # Test assert 'test_id' not in junit_xml.get_testcase_properties( test_name_exp).keys() assert 'jira' not in junit_xml.get_testcase_properties( test_name_exp).keys()
def test_typo_property(testdir, properly_decorated_test_function): """Verify that XSD will enforce the only certain property names are allowed for the testcase.""" # Setup testdir.makepyfile( properly_decorated_test_function.format( test_name='test_typo_mark', test_id='123e4567-e89b-12d3-a456-426655440000', jira_id='ASC-123')) xml_doc = run_and_parse(testdir).xml_doc # Add another property element for the testcase. xml_doc.find( './testcase/properties/property').attrib['name'] = 'wrong_test_id' xmlschema = etree.XMLSchema(etree.parse(get_xsd())) # Test assert xmlschema.validate(xml_doc) is False
def test_multiple_jira_references(testdir): """Verify that 'get_xsd' returns an XSD stream when a testcase is decorated Jira mark with multiple arguments. """ # Setup testdir.makepyfile(""" import pytest @pytest.mark.jira('ASC-123', 'ASC-124') @pytest.mark.test_id('123e4567-e89b-12d3-a456-426655440000') def test_xsd(): pass """) xml_doc = run_and_parse(testdir).xml_doc xmlschema = etree.XMLSchema(etree.parse(get_xsd())) # Test xmlschema.assertValid(xml_doc)
def test_uuid_mark_present(testdir, single_decorated_test_function): """Verify that 'test_id' property element is present when a test is decorated with a UUID mark.""" # Expect mark_type_exp = 'test_id' test_id_exp = '123e4567-e89b-12d3-a456-426655440000' test_name_exp = 'test_uuid' # Setup testdir.makepyfile( single_decorated_test_function.format(mark_type=mark_type_exp, mark_arg=test_id_exp, test_name=test_name_exp)) junit_xml = run_and_parse(testdir) # Test assert junit_xml.get_testcase_properties( test_name_exp)[mark_type_exp] == test_id_exp
def test_custom_config(testdir, single_decorated_test_function, simple_test_config): """Ensure that we can use a known good json document.""" # Expect mark_type_exp = 'test_id' test_id_exp = '123e4567-e89b-12d3-a456-426655440000' test_name_exp = 'test_uuid' # Setup testdir.makepyfile( single_decorated_test_function.format(mark_type=mark_type_exp, mark_arg=test_id_exp, test_name=test_name_exp)) args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 0, args)[0] # Test assert junit_xml.testsuite_props
def test_class_with_setup_and_teardown_steps( testdir, properly_decorated_test_class_with_step_failure, simple_test_config): """Verify that steps with 'setup' or 'teardown' in the name will always be ran regardless if previous steps failed. """ # Expect test_steps = { 'test_step_one': 'test_setup', 'test_step_two': 'test_fail', 'test_step_three': 'test_skip', 'test_step_four': 'test_teardown' } tc_props_exps = { 'test_name': 'TestCaseWithSteps', 'test_id': 'test_case_class_id', 'jira_id': 'ASC-123' } ts_attribs_exps = { 'tests': '4', 'errors': '0', 'skips': '1', 'failures': '1' } # Setup testdir.makepyfile( properly_decorated_test_class_with_step_failure.format( **merge_dicts(test_steps, tc_props_exps))) args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 1, args)[0] # Test assert is_sub_dict(ts_attribs_exps, junit_xml.testsuite_attribs) for test_step in test_steps.values(): assert junit_xml.get_testcase_properties( test_step)['test_step'] == 'true' assert junit_xml.get_testcase_properties( test_step)['test_id'] == tc_props_exps['test_id'] assert junit_xml.get_testcase_properties( test_step)['jira'] == tc_props_exps['jira_id']
def test_class_with_failed_step( testdir, properly_decorated_test_class_with_step_failure, simple_test_config): """Verify that steps that follow a failing step will automatically skip.""" # Expect test_steps = { 'test_step_one': 'test_step_one', 'test_step_two': 'test_step_fail', 'test_step_three': 'test_step_skip', 'test_step_four': 'test_step_skip_again' } tc_props_exps = { 'test_name': 'TestCaseWithSteps', 'test_id': 'test_case_class_id', 'jira_id': 'ASC-123' } ts_attribs_exps = { 'tests': '4', 'errors': '0', 'skips': '2', 'failures': '1' } # Setup testdir.makepyfile( properly_decorated_test_class_with_step_failure.format( **merge_dicts(test_steps, tc_props_exps))) args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 1, args)[0] # Test assert is_sub_dict(ts_attribs_exps, junit_xml.testsuite_attribs) for test_step in test_steps.values(): assert junit_xml.get_testcase_properties( test_step)['test_step'] == 'true' assert junit_xml.get_testcase_properties( test_step)['test_id'] == tc_props_exps['test_id'] assert junit_xml.get_testcase_properties( test_step)['jira'] == tc_props_exps['jira_id']
def test_accurate_test_time(testdir, sleepy_test_function): """Verify that '*_time' properties element are accurate.""" # Expect test_name_exp = 'test_i_can_has_a_duration' sleep_seconds_exp = 2 # Setup testdir.makepyfile( sleepy_test_function.format(test_name=test_name_exp, seconds=str(sleep_seconds_exp))) junit_xml = run_and_parse(testdir) # Test start = date_parser.parse( str(junit_xml.get_testcase_property(test_name_exp, 'start_time')[0])) end = date_parser.parse( str(junit_xml.get_testcase_property(test_name_exp, 'end_time')[0])) assert (end - start).seconds == sleep_seconds_exp
def test_multiple_marks(testdir): """Verify that multiple property elements are present when a test is decorated with multiple marks.""" # Expect test_ids_exp = {'test_id0': 'first', 'test_id1': 'second'} test_name_exp = 'test_uuid' # Setup testdir.makepyfile(""" import pytest @pytest.mark.test_id('{test_id1}') @pytest.mark.test_id('{test_id0}') def {test_name}(): pass """.format(test_name=test_name_exp, **test_ids_exp)) junit_xml = run_and_parse(testdir) # Test (Note: So tox and py.test disagree about the ordering of marks, therefore we sort them first.) assert sorted(junit_xml.get_testcase_property( test_name_exp, 'test_id')) == sorted(test_ids_exp.values())
def test_multiple_test_cases_with_marks_present(testdir, simple_test_config): """Verify that 'test_id' and 'jira' property elements are present when multiple tests are decorated with required marks. """ # Expect test_info = [{ 'test_name': 'test_mark1', 'test_id': 'first', 'jira_id': '1st' }, { 'test_name': 'test_mark2', 'test_id': 'second', 'jira_id': '2nd' }] # Setup test_py_file = \ """ import pytest @pytest.mark.test_id('{test_id}') @pytest.mark.jira('{jira_id}') def {test_name}(): pass """ testdir.makepyfile(test_py_file.format(**test_info[0]), test_py_file.format(**test_info[1])) args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 0, args)[0] # Test for info in test_info: assert junit_xml.get_testcase_property(info['test_name'], 'test_id') == [info['test_id']] assert junit_xml.get_testcase_property(info['test_name'], 'jira') == [info['jira_id']]
def test_class_with_steps_and_repeated_marks( testdir, improperly_decorated_test_class_with_steps, simple_test_config): """Verify that decorating a pytest class with 'test_case_with_steps' mark along with marking methods of said class with 'test_id' and 'jira' marks will result in test cases being marked with the PARENT classes 'test_id' and 'jira' marks. Basically, we're validating that decorating test class methods with marks are ignored when the class itself is decorated with the 'test_case_with_steps' mark. """ # Expect test_steps = { 'test_step_one': 'test_step_one', 'test_step_two': 'test_step_two', 'test_step_three': 'test_step_three' } tc_props_exps = { 'test_name': 'TestCaseWithSteps', 'test_id': 'test_case_class_id', 'jira_id': 'ASC-123' } # Setup testdir.makepyfile( improperly_decorated_test_class_with_steps.format( **merge_dicts(test_steps, tc_props_exps))) args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 0, args)[0] # Test for test_step in test_steps.values(): assert junit_xml.get_testcase_properties( test_step)['test_step'] == 'true' assert junit_xml.get_testcase_properties( test_step)['test_id'] == tc_props_exps['test_id'] assert junit_xml.get_testcase_properties( test_step)['jira'] == tc_props_exps['jira_id']
def test_failure_in_setup_fixture(testdir, failure_in_test_setup, simple_test_config): """Verify that we still get start and end time if test fails setup fixture""" # Expect test_name_exp = 'test_oops_i_failed_my_setup' # Setup testdir.makepyfile(failure_in_test_setup.format(test_name=test_name_exp)) args = ["--pytest-zigzag-config", simple_test_config] junit_xml = run_and_parse(testdir, 1, args)[0] try: date_parser.parse( str( junit_xml.get_testcase_property(test_name_exp, 'start_time')[0])) date_parser.parse( str(junit_xml.get_testcase_property(test_name_exp, 'end_time')[0])) except IndexError: raise AssertionError('Could not find start_time and end_time')
def test_happy_path_mk8s(testdir, properly_decorated_test_function, mocker, mk8s_test_config): """Verify that 'get_xsd' returns an XSD stream that can be used to validate JUnitXML when configured with mk8s.""" # Mock zz = mocker.MagicMock() xmlpf = XmlParsingFacade(zz) # Setup testdir.makepyfile( properly_decorated_test_function.format( test_name='test_happy_path', test_id='123e4567-e89b-12d3-a456-426655440000', jira_id='ASC-123')) args = ["--pytest-zigzag-config", mk8s_test_config] xml_doc = run_and_parse(testdir, 0, args)[0].xml_doc # noinspection PyProtectedMember xmlschema = etree.XMLSchema(etree.parse(xmlpf._get_xsd())) # Test xmlschema.assertValid(xml_doc)