Beispiel #1
0
def test_json_exporter(runpath):
    """
    JSON Exporter should generate a json report at the given `json_path`.
    """
    json_path = os.path.join(runpath, "report.json")

    plan = TestplanMock("plan",
                        exporters=JSONExporter(json_path=json_path),
                        runpath=runpath)
    multitest_1 = multitest.MultiTest(name="Primary", suites=[Alpha()])
    multitest_2 = multitest.MultiTest(name="Secondary", suites=[Beta()])
    plan.add(multitest_1)
    plan.add(multitest_2)
    plan.run()

    assert os.path.exists(json_path)
    assert os.stat(json_path).st_size > 0

    # Load the JSON file to validate it contains valid JSON.
    with open(json_path) as json_file:
        report = json.load(json_file)

    # Check that the expected text file is attached correctly.
    attachments_dir = os.path.join(os.path.dirname(json_path), "_attachments")
    assert os.path.isdir(attachments_dir)
    assert len(report["attachments"]) == 1

    dst_path = list(report["attachments"].keys())[0]
    attachment_filepath = os.path.join(attachments_dir, dst_path)
    assert os.path.isfile(attachment_filepath)

    with open(attachment_filepath) as f:
        attachment_file_contents = f.read()
    assert attachment_file_contents == "testplan\n" * 100
Beispiel #2
0
def test_multitest_runpath():
    """Test setting of runpath."""
    class Parent(object):
        def __init__(self, runpath):
            self.runpath = runpath

    global_runpath = os.path.join("", "var", "tmp", "global_level")
    local_runpath = os.path.join("", "var", "tmp", "local_runpath")

    par = Parent(global_runpath)

    # No runpath specified - take default runpath
    mtest = multitest.MultiTest(name="Mtest",
                                suites=[],
                                **MTEST_DEFAULT_PARAMS)
    assert mtest.runpath is None
    assert mtest._runpath is None
    mtest.run()
    assert mtest.runpath == path.default_runpath(mtest)
    assert mtest._runpath == path.default_runpath(mtest)

    # runpath in local cfg - take local cfg
    mtest = multitest.MultiTest(name="Mtest",
                                suites=[],
                                runpath=local_runpath,
                                **MTEST_DEFAULT_PARAMS)
    assert mtest.runpath is None
    assert mtest._runpath is None
    mtest.run()
    assert mtest.runpath == local_runpath
    assert mtest._runpath == local_runpath

    # runpath in global cfg - take parent's runpath and append uid
    mtest = multitest.MultiTest(name="Mtest",
                                suites=[],
                                **MTEST_DEFAULT_PARAMS)
    mtest.parent = par
    assert mtest.runpath is None
    assert mtest._runpath is None
    mtest.run()
    assert mtest.runpath == os.path.join(global_runpath, "mtest")
    assert mtest._runpath == os.path.join(global_runpath, "mtest")

    # runpath in global cfg and local - take local cfg
    mtest = multitest.MultiTest(name="Mtest",
                                suites=[],
                                runpath=local_runpath,
                                **MTEST_DEFAULT_PARAMS)
    mtest.parent = par
    assert mtest.runpath is None
    assert mtest._runpath is None
    mtest.run()
    assert mtest.runpath == local_runpath
    assert mtest._runpath == local_runpath
Beispiel #3
0
def test_multitest_runpath():
    """Test setting of runpath."""
    # # No runpath specified
    mtest = multitest.MultiTest(name="Mtest",
                                suites=[],
                                **MTEST_DEFAULT_PARAMS)
    assert mtest.runpath is None
    assert mtest._runpath is None
    mtest.run()
    assert mtest.runpath == path.default_runpath(mtest)
    assert mtest._runpath == path.default_runpath(mtest)

    # runpath in local cfg
    custom = os.path.join("", "var", "tmp", "custom")
    mtest = multitest.MultiTest(name="Mtest",
                                suites=[],
                                runpath=custom,
                                **MTEST_DEFAULT_PARAMS)
    assert mtest.runpath is None
    assert mtest._runpath is None
    mtest.run()
    assert mtest.runpath == custom
    assert mtest._runpath == custom

    # runpath in global cfg
    global_runpath = os.path.join("", "var", "tmp", "global_level")
    par = base.MultiTestConfig(name="Mtest", suites=[], runpath=global_runpath)
    mtest = multitest.MultiTest(name="Mtest",
                                suites=[],
                                **MTEST_DEFAULT_PARAMS)
    mtest.cfg.parent = par
    assert mtest.runpath is None
    assert mtest._runpath is None
    mtest.run()
    assert mtest.runpath == global_runpath
    assert mtest._runpath == global_runpath

    # runpath in global cfg and local
    global_runpath = os.path.join("", "var", "tmp", "global_level")
    local_runpath = os.path.join("", "var", "tmp", "local_runpath")
    par = base.MultiTestConfig(name="Mtest", suites=[], runpath=global_runpath)
    mtest = multitest.MultiTest(name="Mtest",
                                suites=[],
                                runpath=local_runpath,
                                **MTEST_DEFAULT_PARAMS)
    mtest.cfg.parent = par
    assert mtest.runpath is None
    assert mtest._runpath is None
    mtest.run()
    assert mtest.runpath == local_runpath
    assert mtest._runpath == local_runpath
def test_run_testcases_iter():
    """Test running tests iteratively."""
    mtest = multitest.MultiTest(
        name="MTest",
        suites=[Suite()],
        thread_pool_size=3,
        **MTEST_DEFAULT_PARAMS
    )
    mtest.dry_run()

    results = list(mtest.run_testcases_iter())
    assert len(results) == 8

    attributes, parent_uids = results[0]
    assert parent_uids == ["MTest", "Suite", "case"]
    assert attributes["runtime_status"] == report.RuntimeStatus.RUNNING

    testcase_report, parent_uids = results[1]
    assert parent_uids == ["MTest", "Suite"]
    assert testcase_report.runtime_status == report.RuntimeStatus.FINISHED
    _check_testcase_report(testcase_report)

    for i, (attributes, parent_uids) in enumerate(results[2::2]):
        assert parent_uids == [
            "MTest",
            "Suite",
            "parametrized",
            "parametrized__val_{}".format(i + 1),
        ]
        assert attributes["runtime_status"] == report.RuntimeStatus.RUNNING

    for i, (testcase_report, parent_uids) in enumerate(results[3::2]):
        assert parent_uids == ["MTest", "Suite", "parametrized"]
        assert testcase_report.runtime_status == report.RuntimeStatus.FINISHED
        _check_param_testcase_report(testcase_report, i)
Beispiel #5
0
 def _factory(pattern=None):
     plan = testplan.TestplanMock(
         name="Logging TestPlan",
         test_filter=Pattern(pattern) if pattern else Filter(),
     )
     plan.add(multitest.MultiTest(name="Logging Test", suites=suites))
     return plan
Beispiel #6
0
def plan(tmpdir):
    """Yield an interactive testplan."""
    plan = testplan.TestplanMock(
        name=six.ensure_str("InteractiveAPITest"),
        interactive_port=0,
        interactive_block=False,
        exporters=[XMLExporter(xml_dir=tmpdir / "xml_exporter")],
    )

    logfile = tmpdir / "attached_log.txt"
    logfile.write_text(
        "This text will be written into the attached file.", encoding="utf8",
    )

    plan.add(
        multitest.MultiTest(
            name=six.ensure_str("ExampleMTest"),
            suites=[ExampleSuite(str(logfile))],
        )
    )
    plan.run()
    timing.wait(
        lambda: plan.interactive.http_handler_info is not None,
        300,
        raise_on_timeout=True,
    )
    yield plan
    plan.abort()
Beispiel #7
0
def plan(tmpdir):
    """Yield an interactive testplan."""

    with mock.patch("testplan.runnable.interactive.reloader.ModuleReloader"
                    ) as MockReloader:
        MockReloader.return_value = None

        plan = testplan.TestplanMock(
            name="InteractiveAPITest",
            interactive_port=0,
            interactive_block=False,
            exporters=[XMLExporter(xml_dir=str(tmpdir / "xml_exporter"))],
        )

        logfile = tmpdir / "attached_log.txt"
        logfile.write_text(
            "This text will be written into the attached file.",
            encoding="utf-8",
        )

        plan.add(
            multitest.MultiTest(
                name="ExampleMTest",
                suites=[ExampleSuite(str(logfile))],
            ))
        plan.run()
        timing.wait(
            lambda: plan.interactive.http_handler_info is not None,
            300,
            raise_on_timeout=True,
        )
        yield plan
        plan.abort()
Beispiel #8
0
def test_run_all_tests():
    """Test running all tests."""
    mtest = multitest.MultiTest(name="MTest",
                                suites=[Suite()],
                                **MTEST_DEFAULT_PARAMS)
    mtest_report = mtest.run_tests()
    assert mtest_report.passed
    assert mtest_report.name == "MTest"
    assert mtest_report.category == report.ReportCategories.MULTITEST
    assert len(mtest_report.entries) == 1  # One suite.

    suite_report = mtest_report.entries[0]
    assert suite_report.passed
    assert suite_report.name == "Suite"
    assert suite_report.category == report.ReportCategories.TESTSUITE
    assert len(suite_report.entries) == 2  # Two testcases.

    testcase_report = suite_report.entries[0]
    _check_testcase_report(testcase_report)

    param_report = suite_report.entries[1]
    assert param_report.passed
    assert param_report.name == "parametrized"
    assert param_report.category == report.ReportCategories.PARAMETRIZATION
    assert len(param_report.entries) == 3  # Three parametrized testcases

    for i, testcase_report in enumerate(param_report.entries):
        _check_param_testcase_report(testcase_report, i)
Beispiel #9
0
def test_run_tests_parallel():
    """Test running tests in parallel via an execution group."""
    # Since we have at most three testcases in any one execution group,
    # use three threads in the thread pool to save on resources.
    mtest = multitest.MultiTest(name="MTest",
                                suites=[ParallelSuite()],
                                thread_pool_size=3,
                                **MTEST_DEFAULT_PARAMS)
    mtest_report = mtest.run_tests()
    assert mtest_report.passed
    assert mtest_report.name == "MTest"
    assert mtest_report.category == report.ReportCategories.MULTITEST
    assert len(mtest_report.entries) == 1  # One suite.

    suite_report = mtest_report.entries[0]
    assert suite_report.passed
    assert suite_report.name == "ParallelSuite"
    assert suite_report.category == report.ReportCategories.TESTSUITE
    assert len(suite_report.entries) == 4  # Four testcases.

    for i in range(3):
        case_name = "case{}".format(i + 1)
        _check_parallel_testcase(suite_report[case_name], i)

    _check_parallel_param(suite_report["parametrized"])
def irunner():
    """Set up an irunner instance for testing."""
    target = runnable.TestRunner(name="TestRunner")

    local_runner = runners.LocalRunner()
    test_uids = ["test_1", "test_2", "test_3"]
    test_objs = [
        multitest.MultiTest(
            name=uid,
            suites=[Suite()],
            test_filter=filtering.Filter(),
            test_sorter=ordering.NoopSorter(),
            stdout_style=defaults.STDOUT_STYLE,
            environment=[driver.Driver(name="mock_driver")],
        ) for uid in test_uids
    ]

    for test in test_objs:
        local_runner.add(test, test.uid())

    target.resources.add(local_runner)

    with mock.patch("cheroot.wsgi.Server"):
        irunner = base.TestRunnerIHandler(target)
        irunner.setup()

        yield irunner

        irunner.teardown()
Beispiel #11
0
def make_serialization_mtest():
    """
    Callable target to make a MultiTest containing the SerializationSuite
    defined above.
    """
    return multitest.MultiTest(name='SerializationMTest',
                               suites=[SerializationSuite()])
Beispiel #12
0
def test_http_exporter(http_server):
    """
    HTTP Exporter should send a json report to the given `http_url`.
    """
    http_url = 'http://localhost:{}'.format(http_server.server_port)

    plan = Testplan(name='plan',
                    parse_cmdline=False,
                    exporters=HTTPExporter(http_url=http_url))
    multitest_1 = multitest.MultiTest(name='Primary', suites=[Alpha()])
    multitest_2 = multitest.MultiTest(name='Secondary', suites=[Beta()])
    plan.add(multitest_1)
    plan.add(multitest_2)
    plan.run()

    assert len(PostHandler.post_data) == 1
    PostHandler.post_data.pop()
Beispiel #13
0
def test_http_exporter(runpath, http_server):
    """
    HTTP Exporter should send a json report to the given `http_url`.
    """
    http_url = "http://localhost:{}".format(http_server.server_port)

    plan = TestplanMock(
        "plan", exporters=HTTPExporter(http_url=http_url), runpath=runpath
    )
    multitest_1 = multitest.MultiTest(name="Primary", suites=[Alpha()])
    multitest_2 = multitest.MultiTest(name="Secondary", suites=[Beta()])
    plan.add(multitest_1)
    plan.add(multitest_2)
    plan.run()

    assert len(PostHandler.post_data) == 1
    PostHandler.post_data.pop()
Beispiel #14
0
def attachment_plan(tmpdir):
    attachment_path = str(tmpdir.join("attachment.txt"))
    with open(attachment_path, "w") as f:
        f.write("testplan\n" * 100)

    plan = testplan.TestplanMock(name="AttachmentPlan")
    plan.add(
        multitest.MultiTest(name="AttachmentTest",
                            suites=[Suite1(attachment_path)]))
    return plan
Beispiel #15
0
def test_dry_run():
    """Test the "dry_run" method which generates an empty report skeleton."""
    mtest = multitest.MultiTest(name="MTest",
                                suites=[Suite()],
                                **MTEST_DEFAULT_PARAMS)
    result = mtest.dry_run()
    report_skeleton = result.report

    # Comparing the serialized reports makes it much easier to spot any
    # inconsistencies.
    assert report_skeleton.serialize() == EXPECTED_REPORT_SKELETON.serialize()
Beispiel #16
0
def test_dry_run():
    """Test the "dry_run" method which generates an empty report skeleton."""
    mtest = multitest.MultiTest(name="MTest",
                                description="Basic multitest.",
                                suites=[Suite()],
                                **MTEST_DEFAULT_PARAMS)
    result = mtest.dry_run()
    report_skeleton = result.report

    # Comparing the reports to spot any inconsistencies.
    testing.check_report(expected=EXPECTED_REPORT_SKELETON,
                         actual=report_skeleton)
Beispiel #17
0
def same_attachments_plan(tmpdir):
    attachment_path = str(tmpdir.join("attachment.txt"))
    with open(attachment_path, "w") as f:
        f.write("testplan\n" * 100)

    plan = testplan.Testplan(name="AttachmentPlan", parse_cmdline=False)
    plan.add(
        multitest.MultiTest(
            name="AttachmentTest",
            suites=[Suite1(attachment_path),
                    Suite2(attachment_path)]))
    return plan
Beispiel #18
0
def test_implicit_exporter_initialization(http_server):
    """
    An implicit exporting should be done if `http_url` is available
    via cmdline args but no exporters were declared programmatically.
    """
    http_url = 'http://localhost:{}'.format(http_server.server_port)

    with argv_overridden('--http', http_url):
        plan = Testplan(name='plan')
        multitest_1 = multitest.MultiTest(name='Primary', suites=[Alpha()])
        plan.add(multitest_1)
        plan.run()

    assert len(PostHandler.post_data) == 1
    PostHandler.post_data.pop()
Beispiel #19
0
def test_implicit_exporter_initialization(runpath, http_server):
    """
    An implicit exporting should be done if `http_url` is available
    via cmdline args but no exporters were declared programmatically.
    """
    http_url = "http://localhost:{}".format(http_server.server_port)

    with argv_overridden("--http", http_url):
        plan = TestplanMock("plan", parse_cmdline=True, runpath=runpath)
        multitest_1 = multitest.MultiTest(name="Primary", suites=[Alpha()])
        plan.add(multitest_1)
        plan.run()

    assert len(PostHandler.post_data) == 1
    PostHandler.post_data.pop()
Beispiel #20
0
def test_run_testcases_iter():
    """Test running tests iteratively."""
    mtest = multitest.MultiTest(name="MTest",
                                suites=[Suite()],
                                thread_pool_size=3,
                                **MTEST_DEFAULT_PARAMS)

    results = list(mtest.run_testcases_iter())
    assert len(results) == 4

    testcase_report, parent_uids = results[0]
    assert parent_uids == ["MTest", "Suite"]
    _check_testcase_report(testcase_report)

    for i, (testcase_report, parent_uids) in enumerate(results[1:]):
        assert parent_uids == ["MTest", "Suite", "parametrized"]
        _check_param_testcase_report(testcase_report, i)
Beispiel #21
0
def test_test_refresh(mock_reload_env):
    """Test that tests are correctly refreshed after a module is reloaded."""
    reload_obj, mock_reload, mock_stat = mock_reload_env

    # Modify mod_a again. This time we specify a MultiTest to refresh
    # suites for.
    mock_reload.reset_mock()
    test = multitest.MultiTest(name="MTest", suites=[Suite()])
    test.cfg.suites[0].__module__ = "mod_a"
    mock_stat.modified_files = {MOCK_MODULES["mod_a"]}

    set_testsuite_testcases = (
        "testplan.testing.multitest.suite.set_testsuite_testcases")
    with mock.patch(set_testsuite_testcases) as mock_set_testcases:
        reload_obj.reload(tests=[test])
        mock_reload.assert_called_once_with(MOCK_SYSMODULES["mod_a"])
        mock_set_testcases.assert_called_once_with(test.cfg.suites[0])
Beispiel #22
0
def test_implicit_exporter_initialization(tmpdir):
    """
        An implicit XMLExporter should be generated if `xml_dir` is available
        via cmdline args but no exporters were declared programmatically.
    """
    xml_dir = tmpdir.mkdir("xml")

    with argv_overridden("--xml", xml_dir.strpath):
        plan = Testplan(name="plan")
        multitest_1 = multitest.MultiTest(name="Primary", suites=[Alpha()])
        plan.add(multitest_1)
        plan.run()

    xml_path = xml_dir.join("primary.xml").strpath

    assert os.path.exists(xml_path)
    assert os.stat(xml_path).st_size > 0
Beispiel #23
0
def multi_attachments_plan(tmpdir):
    attachment_paths = [
        str(tmpdir.join("attachment{}.txt".format(i))) for i in range(2)
    ]

    # Write different content to each file to ensure they get a unique hash.
    for i, attachment_path in enumerate(attachment_paths):
        with open(attachment_path, "w") as f:
            f.write("testplan{}\n".format(i) * 100)

    plan = testplan.Testplan(name="AttachmentPlan", parse_cmdline=False)
    plan.add(
        multitest.MultiTest(
            name="AttachmentTest",
            suites=[Suite1(attachment_paths[0]),
                    Suite2(attachment_paths[1])]))
    return plan
def multi_attachments_plan(tmpdir):

    attachment_paths = [
        str(tmpdir.mkdir(f"{i}").join("attachment.txt")) for i in range(2)
    ]

    # Write different content to each file to ensure they get a unique hash.
    for i, attachment_path in enumerate(attachment_paths):
        with open(attachment_path, "w") as f:
            f.write(f"testplan{i}\n")

    plan = testplan.TestplanMock(name="AttachmentPlan")
    plan.add(
        multitest.MultiTest(
            name="AttachmentTest",
            suites=[Suite1(attachment_paths)],
        ))
    return plan
Beispiel #25
0
def plan():
    """Yield an interactive testplan."""
    plan = testplan.Testplan(
        name=six.ensure_str("InteractiveAPITest"),
        interactive_port=0,
        interactive_block=False,
        parse_cmdline=False,
    )
    plan.add(
        multitest.MultiTest(name=six.ensure_str("ExampleMTest"),
                            suites=[ExampleSuite()]))
    plan.run()
    timing.wait(
        lambda: plan.interactive.http_handler_info is not None,
        300,
        raise_on_timeout=True,
    )
    yield plan
    plan.abort()
Beispiel #26
0
def test_implicit_exporter_initialization(runpath):
    """
    An implicit JSON should be generated if `json_path` is available
    via cmdline args but no exporters were declared programmatically.
    """
    json_path = os.path.join(runpath, "report.json")

    with argv_overridden("--json", json_path):
        plan = TestplanMock(name="plan", parse_cmdline=True)
        multitest_1 = multitest.MultiTest(name="Primary", suites=[Alpha()])
        plan.add(multitest_1)
        plan.run()

    assert os.path.exists(json_path)
    assert os.stat(json_path).st_size > 0

    # Load the JSON file to validate it contains valid JSON.
    with open(json_path) as json_file:
        json.load(json_file)
Beispiel #27
0
def test_implicit_exporter_initialization(tmpdir):
    """
    An implicit JSON should be generated if `json_path` is available
    via cmdline args but no exporters were declared programmatically.
    """
    json_path = tmpdir.mkdir('reports').join('report.json').strpath

    with argv_overridden('--json', json_path):
        plan = Testplan(name='plan')
        multitest_1 = multitest.MultiTest(name='Primary', suites=[Alpha()])
        plan.add(multitest_1)
        plan.run()

    assert os.path.exists(json_path)
    assert os.stat(json_path).st_size > 0

    # Load the JSON file to validate it contains valid JSON.
    with open(json_path) as json_file:
        json.load(json_file)
Beispiel #28
0
def test_run_all_tagged_tests(tags, num_of_suite_entries):
    """Test running all tests whose testcases are selected by tags."""
    target = runnable.TestRunner(name="TestRunner")

    local_runner = LocalRunner()
    test_uids = ["test_1", "test_2", "test_3"]
    test_objs = [
        multitest.MultiTest(
            name=uid,
            suites=[TaggedSuite()],
            test_filter=filtering.Tags({"simple": set(tags)}),
            test_sorter=ordering.NoopSorter(),
            stdout_style=defaults.STDOUT_STYLE,
            environment=[driver.Driver(name="mock_driver")],
        )
        for uid in test_uids
    ]

    for test in test_objs:
        local_runner.add(test, test.uid())

    target.resources.add(local_runner)

    with mock.patch("cheroot.wsgi.Server"), mock.patch(
        "testplan.runnable.interactive.reloader.ModuleReloader"
    ) as MockReloader:
        MockReloader.return_value = None

        irunner = base.TestRunnerIHandler(target)
        irunner.setup()

        irunner.run_all_tests(await_results=True)
        assert irunner.report.passed
        assert len(irunner.report.entries) == 3
        for test_report in irunner.report:
            assert test_report.passed
            assert len(test_report.entries) == 1
            assert len(test_report.entries[0].entries) == num_of_suite_entries
            assert len(test_report.entries[0].entries[-1].entries) == 3

        irunner.teardown()
Beispiel #29
0
def test_xml_exporter(tmpdir):
    """
        XMLExporter should create a JUnit compatible xml file for each
        multitest in the plan.
    """
    xml_dir = tmpdir.mkdir("xml")

    plan = Testplan(
        name="plan",
        parse_cmdline=False,
        exporters=XMLExporter(xml_dir=xml_dir.strpath),
    )
    multitest_1 = multitest.MultiTest(name="Primary", suites=[Alpha()])
    multitest_2 = multitest.MultiTest(name="Secondary", suites=[Beta()])
    plan.add(multitest_1)
    plan.add(multitest_2)
    plan.run()

    xml_primary = xml_dir.join("primary.xml").strpath
    xml_secondary = xml_dir.join("secondary.xml").strpath

    xml_primary_comparison = XC(
        tag="testsuites",
        children=[
            XC(
                tag="testsuite",
                tests="2",
                errors="0",
                name="Alpha",
                package="Primary:Alpha",
                hostname=re.compile(".+"),
                failures="0",
                id="0",
                children=[
                    XC(
                        tag="testcase",
                        classname="Primary:Alpha:test_comparison",
                        name="test_comparison",
                        time=re.compile(r"\d+\.?\d*"),
                    ),
                    XC(
                        tag="testcase",
                        classname="Primary:Alpha:test_membership",
                        name="test_membership",
                        time=re.compile(r"\d+\.?\d*"),
                    ),
                ],
            )
        ],
    )

    xml_secondary_comparison = XC(
        tag="testsuites",
        children=[
            XC(
                tag="testsuite",
                tests="2",
                errors="1",
                name="Beta",
                package="Secondary:Beta",
                hostname=re.compile(".+"),
                failures="1",
                id="0",
                children=[
                    XC(
                        tag="testcase",
                        classname="Secondary:Beta:test_failure",
                        name="test_failure",
                        time=re.compile(r"\d+\.?\d*"),
                        children=[
                            XC(
                                tag="failure",
                                message="failing assertion",
                                type="assertion",
                            ),
                            XC(
                                tag="failure",
                                message="Equal",
                                type="assertion",
                            ),
                        ],
                    ),
                    XC(
                        tag="testcase",
                        classname="Secondary:Beta:test_error",
                        name="test_error",
                        time=re.compile(r"\d+\.?\d*"),
                        children=[
                            XC(
                                tag="error",
                                message=re.compile(
                                    r"Traceback(.|\s)+Exception:\sfoo"),
                            )
                        ],
                    ),
                ],
            )
        ],
    )

    test_ctx = (
        (xml_primary, xml_primary_comparison),
        (xml_secondary, xml_secondary_comparison),
    )

    for file_path, xml_comparison in test_ctx:
        with open(file_path) as xml_file:
            xml_comparison.compare(xml_file.read())
Beispiel #30
0
def main(plan):
    """Define a Testplan with a single MultiTest."""
    plan.add(multitest.MultiTest(name="TestAttachments", suites=[TestSuite()]))