from collections import Counter, defaultdict import tempfile import os from tests import BZTestCase, setup_test_logging from tests.mocks import EngineEmul from bzt.modules.reporting import JUnitXMLReporter from bzt.utils import BetterDict from bzt.modules.passfail import PassFailStatus, DataCriteria from bzt.modules.aggregator import DataPoint, KPISet from bzt.six import etree setup_test_logging() class TestJUnitXML(BZTestCase): def test_prepare_filename_in_settings(self): # test path parameter from config obj = JUnitXMLReporter() obj.engine = EngineEmul() obj.parameters = BetterDict() path_from_config = tempfile.mktemp(suffix='.xml', prefix='junit-xml-path-in-settings', dir=obj.engine.artifacts_dir) obj.parameters.merge({ "filename": path_from_config, "data-source": "sample-labels" })
import shutil import sys from math import ceil import yaml from bzt.modules.aggregator import ConsolidatingAggregator, DataPoint, KPISet from tests import setup_test_logging, BZTestCase, __dir__ from bzt.engine import Provisioning from bzt.modules.jmeter import JMeterExecutor, JMX, JTLErrorsReader, JTLReader, JMeterJTLLoaderExecutor,\ JMeter from tests.mocks import EngineEmul, ResultChecker, RecordingHandler from bzt.utils import BetterDict from bzt.six import etree setup_test_logging() class TestJMeterExecutor(BZTestCase): def test_jmx(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/jmx/dummy.jmx"}}) obj.prepare() def test_jmx_2tg(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict()