def test_distributed_th_hostnames(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/http.jmx"}}) obj.distributed_servers = ["127.0.0.1", "127.0.0.1"] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) thread_groups = xml_tree.findall(".//ThreadGroup") prepend_str = r"${__machineName()}" for thread_group in thread_groups: self.assertTrue( thread_group.attrib["testname"].startswith(prepend_str)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads( open(__dir__() + "/../../tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.distributed_servers = ["127.0.0.1", "127.0.0.1"] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) thread_groups = xml_tree.findall(".//ThreadGroup") prepend_str = r"${__machineName()}" for thread_group in thread_groups: self.assertFalse( thread_group.attrib["testname"].startswith(prepend_str))
def test_iterations_loop_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({"iterations": 10, "scenario": {"script": __dir__() + "/../jmx/http.jmx"}}) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) tg = modified_xml_tree.find(".//ThreadGroup") loop_ctrl = tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//stringProp[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "10") self.assertEqual(tg_forever.text, "false") obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/http.jmx"}}) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) tg = modified_xml_tree.find(".//ThreadGroup") loop_ctrl = tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find("*[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "1") # default value, not disabled self.assertEqual(tg_forever.text, "false")
def test_add_shaper_constant(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({ 'execution': { 'concurrency': 200, 'throughput': 100, 'hold-for': '1m', 'scenario': { 'script': __dir__() + '/../jmx/http.jmx' } } }) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) timer_ = ".//kg.apc.jmeter.timers.VariableThroughputTimer" timer_ += "[@testclass='kg.apc.jmeter.timers.VariableThroughputTimer']" shaper_elements = xml_tree.findall(timer_) self.assertEqual(1, len(shaper_elements)) shaper_coll_element = shaper_elements[0].find( ".//collectionProp[@name='load_profile']") self.assertEqual( "100", shaper_coll_element.find(".//stringProp[@name='49']").text) self.assertEqual( "100", shaper_coll_element.find(".//stringProp[@name='1567']").text) self.assertEqual( "60", shaper_coll_element.find(".//stringProp[@name='53']").text)
def test_user_def_vars_override(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config.merge({ 'execution': { 'concurrency': 200, 'throughput': 100, 'hold-for': '1m', 'scenario': { 'variables': { 'my_var': 'http://demo.blazemeter.com/api/user', 'myvar2': 'val2' }, 'properties': { 'log_level.jmeter': 'DEBUG' }, 'script': __dir__() + '/../jmx/http.jmx' } } }) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) udv_elements = xml_tree.findall(".//Arguments[@testclass='Arguments']") self.assertEqual(1, len(udv_elements))
def test_duration_loops_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "concurrency": 10, "ramp-up": 15, "hold-for": "2m", "scenario": { "script": __dir__() + "/../jmx/http.jmx" } }) obj.prepare() modified_xml_tree = etree.fromstring( open(obj.modified_jmx, "rb").read()) tg = modified_xml_tree.find(".//ThreadGroup") loop_ctrl = tg.find( ".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//intProp[@name='LoopController.loops']") tg_forever = loop_ctrl.find( ".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "-1") self.assertEqual(tg_forever.text, "false")
def test_convert_tgroups_load_modifications(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "iterations": 20, "ramp-up": 10, "hold-for": "2m", "scenario": {"script": __dir__() + "/../jmx/SteppingThreadGroup.jmx"} }) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) st_tg = modified_xml_tree.find(".//kg.apc.jmeter.threads.SteppingThreadGroup") self.assertEqual(st_tg, None) ul_tg = modified_xml_tree.find(".//kg.apc.jmeter.threads.UltimateThreadGroup") self.assertEqual(ul_tg, None) converted_st_tg = modified_xml_tree.find(".//ThreadGroup[@testname='stepping tg']") loop_ctrl = converted_st_tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//*[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "20") self.assertEqual(tg_forever.text, "false") st_tg_concurrency = converted_st_tg.find(".//stringProp[@name='ThreadGroup.num_threads']") self.assertEqual(st_tg_concurrency.text, "123")
def test_2(self): obj = ConsoleStatusReporter() obj.engine = EngineEmul() obj.engine.provisioning = Local() obj.engine.config[Provisioning.PROV] = '' jmeter = JMeterExecutor() jmeter.engine = obj.engine jmeter.start_time = time.time() jmeter.execution[ScenarioExecutor.HOLD_FOR] = 10 obj.engine.provisioning.executors = [jmeter] obj.settings["disable"] = False obj.settings['dummy_cols'] = 160 obj.settings['dummy_rows'] = 40 obj.prepare() obj.startup() for n in range(0, 10): point = self.__get_datapoint(0) obj.aggregated_second(point) obj.check() self.assertTrue(obj.screen.started) obj.check() obj.shutdown() obj.post_process()
def test_css_jquery_extractor(self): obj = JMeterExecutor() handler = RecordingHandler() obj.log.addHandler(handler) obj.engine = EngineEmul() obj.engine.config = json.loads(open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() target_jmx = os.path.join(obj.engine.artifacts_dir, "requests.jmx") modified_xml_tree = etree.fromstring(open(target_jmx, "rb").read()) jq_css_extractors = modified_xml_tree.findall(".//HtmlExtractor") self.assertEqual(2, len(jq_css_extractors)) simplified_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name1']") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name1") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name~=my_input]") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, None) self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "0") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NOT_FOUND") full_form_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name2']") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name2") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name=JMeter]") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, "value") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "1") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NV_JMETER") obj.log.removeHandler(handler)
def test_dns_cache_mgr_script(self): """ :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({'execution': {'ramp-up': 10, 'throughput': 2, 'hold-for': 20, 'concurrency': 5, 'scenario': {'think-time': '0.75s', 'script': 'tests/jmx/http.jmx'}}, 'modules': {'jmeter': {'system-properties': {'any_prop': 'true'}, 'properties': {'log_level.jmeter': 'WARN', 'log_level.jmeter.threads': 'DEBUG', 'my-hostname': 'www.pre-test.com'}}}}) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) dns_managers = xml_tree.findall(".//DNSCacheManager") # 0 dns_managers self.assertEqual(len(dns_managers), 0) sys_prop = open(os.path.join(obj.engine.artifacts_dir, "system.properties")).read() self.assertTrue("any_prop=true" in sys_prop) self.assertFalse("sun.net.inetaddr.ttl=0" in sys_prop)
def test_step_shaper(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge( yaml.load(open("tests/yaml/stepping_ramp_up.yml").read())) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.execution['throughput'] = 100 obj.prepare() load = obj.get_load() modified_xml_tree = etree.fromstring( open(obj.modified_jmx, "rb").read()) timer = modified_xml_tree.findall( ".//kg.apc.jmeter.timers.VariableThroughputTimer") self.assertEqual(len(timer), 1) for num, step_collection in enumerate( timer[0].findall(".//load_profile")): step_start_rps = step_collection.find(".//stringProp[@name='49']") step_stop_rps = step_collection.find(".//stringProp[@name='1567']") self.assertTrue(step_start_rps == step_stop_rps == str( int(round(float(load.throughput) / load.steps)))) if num + 1 == load.steps: self.assertEqual( step_collection.find(".//stringProp[@name='53']"), load.hold + load.ramp_up / load.steps) else: self.assertEqual( step_collection.find(".//stringProp[@name='53']"), load.ramp_up / load.steps)
def test_add_shaper_constant(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge( yaml.load(open("tests/yaml/throughput_constant.yml").read())) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) shaper_elements = xml_tree.findall( ".//kg.apc.jmeter.timers.VariableThroughputTimer[@testclass='kg.apc.jmeter.timers.VariableThroughputTimer']" ) self.assertEqual(1, len(shaper_elements)) shaper_coll_element = shaper_elements[0].find( ".//collectionProp[@name='load_profile']") self.assertEqual( "100", shaper_coll_element.find(".//stringProp[@name='49']").text) self.assertEqual( "100", shaper_coll_element.find(".//stringProp[@name='1567']").text) self.assertEqual( "60", shaper_coll_element.find(".//stringProp[@name='53']").text)
def test_stepping_tg_ramp_proportion(self): """ Tested with concurrency proportions :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({'execution': {'steps': 5, 'concurrency': 170, 'scenario': {'script': 'tests/jmx/stepping_ramp_up.jmx'}, 'ramp-up': '1m', 'distributed': ['127.0.0.1'], 'hold-for': '2m'}}) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.execution['concurrency'] = 100 # from 170 to 100 obj.execution['steps'] = 4 # from 5 to 4 obj.prepare() load = obj.get_load() orig_xml_tree = etree.fromstring(open(obj.original_jmx, "rb").read()) modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) mod_stepping_tgs = modified_xml_tree.findall(".//kg.apc.jmeter.threads.SteppingThreadGroup") orig_tgs = orig_xml_tree.findall(".//ThreadGroup") self.assertEqual(len(mod_stepping_tgs), len(orig_tgs)) orig_summ_cnc = sum([int(x.find(".//stringProp[@name='ThreadGroup.num_threads']").text) for x in orig_tgs]) for orig_th, step_th in zip(orig_tgs, mod_stepping_tgs): orig_num_threads = int(orig_th.find(".//stringProp[@name='ThreadGroup.num_threads']").text) mod_num_threads = int(step_th.find(".//stringProp[@name='ThreadGroup.num_threads']").text) self.assertEqual(round(orig_num_threads * (float(load.concurrency) / orig_summ_cnc)), mod_num_threads) self.assertEqual(step_th.find(".//stringProp[@name='Start users period']").text, str(int(load.ramp_up / load.steps))) self.assertEqual(step_th.find(".//stringProp[@name='Start users count']").text, str(int(ceil(float(load.concurrency) / orig_summ_cnc * orig_num_threads / load.steps))))
def test_stepping_tg_ramp_no_proportion(self): """ Tested without concurrency proportions :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/stepping_ramp_up.yml").read())) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() load = obj.get_load() orig_xml_tree = etree.fromstring(open(obj.original_jmx, "rb").read()) modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) mod_stepping_tgs = modified_xml_tree.findall(".//kg.apc.jmeter.threads.SteppingThreadGroup") orig_tgs = orig_xml_tree.findall(".//ThreadGroup") self.assertEqual(len(mod_stepping_tgs), len(orig_tgs)) for orig_th, step_th in zip(orig_tgs, mod_stepping_tgs): orig_num_threads = int(orig_th.find(".//stringProp[@name='ThreadGroup.num_threads']").text) mod_num_threads = int(step_th.find(".//stringProp[@name='ThreadGroup.num_threads']").text) self.assertEqual(orig_num_threads, mod_num_threads) self.assertEqual(step_th.find(".//stringProp[@name='Start users period']").text, str(int(load.ramp_up / load.steps))) self.assertEqual(step_th.find(".//stringProp[@name='Start users count']").text, str(int(orig_num_threads / load.steps)))
def test_empty_requests( self ): # https://groups.google.com/forum/#!topic/codename-taurus/iaT6O2UhfBE obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({ 'execution': { 'ramp-up': '10s', 'requests': [ 'http://blazedemo.com/', 'http://blazedemo.com/vacation.html' ], 'hold-for': '30s', 'concurrency': 5, 'scenario': { 'think-time': 0.75 } } }) obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.execution = obj.engine.config['execution'] try: obj.prepare() self.fail() except RuntimeError as exc: self.assertEqual( exc.args[0], "Nothing to test, no requests were provided in scenario")
def test_install_jmeter(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter") shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) jmeter_link = JMeterExecutor.JMETER_DOWNLOAD_LINK jmeter_ver = JMeterExecutor.JMETER_VER plugins_link = JMeterExecutor.PLUGINS_DOWNLOAD_TPL JMeterExecutor.JMETER_DOWNLOAD_LINK = "file://" + __dir__() + "/../data/jmeter-dist-{version}.zip" JMeterExecutor.PLUGINS_DOWNLOAD_TPL = "file://" + __dir__() + "/../data/jmeter-plugins-{plugin}.zip" JMeterExecutor.JMETER_VER = '2.13' self.assertFalse(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": []}}) obj.prepare() self.assertTrue(os.path.exists(path)) obj.prepare() JMeterExecutor.JMETER_DOWNLOAD_LINK = jmeter_link JMeterExecutor.PLUGINS_DOWNLOAD_TPL = plugins_link JMeterExecutor.JMETER_VER = jmeter_ver
def test_fail_on_zero_results(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.aggregator=ConsolidatingAggregator() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/jmx/dummy.jmx"}}) obj.prepare() self.assertRaises(RuntimeWarning, obj.post_process)
def test_resource_files_collection(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/files.jmx"}}) res_files = obj.resource_files() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(res_files), 5) self.assertEqual(len(artifacts), 5)
def test_install_jmeter(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter") shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) jmeter_link = JMeterExecutor.JMETER_DOWNLOAD_LINK jmeter_ver = JMeterExecutor.JMETER_VER plugins_link = JMeterExecutor.PLUGINS_DOWNLOAD_TPL mirrors_link = JMeterExecutor.MIRRORS_SOURCE JMeterExecutor.MIRRORS_SOURCE = "file:///" + __dir__() + "/../data/unicode_file" JMeterExecutor.JMETER_DOWNLOAD_LINK = "file:///" + __dir__() + "/../data/jmeter-dist-{version}.zip" JMeterExecutor.PLUGINS_DOWNLOAD_TPL = "file:///" + __dir__() + "/../data/JMeterPlugins-{plugin}-1.3.0.zip" JMeterExecutor.JMETER_VER = '2.13' self.assertFalse(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare() jars = os.listdir(os.path.abspath(os.path.join(path, '../../lib'))) old_jars = ['httpcore-4.2.5.jar', 'httpmime-4.2.6.jar', 'xercesImpl-2.9.1.jar', 'commons-jexl-1.1.jar', 'httpclient-4.2.6.jar'] for old_jar in old_jars: self.assertNotIn(old_jar, jars) self.assertTrue(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare() JMeterExecutor.JMETER_DOWNLOAD_LINK = jmeter_link JMeterExecutor.PLUGINS_DOWNLOAD_TPL = plugins_link JMeterExecutor.JMETER_VER = jmeter_ver JMeterExecutor.MIRRORS_SOURCE = mirrors_link
def test_csv_path_bug_in_distributed_mode(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/files.jmx"}}) obj.distributed_servers = ["127.0.0.1", "127.0.0.1"] obj.prepare() target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_files.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True, reverse_check=True)
def test_think_time_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/think-time-bug.yml").read())) obj.execution = obj.engine.config['execution'] obj.prepare() result = open(obj.modified_jmx).read() self.assertIn('<stringProp name="ConstantTimer.delay">750</stringProp>', result)
def test_jmeter_mirrors(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter" + EXE_SUFFIX) shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) obj = JMeterExecutor() objjm = JMeter(path, obj.log, JMeterExecutor.JMETER_VER) objjm.install()
def test_not_jmx(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = {"scenario": {"script": __file__}} try: obj.prepare() self.fail() except RuntimeError: pass
def get_jmeter(self): path = RESOURCES_DIR + "jmeter/jmeter-loader" + EXE_SUFFIX obj = JMeterExecutor() obj.settings.merge({'path': path}) obj.execution.merge( {"scenario": { "script": RESOURCES_DIR + "jmeter/jmx/dummy.jmx" }}) return obj
def test_jmx(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge( {"scenario": { "script": __dir__() + "/../jmx/dummy.jmx" }}) obj.prepare()
def test_user_def_vars_from_requests(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) udv_elements = xml_tree.findall(".//Arguments[@testclass='Arguments']") self.assertEqual(1, len(udv_elements))
def test_resource_files_from_requests_remote_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] res_files = obj.resource_files() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(res_files), 2) self.assertEqual(len(artifacts), 2)
def test_user_def_vars_override(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config.merge(yaml.load(open("tests/yaml/user_def_vars.yml").read())) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) udv_elements = xml_tree.findall(".//Arguments[@testclass='Arguments']") self.assertEqual(1, len(udv_elements))
def test_resource_files_collection_local_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/files.jmx"}}) obj.prepare() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(artifacts), 7) # minus jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_files.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True)
def test_not_jmx_xml(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge( {"scenario": { "script": __dir__() + "/../jmx/not-jmx.xml" }}) self.assertRaises(RuntimeError, obj.prepare)
def test_not_jmx_xml(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/jmx/not-jmx.xml"}}) try: obj.prepare() self.fail() except RuntimeError: pass