def test_convert_tgroups_load_modifications(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "iterations": 20, "ramp-up": 10, "hold-for": "2m", "scenario": {"script": __dir__() + "/../jmx/SteppingThreadGroup.jmx"} }) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) st_tg = modified_xml_tree.find(".//kg.apc.jmeter.threads.SteppingThreadGroup") self.assertEqual(st_tg, None) ul_tg = modified_xml_tree.find(".//kg.apc.jmeter.threads.UltimateThreadGroup") self.assertEqual(ul_tg, None) converted_st_tg = modified_xml_tree.find(".//ThreadGroup[@testname='stepping tg']") loop_ctrl = converted_st_tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//*[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "20") self.assertEqual(tg_forever.text, "false") st_tg_concurrency = converted_st_tg.find(".//stringProp[@name='ThreadGroup.num_threads']") self.assertEqual(st_tg_concurrency.text, "123")
def test_install_jmeter(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter") shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) jmeter_link = JMeterExecutor.JMETER_DOWNLOAD_LINK jmeter_ver = JMeterExecutor.JMETER_VER plugins_link = JMeterExecutor.PLUGINS_DOWNLOAD_TPL JMeterExecutor.JMETER_DOWNLOAD_LINK = "file://" + __dir__() + "/../data/jmeter-dist-{version}.zip" JMeterExecutor.PLUGINS_DOWNLOAD_TPL = "file://" + __dir__() + "/../data/jmeter-plugins-{plugin}.zip" JMeterExecutor.JMETER_VER = '2.13' self.assertFalse(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": []}}) obj.prepare() self.assertTrue(os.path.exists(path)) obj.prepare() JMeterExecutor.JMETER_DOWNLOAD_LINK = jmeter_link JMeterExecutor.PLUGINS_DOWNLOAD_TPL = plugins_link JMeterExecutor.JMETER_VER = jmeter_ver
def test_dns_cache_mgr_script(self): """ :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({'execution': {'ramp-up': 10, 'throughput': 2, 'hold-for': 20, 'concurrency': 5, 'scenario': {'think-time': '0.75s', 'script': 'tests/jmx/http.jmx'}}, 'modules': {'jmeter': {'system-properties': {'any_prop': 'true'}, 'properties': {'log_level.jmeter': 'WARN', 'log_level.jmeter.threads': 'DEBUG', 'my-hostname': 'www.pre-test.com'}}}}) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) dns_managers = xml_tree.findall(".//DNSCacheManager") # 0 dns_managers self.assertEqual(len(dns_managers), 0) sys_prop = open(os.path.join(obj.engine.artifacts_dir, "system.properties")).read() self.assertTrue("any_prop=true" in sys_prop) self.assertFalse("sun.net.inetaddr.ttl=0" in sys_prop)
def test_fail_on_zero_results(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/jmx/dummy.jmx"}}) obj.prepare() self.assertRaises(RuntimeWarning, obj.post_process)
def test_stepping_tg_ramp_no_proportion(self): """ Tested without concurrency proportions :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/stepping_ramp_up.yml").read())) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() load = obj.get_load() orig_xml_tree = etree.fromstring(open(obj.original_jmx, "rb").read()) modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) mod_stepping_tgs = modified_xml_tree.findall(".//kg.apc.jmeter.threads.SteppingThreadGroup") orig_tgs = orig_xml_tree.findall(".//ThreadGroup") self.assertEqual(len(mod_stepping_tgs), len(orig_tgs)) for orig_th, step_th in zip(orig_tgs, mod_stepping_tgs): orig_num_threads = int(orig_th.find(".//stringProp[@name='ThreadGroup.num_threads']").text) mod_num_threads = int(step_th.find(".//stringProp[@name='ThreadGroup.num_threads']").text) self.assertEqual(orig_num_threads, mod_num_threads) self.assertEqual(step_th.find(".//stringProp[@name='Start users period']").text, str(int(load.ramp_up / load.steps))) self.assertEqual(step_th.find(".//stringProp[@name='Start users count']").text, str(int(orig_num_threads / load.steps)))
def test_stepping_tg_ramp_proportion(self): """ Tested with concurrency proportions :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({'execution': {'steps': 5, 'concurrency': 170, 'scenario': {'script': 'tests/jmx/stepping_ramp_up.jmx'}, 'ramp-up': '1m', 'distributed': ['127.0.0.1'], 'hold-for': '2m'}}) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.execution['concurrency'] = 100 # from 170 to 100 obj.execution['steps'] = 4 # from 5 to 4 obj.prepare() load = obj.get_load() orig_xml_tree = etree.fromstring(open(obj.original_jmx, "rb").read()) modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) mod_stepping_tgs = modified_xml_tree.findall(".//kg.apc.jmeter.threads.SteppingThreadGroup") orig_tgs = orig_xml_tree.findall(".//ThreadGroup") self.assertEqual(len(mod_stepping_tgs), len(orig_tgs)) orig_summ_cnc = sum([int(x.find(".//stringProp[@name='ThreadGroup.num_threads']").text) for x in orig_tgs]) for orig_th, step_th in zip(orig_tgs, mod_stepping_tgs): orig_num_threads = int(orig_th.find(".//stringProp[@name='ThreadGroup.num_threads']").text) mod_num_threads = int(step_th.find(".//stringProp[@name='ThreadGroup.num_threads']").text) self.assertEqual(round(orig_num_threads * (float(load.concurrency) / orig_summ_cnc)), mod_num_threads) self.assertEqual(step_th.find(".//stringProp[@name='Start users period']").text, str(int(load.ramp_up / load.steps))) self.assertEqual(step_th.find(".//stringProp[@name='Start users count']").text, str(int(ceil(float(load.concurrency) / orig_summ_cnc * orig_num_threads / load.steps))))
def test_css_jquery_extractor(self): obj = JMeterExecutor() handler = RecordingHandler() obj.log.addHandler(handler) obj.engine = EngineEmul() obj.engine.config = json.loads(open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() target_jmx = os.path.join(obj.engine.artifacts_dir, "requests.jmx") modified_xml_tree = etree.fromstring(open(target_jmx, "rb").read()) jq_css_extractors = modified_xml_tree.findall(".//HtmlExtractor") self.assertEqual(2, len(jq_css_extractors)) simplified_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name1']") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name1") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name~=my_input]") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, None) self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "0") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NOT_FOUND") full_form_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name2']") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name2") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name=JMeter]") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, "value") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "1") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NV_JMETER") obj.log.removeHandler(handler)
def test_install_jmeter(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter" + EXE_SUFFIX) shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) self.assertFalse(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare() jars = os.listdir(os.path.abspath(os.path.join(path, '../../lib'))) old_jars = ['httpcore-4.2.5.jar', 'httpmime-4.2.6.jar', 'xercesImpl-2.9.1.jar', 'commons-jexl-1.1.jar', 'httpclient-4.2.6.jar'] for old_jar in old_jars: self.assertNotIn(old_jar, jars) self.assertTrue(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare()
def test_iterations_loop_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({"iterations": 10, "scenario": {"script": __dir__() + "/../jmx/http.jmx"}}) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) tg = modified_xml_tree.find(".//ThreadGroup") loop_ctrl = tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//stringProp[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "10") self.assertEqual(tg_forever.text, "false") obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/http.jmx"}}) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) tg = modified_xml_tree.find(".//ThreadGroup") loop_ctrl = tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find("*[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "1") # default value, not disabled self.assertEqual(tg_forever.text, "false")
def test_fail_on_zero_results(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.aggregator = ConsolidatingAggregator() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/dummy.jmx"}}) obj.prepare() self.assertRaises(RuntimeWarning, obj.post_process)
def test_csv_path_bug_in_distributed_mode(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/files.jmx"}}) obj.distributed_servers = ["127.0.0.1", "127.0.0.1"] obj.prepare() target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_files.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True, reverse_check=True)
def test_user_def_vars_override(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config.merge(yaml.load(open("tests/yaml/user_def_vars.yml").read())) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) udv_elements = xml_tree.findall(".//Arguments[@testclass='Arguments']") self.assertEqual(1, len(udv_elements))
def test_resource_files_collection_local_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/files.jmx"}}) obj.prepare() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(artifacts), 7) # minus jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_files.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True)
def test_user_def_vars_from_requests(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) udv_elements = xml_tree.findall(".//Arguments[@testclass='Arguments']") self.assertEqual(1, len(udv_elements))
def test_think_time_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/think-time-bug.yml").read())) obj.execution = obj.engine.config['execution'] obj.prepare() result = open(obj.modified_jmx).read() self.assertIn('<stringProp name="ConstantTimer.delay">750</stringProp>', result)
def test_jmx(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge( {"scenario": { "script": __dir__() + "/../jmx/dummy.jmx" }}) obj.prepare()
def test_datasources_without_delimiter(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"], "data-sources": [ {"path": __dir__() + "/../data/test2.csv"}]}}) obj.prepare()
def test_not_jmx(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = {"scenario": {"script": __file__}} try: obj.prepare() self.fail() except RuntimeError: pass
def test_resource_files_from_requests_local_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(artifacts), 6) # + system.properties, minus jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_requests.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True)
def test_not_jmx_xml(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/jmx/not-jmx.xml"}}) try: obj.prepare() self.fail() except RuntimeError: pass
def test_jtl_none(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({ "write-xml-jtl": "bla-bla-bla", "scenario": { "requests": [{ "url": "http://blazedemo.com", }]}}) obj.prepare()
def test_user_def_vars_override(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config.merge({'execution': {'concurrency': 200, 'throughput': 100, 'hold-for': '1m', 'scenario': { 'variables': {'my_var': 'http://demo.blazemeter.com/api/user', 'myvar2': 'val2'}, 'properties': {'log_level.jmeter': 'DEBUG'}, 'script': 'tests/jmx/http.jmx'}}}) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) udv_elements = xml_tree.findall(".//Arguments[@testclass='Arguments']") self.assertEqual(1, len(udv_elements))
def test_variable_csv_file(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/variable_csv.jmx"}}) obj.prepare() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(artifacts), 3) # minus jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_variable_csv.jmx.jmx") with open(target_jmx) as fds: jmx = fds.read() self.assertIn('<stringProp name="filename">${root}/csvfile.csv</stringProp>', jmx)
def test_broken_xml(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/broken.jmx"}}) try: obj.prepare() self.fail() except RuntimeError: pass
def test_resource_files_from_requests_local_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() files = ['http.jmx', 'jmeter-bzt.properties', 'modified_requests.jmx.jmx'] files += ['requests.jmx', 'system.properties', 'test1.csv'] artifacts = os.listdir(obj.engine.artifacts_dir) self.assertTrue(all([_file in artifacts for _file in files])) # +system.properties, -jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_requests.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True)
def test_css_jquery_extractor(self): obj = JMeterExecutor() handler = RecordingHandler() obj.log.addHandler(handler) obj.engine = EngineEmul() obj.engine.config = json.loads( open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() target_jmx = os.path.join(obj.engine.artifacts_dir, "requests.jmx") modified_xml_tree = etree.fromstring(open(target_jmx, "rb").read()) jq_css_extractors = modified_xml_tree.findall(".//HtmlExtractor") self.assertEqual(2, len(jq_css_extractors)) simplified_extractor = modified_xml_tree.find( ".//HtmlExtractor[@testname='Get name1']") self.assertEqual( simplified_extractor.find( ".//stringProp[@name='HtmlExtractor.refname']").text, "name1") self.assertEqual( simplified_extractor.find( ".//stringProp[@name='HtmlExtractor.expr']").text, "input[name~=my_input]") self.assertEqual( simplified_extractor.find( ".//stringProp[@name='HtmlExtractor.attribute']").text, None) self.assertEqual( simplified_extractor.find( ".//stringProp[@name='HtmlExtractor.match_number']").text, "0") self.assertEqual( simplified_extractor.find( ".//stringProp[@name='HtmlExtractor.default']").text, "NOT_FOUND") full_form_extractor = modified_xml_tree.find( ".//HtmlExtractor[@testname='Get name2']") self.assertEqual( full_form_extractor.find( ".//stringProp[@name='HtmlExtractor.refname']").text, "name2") self.assertEqual( full_form_extractor.find( ".//stringProp[@name='HtmlExtractor.expr']").text, "input[name=JMeter]") self.assertEqual( full_form_extractor.find( ".//stringProp[@name='HtmlExtractor.attribute']").text, "value") self.assertEqual( full_form_extractor.find( ".//stringProp[@name='HtmlExtractor.match_number']").text, "1") self.assertEqual( full_form_extractor.find( ".//stringProp[@name='HtmlExtractor.default']").text, "NV_JMETER") obj.log.removeHandler(handler)
def test_jtl_none(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({ "write-xml-jtl": "bla-bla-bla", "scenario": { "requests": [{ "url": "http://blazedemo.com", }] } }) obj.prepare()
def test_issue_no_iterations(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({ "concurrency": 10, "ramp-up": 10, "scenario": { "script": __dir__() + "/../jmx/issue_no_iterations.jmx" } }) obj.prepare()
def test_think_time_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config.merge({'execution': {'ramp-up': '1m', 'hold-for': '1m30s', 'concurrency': 10, 'scenario': {'think-time': 0.75, 'requests': ['http://blazedemo.com/', 'http://blazedemo.com/vacation.html']}}}) obj.execution = obj.engine.config['execution'] obj.prepare() result = open(obj.modified_jmx).read() self.assertIn('<stringProp name="ConstantTimer.delay">750</stringProp>', result)
def test_datasources_without_delimiter(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({ "scenario": { "requests": ["http://localhost"], "data-sources": [{ "path": __dir__() + "/../data/test2.csv" }] } }) obj.prepare()
def test_empty_requests(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/startup_no_requests.yml").read())) obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.execution = obj.engine.config['execution'] try: obj.prepare() self.fail() except RuntimeError as exc: self.assertEqual(exc.args[0], "Nothing to test, no requests were provided in scenario")
def test_convert_tgroups_no_load(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "scenario": {"script": __dir__() + "/../jmx/SteppingThreadGroup.jmx"} }) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) st_tg = modified_xml_tree.find(".//kg.apc.jmeter.threads.SteppingThreadGroup") self.assertNotEqual(st_tg, None) ul_tg = modified_xml_tree.find(".//kg.apc.jmeter.threads.UltimateThreadGroup") self.assertNotEqual(ul_tg, None)
def test_distributed_gui(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/distributed_gui.yml").read())) obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.execution = obj.engine.config['execution'] obj.prepare() prop_file_path = os.path.join(obj.engine.artifacts_dir, "jmeter-bzt.properties") self.assertTrue(os.path.exists(prop_file_path)) with open(prop_file_path) as prop_file: contents = prop_file.read() self.assertIn("remote_hosts=127.0.0.1,127.0.0.2", contents)
def test_think_time_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({'execution': {'ramp-up': '1m', 'hold-for': '1m30s', 'concurrency': 10, 'scenario': {'think-time': 0.75, 'requests': ['http://blazedemo.com/', 'http://blazedemo.com/vacation.html']}}}) obj.execution = obj.engine.config['execution'] obj.prepare() result = open(obj.modified_jmx).read() self.assertIn('<stringProp name="ConstantTimer.delay">750</stringProp>', result)
def test_stepping_tg_ramp_proportion(self): """ Tested with concurrency proportions :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge( yaml.load(open("tests/yaml/stepping_ramp_up.yml").read())) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.execution['concurrency'] = 100 # from 170 to 100 obj.execution['steps'] = 4 # from 5 to 4 obj.prepare() load = obj.get_load() orig_xml_tree = etree.fromstring(open(obj.original_jmx, "rb").read()) modified_xml_tree = etree.fromstring( open(obj.modified_jmx, "rb").read()) mod_stepping_tgs = modified_xml_tree.findall( ".//kg.apc.jmeter.threads.SteppingThreadGroup") orig_tgs = orig_xml_tree.findall(".//ThreadGroup") self.assertEqual(len(mod_stepping_tgs), len(orig_tgs)) orig_summ_cnc = sum([ int(x.find(".//stringProp[@name='ThreadGroup.num_threads']").text) for x in orig_tgs ]) for orig_th, step_th in zip(orig_tgs, mod_stepping_tgs): orig_num_threads = int( orig_th.find( ".//stringProp[@name='ThreadGroup.num_threads']").text) mod_num_threads = int( step_th.find( ".//stringProp[@name='ThreadGroup.num_threads']").text) self.assertEqual( round(orig_num_threads * (float(load.concurrency) / orig_summ_cnc)), mod_num_threads) self.assertEqual( step_th.find(".//stringProp[@name='Start users period']").text, str(int(load.ramp_up / load.steps))) self.assertEqual( step_th.find(".//stringProp[@name='Start users count']").text, str( int( ceil( float(load.concurrency) / orig_summ_cnc * orig_num_threads / load.steps))))
def test_empty_requests(self): # https://groups.google.com/forum/#!topic/codename-taurus/iaT6O2UhfBE obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({'execution': {'ramp-up': '10s', 'requests': ['http://blazedemo.com/', 'http://blazedemo.com/vacation.html'], 'hold-for': '30s', 'concurrency': 5, 'scenario': {'think-time': 0.75}}}) obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.execution = obj.engine.config['execution'] try: obj.prepare() self.fail() except RuntimeError as exc: self.assertEqual(exc.args[0], "Nothing to test, no requests were provided in scenario")
def test_dns_cache_mgr_scenario(self): """ No system properties :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/http.jmx"}}) obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) dns_element = xml_tree.findall(".//DNSCacheManager") # no dns manager when using jmx, no system.properties file self.assertEqual(len(dns_element), 0) arts = os.listdir(obj.engine.artifacts_dir) self.assertNotIn("system.properties", arts)
def test_body_parse(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) sampler_element = xml_tree.findall(".//HTTPSamplerProxy[@testname='With body params']") arguments_element_prop = sampler_element[0][0] self.assertEqual(7, len(sampler_element[0].getchildren())) self.assertEqual(1, len(arguments_element_prop.getchildren())) self.assertEqual(2, len(arguments_element_prop[0].getchildren())) self.assertEqual(1, len(arguments_element_prop[0].findall(".//elementProp[@name='param1']"))) self.assertEqual(1, len(arguments_element_prop.findall(".//elementProp[@name='param2']")))
def test_resource_files_from_requests_local_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads( open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() files = [ 'http.jmx', 'jmeter-bzt.properties', 'modified_requests.jmx.jmx' ] files += ['requests.jmx', 'system.properties', 'test1.csv'] artifacts = os.listdir(obj.engine.artifacts_dir) self.assertTrue(all([_file in artifacts for _file in files ])) # +system.properties, -jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_requests.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True)
def test_shutdown_soft(self): obj = JMeterExecutor() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/jmx/dummy.jmx"}}) try: obj.prepare() obj.startup() time.sleep(1) obj.shutdown() except: self.fail() finally: obj.log.removeHandler(log_recorder) self.assertIn("JMeter stopped on Shutdown command", log_recorder.debug_buff.getvalue())
def test_stepping_tg_ramp_no_proportion(self): """ Tested without concurrency proportions :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({ 'execution': { 'steps': 5, 'concurrency': 170, 'scenario': { 'script': __dir__() + '/../jmx/stepping_ramp_up.jmx' }, 'ramp-up': '1m', 'distributed': ['127.0.0.1'], 'hold-for': '2m' } }) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() load = obj.get_load() orig_xml_tree = etree.fromstring(open(obj.original_jmx, "rb").read()) modified_xml_tree = etree.fromstring( open(obj.modified_jmx, "rb").read()) mod_stepping_tgs = modified_xml_tree.findall( ".//kg.apc.jmeter.threads.SteppingThreadGroup") orig_tgs = orig_xml_tree.findall(".//ThreadGroup") self.assertEqual(len(mod_stepping_tgs), len(orig_tgs)) for orig_th, step_th in zip(orig_tgs, mod_stepping_tgs): orig_num_threads = int( orig_th.find( ".//stringProp[@name='ThreadGroup.num_threads']").text) mod_num_threads = int( step_th.find( ".//stringProp[@name='ThreadGroup.num_threads']").text) self.assertEqual(orig_num_threads, mod_num_threads) self.assertEqual( step_th.find(".//stringProp[@name='Start users period']").text, str(int(load.ramp_up / load.steps))) self.assertEqual( step_th.find(".//stringProp[@name='Start users count']").text, str(int(orig_num_threads / load.steps)))
def test_add_shaper_constant(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/throughput_constant.yml").read())) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) shaper_elements = xml_tree.findall( ".//kg.apc.jmeter.timers.VariableThroughputTimer[@testclass='kg.apc.jmeter.timers.VariableThroughputTimer']") self.assertEqual(1, len(shaper_elements)) shaper_coll_element = shaper_elements[0].find(".//collectionProp[@name='load_profile']") self.assertEqual("100", shaper_coll_element.find(".//stringProp[@name='49']").text) self.assertEqual("100", shaper_coll_element.find(".//stringProp[@name='1567']").text) self.assertEqual("60", shaper_coll_element.find(".//stringProp[@name='53']").text)
def test_install_jmeter(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter") shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) jmeter_link = JMeterExecutor.JMETER_DOWNLOAD_LINK jmeter_ver = JMeterExecutor.JMETER_VER plugins_link = JMeterExecutor.PLUGINS_DOWNLOAD_TPL mirrors_link = JMeterExecutor.MIRRORS_SOURCE JMeterExecutor.MIRRORS_SOURCE = "file:///" + __dir__() + "/../data/unicode_file" JMeterExecutor.JMETER_DOWNLOAD_LINK = "file:///" + __dir__() + "/../data/jmeter-dist-{version}.zip" JMeterExecutor.PLUGINS_DOWNLOAD_TPL = "file:///" + __dir__() + "/../data/JMeterPlugins-{plugin}-1.3.0.zip" JMeterExecutor.JMETER_VER = '2.13' self.assertFalse(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare() jars = os.listdir(os.path.abspath(os.path.join(path, '../../lib'))) old_jars = ['httpcore-4.2.5.jar', 'httpmime-4.2.6.jar', 'xercesImpl-2.9.1.jar', 'commons-jexl-1.1.jar', 'httpclient-4.2.6.jar'] for old_jar in old_jars: self.assertNotIn(old_jar, jars) self.assertTrue(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare() JMeterExecutor.JMETER_DOWNLOAD_LINK = jmeter_link JMeterExecutor.PLUGINS_DOWNLOAD_TPL = plugins_link JMeterExecutor.JMETER_VER = jmeter_ver JMeterExecutor.MIRRORS_SOURCE = mirrors_link