def test_install_jmeter(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter" + EXE_SUFFIX) shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) self.assertFalse(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare() jars = os.listdir(os.path.abspath(os.path.join(path, '../../lib'))) old_jars = ['httpcore-4.2.5.jar', 'httpmime-4.2.6.jar', 'xercesImpl-2.9.1.jar', 'commons-jexl-1.1.jar', 'httpclient-4.2.6.jar'] for old_jar in old_jars: self.assertNotIn(old_jar, jars) self.assertTrue(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare()
def test_disabled(self): obj = ConsoleStatusReporter() obj.engine = EngineEmul() obj.engine.provisioning = Local() obj.engine.config[Provisioning.PROV] = '' jmeter = JMeterExecutor() jmeter.engine = obj.engine jmeter.start_time = time.time() jmeter.execution[ScenarioExecutor.HOLD_FOR] = 10 obj.engine.provisioning.executors = [jmeter] obj.settings["disable"] = True obj.settings['dummy_cols'] = 160 obj.settings['dummy_rows'] = 40 obj.prepare() obj.startup() for n in range(0, 10): point = self.__get_datapoint(0) obj.aggregated_second(point) obj.check() self.assertFalse(obj.screen.started) obj.check() obj.shutdown() obj.post_process()
def test_dns_cache_mgr_script(self): """ :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({'execution': {'ramp-up': 10, 'throughput': 2, 'hold-for': 20, 'concurrency': 5, 'scenario': {'think-time': '0.75s', 'script': 'tests/jmx/http.jmx'}}, 'modules': {'jmeter': {'system-properties': {'any_prop': 'true'}, 'properties': {'log_level.jmeter': 'WARN', 'log_level.jmeter.threads': 'DEBUG', 'my-hostname': 'www.pre-test.com'}}}}) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) dns_managers = xml_tree.findall(".//DNSCacheManager") # 0 dns_managers self.assertEqual(len(dns_managers), 0) sys_prop = open(os.path.join(obj.engine.artifacts_dir, "system.properties")).read() self.assertTrue("any_prop=true" in sys_prop) self.assertFalse("sun.net.inetaddr.ttl=0" in sys_prop)
def test_css_jquery_extractor(self): obj = JMeterExecutor() handler = RecordingHandler() obj.log.addHandler(handler) obj.engine = EngineEmul() obj.engine.config = json.loads(open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() target_jmx = os.path.join(obj.engine.artifacts_dir, "requests.jmx") modified_xml_tree = etree.fromstring(open(target_jmx, "rb").read()) jq_css_extractors = modified_xml_tree.findall(".//HtmlExtractor") self.assertEqual(2, len(jq_css_extractors)) simplified_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name1']") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name1") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name~=my_input]") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, None) self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "0") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NOT_FOUND") full_form_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name2']") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name2") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name=JMeter]") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, "value") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "1") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NV_JMETER") obj.log.removeHandler(handler)
def test_convert_tgroups_load_modifications(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "iterations": 20, "ramp-up": 10, "hold-for": "2m", "scenario": {"script": __dir__() + "/../jmx/SteppingThreadGroup.jmx"} }) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) st_tg = modified_xml_tree.find(".//kg.apc.jmeter.threads.SteppingThreadGroup") self.assertEqual(st_tg, None) ul_tg = modified_xml_tree.find(".//kg.apc.jmeter.threads.UltimateThreadGroup") self.assertEqual(ul_tg, None) converted_st_tg = modified_xml_tree.find(".//ThreadGroup[@testname='stepping tg']") loop_ctrl = converted_st_tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//*[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "20") self.assertEqual(tg_forever.text, "false") st_tg_concurrency = converted_st_tg.find(".//stringProp[@name='ThreadGroup.num_threads']") self.assertEqual(st_tg_concurrency.text, "123")
def test_fail_on_zero_results(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/jmx/dummy.jmx"}}) obj.prepare() self.assertRaises(RuntimeWarning, obj.post_process)
def test_broken_xml(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/broken.jmx"}}) self.assertRaises(RuntimeError, obj.prepare)
def test_resource_files_collection(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/files.jmx"}}) res_files = obj.resource_files() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(res_files), 5) self.assertEqual(len(artifacts), 5)
def test_csv_path_bug_in_distributed_mode(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/files.jmx"}}) obj.distributed_servers = ["127.0.0.1", "127.0.0.1"] obj.prepare() target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_files.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True, reverse_check=True)
def test_fail_on_zero_results(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.aggregator = ConsolidatingAggregator() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/dummy.jmx"}}) obj.prepare() self.assertRaises(RuntimeWarning, obj.post_process)
def test_user_def_vars_override(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config.merge(yaml.load(open("tests/yaml/user_def_vars.yml").read())) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) udv_elements = xml_tree.findall(".//Arguments[@testclass='Arguments']") self.assertEqual(1, len(udv_elements))
def test_user_def_vars_from_requests(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) udv_elements = xml_tree.findall(".//Arguments[@testclass='Arguments']") self.assertEqual(1, len(udv_elements))
def test_resource_files_collection_local_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/files.jmx"}}) obj.prepare() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(artifacts), 7) # minus jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_files.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True)
def test_not_jmx(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = {"scenario": {"script": __file__}} try: obj.prepare() self.fail() except RuntimeError: pass
def test_datasources_without_delimiter(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"], "data-sources": [ {"path": __dir__() + "/../data/test2.csv"}]}}) obj.prepare()
def test_resource_files_from_requests_remote_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] res_files = obj.resource_files() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(res_files), 2) self.assertEqual(len(artifacts), 2)
def test_think_time_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/think-time-bug.yml").read())) obj.execution = obj.engine.config['execution'] obj.prepare() result = open(obj.modified_jmx).read() self.assertIn('<stringProp name="ConstantTimer.delay">750</stringProp>', result)
def test_resource_files_from_requests_local_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(artifacts), 6) # + system.properties, minus jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_requests.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True)
def test_not_jmx_xml(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/jmx/not-jmx.xml"}}) try: obj.prepare() self.fail() except RuntimeError: pass
def test_jtl_none(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({ "write-xml-jtl": "bla-bla-bla", "scenario": { "requests": [{ "url": "http://blazedemo.com", }]}}) obj.prepare()
def test_resource_files_collection_remote_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/files.jmx"}}) res_files = obj.resource_files() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(res_files), 5) self.assertEqual(len(artifacts), 5) target_jmx = os.path.join(obj.engine.artifacts_dir, "files.jmx") self.__check_path_resource_files(target_jmx)
def test_variable_csv_file(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/variable_csv.jmx"}}) obj.prepare() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(artifacts), 3) # minus jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_variable_csv.jmx.jmx") with open(target_jmx) as fds: jmx = fds.read() self.assertIn('<stringProp name="filename">${root}/csvfile.csv</stringProp>', jmx)
def test_broken_xml(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/broken.jmx"}}) try: obj.prepare() self.fail() except RuntimeError: pass
def test_user_def_vars_override(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config.merge({'execution': {'concurrency': 200, 'throughput': 100, 'hold-for': '1m', 'scenario': { 'variables': {'my_var': 'http://demo.blazemeter.com/api/user', 'myvar2': 'val2'}, 'properties': {'log_level.jmeter': 'DEBUG'}, 'script': 'tests/jmx/http.jmx'}}}) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) udv_elements = xml_tree.findall(".//Arguments[@testclass='Arguments']") self.assertEqual(1, len(udv_elements))
def test_resource_files_from_requests_local_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() files = ['http.jmx', 'jmeter-bzt.properties', 'modified_requests.jmx.jmx'] files += ['requests.jmx', 'system.properties', 'test1.csv'] artifacts = os.listdir(obj.engine.artifacts_dir) self.assertTrue(all([_file in artifacts for _file in files])) # +system.properties, -jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_requests.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True)
def test_issue_no_iterations(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({ "concurrency": 10, "ramp-up": 10, "scenario": { "script": __dir__() + "/../jmx/issue_no_iterations.jmx" } }) obj.prepare()
def test_think_time_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config.merge({'execution': {'ramp-up': '1m', 'hold-for': '1m30s', 'concurrency': 10, 'scenario': {'think-time': 0.75, 'requests': ['http://blazedemo.com/', 'http://blazedemo.com/vacation.html']}}}) obj.execution = obj.engine.config['execution'] obj.prepare() result = open(obj.modified_jmx).read() self.assertIn('<stringProp name="ConstantTimer.delay">750</stringProp>', result)
def test_data_source_list(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({ "scenario": { "requests": ["http://blazedemo.com/"], # note that data-sources should be a list of strings/objects "data-sources": { "path": __dir__() + "/../data/test1.csv", } } }) self.assertRaises(ValueError, obj.prepare)
def test_empty_requests(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/startup_no_requests.yml").read())) obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.execution = obj.engine.config['execution'] try: obj.prepare() self.fail() except RuntimeError as exc: self.assertEqual(exc.args[0], "Nothing to test, no requests were provided in scenario")
def test_resource_files_from_requests_local_prov(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads( open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() files = [ 'http.jmx', 'jmeter-bzt.properties', 'modified_requests.jmx.jmx' ] files += ['requests.jmx', 'system.properties', 'test1.csv'] artifacts = os.listdir(obj.engine.artifacts_dir) self.assertTrue(all([_file in artifacts for _file in files ])) # +system.properties, -jmeter.log target_jmx = os.path.join(obj.engine.artifacts_dir, "modified_requests.jmx.jmx") self.__check_path_resource_files(target_jmx, exclude_jtls=True)
def test_add_shaper_ramp_up(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({ 'execution': { 'ramp-up': '1m', 'throughput': 10, 'hold-for': '2m', 'concurrency': 20, 'scenario': { 'script': __dir__() + '/../jmx/http.jmx' } } }) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) timer_ = ".//kg.apc.jmeter.timers.VariableThroughputTimer" timer_ += "[@testclass='kg.apc.jmeter.timers.VariableThroughputTimer']" shaper_elements = xml_tree.findall(timer_) self.assertEqual(1, len(shaper_elements)) shaper_coll_element = shaper_elements[0].find( ".//collectionProp[@name='load_profile']") self.assertEqual( "1", shaper_coll_element.findall(".//stringProp[@name='49']")[0].text) self.assertEqual( "10", shaper_coll_element.findall(".//stringProp[@name='1567']")[0].text) self.assertEqual( "60", shaper_coll_element.findall(".//stringProp[@name='53']")[0].text) self.assertEqual( "10", shaper_coll_element.findall(".//stringProp[@name='49']")[1].text) self.assertEqual( "10", shaper_coll_element.findall(".//stringProp[@name='1567']")[1].text) self.assertEqual( "120", shaper_coll_element.findall(".//stringProp[@name='53']")[1].text)
def test_http_request_defaults(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads( open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) default_elements = xml_tree.findall( ".//ConfigTestElement[@testclass='ConfigTestElement']") self.assertEqual(1, len(default_elements)) default_element = default_elements[0] self.assertEqual( "www.somehost.com", default_element.find( ".//stringProp[@name='HTTPSampler.domain']").text) self.assertEqual( "884", default_element.find( ".//stringProp[@name='HTTPSampler.port']").text) self.assertEqual( "https", default_element.find( ".//stringProp[@name='HTTPSampler.protocol']").text) self.assertEqual( "true", default_element.find( ".//boolProp[@name='HTTPSampler.image_parser']").text) self.assertEqual( "true", default_element.find( ".//boolProp[@name='HTTPSampler.concurrentDwn']").text) self.assertEqual( "10", default_element.find( ".//stringProp[@name='HTTPSampler.concurrentPool']").text) # all keepalives in requests are disabled requests = xml_tree.findall( ".//HTTPSamplerProxy[@testclass='HTTPSamplerProxy']") for request in requests: self.assertEqual( "false", request.find( ".//boolProp[@name='HTTPSampler.use_keepalive']").text)
def test_dns_cache_mgr_script(self): """ :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({ 'execution': { 'ramp-up': 10, 'throughput': 2, 'hold-for': 20, 'concurrency': 5, 'scenario': { 'think-time': '0.75s', 'script': __dir__() + '/../jmx/http.jmx' } }, 'modules': { 'jmeter': { 'system-properties': { 'any_prop': 'true' }, 'properties': { 'log_level.jmeter': 'WARN', 'log_level.jmeter.threads': 'DEBUG', 'my-hostname': 'www.pre-test.com' } } } }) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) dns_managers = xml_tree.findall(".//DNSCacheManager") # 0 dns_managers self.assertEqual(len(dns_managers), 0) sys_prop = open( os.path.join(obj.engine.artifacts_dir, "system.properties")).read() self.assertTrue("any_prop=true" in sys_prop) self.assertFalse("sun.net.inetaddr.ttl=0" in sys_prop)
def test_jmx_2tg(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "concurrency": 1051, "ramp-up": 15, "iterations": 100, "scenario": {"script": __dir__() + "/../jmx/two_tg.jmx"} }) obj.prepare() jmx = JMX(obj.modified_jmx) selector = 'jmeterTestPlan>hashTree>hashTree>ThreadGroup' selector += '>stringProp[name=ThreadGroup\.num_threads]' thr = jmx.get(selector) self.assertEquals('420', thr[0].text) self.assertEquals('631', thr[1].text)
def test_convert_tgroups_no_load(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "scenario": { "script": __dir__() + "/../jmx/SteppingThreadGroup.jmx" } }) obj.prepare() modified_xml_tree = etree.fromstring( open(obj.modified_jmx, "rb").read()) st_tg = modified_xml_tree.find( ".//kg.apc.jmeter.threads.SteppingThreadGroup") self.assertNotEqual(st_tg, None) ul_tg = modified_xml_tree.find( ".//kg.apc.jmeter.threads.UltimateThreadGroup") self.assertNotEqual(ul_tg, None)
def test_dns_cache_mgr_requests(self): """ :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open(__dir__() + "/../../tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) dns_managers = xml_tree.findall(".//DNSCacheManager") # 1 dns_manager self.assertEqual(len(dns_managers), 1) # check system.properies file contents sys_prop = open(os.path.join(obj.engine.artifacts_dir, "system.properties")).read() self.assertTrue("any_prop=true" in sys_prop) self.assertTrue("sun.net.inetaddr.ttl=0" in sys_prop)
def test_duration_loops_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "concurrency": 10, "ramp-up": 15, "hold-for": "2m", "scenario": {"script": __dir__() + "/../jmx/http.jmx"} }) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) tg = modified_xml_tree.find(".//ThreadGroup") loop_ctrl = tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//intProp[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "-1") self.assertEqual(tg_forever.text, "false")
def test_dns_cache_mgr_script(self): """ :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/dns_mgr_script.yml").read())) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) dns_managers = xml_tree.findall(".//DNSCacheManager") # 0 dns_managers self.assertEqual(len(dns_managers), 0) sys_prop = open(os.path.join(obj.engine.artifacts_dir, "system.properties")).read() self.assertTrue("any_prop=true" in sys_prop) self.assertFalse("sun.net.inetaddr.ttl=0" in sys_prop)
def test_stepping_tg_ramp_no_proportion(self): """ Tested without concurrency proportions :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge( yaml.load(open("tests/yaml/stepping_ramp_up.yml").read())) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() load = obj.get_load() orig_xml_tree = etree.fromstring(open(obj.original_jmx, "rb").read()) modified_xml_tree = etree.fromstring( open(obj.modified_jmx, "rb").read()) mod_stepping_tgs = modified_xml_tree.findall( ".//kg.apc.jmeter.threads.SteppingThreadGroup") orig_tgs = orig_xml_tree.findall(".//ThreadGroup") self.assertEqual(len(mod_stepping_tgs), len(orig_tgs)) for orig_th, step_th in zip(orig_tgs, mod_stepping_tgs): orig_num_threads = int( orig_th.find( ".//stringProp[@name='ThreadGroup.num_threads']").text) mod_num_threads = int( step_th.find( ".//stringProp[@name='ThreadGroup.num_threads']").text) self.assertEqual(orig_num_threads, mod_num_threads) self.assertEqual( step_th.find(".//stringProp[@name='Start users period']").text, str(int(load.ramp_up / load.steps))) self.assertEqual( step_th.find(".//stringProp[@name='Start users count']").text, str(int(orig_num_threads / load.steps)))
def test_add_shaper_ramp_up(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge(yaml.load(open("tests/yaml/throughput_ramp_up.yml").read())) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) shaper_elements = xml_tree.findall( ".//kg.apc.jmeter.timers.VariableThroughputTimer[@testclass='kg.apc.jmeter.timers.VariableThroughputTimer']") self.assertEqual(1, len(shaper_elements)) shaper_coll_element = shaper_elements[0].find(".//collectionProp[@name='load_profile']") self.assertEqual("1", shaper_coll_element.findall(".//stringProp[@name='49']")[0].text) self.assertEqual("10", shaper_coll_element.findall(".//stringProp[@name='1567']")[0].text) self.assertEqual("60", shaper_coll_element.findall(".//stringProp[@name='53']")[0].text) self.assertEqual("10", shaper_coll_element.findall(".//stringProp[@name='49']")[1].text) self.assertEqual("10", shaper_coll_element.findall(".//stringProp[@name='1567']")[1].text) self.assertEqual("120", shaper_coll_element.findall(".//stringProp[@name='53']")[1].text)
def test_css_jquery_extractor(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() target_jmx = os.path.join(obj.engine.artifacts_dir, "requests.jmx") modified_xml_tree = etree.fromstring(open(target_jmx, "rb").read()) jq_css_extractors = modified_xml_tree.findall(".//HtmlExtractor") self.assertEqual(2, len(jq_css_extractors)) simplified_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name1']") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name1") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name~=my_input]") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, None) self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "0") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NOT_FOUND") full_form_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name2']") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name2") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name=JMeter]") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, "value") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "1") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NV_JMETER")
def test_think_time_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config.merge({ 'execution': { 'ramp-up': '1m', 'hold-for': '1m30s', 'concurrency': 10, 'scenario': { 'think-time': 0.75, 'requests': [ 'http://blazedemo.com/', 'http://blazedemo.com/vacation.html' ] } } }) obj.execution = obj.engine.config['execution'] obj.prepare() result = open(obj.modified_jmx).read() self.assertIn( '<stringProp name="ConstantTimer.delay">750</stringProp>', result)
def test_body_parse(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) sampler_element = xml_tree.findall( ".//HTTPSamplerProxy[@testname='With body params']") arguments_element_prop = sampler_element[0][0] self.assertEqual(7, len(sampler_element[0].getchildren())) self.assertEqual(1, len(arguments_element_prop.getchildren())) self.assertEqual(2, len(arguments_element_prop[0].getchildren())) self.assertEqual( 1, len(arguments_element_prop[0].findall( ".//elementProp[@name='param1']"))) self.assertEqual( 1, len( arguments_element_prop.findall( ".//elementProp[@name='param2']")))
def test_convert_tgroups_load_modifications(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "iterations": 20, "ramp-up": 10, "hold-for": "2m", "scenario": { "script": __dir__() + "/../jmx/SteppingThreadGroup.jmx" } }) obj.prepare() modified_xml_tree = etree.fromstring( open(obj.modified_jmx, "rb").read()) st_tg = modified_xml_tree.find( ".//kg.apc.jmeter.threads.SteppingThreadGroup") self.assertEqual(st_tg, None) ul_tg = modified_xml_tree.find( ".//kg.apc.jmeter.threads.UltimateThreadGroup") self.assertEqual(ul_tg, None) converted_st_tg = modified_xml_tree.find( ".//ThreadGroup[@testname='stepping tg']") loop_ctrl = converted_st_tg.find( ".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//*[@name='LoopController.loops']") tg_forever = loop_ctrl.find( ".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "20") self.assertEqual(tg_forever.text, "false") st_tg_concurrency = converted_st_tg.find( ".//stringProp[@name='ThreadGroup.num_threads']") self.assertEqual(st_tg_concurrency.text, "123")
def test_install_jmeter(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter" + EXE_SUFFIX) shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) self.assertFalse(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare() jars = os.listdir(os.path.abspath(os.path.join(path, '../../lib'))) old_jars = [ 'httpcore-4.2.5.jar', 'httpmime-4.2.6.jar', 'xercesImpl-2.9.1.jar', 'commons-jexl-1.1.jar', 'httpclient-4.2.6.jar' ] for old_jar in old_jars: self.assertNotIn(old_jar, jars) self.assertTrue(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare()
def test_iterations_loop_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({"iterations": 10, "scenario": {"script": __dir__() + "/../jmx/http.jmx"}}) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) tg = modified_xml_tree.find(".//ThreadGroup") loop_ctrl = tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//stringProp[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "10") self.assertEqual(tg_forever.text, "false") obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/http.jmx"}}) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) tg = modified_xml_tree.find(".//ThreadGroup") loop_ctrl = tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find("*[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "1") # default value, not disabled self.assertEqual(tg_forever.text, "false")
def test_distributed_th_hostnames(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": "tests/jmx/http.jmx"}}) obj.distributed_servers = ["127.0.0.1", "127.0.0.1"] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) thread_groups = xml_tree.findall(".//ThreadGroup") prepend_str = r"${__machineName()}" for thread_group in thread_groups: self.assertTrue(thread_group.attrib["testname"].startswith(prepend_str)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open(__dir__() + "/../../tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.distributed_servers = ["127.0.0.1", "127.0.0.1"] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) thread_groups = xml_tree.findall(".//ThreadGroup") prepend_str = r"${__machineName()}" for thread_group in thread_groups: self.assertFalse(thread_group.attrib["testname"].startswith(prepend_str))
def test_jmx(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/jmx/dummy.jmx"}}) obj.prepare()
def test_jmeter_mirrors(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter") shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) obj = JMeterExecutor() objjm = JMeter(path, obj.log, JMeterExecutor.JMETER_VER) objjm.install()
def test_install_jmeter(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter") shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) jmeter_link = JMeterExecutor.JMETER_DOWNLOAD_LINK jmeter_ver = JMeterExecutor.JMETER_VER plugins_link = JMeterExecutor.PLUGINS_DOWNLOAD_TPL mirrors_link = JMeterExecutor.MIRRORS_SOURCE JMeterExecutor.MIRRORS_SOURCE = "file:///" + __dir__() + "/../data/unicode_file" JMeterExecutor.JMETER_DOWNLOAD_LINK = "file:///" + __dir__() + "/../data/jmeter-dist-{version}.zip" JMeterExecutor.PLUGINS_DOWNLOAD_TPL = "file:///" + __dir__() + "/../data/JMeterPlugins-{plugin}-1.3.0.zip" JMeterExecutor.JMETER_VER = '2.13' self.assertFalse(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare() jars = os.listdir(os.path.abspath(os.path.join(path, '../../lib'))) old_jars = ['httpcore-4.2.5.jar', 'httpmime-4.2.6.jar', 'xercesImpl-2.9.1.jar', 'commons-jexl-1.1.jar', 'httpclient-4.2.6.jar'] for old_jar in old_jars: self.assertNotIn(old_jar, jars) self.assertTrue(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare() JMeterExecutor.JMETER_DOWNLOAD_LINK = jmeter_link JMeterExecutor.PLUGINS_DOWNLOAD_TPL = plugins_link JMeterExecutor.JMETER_VER = jmeter_ver JMeterExecutor.MIRRORS_SOURCE = mirrors_link
def test_requests(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads(open("tests/json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() obj.log.debug("%s: %s", obj.modified_jmx, open(obj.modified_jmx).read()) obj.log.debug("%s", json.dumps(obj.execution, indent=True)) try: obj.startup() while not obj.check(): obj.log.debug("Check...") time.sleep(1) obj.shutdown() obj.post_process() except: pass finally: if obj.jmeter_log and os.path.exists(obj.jmeter_log): obj.log.debug("%s", open(obj.jmeter_log).read())
def test_not_jmx(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = {"scenario": {"script": __file__}} self.assertRaises(RuntimeError, obj.prepare)