def test_install_jmeter(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter") shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) jmeter_link = JMeterExecutor.JMETER_DOWNLOAD_LINK jmeter_ver = JMeterExecutor.JMETER_VER plugins_link = JMeterExecutor.PLUGINS_DOWNLOAD_TPL JMeterExecutor.JMETER_DOWNLOAD_LINK = "file://" + __dir__() + "/../data/jmeter-dist-{version}.zip" JMeterExecutor.PLUGINS_DOWNLOAD_TPL = "file://" + __dir__() + "/../data/jmeter-plugins-{plugin}.zip" JMeterExecutor.JMETER_VER = '2.13' self.assertFalse(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": []}}) obj.prepare() self.assertTrue(os.path.exists(path)) obj.prepare() JMeterExecutor.JMETER_DOWNLOAD_LINK = jmeter_link JMeterExecutor.PLUGINS_DOWNLOAD_TPL = plugins_link JMeterExecutor.JMETER_VER = jmeter_ver
def test_jmx_shorthand(self): ret = self.obj.perform([ __dir__() + "/json/mock_normal.json", __dir__() + "/jmx/dummy.jmx", __dir__() + "/jmx/dummy.jmx", ]) self.assertEquals(0, ret)
def test_install_Grinder(self): path = os.path.abspath(__dir__() + "/../../build/tmp/grinder-taurus/lib/grinder.jar") shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) grinder_link = GrinderExecutor.DOWNLOAD_LINK grinder_version = GrinderExecutor.VERSION GrinderExecutor.DOWNLOAD_LINK = "file://" + __dir__() + "/../data/grinder-{version}_{version}-binary.zip" GrinderExecutor.VERSION = "3.11" self.assertFalse(os.path.exists(path)) obj = GrinderExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": { "script": "tests/grinder/helloworld.py", "properties-file": "tests/grinder/grinder.properties", "properties": {"grinder.useConsole": "false"}}}) obj.prepare() self.assertTrue(os.path.exists(path)) obj.prepare() GrinderExecutor.DOWNLOAD_LINK = grinder_link GrinderExecutor.VERSION = grinder_version
def test_iterations_loop_bug(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({"iterations": 10, "scenario": {"script": __dir__() + "/../jmx/http.jmx"}}) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) tg = modified_xml_tree.find(".//ThreadGroup") loop_ctrl = tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//stringProp[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "10") self.assertEqual(tg_forever.text, "false") obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/http.jmx"}}) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) tg = modified_xml_tree.find(".//ThreadGroup") loop_ctrl = tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find("*[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "1") # default value, not disabled self.assertEqual(tg_forever.text, "false")
def test_install_Grinder(self): path = os.path.abspath(__dir__() + "/../../build/tmp/grinder-taurus/lib/grinder.jar") shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) self.assertFalse(os.path.exists(path)) obj = GrinderExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.settings.merge( { "properties-file": __dir__() + "/../grinder/grinder.base.properties", "properties": {"sample_prop": "some_val"}, } ) obj.execution.merge( { "scenario": { "script": __dir__() + "/../grinder/helloworld.py", "properties-file": __dir__() + "/..//grinder/grinder.properties", "properties": {"grinder.useConsole": "false"}, } } ) obj.prepare() self.assertTrue(os.path.exists(path))
def test_reread(self): obj = ChromeProfiler() obj.engine = EngineEmul() obj.settings.merge({ "processors": { "trace": { "class": "bzt.modules.chrome.TraceProcessor", "extractors": ["bzt.modules.chrome.MemoryMetricsExtractor"] } } }) listener = RecordingListener() obj.add_listener(listener) shutil.copy(__dir__() + "/../chrome/trace.json", obj.engine.artifacts_dir) obj.prepare() obj.startup() for _ in range(3): obj.check() time.sleep(1) shutil.copy(__dir__() + "/../chrome/trace.json", obj.engine.artifacts_dir) for _ in range(3): obj.check() time.sleep(1) obj.shutdown() obj.post_process()
def test_body_files(self): body_file1 = __dir__() + "/jmeter/body-file.dat" body_file2 = __dir__() + "/jmeter/jmx/http.jmx" self.engine.config.merge({ 'execution': [{ 'iterations': 1, 'executor': 'siege', 'scenario': 'bf'}], 'scenarios': { 'bf': { "requests": [ { 'url': 'http://first.com', 'body-file': body_file1 }, { 'url': 'http://second.com', 'body': 'body2', 'body-file': body_file2}]}}}) self.executor.execution = self.engine.config.get('execution')[0] scenario = self.executor.get_scenario() # check body fields in get_requests() results reqs = list(scenario.get_requests()) body_fields = [req.body for req in reqs] self.assertIn('sample of body', body_fields[0]) self.assertIn('body2', body_fields[1]) # check body fields and body-files fields after get_requests() scenario = self.executor.get_scenario() body_files = [req.get('body-file') for req in scenario.get('requests')] body_fields = [req.get('body') for req in scenario.get('requests')] self.assertTrue(all(body_files)) self.assertEqual(None, body_fields[0]) self.assertIn('body2', body_fields[1])
def test_selenium_startup_shutdown_jar_single(self): """ runt tests from single jar :return: """ obj = self.get_selenium_executor() obj.engine.config.merge({ 'execution': { 'scenario': {'script': __dir__() + '/../selenium/jar/'}, 'executor': 'selenium' }, 'reporting': [{'module': 'junit-xml'}] }) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.execution.merge({"scenario": {"script": __dir__() + "/../selenium/jar/dummy.jar"}}) obj.settings.merge(obj.engine.config.get("modules").get("selenium")) obj.prepare() obj.startup() while not obj.check(): time.sleep(1) obj.shutdown() prepared_files = os.listdir(obj.runner.working_dir) java_files = [fname for fname in prepared_files if fname.endswith(".java")] class_files = [fname for fname in prepared_files if fname.endswith(".class")] jars = [fname for fname in prepared_files if fname.endswith(".jar")] self.assertEqual(len(java_files), 0) self.assertEqual(len(class_files), 0) self.assertEqual(len(jars), 1) self.assertTrue(os.path.exists(obj.runner.settings.get("report-file")))
def test_install_Gatling(self): path = os.path.abspath(__dir__() + "/../../build/tmp/gatling-taurus/bin/gatling" + EXE_SUFFIX) shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) # backup download link and version gatling_link = GatlingExecutor.DOWNLOAD_LINK gatling_ver = GatlingExecutor.VERSION mirrors_link = GatlingExecutor.MIRRORS_SOURCE GatlingExecutor.DOWNLOAD_LINK = "file:///" + __dir__() + "/../data/gatling-dist-{version}_{version}.zip" GatlingExecutor.VERSION = '2.1.4' GatlingExecutor.MIRRORS_SOURCE = "file:///" + __dir__() + "/../data/unicode_file" self.assertFalse(os.path.exists(path)) obj = GatlingExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/gatling/BasicSimulation.scala", "simulation": "mytest.BasicSimulation"}}) obj.prepare() self.assertTrue(os.path.exists(path)) obj.prepare() GatlingExecutor.DOWNLOAD_LINK = gatling_link GatlingExecutor.VERSION = gatling_ver GatlingExecutor.MIRRORS_SOURCE = mirrors_link
def test_selenium_startup_shutdown_java_single(self): """ run tests from single .java file :return: """ self.obj.engine.config.merge({ 'execution': { 'scenario': {'script': __dir__() + '/../selenium/java/'}, 'executor': 'selenium' }, 'reporting': [{'module': 'junit-xml'}] }) self.obj.engine.config.merge({"provisioning": "local"}) self.obj.execution = self.obj.engine.config['execution'] self.obj.execution.merge({"scenario": {"script": __dir__() + "/../selenium/java/TestBlazemeterFail.java"}}) self.obj.settings.merge(self.obj.engine.config.get("modules").get("selenium")) self.obj.prepare() self.obj.startup() while not self.obj.check(): time.sleep(1) self.obj.shutdown() prepared_files = os.listdir(self.obj.runner.working_dir) java_files = [fname for fname in prepared_files if fname.endswith(".java")] class_files = [fname for fname in prepared_files if fname.endswith(".class")] jars = [fname for fname in prepared_files if fname.endswith(".jar")] self.assertEqual(1, len(java_files)) self.assertEqual(1, len(class_files)) self.assertEqual(1, len(jars)) self.assertTrue(os.path.exists(os.path.join(self.obj.runner.working_dir, "compiled.jar"))) self.assertTrue(os.path.exists(self.obj.runner.settings.get("report-file")))
def test_merge(self): obj = Configuration() configs = [ __dir__() + "/yaml/test.yml", __dir__() + "/json/merge1.json", __dir__() + "/json/merge2.json", ] obj.load(configs) fname = tempfile.mkstemp()[1] obj.dump(fname, Configuration.JSON) with open(fname) as fh: logging.debug("JSON:\n%s", fh.read()) jmeter = obj['modules']['jmeter'] classval = jmeter['class'] self.assertEquals("bzt.modules.jmeter.JMeterExecutor", classval) self.assertEquals("value", obj['key']) self.assertEquals(6, len(obj["list-append"])) self.assertEquals(2, len(obj["list-replace"])) self.assertEquals(2, len(obj["list-replace-notexistent"])) self.assertIsInstance(obj["list-complex"][1][0], BetterDict) self.assertIsInstance(obj["list-complex"][1][0], BetterDict) self.assertIsInstance(obj["list-complex"][1][0], BetterDict) self.assertFalse("properties" in jmeter) fname = tempfile.mkstemp()[1] obj.dump(fname, Configuration.JSON) checker = Configuration() checker.load([fname]) token = checker["list-complex"][1][0]['token'] self.assertNotEquals('test', token) token_orig = obj["list-complex"][1][0]['token'] self.assertEquals('test', token_orig)
def test_selenium_startup_shutdown_python_single(self): """ run tests from .py file :return: """ self.obj.engine.config.merge({ 'execution': { 'scenario': {'script': __dir__() + '/../selenium/python/'}, 'executor': 'selenium' }, 'reporting': [{'module': 'junit-xml'}] }) self.obj.engine.config.merge({"provisioning": "local"}) self.obj.execution = self.obj.engine.config['execution'] self.obj.execution.merge({"scenario": { "script": __dir__() + "/../selenium/python/test_blazemeter_fail.py" }}) self.obj.settings.merge(self.obj.engine.config.get("modules").get("selenium")) self.obj.prepare() self.obj.startup() while not self.obj.check(): time.sleep(1) self.obj.shutdown() prepared_files = os.listdir(self.obj.runner.working_dir) python_files = [fname for fname in prepared_files if fname.endswith(".py")] self.assertEqual(1, len(python_files)) self.assertTrue(os.path.exists(self.obj.runner.settings.get("report-file")))
def test_install_tools(self): """ check installation of selenium-server, junit :return: """ dummy_installation_path = os.path.abspath(__dir__() + "/../../build/tmp/selenium-taurus") base_link = "file://" + __dir__() + "/../data/" shutil.rmtree(os.path.dirname(dummy_installation_path), ignore_errors=True) selenium_server_link = SeleniumExecutor.SELENIUM_DOWNLOAD_LINK SeleniumExecutor.SELENIUM_DOWNLOAD_LINK = base_link + "selenium-server-standalone-2.46.0.jar" junit_link = SeleniumExecutor.JUNIT_DOWNLOAD_LINK SeleniumExecutor.JUNIT_DOWNLOAD_LINK = base_link + "junit-4.12.jar" self.assertFalse(os.path.exists(dummy_installation_path)) obj = SeleniumExecutor() obj.engine = EngineEmul() obj.settings.merge({"selenium-tools": { "junit": {"selenium-server": os.path.join(dummy_installation_path, "selenium-server.jar")}}}) obj.settings.merge({"selenium-tools": { "junit": {"path": os.path.join(dummy_installation_path, "tools", "junit", "junit.jar")}}}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": os.path.abspath(__dir__() + "/../../tests/selenium/jar/")}}) obj.prepare() self.assertTrue(os.path.exists(os.path.join(dummy_installation_path, "selenium-server.jar"))) self.assertTrue(os.path.exists(os.path.join(dummy_installation_path, "tools", "junit", "junit.jar"))) SeleniumExecutor.SELENIUM_DOWNLOAD_LINK = selenium_server_link SeleniumExecutor.JUNIT_DOWNLOAD_LINK = junit_link
def test_resource_files_collection_local(self): obj = GrinderExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": __dir__() + "/../grinder/helloworld.py", "properties-file": __dir__() + "/../grinder/grinder.properties"}}) obj.prepare() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(artifacts), 2)
def test_fail_on_zero_results(self): obj = GrinderExecutor() obj.engine = EngineEmul() obj.settings.merge({'path': __dir__() + "/../grinder/fake_grinder.jar"}) obj.execution.merge({"concurrency": {"local": 2}, "scenario": {"script": __dir__() + "/../grinder/helloworld.py"}}) obj.prepare() self.assertRaises(RuntimeWarning, obj.post_process)
def test_gatling(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/gatling.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.obj.run() self.obj.post_process()
def test_double_exec(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/yaml/triple.yml", __dir__() + "/json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.obj.run() self.obj.post_process()
def test_requests(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/get-post.json", __dir__() + "/json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.obj.prepare() self.obj.run() self.obj.post_process()
def test_resource_files_collection_invalid(self): obj = GrinderExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": __dir__() + "/../grinder/helloworld.py", "properties-file": __dir__() + "/../grinder/grinder_invalid.properties"}}) res_files = obj.resource_files() artifacts = os.listdir(obj.engine.artifacts_dir) self.assertEqual(len(res_files), 2) self.assertEqual(len(artifacts), 2) self.assertIn("helloworld.py", open(os.path.join(obj.engine.artifacts_dir, "grinder_invalid.properties")).read())
def test_grinder_widget(self): obj = GrinderExecutor() obj.engine = EngineEmul() obj.settings.merge({'path': __dir__() + "/../grinder/fake_grinder.jar"}) obj.engine.config.merge({"provisioning": 'local'}) obj.execution.merge({"concurrency": {"local": 2}, "ramp-up": 2, "hold-for": 2, "scenario": {"script": __dir__() + "/../grinder/helloworld.py"}}) obj.prepare() obj.get_widget() self.assertEqual(obj.widget.widgets[0].text, "Script: helloworld.py")
def test_double_exec(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/yaml/triple.yml", __dir__() + "/json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() for executor in self.obj.provisioning.executors: executor._env['TEST_MODE'] = 'files' self.obj.run() self.obj.post_process()
def test_unknown_module(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/gatling.json", self.paths ] self.obj.configure(configs) self.obj.config["provisioning"] = "unknown" self.obj.config["modules"]["unknown"] = BetterDict() try: self.obj.prepare() self.fail() except ValueError: pass
def test_requests(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/get-post.json", __dir__() + "/json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() for executor in self.obj.provisioning.executors: executor._env['TEST_MODE'] = 'files' self.obj.run() self.obj.post_process()
def test_convert_tgroups_load_modifications(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "iterations": 20, "ramp-up": 10, "hold-for": "2m", "scenario": {"script": __dir__() + "/../jmx/SteppingThreadGroup.jmx"} }) obj.prepare() modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) st_tg = modified_xml_tree.find(".//kg.apc.jmeter.threads.SteppingThreadGroup") self.assertEqual(st_tg, None) ul_tg = modified_xml_tree.find(".//kg.apc.jmeter.threads.UltimateThreadGroup") self.assertEqual(ul_tg, None) converted_st_tg = modified_xml_tree.find(".//ThreadGroup[@testname='stepping tg']") loop_ctrl = converted_st_tg.find(".//elementProp[@name='ThreadGroup.main_controller']") tg_loops = loop_ctrl.find(".//*[@name='LoopController.loops']") tg_forever = loop_ctrl.find(".//boolProp[@name='LoopController.continue_forever']") self.assertEqual(tg_loops.text, "20") self.assertEqual(tg_forever.text, "false") st_tg_concurrency = converted_st_tg.find(".//stringProp[@name='ThreadGroup.num_threads']") self.assertEqual(st_tg_concurrency.text, "123")
def test_locust_master(self): if six.PY3: logging.warning("No locust available for python 3") obj = LocustIOExecutor() obj.engine = EngineEmul() obj.engine.config['provisioning'] = 'local' obj.execution.merge({ "concurrency": 1, "iterations": 10, "hold-for": 30, "master": True, "slaves": 1, "scenario": { "default-address": "http://blazedemo.com", "script": __dir__() + "/../locust/simple.py" } }) obj.prepare() obj.startup() obj.get_widget() try: obj.check() time.sleep(2) obj.check() except RuntimeError: logging.warning("Do you use patched locust for non-GUI master?") obj.shutdown() self.assertRaises(RuntimeWarning, obj.post_process)
def test_css_jquery_extractor(self): obj = JMeterExecutor() handler = RecordingHandler() obj.log.addHandler(handler) obj.engine = EngineEmul() obj.engine.config = json.loads(open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() target_jmx = os.path.join(obj.engine.artifacts_dir, "requests.jmx") modified_xml_tree = etree.fromstring(open(target_jmx, "rb").read()) jq_css_extractors = modified_xml_tree.findall(".//HtmlExtractor") self.assertEqual(2, len(jq_css_extractors)) simplified_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name1']") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name1") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name~=my_input]") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, None) self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "0") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NOT_FOUND") full_form_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name2']") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name2") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name=JMeter]") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, "value") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "1") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NV_JMETER") obj.log.removeHandler(handler)
def test_install_jmeter(self): path = os.path.abspath(__dir__() + "/../../build/tmp/jmeter-taurus/bin/jmeter" + EXE_SUFFIX) shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True) self.assertFalse(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare() jars = os.listdir(os.path.abspath(os.path.join(path, '../../lib'))) old_jars = ['httpcore-4.2.5.jar', 'httpmime-4.2.6.jar', 'xercesImpl-2.9.1.jar', 'commons-jexl-1.1.jar', 'httpclient-4.2.6.jar'] for old_jar in old_jars: self.assertNotIn(old_jar, jars) self.assertTrue(os.path.exists(path)) obj = JMeterExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) obj.execution = BetterDict() obj.execution.merge({"scenario": {"requests": ["http://localhost"]}}) obj.prepare()
def test_dns_cache_mgr_script(self): """ :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({'execution': {'ramp-up': 10, 'throughput': 2, 'hold-for': 20, 'concurrency': 5, 'scenario': {'think-time': '0.75s', 'script': __dir__() + '/../jmx/http.jmx'}}, 'modules': {'jmeter': {'system-properties': {'any_prop': 'true'}, 'properties': {'log_level.jmeter': 'WARN', 'log_level.jmeter.threads': 'DEBUG', 'my-hostname': 'www.pre-test.com'}}}}) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) dns_managers = xml_tree.findall(".//DNSCacheManager") # 0 dns_managers self.assertEqual(len(dns_managers), 0) sys_prop = open(os.path.join(obj.engine.artifacts_dir, "system.properties")).read() self.assertTrue("any_prop=true" in sys_prop) self.assertFalse("sun.net.inetaddr.ttl=0" in sys_prop)
def test_stepping_tg_ramp_proportion(self): """ Tested with concurrency proportions :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({'execution': {'steps': 5, 'concurrency': 170, 'scenario': {'script': __dir__() + '/../jmx/stepping_ramp_up.jmx'}, 'ramp-up': '1m', 'distributed': ['127.0.0.1'], 'hold-for': '2m'}}) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.execution['concurrency'] = 100 # from 170 to 100 obj.execution['steps'] = 4 # from 5 to 4 obj.prepare() load = obj.get_load() orig_xml_tree = etree.fromstring(open(obj.original_jmx, "rb").read()) modified_xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) mod_stepping_tgs = modified_xml_tree.findall(".//kg.apc.jmeter.threads.SteppingThreadGroup") orig_tgs = orig_xml_tree.findall(".//ThreadGroup") self.assertEqual(len(mod_stepping_tgs), len(orig_tgs)) orig_summ_cnc = sum([int(x.find(".//stringProp[@name='ThreadGroup.num_threads']").text) for x in orig_tgs]) for orig_th, step_th in zip(orig_tgs, mod_stepping_tgs): orig_num_threads = int(orig_th.find(".//stringProp[@name='ThreadGroup.num_threads']").text) mod_num_threads = int(step_th.find(".//stringProp[@name='ThreadGroup.num_threads']").text) self.assertEqual(round(orig_num_threads * (float(load.concurrency) / orig_summ_cnc)), mod_num_threads) self.assertEqual(step_th.find(".//stringProp[@name='Start users period']").text, str(int(load.ramp_up / load.steps))) self.assertEqual(step_th.find(".//stringProp[@name='Start users count']").text, str(int(ceil(float(load.concurrency) / orig_summ_cnc * orig_num_threads / load.steps))))
def test_broken_xml(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../jmx/broken.jmx"}}) self.assertRaises(RuntimeError, obj.prepare)
def test_build_script(self): self.configure({ "execution": [{ "executor": "selenium", "hold-for": "4m", "ramp-up": "3m", "scenario": "loc_sc" }], "scenarios": { "loc_sc": { "default-address": "http://blazedemo.com", "requests": [{ "url": "/", "assert": [{ "contains": ['contained_text'], "not": True }] }] } }, "modules": { "selenium": { "^virtual-display": 0 } } }) self.obj.prepare() with open(self.obj.script) as generated: gen_contents = generated.readlines() with open(__dir__() + "/../selenium/generated_from_requests.py") as sample: sample_contents = sample.readlines() # strip line terminator and exclude specific build path gen_contents = [ line.rstrip() for line in gen_contents if 'webdriver' not in line ] sample_contents = [ line.rstrip() for line in sample_contents if 'webdriver' not in line ] self.assertEqual(gen_contents, sample_contents)
def test_add_shaper_ramp_up(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({ 'execution': { 'ramp-up': '1m', 'throughput': 10, 'hold-for': '2m', 'concurrency': 20, 'scenario': { 'script': __dir__() + '/../jmx/http.jmx' } } }) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) timer_ = ".//kg.apc.jmeter.timers.VariableThroughputTimer" timer_ += "[@testclass='kg.apc.jmeter.timers.VariableThroughputTimer']" shaper_elements = xml_tree.findall(timer_) self.assertEqual(1, len(shaper_elements)) shaper_coll_element = shaper_elements[0].find( ".//collectionProp[@name='load_profile']") self.assertEqual( "1", shaper_coll_element.findall(".//stringProp[@name='49']")[0].text) self.assertEqual( "10", shaper_coll_element.findall(".//stringProp[@name='1567']")[0].text) self.assertEqual( "60", shaper_coll_element.findall(".//stringProp[@name='53']")[0].text) self.assertEqual( "10", shaper_coll_element.findall(".//stringProp[@name='49']")[1].text) self.assertEqual( "10", shaper_coll_element.findall(".//stringProp[@name='1567']")[1].text) self.assertEqual( "120", shaper_coll_element.findall(".//stringProp[@name='53']")[1].text)
def test_build_script(self): self.configure({ "execution": [{ "executor": "selenium", "hold-for": "4m", "ramp-up": "3m", "scenario": "loc_sc"}], "scenarios": { "loc_sc": { "default-address": "http://blazedemo.com", "timeout": "3.5s", "requests": [{ "url": "/", "assert": [{ "contains": ['contained_text'], "not": True }], "actions": [ {"waitByName('toPort')": "visible"}, {"keysByName(\"toPort\")": "B"}, "clickByXPath(//div[3]/form/select[1]//option[3])", "clickByXPath(//div[3]/form/select[2]//option[6])", "clickByXPath(//input[@type='submit'])", "clickByLinkText(destination of the week! The Beach!)" ], }, { "label": "empty" }] } }, "modules": { "selenium": { "^virtual-display": 0}}}) self.obj.prepare() with open(self.obj.script) as generated: gen_contents = generated.readlines() with open(__dir__() + "/../../resources/selenium/generated_from_requests.py") as sample: sample_contents = sample.readlines() # strip line terminator and exclude specific build path gen_contents = [line.rstrip() for line in gen_contents if 'webdriver' not in line] sample_contents = [line.rstrip() for line in sample_contents if 'webdriver' not in line] self.assertEqual(gen_contents, sample_contents)
def test_dns_cache_mgr_script(self): """ :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({ 'execution': { 'ramp-up': 10, 'throughput': 2, 'hold-for': 20, 'concurrency': 5, 'scenario': { 'think-time': '0.75s', 'script': __dir__() + '/../jmx/http.jmx' } }, 'modules': { 'jmeter': { 'system-properties': { 'any_prop': 'true' }, 'properties': { 'log_level.jmeter': 'WARN', 'log_level.jmeter.threads': 'DEBUG', 'my-hostname': 'www.pre-test.com' } } } }) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("jmeter")) obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) dns_managers = xml_tree.findall(".//DNSCacheManager") # 0 dns_managers self.assertEqual(len(dns_managers), 0) sys_prop = open( os.path.join(obj.engine.artifacts_dir, "system.properties")).read() self.assertTrue("any_prop=true" in sys_prop) self.assertFalse("sun.net.inetaddr.ttl=0" in sys_prop)
def test_http_request_defaults(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config = json.loads( open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) default_elements = xml_tree.findall( ".//ConfigTestElement[@testclass='ConfigTestElement']") self.assertEqual(1, len(default_elements)) default_element = default_elements[0] self.assertEqual( "www.somehost.com", default_element.find( ".//stringProp[@name='HTTPSampler.domain']").text) self.assertEqual( "884", default_element.find( ".//stringProp[@name='HTTPSampler.port']").text) self.assertEqual( "https", default_element.find( ".//stringProp[@name='HTTPSampler.protocol']").text) self.assertEqual( "true", default_element.find( ".//boolProp[@name='HTTPSampler.image_parser']").text) self.assertEqual( "true", default_element.find( ".//boolProp[@name='HTTPSampler.concurrentDwn']").text) self.assertEqual( "10", default_element.find( ".//stringProp[@name='HTTPSampler.concurrentPool']").text) # all keepalives in requests are disabled requests = xml_tree.findall( ".//HTTPSamplerProxy[@testclass='HTTPSamplerProxy']") for request in requests: self.assertEqual( "false", request.find( ".//boolProp[@name='HTTPSampler.use_keepalive']").text)
def test_requests_4(self): obj = self.getGatling() obj.execution.merge({ "iterations": 55, "scenario": { "default-address": "", "requests": [{'url': 'site.com/reserve.php', 'assert': [{ 'subject': 'body', 'contains': 'boot(.*)strap.min', 'regexp': True, 'not': False }]}] } }) obj.prepare() scala_file = obj.engine.artifacts_dir + '/' + obj.get_scenario().get('simulation') + '.scala' self.assertEqualFiles(__dir__() + "/../resources/gatling/generated4.scala", scala_file)
def test_testng_config_autodetect(self): self.configure({ 'execution': { 'scenario': { 'script': __dir__() + '/../../resources/selenium/testng/jars/testng-suite.jar' } } }) self.obj.prepare() self.obj.startup() while not self.obj.check(): time.sleep(1) self.obj.shutdown() self.assertTrue(exists(self.obj.runner.report_file)) lines = open(self.obj.runner.report_file).readlines() self.assertEqual(len(lines), 6)
def test_no_supported_files_to_test(self): """ Test RuntimeError raised when no files of known types were found. :return: """ obj = SeleniumExecutor() obj.engine = EngineEmul() obj.engine.config = BetterDict() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "selenium", "scenario": { "script": __dir__() + "/../selenium/invalid/not_found" } } }) obj.execution = obj.engine.config['execution'] self.assertRaises(RuntimeError, obj.prepare)
def test_fail_on_zero_results(self): if six.PY3: logging.warning("No locust available for python 3") obj = LocustIOExecutor() obj.engine = EngineEmul() obj.engine.config['provisioning'] = 'local' obj.execution.merge({ "concurrency": 1, "iterations": 10, "hold-for": 30, "scenario": { "default-address": "http://blazedemo.com", "script": __dir__() + "/../locust/simple.py" } }) obj.prepare() self.assertRaises(RuntimeWarning, obj.post_process)
def test_prepare_jar_single(self): """ Check if jar exists in working dir :return: """ obj = SeleniumExecutor() obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({ "scenario": { "script": os.path.abspath(__dir__() + "/../../tests/selenium/jar/dummy.jar") } }) obj.prepare() self.assertTrue( os.path.exists(os.path.join(obj.runner.working_dir, "dummy.jar")))
def test_dns_cache_mgr_scenario(self): """ No system properties :return: """ obj = JMeterExecutor() obj.engine = EngineEmul() obj.execution.merge( {"scenario": { "script": __dir__() + "/../jmx/http.jmx" }}) obj.prepare() xml_tree = etree.fromstring(open(obj.modified_jmx, "rb").read()) dns_element = xml_tree.findall(".//DNSCacheManager") # no dns manager when using jmx, no system.properties file self.assertEqual(len(dns_element), 0) arts = os.listdir(obj.engine.artifacts_dir) self.assertNotIn("system.properties", arts)
def test_env_type(self): obj = self.getGatling() script = "LocalBasicSimulation.scala" obj.execution.merge({ "concurrency": 2, "hold-for": 1000, "throughput": 100, "scenario": { "script": __dir__() + "/../gatling/" + script } }) obj.prepare() obj.engine.artifacts_dir = u(obj.engine.artifacts_dir) obj.startup() obj.shutdown() with open(obj.stdout_file.name) as fds: lines = fds.readlines() self.assertIn('throughput', lines[-1])
def test_jmx_2tg(self): obj = JMeterExecutor() obj.engine = EngineEmul() obj.engine.config[Provisioning.PROV] = 'test' obj.execution = BetterDict() obj.execution.merge({ "concurrency": 1051, "ramp-up": 15, "iterations": 100, "scenario": {"script": __dir__() + "/../jmx/two_tg.jmx"} }) obj.prepare() jmx = JMX(obj.modified_jmx) selector = 'jmeterTestPlan>hashTree>hashTree>ThreadGroup' selector += '>stringProp[name=ThreadGroup\.num_threads]' thr = jmx.get(selector) self.assertEquals('420', thr[0].text) self.assertEquals('631', thr[1].text)
def test_requests_2(self): obj = self.getGatling() obj.execution.merge({ "concurrency": 10, "hold-for": 110, "ramp-up": 30, "scenario": { 'keepalive': False, 'timeout': '100ms', 'requests': ['http://blazedemo.com', 'google.com'] } }) obj.prepare() scala_file = obj.engine.artifacts_dir + '/' + obj.get_scenario().get( 'simulation') + '.scala' self.assertEqualFiles(__dir__() + "/../gatling/generated2.scala", scala_file)
def test_javac_fail(self): """ Test RuntimeError when compilation fails :return: """ obj = SeleniumExecutor() obj.engine = self.engine_obj obj.settings = self.selenium_config obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "selenium", "scenario": { "script": __dir__() + "/../selenium/invalid/invalid.java" } } }) obj.execution = obj.engine.config['execution'] self.assertRaises(RuntimeError, obj.prepare)
def test_resource_files_collection_remote_jar(self): obj = self.get_selenium_executor() obj.engine.config.merge({ 'execution': { 'scenario': { 'script': __dir__() + '/../selenium/jar/' }, 'executor': 'selenium' }, 'reporting': [{ 'module': 'junit-xml' }] }) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.settings.merge(obj.engine.config.get("modules").get("selenium")) self.assertEqual(len(obj.resource_files()), 2)
def test_no_test_in_name(self): """ Test exact number of tests when annotations used and no "test" in class name :return: """ self.configure({ ScenarioExecutor.EXEC: { "executor": "selenium", "scenario": { "script": __dir__() + "/../selenium/invalid/selenium1.java" } } }) self.obj.prepare() self.obj.startup() while not self.obj.check(): time.sleep(1) self.obj.shutdown()
def test_prepare_java_folder(self): """ Check if scripts exist in working dir :return: """ obj = SeleniumExecutor() obj.engine = self.engine_obj obj.settings = self.selenium_config obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": __dir__() + "/../selenium/java/"}}) obj.prepare() prepared_files = os.listdir(obj.runner.working_dir) java_files = [file for file in prepared_files if file.endswith(".java")] class_files = [file for file in prepared_files if file.endswith(".class")] jars = [file for file in prepared_files if file.endswith(".jar")] self.assertEqual(len(java_files), 2) self.assertEqual(len(class_files), 2) self.assertEqual(len(jars), 1)
def test_apiritif_transactions(self): self.configure({ "execution": [{ "test-mode": "apiritif", "scenario": { "script": __dir__() + "/../../resources/apiritif/test_transactions.py" } }] }) self.obj.prepare() try: self.obj.startup() while not self.obj.check(): time.sleep(self.obj.engine.check_interval) finally: self.obj.shutdown() self.obj.post_process() self.assertNotEquals(self.obj.process, None)
def test_not_junit(self): """ Check that JUnit runner fails if no tests were found :return: """ self.configure({ ScenarioExecutor.EXEC: { "executor": "selenium", "scenario": {"script": __dir__() + "/../selenium/invalid/NotJUnittest.java"}}}) self.obj.prepare() self.obj.startup() try: while not self.obj.check(): time.sleep(1) self.fail() except BaseException as exc: self.assertIn("Nothing to test", exc.args[0]) self.obj.shutdown()
def test_selenium_startup_shutdown_python_folder(self): """ run tests from .py files :return: """ self.configure({ 'execution': { 'scenario': {'script': __dir__() + '/../selenium/python/'}, 'executor': 'selenium' }, 'reporting': [{'module': 'junit-xml'}] }) self.obj.prepare() self.obj.startup() while not self.obj.check(): time.sleep(1) self.obj.shutdown() self.assertTrue(os.path.exists(self.obj.runner.settings.get("report-file")))
def test_locust_resource_files(self): if six.PY3: logging.warning("No locust available for python 3") obj = LocustIOExecutor() obj.engine = EngineEmul() obj.engine.config['provisioning'] = 'local' obj.execution.merge({ "concurrency": 1, "iterations": 10, "hold-for": 30, "scenario": { "default-address": "http://blazedemo.com", "script": __dir__() + "/../locust/simple.py" } }) resource_files = obj.resource_files() self.assertEqual(1, len(resource_files))
def test_iterations(self): self.configure({ 'execution': { 'iterations': 3, 'scenario': { 'script': __dir__() + '/../../resources/selenium/testng/jars/testng-suite.jar'}, 'runner': 'testng'}, 'modules': { 'testng': { 'autodetect-xml': False}}}) self.obj.prepare() self.obj.startup() while not self.obj.check(): time.sleep(1) self.obj.shutdown() self.assertTrue(exists(self.obj.runner.report_file)) lines = open(self.obj.runner.report_file).readlines() self.assertEqual(len(lines), 9)
def test_rest_templated_params_interpolation(self): obj = SoapUIScriptConverter(logging.getLogger('')) config = obj.convert_script(__dir__() + "/../resources/soapui/gmaps-sample.xml") self.assertEqual(len(config["scenarios"]), 9) scenario = config["scenarios"]["Directions API TestSuite-Simple Tests"] for request in scenario["requests"]: self.assertNotIn("{format}", request["url"]) self.assertEqual( scenario["requests"][0]["url"], "http://maps.googleapis.com/maps/api/directions/json") self.assertEqual( scenario["requests"][1]["url"], "http://maps.googleapis.com/maps/api/directions/json") self.assertEqual(scenario["requests"][2]["url"], "http://maps.googleapis.com/maps/api/directions/xml")
def test_hold(self): self.configure({ 'execution': { 'hold-for': '5s', 'scenario': { 'script': __dir__() + '/../../resources/selenium/testng/jars/testng-suite.jar'}, 'runner': 'testng'}, 'modules': { 'testng': { 'autodetect-xml': False}}}) self.obj.prepare() self.obj.startup() while not self.obj.check(): time.sleep(1) self.obj.shutdown() self.assertTrue(exists(self.obj.runner.report_file)) duration = time.time() - self.obj.start_time self.assertGreater(duration, 5)
def test_prepare_java_file(self): self.configure({ 'execution': { 'scenario': { 'script': __dir__() + '/../../resources/selenium/testng/TestNGSuite.java'}, 'runner': 'testng'}, 'modules': { 'testng': { 'autodetect-xml': False }}}) self.obj.prepare() self.obj.startup() while not self.obj.check(): time.sleep(1.0) self.obj.shutdown() self.obj.post_process() lines = open(self.obj.runner.report_file).readlines() self.assertEqual(len(lines), 3)
def test_samples_count_testcase(self): """ Test exact number of tests when test class extends JUnit TestCase :return: """ obj = SeleniumExecutor() obj.engine = self.engine_obj obj.settings = self.selenium_config obj.engine.config.merge({ScenarioExecutor.EXEC: { "executor": "selenium", "scenario": {"script": __dir__() + "/../selenium/invalid/SimpleTest.java"} }}) obj.execution = obj.engine.config['execution'] obj.prepare() obj.startup() while not obj.check(): time.sleep(1) obj.shutdown()
def test_selenium_startup_shutdown_java_package(self): """ Run tests from package :return: """ self.configure({ 'execution': { 'scenario': {'script': __dir__() + '/../selenium/java_package/src'}, 'executor': 'selenium' }, 'reporting': [{'module': 'junit-xml'}] }) self.obj.prepare() self.obj.startup() while not self.obj.check(): time.sleep(1) self.obj.shutdown() self.assertTrue(os.path.exists(os.path.join(self.obj.runner.working_dir, "compiled.jar")))
def test_no_test_in_name(self): """ Test exact number of tests when annotations used and no "test" in class name :return: """ obj = SeleniumExecutor() obj.engine = self.engine_obj obj.settings = self.selenium_config obj.engine.config.merge({ScenarioExecutor.EXEC: { "executor": "selenium", "scenario": {"script": __dir__() + "/../selenium/invalid/selenium1.java"} }}) obj.execution = obj.engine.config['execution'] obj.prepare() obj.startup() while not obj.check(): time.sleep(1) obj.shutdown()
def test_requests(self): obj = SeleniumExecutor() obj.engine = self.engine_obj obj.settings = self.selenium_config obj.engine.config.merge(yaml.load(open(__dir__() + "/../yaml/selenium_executor_requests.yml").read())) obj.engine.config.merge({"provisioning": "local"}) obj.execution = obj.engine.config['execution'] obj.prepare() obj.get_widget() obj.startup() while not obj.check(): time.sleep(1) obj.shutdown() with open(os.path.join(obj.engine.artifacts_dir, "junit.err")) as fds: contents = fds.read() self.assertEqual(1, contents.count("ok")) self.assertEqual(1, contents.count("OK"))