class SeleniumTestCase(BZTestCase): def setUp(self): super(SeleniumTestCase, self).setUp() self.engine_obj = EngineEmul() self.paths = [ABS_PATH("/../../bzt/10-base.json"), local_paths_config()] self.engine_obj.configure(self.paths) self.selenium_config = self.engine_obj.config["modules"]["selenium"]
class SeleniumTestCase(BZTestCase): """ :type obj: SubprocessedExecutor """ def __init__(self, methodName='runTest'): super(SeleniumTestCase, self).__init__(methodName) self.obj = None def setUp(self): super(SeleniumTestCase, self).setUp() self.engine = EngineEmul() paths = [local_paths_config()] self.engine.configure(paths) # FIXME: avoid using whole engine in particular module test! self.virtual_display = VirtualDisplay() self.virtual_display.engine = self.engine self.virtual_display.startup() self.obj = SeleniumExecutor() self.obj.engine = self.engine self.obj.settings = self.engine.config.get("modules").get("selenium") self.obj.env = self.obj.engine.env def tearDown(self): self.virtual_display.shutdown() super(SeleniumTestCase, self).tearDown() def configure(self, config): self.obj.engine.config.merge(config) self.obj.execution = self.obj.engine.config.get('execution') if isinstance(self.obj.execution, list): self.obj.execution = self.obj.execution[0]
class SeleniumTestCase(BZTestCase): def setUp(self): super(SeleniumTestCase, self).setUp() self.engine_obj = EngineEmul() self.paths = [__dir__() + "/../../bzt/10-base.json", local_paths_config()] self.engine_obj.configure(self.paths) # FIXME: avoid using whole engine in particular module test! self.engine_obj.config.get("modules").get("selenium").merge({"virtual-display": {"width": 1024, "height": 768}}) self.selenium_config = self.engine_obj.config["modules"]["selenium"]
def setUp(self): super(TestTestNGTester, self).setUp() engine_obj = EngineEmul() paths = [local_paths_config()] engine_obj.configure(paths) self.obj = TestNGTester() self.obj.settings = engine_obj.config.get("modules").get("testng") self.obj.engine = engine_obj self.obj.env = self.obj.engine.env
def test_encode_decode_infinities(self): engine = EngineEmul() obj = Configuration() obj.merge({ "foo": float("inf"), }) cfg = engine.create_artifact("config", ".json") obj.dump(cfg, Configuration.JSON) with open(cfg) as fds: dump = json.loads(fds.read()) self.assertEqual(dump["foo"], "inf") self.assertEqual(dehumanize_time(dump["foo"]), float("inf"))
def setUp(self): super(SeleniumTestCase, self).setUp() self.engine_obj = EngineEmul() self.engine_obj.artifacts_base_dir = ABS_PATH("/../../build/test") self.paths = [ABS_PATH("/../../bzt/10-base.json"), local_paths_config()] self.engine_obj.configure(self.paths) self.selenium_config = self.engine_obj.config["modules"]["selenium"]
def setUp(self): super(TestJUnitTester, self).setUp() engine_obj = EngineEmul() paths = [local_paths_config()] engine_obj.configure(paths) # just download geckodriver & chromedriver with selenium selenium = SeleniumExecutor() selenium.engine = engine_obj selenium.env = selenium.engine.env selenium.execution.merge({"scenario": {"requests": ["req"]}}) selenium.prepare() self.obj = JUnitTester() self.obj.settings = engine_obj.config.get("modules").get("junit") self.obj.engine = engine_obj self.obj.env = self.obj.engine.env
def setUp(self): super(TestJUnitTester, self).setUp() engine_obj = EngineEmul() paths = [local_paths_config()] engine_obj.configure(paths) # just download geckodriver & chromedriver with selenium selenium = SeleniumExecutor() selenium.engine = engine_obj selenium.install_required_tools() for driver in selenium.webdrivers: selenium.env.add_path({"PATH": driver.get_driver_dir()}) self.obj = JUnitTester() self.obj.env = selenium.env self.obj.settings = engine_obj.config.get("modules").get("junit") self.obj.engine = engine_obj
def test_functional_report(self): engine = EngineEmul() aggregator = FunctionalAggregator() aggregator.engine = engine engine.aggregator = aggregator obj = JUnitXMLReporter() obj.engine = engine obj.parameters = BetterDict() reader = FuncSamplesReader(RESOURCES_DIR + "functional/nose.ldjson", engine, ROOT_LOGGER) aggregator.add_underling(reader) aggregator.prepare() obj.prepare() aggregator.post_process() obj.post_process() self.assertFilesEqual(obj.report_file_path, RESOURCES_DIR + "functional/xunit-report.xml")
def setUp(self): super(SeleniumTestCase, self).setUp() self.engine = EngineEmul() paths = [local_paths_config()] self.engine.configure(paths) # FIXME: avoid using whole engine in particular module test! self.virtual_display = VirtualDisplay() self.virtual_display.engine = self.engine self.virtual_display.startup() self.obj = SeleniumExecutor() self.obj.engine = self.engine self.obj.settings = self.engine.config.get("modules").get("selenium") self.obj.env = self.obj.engine.env
class TestSwagger2YAML(BZTestCase): def setUp(self): super(TestSwagger2YAML, self).setUp() self.engine = EngineEmul() def _get_swagger2yaml(self, path, file_name=None): return Swagger2YAML(FakeOptions(file_name=file_name), RESOURCES_DIR + path) def _get_tmp(self, prefix='test', suffix='.yml'): return self.engine.create_artifact(prefix, suffix) def test_convert(self): self.maxDiff = None source = RESOURCES_DIR + "/swagger/petstore.json" result = self._get_tmp() options = FakeOptions(file_name=result) process(options, [source]) actual = yaml.load(open(result).read()) expected = yaml.load(open(RESOURCES_DIR + "/swagger/petstore-converted.yaml").read()) self.assertEqual(actual, expected)
class TestConverter(BZTestCase): def setUp(self): super(TestConverter, self).setUp() self.engine = EngineEmul() def _get_soapui2yaml(self, path, file_name=None, test_case=None): return SoapUI2YAML(FakeOptions(file_name=file_name, test_case=test_case), __dir__() + path) def _get_tmp(self, prefix='test', suffix='.yml'): return self.engine.create_artifact(prefix, suffix) def test_convert(self): source = RESOURCES_DIR + "soapui/project.xml" result = self._get_tmp() options = FakeOptions(file_name=result, test_case="index") process(options, [source]) actual = yaml.load(open(result).read()) expected = yaml.load(open(RESOURCES_DIR + "soapui/project.xml.yml").read()) self.assertEqual(actual, expected) def test_flickr(self): source = RESOURCES_DIR + "soapui/flickr-sample.xml" result = self._get_tmp() options = FakeOptions(file_name=result) process(options, [source]) actual = yaml.load(open(result).read()) expected = yaml.load(open(RESOURCES_DIR + "soapui/flickr-sample.xml.yml").read()) self.assertEqual(actual, expected) def test_egalaxy(self): source = RESOURCES_DIR + "soapui/egalaxy.xml" result = self._get_tmp() options = FakeOptions(file_name=result) process(options, [source]) actual = yaml.load(open(result).read()) expected = yaml.load(open(RESOURCES_DIR + "soapui/egalaxy.xml.yml").read()) self.assertEqual(actual, expected)
def setUp(self): super(TestEngine, self).setUp() self.obj = EngineEmul() self.paths = local_paths_config()
def test_terminate_only(self): "test is terminated only when it was started and didn't finished" obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: { "executor": "mock", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 1, } }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'] obj.engine.aggregator = ConsolidatingAggregator() obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["check-interval"] = "0ms" # do not skip checks obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # find collection client.results.append({"result": []}) # find test client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append( {"result": { "name": "Taurus Collection", "items": [] }}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection client.results.append({"result": {"id": id(obj)}}) # start client.results.append({ "result": { "id": id(obj), "sessions": [{ "id": "s1", "status": "JMETER_CONSOLE_INIT" }, { "id": "s2", "status": "JMETER_CONSOLE_INIT" }] } }) client.results.append({"result": []}) # sessions client.results.append({"result": {}}) # force start client.results.append({"result": { "progress": 120, "status": "ENDED" }}) # status should trigger shutdown client.results.append({"result": []}) # sessions obj.prepare() obj.startup() obj.check() # this one should trigger force start self.assertTrue(obj.check()) obj.shutdown() obj.post_process() self.assertEqual(client.results, [])
def test_some_errors(self): mock = BZMock() mock.mock_get.update({ 'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test': {"result": []}, 'https://a.blazemeter.com/api/v4/sessions/1': {"result": {'id': 1, "note": "somenote"}}, 'https://a.blazemeter.com/api/v4/masters/1': {"result": {'id': 1, "note": "somenote"}}, }) mock.mock_post.update({ 'https://a.blazemeter.com/api/v4/projects': {"result": {'id': 1}}, 'https://a.blazemeter.com/api/v4/tests': {"result": {'id': 1}}, 'https://a.blazemeter.com/api/v4/tests/1/start-external': {"result": { "session": {'id': 1, "testId": 1, "userId": 1}, "master": {'id': 1}, "signature": "sign" }}, 'https://a.blazemeter.com/api/v4/image/1/files?signature=sign': {"result": True}, 'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1' + '&pq=0&target=labels_bulk&update=1': {}, 'https://a.blazemeter.com/api/v4/sessions/1/stop': {"result": True}, }) mock.mock_patch.update({ 'https://a.blazemeter.com/api/v4/sessions/1': {"result": {"id": 1, "note": "somenote"}}, 'https://a.blazemeter.com/api/v4/masters/1': {"result": {"id": 1, "note": "somenote"}}, }) obj = BlazeMeterUploader() mock.apply(obj._user) obj.parameters['project'] = 'Proj name' obj.settings['token'] = '123' obj.settings['browser-open'] = 'none' obj.engine = EngineEmul() obj.prepare() obj.startup() obj.engine.stopping_reason = ValueError('wrong value') obj.aggregated_second(random_datapoint(10)) obj.kpi_buffer[-1][DataPoint.CUMULATIVE][''][KPISet.ERRORS] = [ {'msg': 'Forbidden', 'cnt': 10, 'type': KPISet.ERRTYPE_ASSERT, 'urls': [], KPISet.RESP_CODES: '111'}, {'msg': 'Allowed', 'cnt': 20, 'type': KPISet.ERRTYPE_ERROR, 'urls': [], KPISet.RESP_CODES: '222'}] obj.post_process() obj.log.info("Requests: %s", mock.requests) # check for note appending in _postproc_phase3() reqs = mock.requests[-4:] self.assertIn('api/v4/sessions/1', reqs[0]['url']) self.assertIn('api/v4/sessions/1', reqs[1]['url']) self.assertIn('api/v4/masters/1', reqs[2]['url']) self.assertIn('api/v4/masters/1', reqs[3]['url']) self.assertIn('ValueError: wrong value', str(reqs[1]['data'])) self.assertIn('ValueError: wrong value', str(reqs[3]['data'])) labels = mock.requests[8]['data'] if not isinstance(labels, str): labels = labels.decode("utf-8") obj.log.info("Labels: %s", labels) data = json.loads(str(labels)) self.assertEqual(1, len(data['labels'])) total_item = data['labels'][0] self.assertEqual('ALL', total_item['name']) self.assertEqual(total_item['assertions'], [{'failureMessage': 'Forbidden', 'failures': 10, 'name': 'All Assertions'}]) self.assertEqual(total_item['errors'], [{'m': 'Allowed', 'count': 20, 'rc': '222'}])
def setUp(self): self.obj = ShellExecutor() self.obj.parameters = BetterDict() self.obj.engine = EngineEmul() self.log_recorder = RecordingHandler() self.obj.log.addHandler(self.log_recorder)
class TestConverter(BZTestCase): def setUp(self): super(TestConverter, self).setUp() self.engine = EngineEmul() self.obj = JMX2YAML(file_name=None, options=FakeOptions()) def configure(self, src_file, dst_file=None, dump_jmx=None): self.obj.src_file = src_file self.obj.options = FakeOptions(file_name=dst_file, dump_jmx=dump_jmx) def tearDown(self): if self.obj.dst_file and os.path.isfile(self.obj.dst_file): os.remove(self.obj.dst_file) super(TestConverter, self).tearDown() @staticmethod def same_yaml(file1, file2): yml1 = yaml.load(open(file1).read()) yml2 = yaml.load(open(file2).read()) return yml1 == yml2 def _get_tmp(self, prefix='test', suffix='.yml'): return self.engine.create_artifact(prefix, suffix) def test_objprop(self): self.configure(RESOURCES_DIR + "jmeter/jmx/http.jmx", self._get_tmp()) self.sniff_log(self.obj.log) self.obj.process() self.assertNotIn("Removing unknown element: name (None)", self.log_recorder.warn_buff.getvalue()) self.assertNotIn("Removing unknown element: value (None)", self.log_recorder.warn_buff.getvalue()) def test_loadjmx1(self): self.configure(RESOURCES_DIR + "jmeter/jmx/http.jmx", self._get_tmp()) self.sniff_log(self.obj.log) self.obj.process() self.assertIn("Loading jmx file", self.log_recorder.info_buff.getvalue()) self.assertNotEqual("", self.log_recorder.debug_buff.getvalue()) self.assertEqual("", self.log_recorder.err_buff.getvalue()) def test_loadjmx2(self): self.configure(RESOURCES_DIR + "jmeter/jmx/notfound.jmx") self.sniff_log(self.obj.log) try: self.obj.process() self.fail() except BaseException as exc: self.assertIn("File does not exist", exc.args[0]) self.assertIn("Loading jmx file", self.log_recorder.info_buff.getvalue()) self.assertEqual("", self.log_recorder.debug_buff.getvalue()) def test_loadjmx3(self): self.configure(RESOURCES_DIR + "jmeter/jmx/broken.jmx") self.sniff_log(self.obj.log) try: self.obj.process() self.fail() except BaseException as exc: self.assertIn("XML parsing failed", exc.args[0]) self.assertIn("Loading jmx file", self.log_recorder.info_buff.getvalue()) self.assertIn("Error while processing jmx file", self.log_recorder.err_buff.getvalue()) def test_loadjmx4(self): self.configure(RESOURCES_DIR + "jmeter/jmx/http.jmx", self._get_tmp('tmp', 'file')) self.sniff_log(self.obj.log) self.obj.process() self.assertIn("Loading jmx file", self.log_recorder.info_buff.getvalue()) self.assertIn("Done processing, result saved in", self.log_recorder.info_buff.getvalue()) self.assertIn("Removing unknown element", self.log_recorder.warn_buff.getvalue()) def test_export_clean_jmx(self): tmp_jmx_name = self._get_tmp('tmp', '.jmx') open(tmp_jmx_name, 'w+').close() # touch file self.configure(RESOURCES_DIR + "yaml/converter/disabled.jmx", dump_jmx=tmp_jmx_name) self.sniff_log(self.obj.log) self.obj.process() self.assertIn("Loading jmx file", self.log_recorder.info_buff.getvalue()) self.assertIn("already exists and will be overwritten", self.log_recorder.warn_buff.getvalue()) def test_not_jmx(self): self.configure(RESOURCES_DIR + "jmeter/jmx/not-jmx.xml") try: self.obj.process() self.fail() except BaseException as exc: self.assertIn("Bad jmx format", exc.args[0]) def test_clean_disabled_jmx(self): self.configure(RESOURCES_DIR + "yaml/converter/disabled.jmx", self._get_tmp()) self.obj.process() disabled_elements = [element for element in self.obj.converter.dialect.tree.iter() if element.get("enabled") == "false"] self.assertEquals(0, len(disabled_elements)) def test_copy_global_csv_dataset(self): self.configure(RESOURCES_DIR + "yaml/converter/global_copy.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) datasets_first_tg = yml.get("scenarios").get("Thread Group one").get("data-sources") datasets_second_tg = yml.get("scenarios").get("Thread Group two").get("data-sources") global_csv_tg_one = [dataset for dataset in datasets_first_tg if dataset.get('path') == 'global.csv'] global_csv_tg_two = [dataset for dataset in datasets_second_tg if dataset.get('path') == 'global.csv'] local_csv_tg_one = [dataset for dataset in datasets_first_tg if dataset.get('path') == 'local.csv'] local_csv_tg_two = [dataset for dataset in datasets_second_tg if dataset.get('path') == 'local.csv'] self.assertEqual(len(global_csv_tg_one), len(global_csv_tg_two), 1) self.assertEqual(len(local_csv_tg_one), 1) self.assertEqual(len(local_csv_tg_two), 0) def test_parse_csv_dataset(self): self.configure(RESOURCES_DIR + "yaml/converter/global_copy.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) datasets = yml.get("scenarios").get("Thread Group one").get("data-sources") local_csv = [dataset for dataset in datasets if dataset.get('path') == 'local.csv'][0] self.assertEqual(local_csv['loop'], False) self.assertEqual(local_csv['delimiter'], ',') self.assertEqual(local_csv['quoted'], False) def test_copy_global_headers(self): yml = self._get_tmp() self.configure(RESOURCES_DIR + "yaml/converter/global_copy.jmx", yml) self.obj.process() yml = yaml.load(open(yml).read()) headers_first_tg = yml.get("scenarios").get("Thread Group one").get("headers", []) headers_second_tg = yml.get("scenarios").get("Thread Group two").get("headers", []) self.assertEqual(len(headers_first_tg), 3) self.assertEqual(len(headers_second_tg), 2) def test_cache_cookie_dns_overrides(self): yml = self._get_tmp() self.configure(RESOURCES_DIR + "yaml/converter/global_copy.jmx", yml) self.obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') cache_first_tg = tg_one.get("store-cache") cache_second_tg = tg_two.get("store-cache") cookie_first_tg = tg_one.get("store-cookie") cookie_second_tg = tg_two.get("store-cookie") dns_cache_mgr_first_tg = tg_one.get("use-dns-cache-mgr") dns_cache_mgr_second_tg = tg_two.get("use-dns-cache-mgr") self.assertEqual(cache_first_tg, True) self.assertEqual(cache_second_tg, True) self.assertEqual(cookie_first_tg, False) self.assertEqual(cookie_second_tg, True) self.assertEqual(dns_cache_mgr_first_tg, True) self.assertEqual(dns_cache_mgr_second_tg, True) def test_think_time_overrides(self): yml = self._get_tmp() self.configure(RESOURCES_DIR + "yaml/converter/global_copy.jmx", yml) self.obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') request_tg_two = tg_two.get("requests")[0] tg_one_timer = tg_one.get("think-time") tg_two_timer = tg_two.get("think-time") req_timer = request_tg_two.get("think-time") self.assertEqual(tg_one_timer, "200ms") self.assertEqual(tg_two_timer, "300ms") self.assertEqual(req_timer, "100ms") def test_request_defaults(self): yml = self._get_tmp() self.configure(RESOURCES_DIR + "yaml/converter/global_copy.jmx", yml) self.obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') self.assertEqual(tg_one.get("default-address"), "https://127.0.0.2/") self.assertEqual(tg_two.get("default-address"), "http://127.0.0.3:2582/resources/") self.assertEqual(tg_one.get("timeout"), "500ms") self.assertEqual(tg_two.get("timeout"), "100ms") self.assertEqual(tg_one.get("retrieve-resources"), True) self.assertEqual(tg_two.get("retrieve-resources"), True) self.assertEqual(tg_one.get("concurrent-pool-size"), 5) self.assertEqual(tg_two.get("concurrent-pool-size"), 10) def test_copy_global_request_assertions(self): yml = self._get_tmp() self.configure(RESOURCES_DIR + "yaml/converter/assertions.jmx", yml) self.obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_assertions = tg_one.get("assert") self.assertEqual(len(tg_one_assertions), 2) # global assertion + tg assertion tg_two_assertions = tg_two.get("assert") self.assertEqual(len(tg_two_assertions), 1) # global only assertion tg_one_req_one_assertion = tg_one.get("requests")[0].get("assert")[0] expected = {"subject": "headers", "contains": ["tg1httpreq1", "tg1httpreq12"], "assume-success": False, "not": False, "regexp": False} self.assertEqual(tg_one_req_one_assertion, expected) tg_one_assertion = tg_one.get("assert")[0] expected = {"subject": "body", "contains": ["tg1body_text_not_contains"], "assume-success": False, "not": True, 'regexp': False} self.assertEqual(tg_one_assertion, expected) def test_broken_request_assertions(self): # see comments in broken_resp_asserts.jmx for explanation of cases # don't save broken_resp_asserts.jmx by jmeter yml = self._get_tmp() self.configure(RESOURCES_DIR + "yaml/converter/broken_resp_asserts.jmx", yml) self.obj.process() yml1 = RESOURCES_DIR + "yaml/converter/broken_resp_asserts.yml" yml2 = yml self.assertTrue(yml1, yml2) def test_copy_global_json_assertions(self): self.configure(RESOURCES_DIR + "yaml/converter/assertions.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_assertions = tg_one.get("assert-jsonpath") self.assertEqual(len(tg_one_assertions), 1) # global assertion + tg assertion tg_two_assertions = tg_two.get("assert-jsonpath") self.assertEqual(len(tg_two_assertions), 1) # global only assertion tg_one_req_one_jp = tg_one.get("requests")[0].get("assert-jsonpath", []) # no assertions self.assertEqual(len(tg_one_req_one_jp), 0) tg_two_req_one_jp = tg_two.get("requests")[0].get("assert-jsonpath", []) self.assertEqual(len(tg_two_req_one_jp), 1) expected = {"expect-null": True, "invert": True, "jsonpath": '$(":input")', "validate": True, "regexp": True} self.assertEqual(expected, tg_two_req_one_jp[0]) # test concurrency, ramp-up, iterations in execution tg_one_exec = yml.get(ScenarioExecutor.EXEC)[0] tg_two_exec = yml.get(ScenarioExecutor.EXEC)[1] tg_three_exec = yml.get(ScenarioExecutor.EXEC)[2] self.assertEqual(tg_one_exec.get("concurrency"), 10) self.assertEqual(tg_two_exec.get("concurrency"), 15) self.assertEqual(tg_three_exec.get("concurrency"), 1) self.assertEqual(tg_one_exec.get("ramp-up"), '10s') self.assertEqual(tg_two_exec.get("ramp-up"), '60s') self.assertEqual(tg_three_exec.get("ramp-up"), '2s') self.assertEqual(tg_one_exec.get("iterations"), 1) self.assertEqual(tg_two_exec.get("iterations"), 1) self.assertEqual(tg_three_exec.get("iterations"), 100) def test_xpath_assertions(self): self.configure(RESOURCES_DIR + "yaml/converter/assertions.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) tg = yml.get("scenarios").get("tg3") assertions = tg.get("assert-xpath") self.assertEqual(len(assertions), 2) self.assertEqual(assertions[0], { "xpath": "/note/to", "ignore-whitespace": False, "invert": False, "validate-xml": False, "use-tolerant-parser": False, }) self.assertEqual(assertions[1], { "xpath": "/note/from", "ignore-whitespace": True, "invert": True, "validate-xml": True, "use-tolerant-parser": True, }) def test_extractors(self): self.configure(RESOURCES_DIR + "yaml/converter/extractors.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_three = yml.get("scenarios").get("tg3") tg_one_extractors = tg_one.get("extract-regexp") tg_two_extractors = tg_two.get("extract-regexp") self.assertEqual(len(tg_one_extractors), 1) # global self.assertEqual(len(tg_two_extractors), 1) # global + local - ignored tg_one_req_exr = tg_one.get("requests")[0].get("extract-regexp", {}) self.assertEqual(len(tg_one_req_exr), 2) expected = {'template': '1', 'match-no': 1, 'regexp': '*tg1hr1', 'default': 'default'} self.assertEqual(expected, tg_one_req_exr.get("test_tg1hr1")) # test extract-jsonpath tg_one_extractors = tg_one.get("extract-jsonpath") tg_two_extractors = tg_two.get("extract-jsonpath") self.assertEqual(len(tg_one_extractors), 5) # 4x global + local self.assertEqual(len(tg_two_extractors), 4) # 4x global tg_three_req_exr = tg_three.get("requests")[0].get("extract-jsonpath", {}) self.assertEqual(len(tg_three_req_exr), 1) # 1x local # test extract-xpath tg_three_extractors = tg_three.get("extract-xpath") self.assertEqual(len(tg_three_extractors), 2) # 2 global self.assertEqual(tg_three_extractors['bookAuthor'], { "xpath": "/books/[@title()='1984']/author", "default": "no_author", "ignore-whitespace": False, "validate-xml": False, "use-tolerant-parser": False, }) self.assertEqual(tg_three_extractors['author'], { "xpath": "/books/[@title()='Fahrenheit 451']/author", "default": "no", "ignore-whitespace": True, "validate-xml": True, "use-tolerant-parser": False, }) self.assertEqual(tg_one_extractors['VAR1'], { "jsonpath": "$.foo", "default": "DEF_1", }) self.assertEqual(tg_one_extractors['VAR2'], { "jsonpath": "$.bar", "default": "DEF_2", }) def test_request_body(self): yml = self._get_tmp() self.configure(RESOURCES_DIR + "yaml/converter/extractors.jmx", yml) self.obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_req_one_body = tg_one.get("requests")[0].get("body") self.assertEqual(tg_one_req_one_body, "body-string") tg_one_req_one_body = tg_one.get("requests")[1].get("body") self.assertEqual(tg_one_req_one_body, {"body_param1": "value1", "body_param2": "value2"}) tg_two_req_one_body = tg_two.get("requests")[0].get("body") self.assertEqual(tg_two_req_one_body, None) def test_json_body(self): yml = self._get_tmp() self.configure(RESOURCES_DIR + "yaml/converter/json_body.jmx", yml) self.obj.process() yml = yaml.load(open(yml).read()) reqs1 = yml.get("scenarios").get("tg1")['requests'] reqs2 = yml.get("scenarios").get("tg2")['requests'] bodies = {req['label']: req.get('body', None) for req in reqs1 + reqs2} targets = {'r1_1': None, 'r1_2': list, 'r1_3': str, 'r1_4': dict, 'r2_1': None, 'r2_2': dict, 'r2_3': str, 'r2_4': str, 'r2_5': str} for label in targets: self.assertTrue((bodies[label] is None and targets[label] is None) or isinstance(bodies[label], targets[label])) def test_duration_throughput(self): yml = self._get_tmp() self.configure(RESOURCES_DIR + "yaml/converter/duration.jmx", yml) self.obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get(ScenarioExecutor.EXEC)[0] tg_two = yml.get(ScenarioExecutor.EXEC)[1] tg_three = yml.get(ScenarioExecutor.EXEC)[2] self.assertEqual("10s", tg_one.get("ramp-up")) self.assertEqual("60s", tg_one.get("hold-for")) self.assertEqual("10s", tg_one.get("ramp-up")) self.assertEqual(100, tg_one.get("throughput")) self.assertEqual("10s", tg_two.get("ramp-up")) self.assertEqual("20s", tg_two.get("hold-for")) self.assertEqual(20, tg_two.get("throughput")) self.assertEqual("60s", tg_three.get("ramp-up")) self.assertEqual("40s", tg_three.get("hold-for")) self.assertEqual(100, tg_three.get("throughput")) def test_all(self): self.configure(RESOURCES_DIR + "yaml/converter/disabled.jmx") self.obj.process() yml1 = RESOURCES_DIR + "yaml/converter/disabled.yml" yml2 = self.obj.dst_file self.assertTrue(self.same_yaml(yml1, yml2)) def test_params_conversion(self): self.configure(RESOURCES_DIR + "yaml/converter/params_conversion.jmx") self.sniff_log(self.obj.log) self.obj.process() yml1 = self.obj.dst_file yml2 = RESOURCES_DIR + "yaml/converter/params_conversion.yml" self.assertTrue(self.same_yaml(yml1, yml2)) self.assertNotIn('n1', self.log_recorder.warn_buff.getvalue()) self.assertNotIn('n2', self.log_recorder.warn_buff.getvalue()) self.assertIn('n1_101', self.log_recorder.debug_buff.getvalue()) self.assertIn('n1_011', self.log_recorder.debug_buff.getvalue()) self.assertIn('n1_001', self.log_recorder.debug_buff.getvalue()) def test_param_null(self): self.configure(RESOURCES_DIR + "yaml/converter/param-null.jmx") self.obj.process() def test_load_profile_default_values(self): yml = self._get_tmp() self.configure(RESOURCES_DIR + "yaml/converter/default.jmx", yml) self.obj.process() yml = yaml.load(open(yml).read()) execution = yml.get(ScenarioExecutor.EXEC)[0] self.assertEqual("60s", execution.get("ramp-up")) self.assertEqual("60s", execution.get("hold-for")) self.assertEqual(1, execution.get("concurrency")) self.assertEqual(1, execution.get("iterations")) def test_variables(self): self.configure(RESOURCES_DIR + "yaml/converter/vars.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) scenarios = yml.get("scenarios") tg_one = scenarios["TG1"] self.assertEqual(tg_one.get('variables'), {"tg1_local": "tg1", "global_var": "global", "auth_token": "shouldn't be masked"}) tg_two = scenarios["TG2"] self.assertEqual(tg_two.get('variables'), {"tg2_local": "tg2", "global_var": "global", "auth_token": "shouldn't be masked"}) def test_no_variables(self): self.configure(RESOURCES_DIR + "yaml/converter/default.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) execution = yml.get(ScenarioExecutor.EXEC)[0] scenarios = yml.get("scenarios") scenario = scenarios[execution.get("scenario")] self.assertNotIn("variables", scenario) def test_controllers_to_requests(self): self.configure(RESOURCES_DIR + "yaml/converter/controllers.jmx") self.obj.process() yml1 = RESOURCES_DIR + "yaml/converter/controllers.yml" yml2 = self.obj.dst_file self.assertTrue(self.same_yaml(yml1, yml2)) def test_jsr223(self): self.configure(RESOURCES_DIR + "jmeter/jmx/jsr223.jmx") try: self.obj.process() lines = FileReader(self.obj.dst_file).get_lines(last_pass=True) yml = yaml.load(''.join(lines)) scenarios = yml.get("scenarios") scenario = scenarios["Thread Group"] requests = scenario["requests"] self.assertEqual(len(requests), 1) request = requests[0] self.assertIn("jsr223", request) jsrs = request["jsr223"] self.assertTrue(isinstance(jsrs, list)) self.assertEqual(len(jsrs), 5) self.assertEqual(jsrs[0]["language"], "beanshell") self.assertEqual(jsrs[0]["script-text"], "scripty") self.assertEqual(jsrs[0]["parameters"], "parames") self.assertNotIn('script-file', jsrs[0]) self.assertEqual(jsrs[1]["language"], "javascript") self.assertEqual(jsrs[1]["script-text"], u'console.log("ПРИВЕТ");\nline("2");') self.assertEqual(jsrs[1]["parameters"], "a b c") self.assertNotIn('script-file', jsrs[1]) self.assertEqual(jsrs[2]["language"], "javascript") self.assertEqual(jsrs[2]["script-file"], "script.js") self.assertEqual(jsrs[2]["parameters"], None) self.assertNotIn('script-text', jsrs[2]) self.assertEqual(jsrs[3]["language"], "beanshell") self.assertEqual(jsrs[3]["execute"], "before") self.assertEqual(jsrs[3]["parameters"], None) self.assertEqual(jsrs[3]['script-text'], 'console.log("beanshell aka jsr223");') self.assertNotIn('script-file', jsrs[3]) self.assertEqual(jsrs[4]["language"], "java") self.assertEqual(jsrs[4]["execute"], "before") self.assertEqual(jsrs[4]["parameters"], None) self.assertIn('BlazeDemo.java', jsrs[4]['script-file']) self.assertNotIn('script-text', jsrs[4]) self.assertTrue(os.path.exists(os.path.join(get_full_path(self.obj.dst_file, step_up=1), 'script.js'))) finally: os.remove(os.path.join(get_full_path(self.obj.dst_file, step_up=1), 'script.js')) def test_unicode(self): self.configure(RESOURCES_DIR + "yaml/converter/unicode.jmx") self.obj.process() def test_path_without_domain(self): self.configure(RESOURCES_DIR + "jmeter/jmx/http.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) scenarios = yml.get("scenarios") scenario = scenarios["Thread Group"] requests = scenario["requests"] self.assertEqual(len(requests), 3) without_domain = requests[2] self.assertEqual(without_domain['url'], '/path') def test_request_content_encoding(self): self.configure(RESOURCES_DIR + "jmeter/jmx/http.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) scenarios = yml.get("scenarios") scenario = scenarios["Thread Group"] requests = scenario["requests"] self.assertEqual(len(requests), 3) request = requests[1] self.assertEqual(request['content-encoding'], 'utf-8') def test_request_redirect_policy(self): self.configure(RESOURCES_DIR + "jmeter/jmx/http.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) scenarios = yml.get("scenarios") scenario = scenarios["Thread Group"] requests = scenario["requests"] self.assertEqual(len(requests), 3) self.assertEqual(requests[0].get('follow-redirects'), True) self.assertEqual(requests[1].get('follow-redirects'), True) self.assertEqual(requests[2].get('follow-redirects'), False) def test_controllers(self): self.configure(RESOURCES_DIR + "jmeter/jmx/all_controllers.jmx") self.obj.process() yml = yaml.load(open(self.obj.dst_file).read()) requests = yml.get("scenarios").get("Thread Group").get("requests") self.assertEqual(len(requests), 14)
def setUp(self): self.engine = EngineEmul()
def setUp(self): super(TestEngine, self).setUp() self.obj = EngineEmul() self.obj.artifacts_base_dir = tempfile.gettempdir() + "/bzt" self.paths = local_paths_config()
class TestEngine(BZTestCase): def setUp(self): super(TestEngine, self).setUp() self.obj = EngineEmul() self.paths = local_paths_config() def test_jmx(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/jmx.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.obj.run() self.obj.post_process() def test_requests(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/get-post.json", __dir__() + "/json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.obj.prepare() self.obj.run() self.obj.post_process() def test_double_exec(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/yaml/triple.yml", __dir__() + "/json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.obj.run() self.obj.post_process() def test_grinder(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/grinder.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.obj.run() self.obj.post_process() def test_gatling(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/gatling.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.obj.run() self.obj.post_process() def test_unknown_module(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/gatling.json", self.paths ] self.obj.configure(configs) self.obj.config["provisioning"] = "unknown" self.obj.config["modules"]["unknown"] = BetterDict() try: self.obj.prepare() self.fail() except ValueError: pass
def test_fail_on_zero_results(self): obj = GatlingExecutor() obj.engine = EngineEmul() obj.execution.merge({"scenario": {"script": __dir__() + "/../gatling/BasicSimulation.scala"}}) obj.prepare() self.assertRaises(RuntimeWarning, obj.post_process)
def setUp(self): super(TestConverter, self).setUp() self.engine = EngineEmul()
def test_check(self): client = BlazeMeterClientEmul(logging.getLogger('')) client.results.append({"marker": "ping", 'result': {}}) client.results.append({"marker": "projects", 'result': []}) client.results.append({ "marker": "project-create", 'result': { "id": time.time(), "name": "boo", "userId": time.time(), "description": None, "created": time.time(), "updated": time.time(), "organizationId": None } }) client.results.append({"marker": "tests", 'result': {}}) client.results.append({ "marker": "test-create", 'result': { 'id': 'unittest1' } }) client.results.append({ "marker": "sess-start", 'result': { 'session': { 'id': 'sess1', 'userId': 1 }, 'signature': '' } }) client.results.append({ "marker": "first push", 'result': { 'session': {} } }) # client.results.append(None) # first check error stats client.results.append({"marker": "mon push", "result": True}) client.results.append({ "marker": "second push", 'result': { 'session': { "statusCode": 140, 'status': 'ENDED' } } }) # client.results.append(None) # second check error stats client.results.append({ "marker": "post-proc push", 'result': { 'session': {} } }) client.results.append({ "marker": "upload1", "result": True }) # post-proc error stats client.results.append({ "marker": "terminate", 'result': { 'session': {} } }) obj = BlazeMeterUploader() obj.parameters['project'] = 'Proj name' obj.settings['token'] = '123' obj.settings['browser-open'] = 'none' obj.engine = EngineEmul() shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__)) obj.client = client obj.prepare() obj.startup() for x in range(0, 31): obj.aggregated_second(random_datapoint(x)) mon = [{ "ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0 }] obj.monitoring_data(mon) obj.check() for x in range(32, 65): obj.aggregated_second(random_datapoint(x)) try: obj.check() self.fail() except KeyboardInterrupt: pass obj.aggregated_second(random_datapoint(10)) obj.shutdown() obj.post_process()
def test_exclude_problematic(self): obj = InstallChecker() obj.engine = EngineEmul() obj.engine.config.get("modules")["err"] = "hello there" obj.settings["exclude"] = ["err"] self.assertRaises(NormalShutdown, obj.prepare)
def setUp(self): super(TestConverter, self).setUp() self.engine = EngineEmul() self.obj = JMX2YAML(file_name=None, options=FakeOptions())
def setUp(self): super(TestScenarioExecutor, self).setUp() self.engine = EngineEmul() self.executor = ScenarioExecutor() self.executor.engine = self.engine
class TestScenarioExecutor(BZTestCase): def setUp(self): super(TestScenarioExecutor, self).setUp() self.engine = EngineEmul() self.executor = ScenarioExecutor() self.executor.engine = self.engine self.executor.env = self.executor.engine.env def test_scenario_extraction_script(self): self.engine.config.merge({ "execution": [{ "scenario": { "script": "tests/resources/selenium/python/test_blazemeter_fail.py", "param": "value" }}]}) self.executor.execution = self.engine.config.get('execution')[0] self.executor.get_scenario() config = self.engine.config self.assertEqual(config['execution'][0]['scenario'], 'test_blazemeter_fail.py') self.assertIn('test_blazemeter_fail.py', config['scenarios']) def test_body_files(self): body_file1 = RESOURCES_DIR + "jmeter/body-file.dat" body_file2 = RESOURCES_DIR + "jmeter/jmx/http.jmx" self.engine.config.merge({ 'execution': [{ 'iterations': 1, 'executor': 'siege', 'scenario': 'bf'}], 'scenarios': { 'bf': { "requests": [ { 'url': 'http://first.com', 'body-file': body_file1 }, { 'url': 'http://second.com', 'body': 'body2', 'body-file': body_file2}]}}}) self.executor.execution = self.engine.config.get('execution')[0] scenario = self.executor.get_scenario() # check body fields in get_requests() results reqs = list(scenario.get_requests()) body_fields = [req.body for req in reqs] self.assertIn('sample of body', body_fields[0]) self.assertIn('body2', body_fields[1]) # check body fields and body-files fields after get_requests() scenario = self.executor.get_scenario() body_files = [req.get('body-file') for req in scenario.get('requests')] body_fields = [req.get('body') for req in scenario.get('requests')] self.assertTrue(all(body_files)) self.assertFalse(body_fields[0]) self.assertIn('body2', body_fields[1]) def test_scenario_is_script(self): self.engine.config.merge({ "execution": [{ "scenario": "tests/resources/selenium/python/test_blazemeter_fail.py" }]}) self.executor.execution = self.engine.config.get('execution')[0] self.executor.get_scenario() config = self.engine.config self.assertEqual(config['execution'][0]['scenario'], 'test_blazemeter_fail.py') self.assertIn('test_blazemeter_fail.py', config['scenarios']) def test_scenario_extraction_request(self): self.engine.config.merge({ "execution": [{ "scenario": { "requests": [{"url": "url.example"}], "param": "value" }}]}) self.executor.execution = self.engine.config.get('execution')[0] self.executor.get_scenario() config = self.engine.config scenario = config['execution'][0]['scenario'] self.assertTrue(isinstance(scenario, string_types)) self.assertIn(scenario, config['scenarios']) def test_scenario_not_found(self): self.engine.config.merge({ "execution": [{ "scenario": "non-existent" }]}) self.executor.execution = self.engine.config.get('execution')[0] self.assertRaises(TaurusConfigError, self.executor.get_scenario) def test_scenario_no_requests(self): self.engine.config.merge({ "execution": [{ "scenario": ["url1", "url2"] }]}) self.executor.execution = self.engine.config.get('execution')[0] self.assertRaises(TaurusConfigError, self.executor.get_scenario) def test_passes_artifacts_dir(self): cmdline = "echo %TAURUS_ARTIFACTS_DIR%" if is_windows() else "echo $TAURUS_ARTIFACTS_DIR" self.engine.eval_env() self.engine.prepare() self.executor.env.set(self.engine.env.get()) process = self.executor.execute(cmdline, shell=True) stdout, _ = communicate(process) self.assertEquals(self.engine.artifacts_dir, stdout.strip()) def test_case_of_variables(self): env = {'aaa': 333, 'AAA': 666} line_tpl = "echo %%%s%%" if is_windows() else "echo $%s" cmdlines = [line_tpl % "aaa", line_tpl % "AAA"] results = set() for cmdline in cmdlines: self.executor.env.set(env) process = self.executor.execute(cmdline, shell=True) stdout, _ = communicate(process) results.add(stdout.strip()) if is_windows(): self.assertEqual(1, len(results)) else: self.assertEqual(2, len(results)) def test_get_load_str(self): self.executor.execution.merge({ "concurrency": "2", "hold-for": "3", "ramp-up": "4", "iterations": "5", "throughput": "6", "steps": "7", }) load = self.executor.get_load() self.assertEquals(2, load.concurrency) self.assertEquals(3, load.hold) self.assertEquals(4, load.ramp_up) self.assertEquals(5, load.iterations) self.assertEquals(6, load.throughput) self.assertEquals(7, load.steps) def test_get_load_str_fail(self): self.executor.execution.merge({ "concurrency": "2VU", }) self.assertRaises(TaurusConfigError, self.executor.get_load)
def test_simple(self): obj = PBenchExecutor() obj.engine = EngineEmul() obj.engine.aggregator = ConsolidatingAggregator() obj.engine.aggregator.add_listener(DataPointLogger()) obj.engine.config.merge({"provisioning": "test"}) if os.path.exists("/home/undera/Sources/phantom" ): # FIXME: not good, get rid of it obj.settings.merge({ "path": "/home/undera/Sources/phantom/bin/phantom", "modules-path": "/home/undera/Sources/phantom/lib/phantom", }) else: obj.settings.merge({ "path": os.path.join(os.path.dirname(__file__), '..', "phantom.sh"), }) obj.execution.merge({ "log-responses": "proto_error", # "iterations": 5000000, "concurrency": 10, "throughput": 1000, "ramp-up": "1m", # "steps": 5, "hold-for": "15", "scenario": { "timeout": 1, "default-address": "http://localhost:33", "headers": { "Connection": "close" }, "requests": [ # "/", { "url": "/api", "method": "POST", "headers": { "Content-Length": 0 }, "body": { "param": "value" } } ] } }) obj.engine.aggregator.prepare() obj.prepare() obj.engine.aggregator.startup() obj.startup() while not obj.check(): logging.debug("Running...") obj.engine.aggregator.check() time.sleep(1) obj.shutdown() obj.engine.aggregator.shutdown() obj.post_process() obj.engine.aggregator.post_process()
def _get_pbench(self): obj = PBenchExecutor() obj.engine = EngineEmul() obj.settings = BetterDict() obj.engine.config = BetterDict() return obj
class TestConverter(BZTestCase): def setUp(self): self.engine = EngineEmul() def _get_jmx2yaml(self, path, file_name=None, dump_jmx=False): return JMX2YAML(FakeOptions(file_name=file_name, dump_jmx=dump_jmx), __dir__() + path) def _get_tmp(self, prefix='test', suffix='.yml'): return self.engine.create_artifact(prefix, suffix) def test_loadjmx1(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/http.jmx", self._get_tmp()) obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertNotEqual("", log_recorder.debug_buff.getvalue()) self.assertEqual("", log_recorder.err_buff.getvalue()) obj.log.removeHandler(log_recorder) def test_loadjmx2(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/notfound.jmx") obj.log.addHandler(log_recorder) try: obj.process() self.fail() except BaseException as exc: self.assertIn("File does not exist", exc.args[0]) self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("does not exist", log_recorder.err_buff.getvalue()) self.assertEqual("", log_recorder.debug_buff.getvalue()) obj.log.removeHandler(log_recorder) def test_loadjmx3(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/broken.jmx") obj.log.addHandler(log_recorder) try: obj.process() self.fail() except BaseException as exc: self.assertIn("XML parsing failed", exc.args[0]) self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("Error while processing jmx file", log_recorder.err_buff.getvalue()) self.assertIn("XML parsing error", log_recorder.debug_buff.getvalue()) obj.log.removeHandler(log_recorder) def test_loadjmx4(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/http.jmx", self._get_tmp('tmp', 'file')) obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("Done processing, result saved in", log_recorder.info_buff.getvalue()) self.assertIn("Removing unknown element", log_recorder.warn_buff.getvalue()) obj.log.removeHandler(log_recorder) def test_export_clean_jmx(self): tmp_jmx_name = self._get_tmp('tmp', '.jmx') open(tmp_jmx_name, 'w+').close() # touch file yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/disabled.jmx", yml, dump_jmx=tmp_jmx_name) log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("already exists and will be overwritten", log_recorder.warn_buff.getvalue()) obj.log.removeHandler(log_recorder) def test_not_jmx(self): obj = self._get_jmx2yaml("/jmeter/jmx/not-jmx.xml") try: obj.process() self.fail() except BaseException as exc: self.assertIn("Bad jmx format", exc.args[0]) def test_clean_disabled_jmx(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/disabled.jmx", yml) obj.process() disabled_elements = [ element for element in obj.converter.dialect.tree.iter() if element.get("enabled") == "false" ] self.assertEquals(0, len(disabled_elements)) def test_copy_global_csv_dataset(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) datasets_first_tg = yml.get("scenarios").get("Thread Group one").get( "data-sources") datasets_second_tg = yml.get("scenarios").get("Thread Group two").get( "data-sources") global_csv_tg_one = [ dataset for dataset in datasets_first_tg if dataset.get('path') == 'global.csv' ] global_csv_tg_two = [ dataset for dataset in datasets_second_tg if dataset.get('path') == 'global.csv' ] local_csv_tg_one = [ dataset for dataset in datasets_first_tg if dataset.get('path') == 'local.csv' ] local_csv_tg_two = [ dataset for dataset in datasets_second_tg if dataset.get('path') == 'local.csv' ] self.assertEqual(len(global_csv_tg_one), len(global_csv_tg_two), 1) self.assertEqual(len(local_csv_tg_one), 1) self.assertEqual(len(local_csv_tg_two), 0) def test_parse_csv_dataset(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) datasets = yml.get("scenarios").get("Thread Group one").get( "data-sources") local_csv = [ dataset for dataset in datasets if dataset.get('path') == 'local.csv' ][0] self.assertEqual(local_csv['loop'], False) self.assertEqual(local_csv['delimiter'], ',') self.assertEqual(local_csv['quoted'], False) def test_copy_global_headers(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) headers_first_tg = yml.get("scenarios").get("Thread Group one").get( "headers", []) headers_second_tg = yml.get("scenarios").get("Thread Group two").get( "headers", []) self.assertEqual(len(headers_first_tg), 3) self.assertEqual(len(headers_second_tg), 2) def test_cache_cookie_dns_overrides(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') cache_first_tg = tg_one.get("store-cache") cache_second_tg = tg_two.get("store-cache") cookie_first_tg = tg_one.get("store-cookie") cookie_second_tg = tg_two.get("store-cookie") dns_cache_mgr_first_tg = tg_one.get("use-dns-cache-mgr") dns_cache_mgr_second_tg = tg_two.get("use-dns-cache-mgr") self.assertEqual(cache_first_tg, True) self.assertEqual(cache_second_tg, True) self.assertEqual(cookie_first_tg, False) self.assertEqual(cookie_second_tg, True) self.assertEqual(dns_cache_mgr_first_tg, True) self.assertEqual(dns_cache_mgr_second_tg, True) def test_think_time_overrides(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') request_tg_two = tg_two.get("requests")[0] tg_one_timer = tg_one.get("think-time") tg_two_timer = tg_two.get("think-time") req_timer = request_tg_two.get("think-time") self.assertEqual(tg_one_timer, "200ms") self.assertEqual(tg_two_timer, "300ms") self.assertEqual(req_timer, "100ms") def test_request_defaults(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') self.assertEqual(tg_one.get("default-address"), "https://127.0.0.2/") self.assertEqual(tg_two.get("default-address"), "http://127.0.0.3:2582/resources/") self.assertEqual(tg_one.get("timeout"), "500ms") self.assertEqual(tg_two.get("timeout"), "100ms") self.assertEqual(tg_one.get("retrieve-resources"), True) self.assertEqual(tg_two.get("retrieve-resources"), True) self.assertEqual(tg_one.get("concurrent-pool-size"), 5) self.assertEqual(tg_two.get("concurrent-pool-size"), 10) def test_copy_global_request_assertions(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/assertions.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_assertions = tg_one.get("assert") self.assertEqual(len(tg_one_assertions), 2) # global assertion + tg assertion tg_two_assertions = tg_two.get("assert") self.assertEqual(len(tg_two_assertions), 1) # global only assertion tg_one_req_one_assertion = tg_one.get("requests")[0].get("assert")[0] expected = { 'subject': 'headers', 'contains': ["tg1httpreq1", "tg1httpreq12"], "not": False, 'regexp': True } self.assertEqual(tg_one_req_one_assertion, expected) tg_one_assertion = tg_one.get("assert")[0] expected = { 'subject': 'body', 'contains': ["tg1body_text_not_contains"], "not": True, 'regexp': True } self.assertEqual(tg_one_assertion, expected) def test_copy_global_json_assertions(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/assertions.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_assertions = tg_one.get("assert-jsonpath") self.assertEqual(len(tg_one_assertions), 1) # global assertion + tg assertion tg_two_assertions = tg_two.get("assert-jsonpath") self.assertEqual(len(tg_two_assertions), 1) # global only assertion tg_one_req_one_jp = tg_one.get("requests")[0].get( "assert-jsonpath", []) # no assertions self.assertEqual(len(tg_one_req_one_jp), 0) tg_two_req_one_jp = tg_two.get("requests")[0].get( "assert-jsonpath", []) self.assertEqual(len(tg_two_req_one_jp), 1) expected = { "expect-null": True, "invert": True, "jsonpath": '$(":input")', "validate": True } self.assertEqual(expected, tg_two_req_one_jp[0]) # test concurrency, ramp-up, iterations in execution tg_one_exec = yml.get(ScenarioExecutor.EXEC)[0] tg_two_exec = yml.get(ScenarioExecutor.EXEC)[1] tg_three_exec = yml.get(ScenarioExecutor.EXEC)[2] self.assertEqual(tg_one_exec.get("concurrency"), 10) self.assertEqual(tg_two_exec.get("concurrency"), 15) self.assertEqual(tg_three_exec.get("concurrency"), 1) self.assertEqual(tg_one_exec.get("ramp-up"), '10s') self.assertEqual(tg_two_exec.get("ramp-up"), '60s') self.assertEqual(tg_three_exec.get("ramp-up"), '2s') self.assertEqual(tg_one_exec.get("iterations"), 1) self.assertEqual(tg_two_exec.get("iterations"), 1) self.assertEqual(tg_three_exec.get("iterations"), 100) def test_xpath_assertions(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/assertions.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg = yml.get("scenarios").get("tg3") assertions = tg.get("assert-xpath") self.assertEqual(len(assertions), 2) self.assertEqual( assertions[0], { "xpath": "/note/to", "ignore-whitespace": False, "invert": False, "validate-xml": False, "use-tolerant-parser": False, }) self.assertEqual( assertions[1], { "xpath": "/note/from", "ignore-whitespace": True, "invert": True, "validate-xml": True, "use-tolerant-parser": True, }) def test_extractors(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/extractors.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_three = yml.get("scenarios").get("tg3") tg_one_extractors = tg_one.get("extract-regexp") tg_two_extractors = tg_two.get("extract-regexp") self.assertEqual(len(tg_one_extractors), 1) # global self.assertEqual(len(tg_two_extractors), 1) # global + local - ignored tg_one_req_exr = tg_one.get("requests")[0].get("extract-regexp", {}) self.assertEqual(len(tg_one_req_exr), 2) expected = { 'template': '1', 'match-no': 1, 'regexp': '*tg1hr1', 'default': 'default' } self.assertEqual(expected, tg_one_req_exr.get("test_tg1hr1")) # test extract-jsonpath tg_one_extractors = tg_one.get("extract-jsonpath") tg_two_extractors = tg_two.get("extract-jsonpath") self.assertEqual(len(tg_one_extractors), 3) # 2x global + local self.assertEqual(len(tg_two_extractors), 2) # 2x global tg_three_req_exr = tg_three.get("requests")[0].get( "extract-jsonpath", {}) self.assertEqual(len(tg_three_req_exr), 1) # 1x local # test extract-xpath tg_three_extractors = tg_three.get("extract-xpath") self.assertEqual(len(tg_three_extractors), 2) # 2 global self.assertEqual( tg_three_extractors['bookAuthor'], { "xpath": "/books/[@title()='1984']/author", "default": "no_author", "ignore-whitespace": False, "validate-xml": False, "use-tolerant-parser": False, }) self.assertEqual( tg_three_extractors['author'], { "xpath": "/books/[@title()='Fahrenheit 451']/author", "default": "no", "ignore-whitespace": True, "validate-xml": True, "use-tolerant-parser": False, }) def test_request_body(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/extractors.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_req_one_body = tg_one.get("requests")[0].get("body") self.assertEqual(tg_one_req_one_body, "body-string") tg_one_req_one_body = tg_one.get("requests")[1].get("body") self.assertEqual(tg_one_req_one_body, { "body_param1": "value1", "body_param2": "value2" }) tg_two_req_one_body = tg_two.get("requests")[0].get("body") self.assertEqual(tg_two_req_one_body, None) def test_duration_throughput(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/duration.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get(ScenarioExecutor.EXEC)[0] tg_two = yml.get(ScenarioExecutor.EXEC)[1] tg_three = yml.get(ScenarioExecutor.EXEC)[2] self.assertEqual("10s", tg_one.get("ramp-up")) self.assertEqual("60s", tg_one.get("hold-for")) self.assertEqual("10s", tg_one.get("ramp-up")) self.assertEqual(100, tg_one.get("throughput")) self.assertEqual("10s", tg_two.get("ramp-up")) self.assertEqual("20s", tg_two.get("hold-for")) self.assertEqual(20, tg_two.get("throughput")) self.assertEqual("60s", tg_three.get("ramp-up")) self.assertEqual("40s", tg_three.get("hold-for")) self.assertEqual(100, tg_three.get("throughput")) def test_all(self): obj = self._get_jmx2yaml("/yaml/converter/disabled.jmx", self._get_tmp()) obj.process() yml = yaml.load( open(__dir__() + "/yaml/converter/disabled.yml").read()) self.assertEqual(obj.converter.convert(obj.file_to_convert), yml) def test_param_null(self): obj = self._get_jmx2yaml("/yaml/converter/param-null.jmx", self._get_tmp()) obj.process() obj.converter.convert(obj.file_to_convert) def test_load_profile_default_values(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/default.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) execution = yml.get(ScenarioExecutor.EXEC)[0] self.assertEqual("60s", execution.get("ramp-up")) self.assertEqual("60s", execution.get("hold-for")) self.assertEqual(1, execution.get("concurrency")) self.assertEqual(1, execution.get("iterations")) def test_variables(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/vars.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) scenarios = yml.get("scenarios") tg_one = scenarios["TG1"] self.assertEqual(tg_one.get('variables'), { "tg1_local": "tg1", "global_var": "global" }) tg_two = scenarios["TG2"] self.assertEqual(tg_two.get('variables'), { "tg2_local": "tg2", "global_var": "global" }) def test_no_variables(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/default.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) execution = yml.get(ScenarioExecutor.EXEC)[0] scenarios = yml.get("scenarios") scenario = scenarios[execution.get("scenario")] self.assertNotIn("variables", scenario) def test_controllers_to_requests(self): obj = self._get_jmx2yaml("/yaml/converter/controllers.jmx", self._get_tmp()) obj.process() yml = yaml.load( open(__dir__() + "/yaml/converter/controllers.yml").read()) self.assertEqual(obj.converter.convert(obj.file_to_convert), yml)
def test_problematic(self): obj = InstallChecker() obj.engine = EngineEmul() obj.engine.config.get("modules")["err"] = "hello there" self.assertRaises(ToolError, obj.prepare)
def get_gatling(): path = os.path.abspath(RESOURCES_DIR + "gatling/gatling" + EXE_SUFFIX) obj = GatlingExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) return obj
def test_check(self): mock = BZMock() mock.mock_get.update({ 'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test': {"result": []}, 'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test': {"result": []} }) mock.mock_post.update({ 'https://a.blazemeter.com/api/v4/projects': {"result": { "id": 1, "name": "boo", "userId": 2, "description": None, "created": time.time(), "updated": time.time(), "organizationId": None }}, 'https://a.blazemeter.com/api/v4/tests': {"result": {'id': 1}}, 'https://a.blazemeter.com/api/v4/tests/1/start-external': {"result": { 'session': {'id': 1, 'userId': 1, 'testId': 1}, 'master': {'id': 1, 'userId': 1}, 'signature': 'sign'}}, 'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': [ {}, {"result": {'session': {"statusCode": 140, 'status': 'ENDED'}}}, {}, ], 'https://a.blazemeter.com/api/v4/image/1/files?signature=sign': [ IOError("monitoring push expected fail"), {"result": True}, {"result": True}, {"result": True}, {"result": True}, {"result": True}, {"result": True}, {"result": True}, {"result": True}, ], 'https://a.blazemeter.com/api/v4/data/masters/1/custom-metrics': [ IOError("custom metric push expected fail"), {"result": True}, {"result": True}, ], 'https://a.blazemeter.com/api/v4/sessions/1/stop': {} }) obj = BlazeMeterUploader() obj.parameters['project'] = 'Proj name' obj.settings['token'] = '123' obj.settings['browser-open'] = 'none' obj.settings['send-custom-metrics'] = True obj.settings['send-custom-tables'] = True obj.engine = EngineEmul() shutil.copy(__file__, os.path.join(obj.engine.artifacts_dir, os.path.basename(__file__))) mock.apply(obj._user) obj._user.timeout = 0.1 obj.prepare() obj.startup() for x in range(0, 31): obj.aggregated_second(random_datapoint(x)) mon = [{"ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0}, {"ts": 1, "source": "chrome", "memory": 32, "cpu": 23}] obj.monitoring_data(mon) obj.check() for x in range(32, 65): obj.aggregated_second(random_datapoint(x)) obj.last_dispatch = time.time() - 2 * obj.send_interval self.assertRaises(KeyboardInterrupt, obj.check) obj.aggregated_second(random_datapoint(10)) obj.shutdown() log_file = obj.engine.create_artifact('log', '.tmp') handler = logging.FileHandler(log_file) obj.engine.log.parent.addHandler(handler) obj.engine.config.get('modules').get('shellexec').get('env')['TAURUS_INDEX_ALL'] = 1 obj.post_process() self.assertEqual(22, len(mock.requests)) obj.engine.log.parent.removeHandler(handler)
def setUp(self): sys.path.append(__dir__() + "/../locust/") self.obj = LocustIOExecutor() self.obj.engine = EngineEmul() self.obj.engine.config['provisioning'] = 'local'
def temp_yaml(self): emul = EngineEmul() return emul.create_artifact("test", ".yml")
class TestSwagger2YAML(BZTestCase): def setUp(self): super(TestSwagger2YAML, self).setUp() self.engine = EngineEmul() def _get_swagger2yaml(self, path, file_name=None): return Swagger2YAML(FakeOptions(file_name=file_name), RESOURCES_DIR + path) def _get_tmp(self, prefix='test', suffix='.yml'): return self.engine.create_artifact(prefix, suffix) def test_convert(self): source = RESOURCES_DIR + "/swagger/petstore.json" expected = RESOURCES_DIR + "/swagger/petstore-converted.yaml" result = self._get_tmp() options = FakeOptions(file_name=result) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected) def test_convert_scenarios_from_paths(self): source = RESOURCES_DIR + "/swagger/bzm-api.json" expected = RESOURCES_DIR + "/swagger/bzm-api-converted.yaml" result = self._get_tmp() options = FakeOptions(file_name=result, scenarios_from_paths=True) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected) def test_convert_scenarios_with_multiple_types(self): source = RESOURCES_DIR + "/swagger/swagger-with-multiple-types.json" expected = RESOURCES_DIR + "/swagger/swagger-with-multiple-types.yml" result = self._get_tmp() options = FakeOptions(file_name=result, scenarios_from_paths=True) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected) def test_convert_security_apikey_header(self): source = RESOURCES_DIR + "/swagger/auth-key.json" expected = RESOURCES_DIR + "/swagger/auth-key-converted.yaml" result = self._get_tmp() options = FakeOptions(file_name=result) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected) def test_convert_security_basic(self): source = RESOURCES_DIR + "/swagger/auth-basic.json" expected = RESOURCES_DIR + "/swagger/auth-basic-converted.yaml" result = self._get_tmp() options = FakeOptions(file_name=result) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected) def test_convert_security_basic_local(self): source = RESOURCES_DIR + "/swagger/auth-basic-local.json" expected = RESOURCES_DIR + "/swagger/auth-basic-local-converted.yaml" result = self._get_tmp() options = FakeOptions(file_name=result) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected) def test_convert_security_apikey_query(self): source = RESOURCES_DIR + "/swagger/auth-key-as-param.json" expected = RESOURCES_DIR + "/swagger/auth-key-as-param-converted.yaml" result = self._get_tmp() options = FakeOptions(file_name=result) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected) def test_convert_interpolation_values(self): source = RESOURCES_DIR + "/swagger/bzm-api.json" expected = RESOURCES_DIR + "/swagger/bzm-converted-values.yaml" result = self._get_tmp() options = FakeOptions(file_name=result) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected) def test_convert_interpolation_variables(self): source = RESOURCES_DIR + "/swagger/bzm-api.json" expected = RESOURCES_DIR + "/swagger/bzm-converted-variables.yaml" result = self._get_tmp() options = FakeOptions(file_name=result, parameter_interpolation=Swagger.INTERPOLATE_WITH_JMETER_VARS) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected) def test_convert_interpolation_none(self): source = RESOURCES_DIR + "/swagger/bzm-api.json" expected = RESOURCES_DIR + "/swagger/bzm-converted-none.yaml" result = self._get_tmp() options = FakeOptions(file_name=result, parameter_interpolation=Swagger.INTERPOLATE_DISABLE) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected) def test_convert_security_apikey_multiscenarios(self): source = RESOURCES_DIR + "/swagger/auth-key.json" expected = RESOURCES_DIR + "/swagger/auth-key-multiscenarios-converted.yaml" result = self._get_tmp() options = FakeOptions(file_name=result, scenarios_from_paths=True) process(options, [source]) # shutil.copy(result, expected) actual = yaml.full_load(open(result).read()) expected = yaml.full_load(open(expected).read()) self.assertEqual(actual, expected)
class TestEngine(BZTestCase): def setUp(self): super(TestEngine, self).setUp() self.obj = EngineEmul() self.paths = local_paths_config() def test_find_file(self): self.sniff_log(self.obj.log) config = RESOURCES_DIR + "json/get-post.json" configs = [config, self.paths] self.obj.configure(configs) self.assertEqual(2, len(self.obj.file_search_paths)) self.obj.find_file(config) self.assertEqual("", self.log_recorder.warn_buff.getvalue()) self.obj.find_file("reporting.json") self.assertIn("Guessed location", self.log_recorder.warn_buff.getvalue()) self.obj.find_file("definitely_missed.file") self.assertIn("Could not find", self.log_recorder.warn_buff.getvalue()) self.obj.find_file("http://localhost:8000/BlazeDemo.html") self.assertIn("Downloading http://localhost:8000/BlazeDemo.html", self.log_recorder.info_buff.getvalue()) def test_missed_config(self): configs = ['definitely_missed.file'] try: self.obj.configure(configs) self.fail() except TaurusConfigError as exc: self.assertIn('reading config file', str(exc)) def test_configuration_smoothness(self): def find_ad_dict_ed(*args): if isinstance(args[0], dict) and not isinstance(args[0], BetterDict): raise BaseException("dict found in Configuration") configs = [RESOURCES_DIR + "json/get-post.json", self.paths] self.obj.configure(configs) self.assertTrue(isinstance(self.obj.config, Configuration)) BetterDict.traverse(self.obj.config, find_ad_dict_ed) def test_requests(self): configs = [ RESOURCES_DIR + "json/get-post.json", RESOURCES_DIR + "json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() for executor in self.obj.provisioning.executors: executor.env.set({"TEST_MODE": "files"}) self.obj.run() self.obj.post_process() def test_double_exec(self): configs = [ RESOURCES_DIR + "yaml/triple.yml", RESOURCES_DIR + "json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.assertEquals(1, len(self.obj.services)) for executor in self.obj.provisioning.executors: executor.env.set({"TEST_MODE": "files"}) self.obj.run() self.obj.post_process() def test_unknown_module(self): configs = [RESOURCES_DIR + "json/gatling.json", self.paths] self.obj.configure(configs) self.obj.config["provisioning"] = "unknown" self.obj.config["modules"]["unknown"] = BetterDict() self.assertRaises(TaurusConfigError, self.obj.prepare) def test_null_aggregator(self): self.obj.config.merge({ "execution": [{ "scenario": { "requests": [{ "url": "http://example.com/" }], } }], "settings": { "aggregator": None, "default-executor": "jmeter", }, "modules": { "local": "bzt.modules.provisioning.Local", "jmeter": { "class": "tests.modules.jmeter.MockJMeterExecutor", "detect-plugins": False, "protocol-handlers": { "http": "bzt.jmx.http.HTTPProtocolHandler" } }, } }) self.obj.unify_config() self.obj.prepare() def test_yaml_multi_docs(self): configs = [RESOURCES_DIR + "yaml/multi-docs.yml", self.paths] self.obj.configure(configs) self.obj.prepare() self.assertEqual(len(self.obj.config["execution"]), 2) def test_json_format_regression(self): configs = [RESOURCES_DIR + "json/json-but-not-yaml.json"] self.obj.configure(configs) self.obj.prepare() def test_invalid_format(self): configs = [RESOURCES_DIR + "jmeter-dist-3.0.zip"] self.assertRaises(TaurusConfigError, lambda: self.obj.configure(configs)) def test_included_configs(self): configs = [ RESOURCES_DIR + "yaml/included-level1.yml", ] self.obj.configure(configs) self.assertTrue(self.obj.config["level1"]) self.assertTrue(self.obj.config["level2"]) self.assertTrue(self.obj.config["level3"]) self.assertListEqual(['included-level2.yml', 'included-level3.yml'], self.obj.config["included-configs"]) def test_included_configs_cycle(self): configs = [ RESOURCES_DIR + "yaml/included-circular1.yml", ] self.obj.configure(configs) self.assertTrue(self.obj.config["level1"]) self.assertTrue(self.obj.config["level2"]) self.assertListEqual([ 'included-circular2.yml', 'included-circular1.yml', 'included-circular2.yml' ], self.obj.config["included-configs"]) def test_env_eval(self): configs = [ RESOURCES_DIR + "yaml/env-eval.yml", ] os.environ["BZT_ENV_TEST_UNSET"] = "set" try: self.obj.configure(configs) self.obj.eval_env() self.assertEquals("success/top", self.obj.config["toplevel"]) self.assertEquals("success/test/", self.obj.config["settings"]["artifacts-dir"]) self.assertEquals( "http://success/", self.obj.config["scenarios"]["scen1"]["default-address"]) self.assertEquals( "/success/", self.obj.config["scenarios"]["scen1"]["requests"][0]) self.assertNotEquals( "/${PATH}/", self.obj.config["scenarios"]["scen1"]["requests"][1]) self.assertEquals( "/${TEMP}/", self.obj.config["scenarios"]["scen1"]["requests"][2]) self.assertEquals( "/" + self.obj.artifacts_dir + "/", self.obj.config["scenarios"]["scen1"]["requests"][3]) finally: if "BZT_ENV_TEST" in os.environ: os.environ.pop("BZT_ENV_TEST") if "BZT_ENV_TEST_UNSET" in os.environ: os.environ.pop("BZT_ENV_TEST_UNSET") def test_singletone_service(self): configs = [ RESOURCES_DIR + "yaml/singletone-service.yml", ] self.obj.configure(configs, read_config_files=False) self.obj.prepare() self.assertEquals(2, len(self.obj.services)) self.assertEquals(None, self.obj.services[0].parameters['run-at']) self.assertEquals("mock", self.obj.services[1].parameters['run-at']) self.assertEquals(2, len(self.obj.reporters)) self.assertEquals("mock", self.obj.reporters[0].parameters['run-at']) self.assertEquals(None, self.obj.reporters[1].parameters['run-at']) def test_autodetect_plugin_configs(self): self.sniff_log(self.obj.log) sys.path.append(RESOURCES_DIR + "plugins") try: configs = [ RESOURCES_DIR + "plugins/bzt_plugin_dummy/demo.yml", ] self.obj.configure(configs, read_config_files=True) self.obj.prepare() self.assertEqual({'class': 'bzt_plugin_dummy.dummy.DummyExecutor'}, self.obj.config['modules']['dummy']) finally: sys.path.remove(RESOURCES_DIR + "plugins")
def test_no_token(self): obj = Proxy2JMXEmul() obj.engine = EngineEmul() obj.engine.config.merge({}) obj.settings = obj.engine.config.get('recorder') self.assertRaises(TaurusConfigError, obj.prepare)
def configure(self, jtl_file): engine = EngineEmul() self.obj = FuncJTLReader(jtl_file, engine, logging.getLogger(''))
class TestEngine(BZTestCase): def setUp(self): super(TestEngine, self).setUp() self.obj = EngineEmul() self.paths = local_paths_config() def test_requests(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/get-post.json", __dir__() + "/json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.obj.prepare() self.obj.run() self.obj.post_process() def test_double_exec(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/yaml/triple.yml", __dir__() + "/json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.obj.run() self.obj.post_process() def test_unknown_module(self): configs = [ __dir__() + "/../bzt/10-base.json", __dir__() + "/json/gatling.json", self.paths ] self.obj.configure(configs) self.obj.config["provisioning"] = "unknown" self.obj.config["modules"]["unknown"] = BetterDict() try: self.obj.prepare() self.fail() except ValueError: pass
def setUp(self): super(TestScenarioExecutor, self).setUp() self.engine = EngineEmul() self.executor = ScenarioExecutor() self.executor.engine = self.engine self.executor.env = self.executor.engine.env
def setUp(self): super(TestSwagger2YAML, self).setUp() self.engine = EngineEmul()
class TestEngine(BZTestCase): def setUp(self): super(TestEngine, self).setUp() self.obj = EngineEmul() self.paths = local_paths_config() def test_find_file(self): self.sniff_log(self.obj.log) config = RESOURCES_DIR + "json/get-post.json" configs = [config, self.paths] self.obj.configure(configs) self.assertEqual(2, len(self.obj.file_search_paths)) self.obj.find_file(config) self.assertEqual("", self.log_recorder.warn_buff.getvalue()) self.obj.find_file("reporting.json") self.assertIn("Guessed location", self.log_recorder.warn_buff.getvalue()) self.obj.find_file("definitely_missed.file") self.assertIn("Could not find", self.log_recorder.warn_buff.getvalue()) self.obj.find_file("http://localhost:8000/BlazeDemo.html") self.assertIn("Downloading http://localhost:8000/BlazeDemo.html", self.log_recorder.info_buff.getvalue()) def test_missed_config(self): configs = ['definitely_missed.file'] try: self.obj.configure(configs) self.fail() except TaurusConfigError as exc: self.assertIn('reading config file', str(exc)) def test_configuration_smoothness(self): def find_ad_dict_ed(*args): if isinstance(args[0], dict) and not isinstance(args[0], BetterDict): raise BaseException("dict found in Configuration") configs = [ RESOURCES_DIR + "json/get-post.json", self.paths] self.obj.configure(configs) self.assertTrue(isinstance(self.obj.config, Configuration)) BetterDict.traverse(self.obj.config, find_ad_dict_ed) def test_requests(self): configs = [ RESOURCES_DIR + "json/get-post.json", RESOURCES_DIR + "json/reporting.json", self.paths] self.obj.configure(configs) self.obj.prepare() for executor in self.obj.provisioning.executors: executor.env.set({"TEST_MODE": "files"}) self.obj.run() self.obj.post_process() def test_double_exec(self): configs = [ RESOURCES_DIR + "yaml/triple.yml", RESOURCES_DIR + "json/reporting.json", self.paths ] self.obj.configure(configs) self.obj.prepare() self.assertEquals(1, len(self.obj.services)) for executor in self.obj.provisioning.executors: executor.env.set({"TEST_MODE": "files"}) self.obj.run() self.obj.post_process() def test_unknown_module(self): configs = [ RESOURCES_DIR + "json/gatling.json", self.paths ] self.obj.configure(configs) self.obj.config["provisioning"] = "unknown" self.obj.config["modules"]["unknown"] = BetterDict() self.assertRaises(TaurusConfigError, self.obj.prepare) def test_null_aggregator(self): self.obj.config.merge({ "execution": [{ "scenario": { "requests": [{"url": "http://example.com/"}], }}], "settings": { "aggregator": None, "default-executor": "jmeter", }, "modules": { "local": "bzt.modules.provisioning.Local", "jmeter": {"class": "tests.modules.jmeter.MockJMeterExecutor", "protocol-handlers": {"http": "bzt.jmx.http.HTTPProtocolHandler"}}, }}) self.obj.unify_config() self.obj.prepare() def test_yaml_multi_docs(self): configs = [ RESOURCES_DIR + "yaml/multi-docs.yml", self.paths ] self.obj.configure(configs) self.obj.prepare() self.assertEqual(len(self.obj.config["execution"]), 2) def test_json_format_regression(self): configs = [ RESOURCES_DIR + "json/json-but-not-yaml.json" ] self.obj.configure(configs) self.obj.prepare() def test_invalid_format(self): configs = [ RESOURCES_DIR + "jmeter-dist-3.0.zip" ] self.assertRaises(TaurusConfigError, lambda: self.obj.configure(configs)) def test_included_configs(self): configs = [ RESOURCES_DIR + "yaml/included-level1.yml", ] self.obj.configure(configs) self.assertTrue(self.obj.config["level1"]) self.assertTrue(self.obj.config["level2"]) self.assertTrue(self.obj.config["level3"]) self.assertListEqual(['included-level2.yml', 'included-level3.yml'], self.obj.config["included-configs"]) def test_included_configs_cycle(self): configs = [ RESOURCES_DIR + "yaml/included-circular1.yml", ] self.obj.configure(configs) self.assertTrue(self.obj.config["level1"]) self.assertTrue(self.obj.config["level2"]) self.assertListEqual(['included-circular2.yml', 'included-circular1.yml', 'included-circular2.yml'], self.obj.config["included-configs"]) def test_env_eval(self): configs = [ RESOURCES_DIR + "yaml/env-eval.yml", ] os.environ["BZT_ENV_TEST_UNSET"] = "set" try: self.obj.configure(configs) self.obj.eval_env() self.assertEquals("success/top", self.obj.config["toplevel"]) self.assertEquals("success/test/", self.obj.config["settings"]["artifacts-dir"]) self.assertEquals("http://success/", self.obj.config["scenarios"]["scen1"]["default-address"]) self.assertEquals("/success/", self.obj.config["scenarios"]["scen1"]["requests"][0]) self.assertNotEquals("/${PATH}/", self.obj.config["scenarios"]["scen1"]["requests"][1]) self.assertEquals("/${TEMP}/", self.obj.config["scenarios"]["scen1"]["requests"][2]) self.assertEquals("/" + self.obj.artifacts_dir + "/", self.obj.config["scenarios"]["scen1"]["requests"][3]) finally: if "BZT_ENV_TEST" in os.environ: os.environ.pop("BZT_ENV_TEST") if "BZT_ENV_TEST_UNSET" in os.environ: os.environ.pop("BZT_ENV_TEST_UNSET") def test_singletone_service(self): configs = [ RESOURCES_DIR + "yaml/singletone-service.yml", ] self.obj.configure(configs, read_config_files=False) self.obj.prepare() self.assertEquals(2, len(self.obj.services)) self.assertEquals(None, self.obj.services[0].parameters['run-at']) self.assertEquals("mock", self.obj.services[1].parameters['run-at']) self.assertEquals(2, len(self.obj.reporters)) self.assertEquals("mock", self.obj.reporters[0].parameters['run-at']) self.assertEquals(None, self.obj.reporters[1].parameters['run-at'])
class TestConverter(BZTestCase): def setUp(self): super(TestConverter, self).setUp() self.engine = EngineEmul() self.out_file = self.engine.create_artifact("converted", ".out") @staticmethod def same_yaml(file1, file2): yml1 = yaml.load(open(file1).read()) yml2 = yaml.load(open(file2).read()) return yml1 == yml2 def _get_jmx2yaml(self, path, file_name=None, dump_jmx=None): return JMX2YAML(FakeOptions(file_name=file_name, dump_jmx=dump_jmx), __dir__() + path) def _get_tmp(self, prefix='test', suffix='.yml'): return self.engine.create_artifact(prefix, suffix) def test_objprop(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/http.jmx", self._get_tmp()) self.sniff_log(obj.log) obj.process() self.assertNotIn("Removing unknown element: name (None)", self.log_recorder.warn_buff.getvalue()) self.assertNotIn("Removing unknown element: value (None)", self.log_recorder.warn_buff.getvalue()) def test_loadjmx1(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/http.jmx", self._get_tmp()) self.sniff_log(obj.log) obj.process() self.assertIn("Loading jmx file", self.log_recorder.info_buff.getvalue()) self.assertNotEqual("", self.log_recorder.debug_buff.getvalue()) self.assertEqual("", self.log_recorder.err_buff.getvalue()) def test_loadjmx2(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/notfound.jmx") self.sniff_log(obj.log) try: obj.process() self.fail() except BaseException as exc: self.assertIn("File does not exist", exc.args[0]) self.assertIn("Loading jmx file", self.log_recorder.info_buff.getvalue()) self.assertEqual("", self.log_recorder.debug_buff.getvalue()) def test_loadjmx3(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/broken.jmx") self.sniff_log(obj.log) try: obj.process() self.fail() except BaseException as exc: self.assertIn("XML parsing failed", exc.args[0]) self.assertIn("Loading jmx file", self.log_recorder.info_buff.getvalue()) self.assertIn("Error while processing jmx file", self.log_recorder.err_buff.getvalue()) def test_loadjmx4(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/http.jmx", self._get_tmp('tmp', 'file')) self.sniff_log(obj.log) obj.process() self.assertIn("Loading jmx file", self.log_recorder.info_buff.getvalue()) self.assertIn("Done processing, result saved in", self.log_recorder.info_buff.getvalue()) self.assertIn("Removing unknown element", self.log_recorder.warn_buff.getvalue()) def test_export_clean_jmx(self): tmp_jmx_name = self._get_tmp('tmp', '.jmx') open(tmp_jmx_name, 'w+').close() # touch file obj = self._get_jmx2yaml("/resources/yaml/converter/disabled.jmx", dump_jmx=tmp_jmx_name) self.sniff_log(obj.log) obj.process() self.assertIn("Loading jmx file", self.log_recorder.info_buff.getvalue()) self.assertIn("already exists and will be overwritten", self.log_recorder.warn_buff.getvalue()) def test_not_jmx(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/not-jmx.xml") try: obj.process() self.fail() except BaseException as exc: self.assertIn("Bad jmx format", exc.args[0]) def test_clean_disabled_jmx(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/resources/yaml/converter/disabled.jmx", yml) obj.process() disabled_elements = [ element for element in obj.converter.dialect.tree.iter() if element.get("enabled") == "false" ] self.assertEquals(0, len(disabled_elements)) def test_copy_global_csv_dataset(self): obj = self._get_jmx2yaml("/resources/yaml/converter/global_copy.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) datasets_first_tg = yml.get("scenarios").get("Thread Group one").get( "data-sources") datasets_second_tg = yml.get("scenarios").get("Thread Group two").get( "data-sources") global_csv_tg_one = [ dataset for dataset in datasets_first_tg if dataset.get('path') == 'global.csv' ] global_csv_tg_two = [ dataset for dataset in datasets_second_tg if dataset.get('path') == 'global.csv' ] local_csv_tg_one = [ dataset for dataset in datasets_first_tg if dataset.get('path') == 'local.csv' ] local_csv_tg_two = [ dataset for dataset in datasets_second_tg if dataset.get('path') == 'local.csv' ] self.assertEqual(len(global_csv_tg_one), len(global_csv_tg_two), 1) self.assertEqual(len(local_csv_tg_one), 1) self.assertEqual(len(local_csv_tg_two), 0) def test_parse_csv_dataset(self): obj = self._get_jmx2yaml("/resources/yaml/converter/global_copy.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) datasets = yml.get("scenarios").get("Thread Group one").get( "data-sources") local_csv = [ dataset for dataset in datasets if dataset.get('path') == 'local.csv' ][0] self.assertEqual(local_csv['loop'], False) self.assertEqual(local_csv['delimiter'], ',') self.assertEqual(local_csv['quoted'], False) def test_copy_global_headers(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/resources/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) headers_first_tg = yml.get("scenarios").get("Thread Group one").get( "headers", []) headers_second_tg = yml.get("scenarios").get("Thread Group two").get( "headers", []) self.assertEqual(len(headers_first_tg), 3) self.assertEqual(len(headers_second_tg), 2) def test_cache_cookie_dns_overrides(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/resources/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') cache_first_tg = tg_one.get("store-cache") cache_second_tg = tg_two.get("store-cache") cookie_first_tg = tg_one.get("store-cookie") cookie_second_tg = tg_two.get("store-cookie") dns_cache_mgr_first_tg = tg_one.get("use-dns-cache-mgr") dns_cache_mgr_second_tg = tg_two.get("use-dns-cache-mgr") self.assertEqual(cache_first_tg, True) self.assertEqual(cache_second_tg, True) self.assertEqual(cookie_first_tg, False) self.assertEqual(cookie_second_tg, True) self.assertEqual(dns_cache_mgr_first_tg, True) self.assertEqual(dns_cache_mgr_second_tg, True) def test_think_time_overrides(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/resources/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') request_tg_two = tg_two.get("requests")[0] tg_one_timer = tg_one.get("think-time") tg_two_timer = tg_two.get("think-time") req_timer = request_tg_two.get("think-time") self.assertEqual(tg_one_timer, "200ms") self.assertEqual(tg_two_timer, "300ms") self.assertEqual(req_timer, "100ms") def test_request_defaults(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/resources/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') self.assertEqual(tg_one.get("default-address"), "https://127.0.0.2/") self.assertEqual(tg_two.get("default-address"), "http://127.0.0.3:2582/resources/") self.assertEqual(tg_one.get("timeout"), "500ms") self.assertEqual(tg_two.get("timeout"), "100ms") self.assertEqual(tg_one.get("retrieve-resources"), True) self.assertEqual(tg_two.get("retrieve-resources"), True) self.assertEqual(tg_one.get("concurrent-pool-size"), 5) self.assertEqual(tg_two.get("concurrent-pool-size"), 10) def test_copy_global_request_assertions(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/resources/yaml/converter/assertions.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_assertions = tg_one.get("assert") self.assertEqual(len(tg_one_assertions), 2) # global assertion + tg assertion tg_two_assertions = tg_two.get("assert") self.assertEqual(len(tg_two_assertions), 1) # global only assertion tg_one_req_one_assertion = tg_one.get("requests")[0].get("assert")[0] expected = { "subject": "headers", "contains": ["tg1httpreq1", "tg1httpreq12"], "assume-success": False, "not": False, "regexp": False } self.assertEqual(tg_one_req_one_assertion, expected) tg_one_assertion = tg_one.get("assert")[0] expected = { "subject": "body", "contains": ["tg1body_text_not_contains"], "assume-success": False, "not": True, 'regexp': False } self.assertEqual(tg_one_assertion, expected) def test_broken_request_assertions(self): # see comments in broken_resp_asserts.jmx for explanation of cases # don't save broken_resp_asserts.jmx by jmeter yml = self._get_tmp() obj = self._get_jmx2yaml( "/resources/yaml/converter/broken_resp_asserts.jmx", yml) obj.process() yml1 = __dir__() + "/resources/yaml/converter/broken_resp_asserts.yml" yml2 = yml self.assertTrue(yml1, yml2) def test_copy_global_json_assertions(self): obj = self._get_jmx2yaml("/resources/yaml/converter/assertions.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_assertions = tg_one.get("assert-jsonpath") self.assertEqual(len(tg_one_assertions), 1) # global assertion + tg assertion tg_two_assertions = tg_two.get("assert-jsonpath") self.assertEqual(len(tg_two_assertions), 1) # global only assertion tg_one_req_one_jp = tg_one.get("requests")[0].get( "assert-jsonpath", []) # no assertions self.assertEqual(len(tg_one_req_one_jp), 0) tg_two_req_one_jp = tg_two.get("requests")[0].get( "assert-jsonpath", []) self.assertEqual(len(tg_two_req_one_jp), 1) expected = { "expect-null": True, "invert": True, "jsonpath": '$(":input")', "validate": True, "regexp": True } self.assertEqual(expected, tg_two_req_one_jp[0]) # test concurrency, ramp-up, iterations in execution tg_one_exec = yml.get(ScenarioExecutor.EXEC)[0] tg_two_exec = yml.get(ScenarioExecutor.EXEC)[1] tg_three_exec = yml.get(ScenarioExecutor.EXEC)[2] self.assertEqual(tg_one_exec.get("concurrency"), 10) self.assertEqual(tg_two_exec.get("concurrency"), 15) self.assertEqual(tg_three_exec.get("concurrency"), 1) self.assertEqual(tg_one_exec.get("ramp-up"), '10s') self.assertEqual(tg_two_exec.get("ramp-up"), '60s') self.assertEqual(tg_three_exec.get("ramp-up"), '2s') self.assertEqual(tg_one_exec.get("iterations"), 1) self.assertEqual(tg_two_exec.get("iterations"), 1) self.assertEqual(tg_three_exec.get("iterations"), 100) def test_xpath_assertions(self): obj = self._get_jmx2yaml("/resources/yaml/converter/assertions.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) tg = yml.get("scenarios").get("tg3") assertions = tg.get("assert-xpath") self.assertEqual(len(assertions), 2) self.assertEqual( assertions[0], { "xpath": "/note/to", "ignore-whitespace": False, "invert": False, "validate-xml": False, "use-tolerant-parser": False, }) self.assertEqual( assertions[1], { "xpath": "/note/from", "ignore-whitespace": True, "invert": True, "validate-xml": True, "use-tolerant-parser": True, }) def test_extractors(self): obj = self._get_jmx2yaml("/resources/yaml/converter/extractors.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_three = yml.get("scenarios").get("tg3") tg_one_extractors = tg_one.get("extract-regexp") tg_two_extractors = tg_two.get("extract-regexp") self.assertEqual(len(tg_one_extractors), 1) # global self.assertEqual(len(tg_two_extractors), 1) # global + local - ignored tg_one_req_exr = tg_one.get("requests")[0].get("extract-regexp", {}) self.assertEqual(len(tg_one_req_exr), 2) expected = { 'template': '1', 'match-no': 1, 'regexp': '*tg1hr1', 'default': 'default' } self.assertEqual(expected, tg_one_req_exr.get("test_tg1hr1")) # test extract-jsonpath tg_one_extractors = tg_one.get("extract-jsonpath") tg_two_extractors = tg_two.get("extract-jsonpath") self.assertEqual(len(tg_one_extractors), 5) # 4x global + local self.assertEqual(len(tg_two_extractors), 4) # 4x global tg_three_req_exr = tg_three.get("requests")[0].get( "extract-jsonpath", {}) self.assertEqual(len(tg_three_req_exr), 1) # 1x local # test extract-xpath tg_three_extractors = tg_three.get("extract-xpath") self.assertEqual(len(tg_three_extractors), 2) # 2 global self.assertEqual( tg_three_extractors['bookAuthor'], { "xpath": "/books/[@title()='1984']/author", "default": "no_author", "ignore-whitespace": False, "validate-xml": False, "use-tolerant-parser": False, }) self.assertEqual( tg_three_extractors['author'], { "xpath": "/books/[@title()='Fahrenheit 451']/author", "default": "no", "ignore-whitespace": True, "validate-xml": True, "use-tolerant-parser": False, }) self.assertEqual(tg_one_extractors['VAR1'], { "jsonpath": "$.foo", "default": "DEF_1", }) self.assertEqual(tg_one_extractors['VAR2'], { "jsonpath": "$.bar", "default": "DEF_2", }) def test_request_body(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/resources/yaml/converter/extractors.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_req_one_body = tg_one.get("requests")[0].get("body") self.assertEqual(tg_one_req_one_body, "body-string") tg_one_req_one_body = tg_one.get("requests")[1].get("body") self.assertEqual(tg_one_req_one_body, { "body_param1": "value1", "body_param2": "value2" }) tg_two_req_one_body = tg_two.get("requests")[0].get("body") self.assertEqual(tg_two_req_one_body, None) def test_json_body(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/resources/yaml/converter/json_body.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) reqs1 = yml.get("scenarios").get("tg1")['requests'] reqs2 = yml.get("scenarios").get("tg2")['requests'] bodies = {req['label']: req.get('body', None) for req in reqs1 + reqs2} targets = { 'r1_1': None, 'r1_2': list, 'r1_3': str, 'r1_4': dict, 'r2_1': None, 'r2_2': dict, 'r2_3': str, 'r2_4': str } for label in targets: self.assertTrue((bodies[label] is None and targets[label] is None) or isinstance(bodies[label], targets[label])) def test_duration_throughput(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/resources/yaml/converter/duration.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get(ScenarioExecutor.EXEC)[0] tg_two = yml.get(ScenarioExecutor.EXEC)[1] tg_three = yml.get(ScenarioExecutor.EXEC)[2] self.assertEqual("10s", tg_one.get("ramp-up")) self.assertEqual("60s", tg_one.get("hold-for")) self.assertEqual("10s", tg_one.get("ramp-up")) self.assertEqual(100, tg_one.get("throughput")) self.assertEqual("10s", tg_two.get("ramp-up")) self.assertEqual("20s", tg_two.get("hold-for")) self.assertEqual(20, tg_two.get("throughput")) self.assertEqual("60s", tg_three.get("ramp-up")) self.assertEqual("40s", tg_three.get("hold-for")) self.assertEqual(100, tg_three.get("throughput")) def test_all(self): obj = self._get_jmx2yaml("/resources/yaml/converter/disabled.jmx") obj.process() yml1 = __dir__() + "/resources/yaml/converter/disabled.yml" yml2 = obj.dst_file self.assertTrue(self.same_yaml(yml1, yml2)) pass def test_params_conversion(self): obj = self._get_jmx2yaml( "/resources/yaml/converter/params_conversion.jmx") self.sniff_log(obj.log) obj.process() yml1 = obj.dst_file yml2 = __dir__() + "/resources/yaml/converter/params_conversion.yml" self.assertTrue(self.same_yaml(yml1, yml2)) self.assertNotIn('n1', self.log_recorder.warn_buff.getvalue()) self.assertNotIn('n2', self.log_recorder.warn_buff.getvalue()) self.assertIn('n1_101', self.log_recorder.debug_buff.getvalue()) self.assertIn('n1_011', self.log_recorder.debug_buff.getvalue()) self.assertIn('n1_001', self.log_recorder.debug_buff.getvalue()) def test_param_null(self): obj = self._get_jmx2yaml("/resources/yaml/converter/param-null.jmx") obj.process() def test_load_profile_default_values(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/resources/yaml/converter/default.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) execution = yml.get(ScenarioExecutor.EXEC)[0] self.assertEqual("60s", execution.get("ramp-up")) self.assertEqual("60s", execution.get("hold-for")) self.assertEqual(1, execution.get("concurrency")) self.assertEqual(1, execution.get("iterations")) def test_variables(self): obj = self._get_jmx2yaml("/resources/yaml/converter/vars.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) scenarios = yml.get("scenarios") tg_one = scenarios["TG1"] self.assertEqual( tg_one.get('variables'), { "tg1_local": "tg1", "global_var": "global", "auth_token": "shouldn't be masked" }) tg_two = scenarios["TG2"] self.assertEqual( tg_two.get('variables'), { "tg2_local": "tg2", "global_var": "global", "auth_token": "shouldn't be masked" }) def test_no_variables(self): obj = self._get_jmx2yaml("/resources/yaml/converter/default.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) execution = yml.get(ScenarioExecutor.EXEC)[0] scenarios = yml.get("scenarios") scenario = scenarios[execution.get("scenario")] self.assertNotIn("variables", scenario) def test_controllers_to_requests(self): obj = self._get_jmx2yaml("/resources/yaml/converter/controllers.jmx") obj.process() yml1 = __dir__() + "/resources/yaml/converter/controllers.yml" yml2 = obj.dst_file self.assertTrue(self.same_yaml(yml1, yml2)) def test_jsr223(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/jsr223.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) scenarios = yml.get("scenarios") scenario = scenarios["Thread Group"] requests = scenario["requests"] self.assertEqual(len(requests), 1) request = requests[0] self.assertIn("jsr223", request) jsrs = request["jsr223"] self.assertIsInstance(jsrs, list) self.assertEqual(len(jsrs), 5) self.assertEqual(jsrs[0]["language"], "beanshell") self.assertEqual(jsrs[0]["script-text"], "scripty") self.assertEqual(jsrs[0]["parameters"], "parames") self.assertNotIn('script-file', jsrs[0]) self.assertEqual(jsrs[1]["language"], "javascript") self.assertEqual(jsrs[1]["script-text"], u'console.log("ПРИВЕТ");\nline("2");') self.assertEqual(jsrs[1]["parameters"], "a b c") self.assertNotIn('script-file', jsrs[1]) self.assertEqual(jsrs[2]["language"], "javascript") self.assertEqual(jsrs[2]["script-file"], "script.js") self.assertEqual(jsrs[2]["parameters"], None) self.assertNotIn('script-text', jsrs[2]) self.assertEqual(jsrs[3]["language"], "beanshell") self.assertEqual(jsrs[3]["execute"], "before") self.assertEqual(jsrs[3]["parameters"], None) self.assertEqual(jsrs[3]['script-text'], 'console.log("beanshell aka jsr223");') self.assertNotIn('script-file', jsrs[3]) self.assertEqual(jsrs[4]["language"], "java") self.assertEqual(jsrs[4]["execute"], "before") self.assertEqual(jsrs[4]["parameters"], None) self.assertIn('BlazeDemo.java', jsrs[4]['script-file']) self.assertNotIn('script-text', jsrs[4]) self.assertTrue( os.path.exists( os.path.join(get_full_path(obj.dst_file, step_up=1), 'script.js'))) def test_unicode(self): obj = self._get_jmx2yaml("/resources/yaml/converter/unicode.jmx") obj.process() def test_path_without_domain(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/http.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) scenarios = yml.get("scenarios") scenario = scenarios["Thread Group"] requests = scenario["requests"] self.assertEqual(len(requests), 3) without_domain = requests[2] self.assertEqual(without_domain['url'], '/path') def test_request_content_encoding(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/http.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) scenarios = yml.get("scenarios") scenario = scenarios["Thread Group"] requests = scenario["requests"] self.assertEqual(len(requests), 3) request = requests[1] self.assertEqual(request['content-encoding'], 'utf-8') def test_request_redirect_policy(self): obj = self._get_jmx2yaml("/resources/jmeter/jmx/http.jmx") obj.process() yml = yaml.load(open(obj.dst_file).read()) scenarios = yml.get("scenarios") scenario = scenarios["Thread Group"] requests = scenario["requests"] self.assertEqual(len(requests), 3) self.assertEqual(requests[0].get('follow-redirects'), True) self.assertEqual(requests[1].get('follow-redirects'), True) self.assertEqual(requests[2].get('follow-redirects'), False)
def setUp(self): super(SeleniumTestCase, self).setUp() self.engine_obj = EngineEmul() self.paths = [__dir__() + "/../../bzt/10-base.json", local_paths_config()] self.engine_obj.configure(self.paths) self.selenium_config = self.engine_obj.config["modules"]["selenium"]
def setUp(self): super(TestConverter, self).setUp() self.engine = EngineEmul() self.out_file = self.engine.create_artifact("converted", ".out")
class TestConverter(BZTestCase): def setUp(self): self.engine = EngineEmul() def _get_jmx2yaml(self, path, file_name=None, dump_jmx=False): return JMX2YAML(FakeOptions(file_name=file_name, dump_jmx=dump_jmx), __dir__() + path) def _get_tmp(self, prefix='test', suffix='.yml'): return self.engine.create_artifact(prefix, suffix) def test_loadjmx1(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/http.jmx", self._get_tmp()) obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertNotEqual("", log_recorder.debug_buff.getvalue()) self.assertEqual("", log_recorder.err_buff.getvalue()) obj.log.removeHandler(log_recorder) def test_loadjmx2(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/notfound.jmx") obj.log.addHandler(log_recorder) try: obj.process() self.fail() except BaseException as exc: self.assertIn("File does not exist", exc.args[0]) self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("does not exist", log_recorder.err_buff.getvalue()) self.assertEqual("", log_recorder.debug_buff.getvalue()) obj.log.removeHandler(log_recorder) def test_loadjmx3(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/broken.jmx") obj.log.addHandler(log_recorder) try: obj.process() self.fail() except BaseException as exc: self.assertIn("XML parsing failed", exc.args[0]) self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("Error while processing jmx file", log_recorder.err_buff.getvalue()) self.assertIn("XML parsing error", log_recorder.debug_buff.getvalue()) obj.log.removeHandler(log_recorder) def test_loadjmx4(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/http.jmx", self._get_tmp('tmp', 'file')) obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("Done processing, result saved in", log_recorder.info_buff.getvalue()) self.assertIn("Removing unknown element", log_recorder.warn_buff.getvalue()) obj.log.removeHandler(log_recorder) def test_export_clean_jmx(self): tmp_jmx_name = self._get_tmp('tmp', '.jmx') open(tmp_jmx_name, 'w+').close() # touch file yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/disabled.jmx", yml, dump_jmx=tmp_jmx_name) log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("already exists and will be overwritten", log_recorder.warn_buff.getvalue()) obj.log.removeHandler(log_recorder) def test_not_jmx(self): obj = self._get_jmx2yaml("/jmeter/jmx/not-jmx.xml") try: obj.process() self.fail() except BaseException as exc: self.assertIn("Bad jmx format", exc.args[0]) def test_clean_disabled_jmx(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/disabled.jmx", yml) obj.process() disabled_elements = [element for element in obj.converter.dialect.tree.iter() if element.get("enabled") == "false"] self.assertEquals(0, len(disabled_elements)) def test_copy_global_csv_dataset(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) datasets_first_tg = yml.get("scenarios").get("Thread Group one").get("data-sources") datasets_second_tg = yml.get("scenarios").get("Thread Group two").get("data-sources") global_csv_tg_one = [dataset for dataset in datasets_first_tg if dataset.get('path') == 'global.csv'] global_csv_tg_two = [dataset for dataset in datasets_second_tg if dataset.get('path') == 'global.csv'] local_csv_tg_one = [dataset for dataset in datasets_first_tg if dataset.get('path') == 'local.csv'] local_csv_tg_two = [dataset for dataset in datasets_second_tg if dataset.get('path') == 'local.csv'] self.assertEqual(len(global_csv_tg_one), len(global_csv_tg_two), 1) self.assertEqual(len(local_csv_tg_one), 1) self.assertEqual(len(local_csv_tg_two), 0) def test_copy_global_headers(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) headers_first_tg = yml.get("scenarios").get("Thread Group one").get("headers", []) headers_second_tg = yml.get("scenarios").get("Thread Group two").get("headers", []) self.assertEqual(len(headers_first_tg), 3) self.assertEqual(len(headers_second_tg), 2) def test_cache_cookie_dns_overrides(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') cache_first_tg = tg_one.get("store-cache") cache_second_tg = tg_two.get("store-cache") cookie_first_tg = tg_one.get("store-cookie") cookie_second_tg = tg_two.get("store-cookie") dns_cache_mgr_first_tg = tg_one.get("use-dns-cache-mgr") dns_cache_mgr_second_tg = tg_two.get("use-dns-cache-mgr") self.assertEqual(cache_first_tg, True) self.assertEqual(cache_second_tg, True) self.assertEqual(cookie_first_tg, False) self.assertEqual(cookie_second_tg, True) self.assertEqual(dns_cache_mgr_first_tg, True) self.assertEqual(dns_cache_mgr_second_tg, True) def test_think_time_overrides(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') request_tg_two = tg_two.get("requests")[0] tg_one_timer = tg_one.get("think-time") tg_two_timer = tg_two.get("think-time") req_timer = request_tg_two.get("think-time") self.assertEqual(tg_one_timer, "200ms") self.assertEqual(tg_two_timer, "300ms") self.assertEqual(req_timer, "100ms") def test_request_defaults(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/global_copy.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get('Thread Group one') tg_two = yml.get("scenarios").get('Thread Group two') self.assertEqual(tg_one.get("default-address"), "https://127.0.0.2/") self.assertEqual(tg_two.get("default-address"), "http://127.0.0.3:2582/resources/") self.assertEqual(tg_one.get("timeout"), "500ms") self.assertEqual(tg_two.get("timeout"), "100ms") self.assertEqual(tg_one.get("retrieve-resources"), True) self.assertEqual(tg_two.get("retrieve-resources"), True) self.assertEqual(tg_one.get("concurrent-pool-size"), 5) self.assertEqual(tg_two.get("concurrent-pool-size"), 10) def test_copy_global_request_assertions(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/assertions.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_assertions = tg_one.get("assert") self.assertEqual(len(tg_one_assertions), 2) # global assertion + tg assertion tg_two_assertions = tg_two.get("assert") self.assertEqual(len(tg_two_assertions), 1) # global only assertion tg_one_req_one_assertion = tg_one.get("requests")[0].get("assert")[0] expected = {'subject': 'headers', 'contains': ["tg1httpreq1", "tg1httpreq12"], "not": False, 'regexp': True} self.assertEqual(tg_one_req_one_assertion, expected) tg_one_assertion = tg_one.get("assert")[0] expected = {'subject': 'body', 'contains': ["tg1body_text_not_contains"], "not": True, 'regexp': True} self.assertEqual(tg_one_assertion, expected) def test_copy_global_json_assertions(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/assertions.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_assertions = tg_one.get("assert-jsonpath") self.assertEqual(len(tg_one_assertions), 1) # global assertion + tg assertion tg_two_assertions = tg_two.get("assert-jsonpath") self.assertEqual(len(tg_two_assertions), 1) # global only assertion tg_one_req_one_jp = tg_one.get("requests")[0].get("assert-jsonpath", []) # no assertions self.assertEqual(len(tg_one_req_one_jp), 0) tg_two_req_one_jp = tg_two.get("requests")[0].get("assert-jsonpath", []) self.assertEqual(len(tg_two_req_one_jp), 1) expected = {"expect-null": True, "invert": True, "jsonpath": '$(":input")', "validate": True} self.assertEqual(expected, tg_two_req_one_jp[0]) # test concurrency, ramp-up, iterations in execution tg_one_exec = yml.get(ScenarioExecutor.EXEC)[0] tg_two_exec = yml.get(ScenarioExecutor.EXEC)[1] tg_three_exec = yml.get(ScenarioExecutor.EXEC)[2] self.assertEqual(tg_one_exec.get("concurrency"), 10) self.assertEqual(tg_two_exec.get("concurrency"), 15) self.assertEqual(tg_three_exec.get("concurrency"), None) self.assertEqual(tg_one_exec.get("ramp-up"), '10s') self.assertEqual(tg_two_exec.get("ramp-up"), None) self.assertEqual(tg_three_exec.get("ramp-up"), '2s') self.assertEqual(tg_one_exec.get("iterations"), None) self.assertEqual(tg_two_exec.get("iterations"), None) self.assertEqual(tg_three_exec.get("iterations"), 100) def test_xpath_assertions(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/assertions.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg = yml.get("scenarios").get("tg3") assertions = tg.get("assert-xpath") self.assertEqual(len(assertions), 2) self.assertEqual(assertions[0], { "xpath": "/note/to", "ignore-whitespace": False, "invert": False, "validate-xml": False, "use-tolerant-parser": False, }) self.assertEqual(assertions[1], { "xpath": "/note/from", "ignore-whitespace": True, "invert": True, "validate-xml": True, "use-tolerant-parser": True, }) def test_extractors(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/extractors.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_three = yml.get("scenarios").get("tg3") tg_one_extractors = tg_one.get("extract-regexp") tg_two_extractors = tg_two.get("extract-regexp") self.assertEqual(len(tg_one_extractors), 1) # global self.assertEqual(len(tg_two_extractors), 1) # global + local - ignored tg_one_req_exr = tg_one.get("requests")[0].get("extract-regexp", {}) self.assertEqual(len(tg_one_req_exr), 2) expected = {'template': '1', 'match-no': 1, 'regexp': '*tg1hr1', 'default': 'default'} self.assertEqual(expected, tg_one_req_exr.get("test_tg1hr1")) # test extract-jsonpath tg_one_extractors = tg_one.get("extract-jsonpath") tg_two_extractors = tg_two.get("extract-jsonpath") self.assertEqual(len(tg_one_extractors), 3) # 2x global + local self.assertEqual(len(tg_two_extractors), 2) # 2x global tg_three_req_exr = tg_three.get("requests")[0].get("extract-jsonpath", {}) self.assertEqual(len(tg_three_req_exr), 1) # 1x local # test extract-xpath tg_three_extractors = tg_three.get("extract-xpath") self.assertEqual(len(tg_three_extractors), 2) # 2 global self.assertEqual(tg_three_extractors['bookAuthor'], { "xpath": "/books/[@title()='1984']/author", "default": "no_author", "ignore-whitespace": False, "validate-xml": False, "use-tolerant-parser": False, }) self.assertEqual(tg_three_extractors['author'], { "xpath": "/books/[@title()='Fahrenheit 451']/author", "default": "no", "ignore-whitespace": True, "validate-xml": True, "use-tolerant-parser": False, }) def test_request_body(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/extractors.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get("scenarios").get("tg1") tg_two = yml.get("scenarios").get("tg2") tg_one_req_one_body = tg_one.get("requests")[0].get("body") self.assertEqual(tg_one_req_one_body, "body-string") tg_one_req_one_body = tg_one.get("requests")[1].get("body") self.assertEqual(tg_one_req_one_body, {"body_param1": "value1", "body_param2": "value2"}) tg_two_req_one_body = tg_two.get("requests")[0].get("body") self.assertEqual(tg_two_req_one_body, None) def test_duration_throughput(self): yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/duration.jmx", yml) obj.process() yml = yaml.load(open(yml).read()) tg_one = yml.get(ScenarioExecutor.EXEC)[0] tg_two = yml.get(ScenarioExecutor.EXEC)[1] tg_three = yml.get(ScenarioExecutor.EXEC)[2] self.assertEqual("10s", tg_one.get("ramp-up")) self.assertEqual(None, tg_one.get("hold-for")) self.assertEqual("10s", tg_one.get("ramp-up")) self.assertEqual(100, tg_one.get("throughput")) self.assertEqual("10s", tg_two.get("ramp-up")) self.assertEqual("20s", tg_two.get("hold-for")) self.assertEqual(20, tg_two.get("throughput")) self.assertEqual(None, tg_three.get("ramp-up")) self.assertEqual("40s", tg_three.get("hold-for")) self.assertEqual(100, tg_three.get("throughput")) def test_all(self): obj = self._get_jmx2yaml("/yaml/converter/disabled.jmx", self._get_tmp()) obj.process() yml = yaml.load(open(__dir__() + "/yaml/converter/disabled.yml").read()) self.assertEqual(obj.converter.convert(obj.file_to_convert), yml) def test_param_null(self): obj = self._get_jmx2yaml("/yaml/converter/param-null.jmx", self._get_tmp()) obj.process() obj.converter.convert(obj.file_to_convert)
def test_pack_and_send_to_blazemeter(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ "execution": { "executor": "selenium", "concurrency": 5500, "locations": { "us-east-1": 1, "us-west": 2 }, "scenario": { "script": RESOURCES_DIR + "selenium/junit/java_package" } }, "modules": { "selenium": "bzt.modules.selenium.SeleniumExecutor", "cloud": "bzt.modules.blazemeter.CloudProvisioning", "junit": "bzt.modules.java.JUnitTester" }, "provisioning": "cloud" }) obj.parameters = obj.engine.config['execution'] obj.settings["token"] = "FakeToken" mock = BZMock(obj.user) mock.mock_get.update({ 'https://a.blazemeter.com/api/v4/web/elfinder/1?cmd=open&target=s1_Lw': { "files": [] }, 'https://a.blazemeter.com/api/v4/multi-tests?projectId=1&name=Taurus+Cloud+Test': { "result": [] }, 'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Cloud+Test': { "result": [{ "id": 1, 'name': 'Taurus Cloud Test', "configuration": { "type": "taurus" } }] }, }) mock.mock_post.update({ 'https://a.blazemeter.com/api/v4/projects': { "result": { "id": 1, 'workspaceId': 1 } }, 'https://a.blazemeter.com/api/v4/multi-tests': { "result": {} }, 'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Cloud+Test': { "result": { "id": 1, "configuration": { "type": "taurus" } } }, 'https://a.blazemeter.com/api/v4/tests/1/files': {} }) mock.mock_patch.update( {'https://a.blazemeter.com/api/v4/tests/1': { "result": {} }}) obj.prepare() unpack_cfgs = obj.engine.config.get(Service.SERV) self.assertEqual(len(unpack_cfgs), 1) self.assertEqual(unpack_cfgs[0]['module'], Unpacker.UNPACK) self.assertEqual(unpack_cfgs[0][Unpacker.FILES], ['java_package.zip']) self.assertTrue( zipfile.is_zipfile(obj.engine.artifacts_dir + '/java_package.zip'))
def setUp(self): super(ExecutorTestCase, self).setUp() self.engine = EngineEmul() self.obj = self.EXECUTOR() self.obj.engine = self.engine
def setUp(self): super(TestSiegeExecutor, self).setUp() self.obj = SiegeExecutor() self.obj.engine = EngineEmul() self.obj.engine.aggregator = ConsolidatingAggregator() self.obj.settings.merge({"path": TOOL_PATH})
def getGatling(self): path = os.path.abspath(__dir__() + "/../gatling/gatling" + EXE_SUFFIX) obj = GatlingExecutor() obj.engine = EngineEmul() obj.settings.merge({"path": path}) return obj