def setUp(self): self.obj = ShellExecutor() self.obj.parameters = BetterDict() self.obj.engine = EngineEmul() self.obj.engine.config.merge({"provisioning": "local"}) self.log_recorder = RecordingHandler() self.obj.log.addHandler(self.log_recorder)
def test_log_messages_percentiles(self): obj = FinalStatus() obj.engine = EngineEmul() obj.parameters = BetterDict() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.parameters.merge({ "failed-labels": False, "percentiles": True, "summary": False, "test-duration": False }) obj.aggregated_second(self.__get_datapoint()) obj.post_process() target_output = ( "Average times: total 0.001, latency 0.000, connect 0.000\n" "Percentile 0.0%: 0.000\n" "Percentile 50.0%: 0.000\n" "Percentile 90.0%: 0.001\n" "Percentile 95.0%: 0.001\n" "Percentile 99.0%: 0.003\n" "Percentile 99.9%: 0.008\n" "Percentile 100.0%: 0.081\n") self.assertEqual(target_output, log_recorder.info_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_streams(self): self.log = logging.getLogger('') handler = RecordingHandler() self.log.addHandler(handler) print('test1') with log_std_streams(logger=self.log, stdout_level=logging.DEBUG): print('test2') with log_std_streams(stdout_level=logging.DEBUG): print('test3') with log_std_streams(stdout_level=logging.DEBUG): sys.stdout.write('test3') with log_std_streams(logger=self.log, stdout_level=logging.DEBUG): process = Popen(['echo', '"test5"']) process.wait() missed_file = get_uniq_name('.', 'test6', '') with log_std_streams(logger=self.log, stderr_level=logging.WARNING): process = Popen(['dir', missed_file]) process.wait() self.log.removeHandler(handler) debug_buf = handler.debug_buff.getvalue() warn_buf = handler.warn_buff.getvalue() self.assertNotIn('test1', debug_buf) self.assertIn('test2', debug_buf) self.assertNotIn('test3', debug_buf) self.assertIn('test5', debug_buf) self.assertTrue(len(warn_buf) > 0)
def test_css_jquery_extractor(self): obj = JMeterExecutor() handler = RecordingHandler() obj.log.addHandler(handler) obj.engine = EngineEmul() obj.engine.config = json.loads(open(__dir__() + "/../json/get-post.json").read()) obj.execution = obj.engine.config['execution'] obj.prepare() target_jmx = os.path.join(obj.engine.artifacts_dir, "requests.jmx") modified_xml_tree = etree.fromstring(open(target_jmx, "rb").read()) jq_css_extractors = modified_xml_tree.findall(".//HtmlExtractor") self.assertEqual(2, len(jq_css_extractors)) simplified_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name1']") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name1") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name~=my_input]") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, None) self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "0") self.assertEqual(simplified_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NOT_FOUND") full_form_extractor = modified_xml_tree.find(".//HtmlExtractor[@testname='Get name2']") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.refname']").text, "name2") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.expr']").text, "input[name=JMeter]") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.attribute']").text, "value") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.match_number']").text, "1") self.assertEqual(full_form_extractor.find(".//stringProp[@name='HtmlExtractor.default']").text, "NV_JMETER") obj.log.removeHandler(handler)
def test_skip_if_no_requests(self): log_recorder = RecordingHandler() obj = SoapUIScriptConverter(logging.getLogger('')) obj.log.addHandler(log_recorder) obj.convert_script(__dir__() + "/../soapui/project.xml") self.assertIn("No requests extracted for scenario TestSuite 1-EmptyTestCase, skipping it", log_recorder.warn_buff.getvalue())
def test_loadjmx1(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmx/http.jmx", self._get_tmp()) obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertNotEqual("", log_recorder.debug_buff.getvalue()) self.assertEqual("", log_recorder.err_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_loadjmx4(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/http.jmx", self._get_tmp('tmp', 'file')) obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("Done processing, result saved in", log_recorder.info_buff.getvalue()) self.assertIn("Removing unknown element", log_recorder.warn_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_loadjmx1(self): log_recorder = RecordingHandler() obj = JMX2YAML(FakeOptions(file_name=self.temp_yaml()), "tests/jmx/http.jmx") obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertNotEqual("", log_recorder.debug_buff.getvalue()) self.assertEqual("", log_recorder.err_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_objprop(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/http.jmx", self._get_tmp()) obj.log.addHandler(log_recorder) obj.process() self.assertNotIn("Removing unknown element: name (None)", log_recorder.warn_buff.getvalue()) self.assertNotIn("Removing unknown element: value (None)", log_recorder.warn_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_loadjmx4(self): log_recorder = RecordingHandler() with tempfile.NamedTemporaryFile() as tmp_file: obj = JMX2YAML(FakeOptions(file_name=tmp_file.name), "tests/jmx/http.jmx") obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("Done processing, result saved in", log_recorder.info_buff.getvalue()) self.assertIn("Removing unknown element", log_recorder.warn_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_export_clean_jmx(self): with tempfile.NamedTemporaryFile() as tmp_jmx: obj = JMX2YAML(FakeOptions(dump_jmx=tmp_jmx.name, file_name=self.temp_yaml()), "tests/yaml/converter/disabled.jmx") log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("already exists and will be overwritten", log_recorder.warn_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_log_messages_samples_count(self): obj = FinalStatus() obj.engine = EngineEmul() obj.parameters = BetterDict() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.parameters.merge({"failed-labels": False, "percentiles": False, "summary": True, "test-duration": False}) obj.aggregated_second(self.__get_datapoint()) obj.post_process() self.assertEqual("Samples count: 59314, 50.00% failures\n", log_recorder.info_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_log_messages_failed_labels(self): obj = FinalStatus() obj.engine = EngineEmul() obj.parameters = BetterDict() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.parameters.merge({"failed-labels": True, "percentiles": False, "summary": False, "test-duration": False}) obj.aggregated_second(self.__get_datapoint()) obj.post_process() self.assertIn("29656 failed samples: http://192.168.1.1/anotherquery\n", log_recorder.info_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_loadjmx3(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/broken.jmx") obj.log.addHandler(log_recorder) try: obj.process() self.fail() except BaseException as exc: self.assertIn("XML parsing failed", exc.args[0]) self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("Error while processing jmx file", log_recorder.err_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_loadjmx2(self): log_recorder = RecordingHandler() obj = self._get_jmx2yaml("/jmeter/jmx/notfound.jmx") obj.log.addHandler(log_recorder) try: obj.process() self.fail() except BaseException as exc: self.assertIn("File does not exist", exc.args[0]) self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertEqual("", log_recorder.debug_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_loadjmx3(self): log_recorder = RecordingHandler() obj = JMX2YAML(FakeOptions(), "tests/jmx/broken.jmx") obj.log.addHandler(log_recorder) try: obj.process() self.fail() except BaseException as exc: self.assertIn("XML parsing failed", exc.args[0]) self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("Error while processing jmx file", log_recorder.err_buff.getvalue()) self.assertIn("XML parsing error", log_recorder.debug_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_export_clean_jmx(self): tmp_jmx_name = self._get_tmp('tmp', '.jmx') open(tmp_jmx_name, 'w+').close() # touch file yml = self._get_tmp() obj = self._get_jmx2yaml("/yaml/converter/disabled.jmx", yml, dump_jmx=tmp_jmx_name) log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.process() self.assertIn("Loading jmx file", log_recorder.info_buff.getvalue()) self.assertIn("already exists and will be overwritten", log_recorder.warn_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_locations_on_both_levels(self): obj = CloudProvisioning() obj.engine = EngineEmul() obj.engine.config.merge({ ScenarioExecutor.EXEC: [{ "executor": "mock", "concurrency": 5500, "locations": { "eu-west-1": 1, } }], "locations": { "ams3": 1, }, "modules": { "mock": ModuleMock.__module__ + "." + ModuleMock.__name__ }, "provisioning": "mock" }) obj.parameters = obj.engine.config['execution'][0] obj.engine.aggregator = ConsolidatingAggregator() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.settings["token"] = "FakeToken" obj.settings["browser-open"] = False obj.settings["use-deprecated-api"] = False obj.client = client = BlazeMeterClientEmul(obj.log) client.results.append({"result": []}) # find test client.results.append({"result": []}) # find collection client.results.append(self.__get_user_info()) # user client.results.append({}) # upload files client.results.append( {"result": { "name": "Taurus Collection", "items": [] }}) # transform config to collection client.results.append({"result": {"id": 42}}) # create collection obj.prepare() cloud_config = yaml.load( open(os.path.join(obj.engine.artifacts_dir, "cloud.yml"))) self.assertNotIn("locations", cloud_config) for execution in cloud_config["execution"]: self.assertIn("locations", execution) log_buff = log_recorder.warn_buff.getvalue() self.assertIn( "Each execution has locations specified, global locations won't have any effect", log_buff)
def test_1(self): obj = ConsoleStatusReporter() handler = RecordingHandler() obj.log.addHandler(handler) obj.engine = EngineEmul() obj.engine.provisioning = Local() obj.engine.provisioning.start_time = time.time() obj.engine.config[Provisioning.PROV] = '' jmeter = self.get_jmeter() jmeter.engine = obj.engine jmeter.execution[ScenarioExecutor.HOLD_FOR] = 10 jmeter.execution.merge({'hold-for': 0, 'ramp-up': 0}) jmeter.delay = 10 jmeter.prepare() widget = jmeter.get_widget() widget.update() jmeter.startup() widget.update() obj.engine.provisioning.executors = [jmeter] obj.settings["disable"] = False obj.settings['dummy_cols'] = 160 obj.settings['dummy_rows'] = 40 obj.settings['disable'] = False obj.prepare() obj.startup() obj.check() obj.temp_stream.write("test1\n") obj.temp_stream.flush() obj.temp_stream.write("test1\n") obj.temp_stream.flush() obj.check() for n in range(0, 10): point = self.__get_datapoint(n) obj.aggregated_second(point) obj.temp_stream.write("test %s\n" % n) obj.temp_stream.flush() obj.check() self.assertTrue(obj.screen.started) point = self.__get_datapoint(11) point[DataPoint.CURRENT][''][KPISet.RESP_CODES][''] = 1 obj.aggregated_second(point) obj.check() obj.shutdown() obj.post_process() obj.log.removeHandler(handler) self.assertNotIn('Failed', handler.warn_buff.getvalue())
def test_dump(self): obj = FinalStatus() obj.engine = EngineEmul() obj.parameters = BetterDict() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.parameters.merge({ "dump-xml": obj.engine.create_artifact("status", ".xml"), "dump-csv": obj.engine.create_artifact("status", ".csv") }) obj.aggregated_second(random_datapoint(time.time())) obj.post_process() self.assertIn("XML", log_recorder.info_buff.getvalue())
def test_log_messages_duration(self): """ Test duration report :return: """ obj = FinalStatus() obj.parameters = BetterDict() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.prepare() obj.start_time -= 120005 obj.post_process() self.assertEqual("Test duration: 1 day, 9:20:05\n", log_recorder.info_buff.getvalue()) obj.log.removeHandler(log_recorder)
def test_find_test_case_empty(self): log_recorder = RecordingHandler() obj = SoapUIScriptConverter(logging.getLogger('')) obj.log.addHandler(log_recorder) config = obj.convert_script(__dir__() + "/../soapui/project.xml") scenarios = config["scenarios"] self.assertEqual(len(scenarios), 3) target_scenario = scenarios["TestSuite 1-index"] found_name, found_scenario = obj.find_soapui_test_case(None, scenarios) self.assertEqual(target_scenario, found_scenario) self.assertIn("No `test-case` specified for SoapUI project, will use 'index'", log_recorder.warn_buff.getvalue())
def test_func_report(self): obj = FinalStatus() obj.engine = EngineEmul() obj.parameters = BetterDict() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.prepare() obj.aggregated_results(*self.__get_func_tree()) obj.post_process() info_log = log_recorder.info_buff.getvalue() self.assertIn("Total: 3 tests", info_log) self.assertIn("Test TestClass.case2", info_log) self.assertIn("stacktrace2", info_log) self.assertIn("Test TestClass.case3", info_log) self.assertIn("stacktrace3", info_log) obj.log.removeHandler(log_recorder)
def test_shutdown_soft(self): obj = JMeterExecutor() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.engine = EngineEmul() obj.execution = BetterDict() obj.execution.merge({"scenario": {"script": "tests/jmx/dummy.jmx"}}) try: obj.prepare() obj.startup() time.sleep(1) obj.shutdown() except: self.fail() finally: obj.log.removeHandler(log_recorder) self.assertIn("JMeter stopped on Shutdown command", log_recorder.debug_buff.getvalue())
def test_func_report_all_no_stacktrace(self): obj = FinalStatus() obj.engine = EngineEmul() obj.parameters = BetterDict() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.parameters.merge({"report-tests": "all", "print-stacktrace": False}) obj.prepare() obj.aggregated_results(*self.__get_func_tree()) obj.post_process() info_log = log_recorder.info_buff.getvalue() self.assertIn("Total: 3 tests", info_log) self.assertIn("Test TestClass.case1 - PASSED", info_log) self.assertIn("Test TestClass.case2 - FAILED", info_log) self.assertIn("Test TestClass.case3 - BROKEN", info_log) self.assertNotIn("stacktrace2", info_log) self.assertNotIn("stacktrace3", info_log) obj.log.removeHandler(log_recorder)
def test_metrics_reporting(self): engine = EngineEmul() profiler = ChromeProfiler() profiler.engine = engine profiler.settings.merge({ "processors": { "trace": { "class": "bzt.modules.chrome.TraceProcessor", "extractors": [ "bzt.modules.chrome.TabNameExtractor", "bzt.modules.chrome.MemoryMetricsExtractor", ] } } }) shutil.copy(__dir__() + "/../chrome/trace.json", engine.artifacts_dir) log_recorder = RecordingHandler() reporter = MetricReporter() reporter.log.addHandler(log_recorder) reporter.engine = engine engine.services.append(profiler) engine.reporters.append(reporter) reporter.prepare() profiler.prepare() reporter.startup() profiler.startup() profiler.check() reporter.check() reporter.shutdown() reporter.post_process() info_buff = log_recorder.info_buff.getvalue() self.assertIn( "Chrome metrics for tab 'JMeter and Performance Testing for DevOps I BlazeMeter'", info_buff) self.assertIn("Memory metrics:", info_buff) profiler.log.removeHandler(log_recorder)
def test_public_report(self): mock = BZMock() mock.mock_get.update({ 'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test': {"result": []} }) mock.mock_post.update({ 'https://a.blazemeter.com/api/v4/projects': {"result": {'id': 1}}, 'https://a.blazemeter.com/api/v4/tests': {'result': {'id': 'unittest1'}}, 'https://a.blazemeter.com/api/v4/tests/unittest1/start-external': {"result": { 'session': {'id': 'sess1', 'userId': 1, 'testId': 1}, 'master': {'id': 'master1', 'userId': 1}, 'signature': '' }}, 'https://a.blazemeter.com/api/v4/masters/master1/public-token': {'result': {'publicToken': 'publicToken'}}, 'https://data.blazemeter.com/submit.php?session_id=sess1&signature=&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': { "result": {'session': {}}}, 'https://a.blazemeter.com/api/v4/image/sess1/files?signature=': {'result': True}, }) log_recorder = RecordingHandler() obj = BlazeMeterUploader() obj.settings['token'] = '123' obj.settings['browser-open'] = 'none' obj.settings['public-report'] = True obj.settings['send-monitoring'] = False obj.engine = EngineEmul() mock.apply(obj._user) obj.log.addHandler(log_recorder) obj.prepare() obj.startup() obj.aggregated_second(random_datapoint(10)) obj.check() obj.shutdown() obj.post_process() log_buff = log_recorder.info_buff.getvalue() log_line = "Public report link: https://a.blazemeter.com/app/?public-token=publicToken#/masters/master1/summary" self.assertIn(log_line, log_buff) logging.warning("\n".join([x['url'] for x in mock.requests])) self.assertEqual(14, len(mock.requests))
def test_public_report(self): client = BlazeMeterClientEmul(logging.getLogger('')) client.timeout = 1 client.results.append({"marker": "ping", 'result': {}}) client.results.append({"marker": "tests", 'result': {}}) client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}}) client.results.append( {"marker": "sess-start", "result": { 'session': {'id': 'sess1', 'userId': 1}, 'master': {'id': 'master1', 'userId': 1}, 'signature': ''}}) client.results.append({"marker": "share-report", 'result': {'publicToken': 'publicToken'}}) client.results.append({"marker": "first push", 'result': {'session': {}}}) client.results.append({"marker": "post-proc push", 'result': {'session': {}}}) client.results.append({"marker": "artifacts push", 'result': True}) client.results.append({"marker": "logs push", 'result': True}) client.results.append({"marker": "terminate", 'result': {'session': {}}}) log_recorder = RecordingHandler() obj = BlazeMeterUploader() obj.settings['token'] = '123' obj.settings['browser-open'] = 'none' obj.settings['public-report'] = True obj.settings['send-monitoring'] = False obj.engine = EngineEmul() obj.client = client obj.log.addHandler(log_recorder) obj.prepare() obj.startup() obj.aggregated_second(random_datapoint(10)) obj.check() obj.shutdown() obj.post_process() self.assertEqual(0, len(client.results)) log_buff = log_recorder.info_buff.getvalue() log_line = "Public report link: https://a.blazemeter.com/app/?public-token=publicToken#/masters/master1/summary" self.assertIn(log_line, log_buff)
def test_chrome_proxy(self): self.obj.responses = [ ResponseEmul(200, '{"result" : {}}'), ResponseEmul(200, '{"result" : {"port": "port1", "host": "host1"}}'), ResponseEmul(200, ''), ResponseEmul(200, ''), # startup: startRecording ResponseEmul(200, ''), # shutdown: stopRecording ResponseEmul(200, '{"result" : {"smartjmx": "unavailable"}}'), ResponseEmul(200, '{"result" : {"smartjmx": "available"}}'), ResponseEmul(200, 'only one string') ] self.obj.engine.config.merge( {'modules': { 'recorder': { 'token': '123' } }}) self.obj.settings = self.obj.engine.config.get('modules').get( 'recorder') handler = RecordingHandler() self.obj.log.addHandler(handler) self.obj.prepare() self.obj.engine.provisioning.executors = [SeleniumExecutor()] self.obj.log.removeHandler(handler) is_linux = 'linux' in sys.platform.lower() if is_linux: self._check_linux() elif is_windows(): self._check_windows() else: # MacOS, for future and manual testing self.assertIn("Your system doesn't support settings of proxy", handler.warn_buff.getvalue()) self.obj.shutdown() self.obj.post_process()
def test_dump_locations_new_style(self): obj = CloudProvisioning() obj.engine = EngineEmul() log_recorder = RecordingHandler() obj.log.addHandler(log_recorder) obj.settings["dump-locations"] = True obj.settings["token"] = "FakeToken" obj.settings["use-deprecated-api"] = False obj.client = BlazeMeterClientEmul(obj.log) obj.client.results.append(self.__get_user_info()) self.assertRaises(ManualShutdown, obj.prepare) warnings = log_recorder.warn_buff.getvalue() self.assertIn( "Dumping available locations instead of running the test", warnings) info = log_recorder.info_buff.getvalue() self.assertIn("Location: DFW Dallas (Rackspace)", info) self.assertIn("Location: us-west-2 US West (Oregon)", info) self.assertIn("Location: harbor-5591335d8588531f5cde3a04 Sandbox", info) obj.post_process()