def test_vars(self): self.configure({ "execution": [{ "test-mode": "apiritif", "scenario": { "variables": { "an": "av" }, "default-address": "http://localhost:8000/", "requests": [ "${an}", { "set-variables": { "an": "another_path1", "bn": "another_path2" } }, "${an}" ], } }] }) self.obj.engine.aggregator = ConsolidatingAggregator() self.obj.prepare() exp_file = RESOURCES_DIR + "/apiritif/test_vars.py" self.assertIn("set_variables", self.obj.engine.aggregator.ignored_labels) self.assertFilesEqual(exp_file, self.obj.script, python_files=True)
def test_load_reader_real2(self): reader1 = ApiritifLoadReader(self.obj.log) reader1.engine = EngineEmul() reader1.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif-0.csv") reader1.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif-1.csv") reader2 = ApiritifLoadReader(self.obj.log) reader2.engine = EngineEmul() reader2.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif--10.csv") reader2.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif--11.csv") reader = ConsolidatingAggregator() reader.engine = EngineEmul() reader.add_underling(reader1) reader.add_underling(reader2) items = list(reader.datapoints()) self.assertEqual(0, len(items)) all_items = [] while True: items = list(reader.datapoints()) all_items.extend(items) if not items: break for point in items: cnc = point[DataPoint.CURRENT][''][KPISet.CONCURRENCY] logging.info("%s: %s", point[DataPoint.TIMESTAMP], cnc) self.assertLessEqual(cnc, 4) cnc1 = point[DataPoint.CUMULATIVE][''][KPISet.CONCURRENCY] self.assertLessEqual(cnc1, 4) self.assertEqual(4, all_items[-1][DataPoint.CURRENT][''][KPISet.CONCURRENCY])
def test_long_iterations_value(self): self.engine.aggregator = ConsolidatingAggregator() self.engine.aggregator.engine = self.engine self.obj.execution.merge({ "iterations": 2**64, "scenario": { "requests": [ "http://blazedemo.com/", ], } }) self.obj.prepare() try: self.obj.startup() for _ in range(3): self.assertFalse(self.obj.check()) self.engine.aggregator.check() time.sleep(self.obj.engine.check_interval) finally: self.obj.shutdown()
def test_load_reader_real2(self): reader1 = ApiritifLoadReader(self.obj.log) reader1.engine = EngineEmul() reader1.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif-0.csv") reader1.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif-1.csv") reader2 = ApiritifLoadReader(self.obj.log) reader2.engine = EngineEmul() reader2.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif--10.csv") reader2.register_file(RESOURCES_DIR + "jmeter/jtl/apiritif-results/apiritif--11.csv") reader = ConsolidatingAggregator() reader.engine = EngineEmul() reader.add_underling(reader1) reader.add_underling(reader2) items = list(reader.datapoints()) self.assertEqual(39, len(items)) self.assertEqual(4, items[-1][DataPoint.CURRENT][''][KPISet.CONCURRENCY])
def test_inconsistent(self): self.skipTest("just keep this code for future troubleshooting") agg = ConsolidatingAggregator() obj = ResultsFromBZA(MasterFromLog(data={'id': 0})) with open("/tmp/downloads/bzt.log") as fhd: obj.master.loglines = fhd.readlines() class Listener(AggregatorListener): def aggregated_second(self, data): for x in data[DataPoint.CURRENT].values(): a = x[KPISet.FAILURES] / x[KPISet.SAMPLE_COUNT] obj.log.debug("TS: %s %s", data[DataPoint.TIMESTAMP], x[KPISet.SAMPLE_COUNT]) agg.add_underling(obj) agg.add_listener(Listener()) agg.prepare() agg.startup() try: while not agg.check(): pass # 1537973736 fail, prev 1537973735 1537973734 1537973733 except NormalShutdown: obj.log.warning("Shutting down") agg.shutdown() agg.post_process()