def test_schedule_empty(self): # concurrency: 1, iterations: 1 scheduler = self.get_scheduler(b("4 test\ntest\n")) items = list(scheduler.generate()) for item in items: ROOT_LOGGER.debug("Item: %s", item) self.assertEqual(1, len(items))
def test_merge(self): obj = Configuration() configs = [ RESOURCES_DIR + "yaml/test.yml", RESOURCES_DIR + "json/merge1.json", RESOURCES_DIR + "json/merge2.json", ] obj.load(configs) fname = tempfile.mkstemp()[1] obj.dump(fname, Configuration.JSON) with open(fname) as fh: ROOT_LOGGER.debug("JSON:\n%s", fh.read()) jmeter = obj['modules']['jmeter'] classval = jmeter['class'] self.assertEquals("bzt.modules.jmeter.JMeterExecutor", classval) self.assertEquals("value", obj['key']) self.assertEquals(6, len(obj["list-append"])) self.assertEquals(2, len(obj["list-replace"])) self.assertEquals(2, len(obj["list-replace-notexistent"])) self.assertIsInstance(obj["list-complex"][1][0], BetterDict) self.assertIsInstance(obj["list-complex"][1][0], BetterDict) self.assertIsInstance(obj["list-complex"][1][0], BetterDict) self.assertFalse("properties" in jmeter) fname = tempfile.mkstemp()[1] obj.dump(fname, Configuration.JSON) checker = Configuration() checker.load([fname]) token = checker["list-complex"][1][0]['token'] self.assertNotEquals('test', token) token_orig = obj["list-complex"][1][0]['token'] self.assertEquals('test', token_orig)
def test_not_junit(self): """ Check that JUnit runner fails if no tests were found :return: """ self.configure({ EXEC: { "executor": "selenium", "scenario": { "script": RESOURCES_DIR + "selenium/invalid/NotJUnittest.java" } } }) self.obj.prepare() self.assertIsInstance(self.obj.runner, JUnitTester) self.obj.startup() try: while not self.obj.check(): time.sleep(self.obj.engine.check_interval) self.fail() except ToolError as exc: diagnostics = "\n".join(exc.diagnostics) self.assertIn("Nothing to test", diagnostics) except BaseException as exc: ROOT_LOGGER.debug(traceback.format_exc()) self.fail("Unexpected exception %s, expected ToolError" % exc) self.obj.shutdown()
def test_schedule_rps(self): rps = 9 rampup = 12 self.configure({ "provisioning": "test", EXEC: { "throughput": rps, "ramp-up": rampup, "steps": 3, "hold-for": 0 } }) scheduler = self.get_scheduler(b("4 test\ntest\n")) cnt = 0 cur = 0 currps = 0 for item in scheduler.generate(): if int(math.ceil(item[0])) != cur: # self.assertLessEqual(currps, rps) cur = int(math.ceil(item[0])) ROOT_LOGGER.debug("RPS: %s", currps) currps = 0 cnt += 1 currps += 1 ROOT_LOGGER.debug("RPS: %s", currps)
def test_save(self): obj = Configuration() obj.merge({"str": "text", "uc": six.u("ucstring")}) fname = tempfile.mkstemp()[1] obj.dump(fname, Configuration.YAML) with open(fname) as fh: written = fh.read() ROOT_LOGGER.debug("YAML:\n%s", written) self.assertNotIn("unicode", written)
def test_server_agent(self): obj = Monitoring() obj.engine = EngineEmul() obj.parameters.merge({ "server-agent": [{ "address": "127.0.0.1:4444", "logging": "True", "metrics": [ "cpu", "disks" ] }, { "address": "10.0.0.1", "metrics": [ "something1", "something2" ] }] }) listener = LoggingMonListener() obj.add_listener(listener) widget = obj.get_widget() obj.add_listener(widget) crit_conf = BetterDict.from_dict({"condition": ">", "threshold": 5, "subject": "127.0.0.1:4444/cpu"}) criteria = MonitoringCriteria(crit_conf, obj) obj.add_listener(criteria) obj.client_classes = {'server-agent': ServerAgentClientEmul} obj.prepare() obj.startup() for i in range(1, 10): obj.clients[0].socket.recv_data += b("%s\t%s\t\n" % (i, i*10)) obj.check() ROOT_LOGGER.debug("Criteria state: %s", criteria) time.sleep(obj.engine.check_interval) obj.shutdown() obj.post_process() self.assertEquals(b("test\ninterval:1\nmetrics:cpu\tdisks\nexit\n"), obj.clients[0].socket.sent_data) if PY3: self.assertIsNotNone(obj.clients[0].logs_file) with open(obj.clients[0].logs_file) as serveragent_logs: logs_reader = csv.reader(serveragent_logs) logs_reader = list(logs_reader) self.assertEquals(['ts', 'cpu', 'disks'], logs_reader[0]) for i in range(1, 10): self.assertEquals([str(i), str(i * 10)], logs_reader[i][1:])
def test_simple(self): self.configure({ "provisioning": "test", EXEC: { "log-responses": "proto_error", # "iterations": 5000000, "concurrency": 10, "throughput": 1000, "ramp-up": "1m", # "steps": 5, "hold-for": "15", "scenario": { "timeout": 1, "default-address": "http://localhost:33", "headers": { "Connection": "close" }, "requests": [{ "url": "/api", "method": "POST", "headers": { "Content-Length": 0 }, "body": { "param": "value" } }] } } }) self.obj.engine.aggregator = ConsolidatingAggregator() self.obj.engine.aggregator.engine = self.obj.engine self.obj.engine.aggregator.add_listener(DataPointLogger()) self.obj.engine.aggregator.prepare() self.obj.prepare() self.obj.engine.aggregator.startup() self.obj.startup() while not self.obj.check(): ROOT_LOGGER.debug("Running...") self.obj.engine.aggregator.check() time.sleep(0.1) self.obj.shutdown() self.obj.engine.aggregator.shutdown() self.obj.post_process() self.obj.engine.aggregator.post_process()
def test_save(self): obj = Configuration() obj.merge({ "str": "text", "uc": six.u("ucstring") }) fname = tempfile.mkstemp()[1] obj.dump(fname, Configuration.YAML) with open(fname) as fh: written = fh.read() ROOT_LOGGER.debug("YAML:\n%s", written) self.assertNotIn("unicode", written)
def test_simple(self): self.configure({ "provisioning": "test", ScenarioExecutor.EXEC: { "log-responses": "proto_error", # "iterations": 5000000, "concurrency": 10, "throughput": 1000, "ramp-up": "1m", # "steps": 5, "hold-for": "15", "scenario": { "timeout": 1, "default-address": "http://localhost:33", "headers": { "Connection": "close" }, "requests": [ { "url": "/api", "method": "POST", "headers": { "Content-Length": 0 }, "body": { "param": "value"}}]}}}) self.obj.engine.aggregator = ConsolidatingAggregator() self.obj.engine.aggregator.engine = self.obj.engine self.obj.engine.aggregator.add_listener(DataPointLogger()) self.obj.engine.aggregator.prepare() self.obj.prepare() self.obj.engine.aggregator.startup() self.obj.startup() while not self.obj.check(): ROOT_LOGGER.debug("Running...") self.obj.engine.aggregator.check() time.sleep(0.1) self.obj.shutdown() self.obj.engine.aggregator.shutdown() self.obj.post_process() self.obj.engine.aggregator.post_process()
def test_server_agent(self): obj = Monitoring() obj.engine = EngineEmul() obj.parameters.merge({ "server-agent": [{ "address": "127.0.0.1:4444", "metrics": ["cpu", "disks"] }, { "address": "10.0.0.1", "metrics": ["something1", "something2"] }] }) listener = LoggingMonListener() obj.add_listener(listener) widget = obj.get_widget() obj.add_listener(widget) crit_conf = BetterDict.from_dict({ "condition": ">", "threshold": 5, "subject": "127.0.0.1:4444/cpu" }) criteria = MonitoringCriteria(crit_conf, obj) obj.add_listener(criteria) obj.client_classes = {'server-agent': ServerAgentClientEmul} obj.prepare() obj.startup() for _ in range(1, 10): obj.clients[0].socket.recv_data += b( "%s\t%s\n" % (random.random(), random.random())) obj.check() ROOT_LOGGER.debug("Criteria state: %s", criteria) time.sleep(obj.engine.check_interval) obj.shutdown() obj.post_process() self.assertEquals(b("test\ninterval:1\nmetrics:cpu\tdisks\nexit\n"), obj.clients[0].socket.sent_data)
def test_stdev_performance(self): start = time.time() self.configure(RESOURCES_DIR + "/jmeter/jtl/slow-stdev.jtl") res = list(self.obj.datapoints(final_pass=True)) lst_json = to_json(res) self.assertNotIn('"perc": {},', lst_json) elapsed = time.time() - start ROOT_LOGGER.debug("Elapsed/per datapoint: %s / %s", elapsed, elapsed / len(res)) # self.assertLess(elapsed, len(res)) # less than 1 datapoint per sec is a no-go exp = [ 2.2144798867972773, 0.7207704268609725, 0.606834452578833, 0.8284089170237546, 0.5858142211763572, 0.622922628329711, 0.5529488620851849, 0.6933748292117727, 0.4876162181858197, 0.42471180222446503, 0.2512251128133865 ] self.assertEqual( exp, [x[DataPoint.CURRENT][''][KPISet.STDEV_RESP_TIME] for x in res])
def test_stdev_performance(self): start = time.time() self.configure(RESOURCES_DIR + "jmeter/jtl/slow-stdev.jtl") res = list(self.obj.datapoints(final_pass=True)) lst_json = to_json(res) self.assertNotIn('"perc": {},', lst_json) elapsed = time.time() - start ROOT_LOGGER.debug("Elapsed/per datapoint: %s / %s", elapsed, elapsed / len(res)) # self.assertLess(elapsed, len(res)) # less than 1 datapoint per sec is a no-go exp = [ 0.53060066889723, 0.39251356581014, 0.388405157629, 0.38927586980868, 0.30511697736531, 0.21160424043633, 0.07339064994943 ] self.assertEqual(exp, [ round(x[DataPoint.CURRENT][''][KPISet.STDEV_RESP_TIME], 14) for x in res ])
def test_schedule_rps(self): self.obj.engine.config.merge({"provisioning": "test"}) rps = 9 rampup = 12 self.obj.execution.merge({"throughput": rps, "ramp-up": rampup, "steps": 3, "hold-for": 0}) scheduler = self.get_scheduler(b("4 test\ntest\n")) cnt = 0 cur = 0 currps = 0 for item in scheduler.generate(): if int(math.ceil(item[0])) != cur: # self.assertLessEqual(currps, rps) cur = int(math.ceil(item[0])) ROOT_LOGGER.debug("RPS: %s", currps) currps = 0 cnt += 1 currps += 1 ROOT_LOGGER.debug("RPS: %s", currps)
def test_stdev_performance(self): start = time.time() self.configure(RESOURCES_DIR + "jmeter/jtl/slow-stdev.jtl") res = list(self.obj.datapoints(final_pass=True)) lst_json = to_json(res) self.assertNotIn('"perc": {},', lst_json) elapsed = time.time() - start ROOT_LOGGER.debug("Elapsed/per datapoint: %s / %s", elapsed, elapsed / len(res)) # self.assertLess(elapsed, len(res)) # less than 1 datapoint per sec is a no-go exp = [ 0.53060066889723, 0.39251356581014, 0.388405157629, 0.52855748890714, 0.39107758224016, 0.38999119030886, 0.32537625773864, 0.47057465198195, 0.2746790136753, 0.23251104555698, 0.08369447671202 ] self.assertEqual(exp, [ round(x[DataPoint.CURRENT][''][KPISet.STDEV_RESP_TIME], 14) for x in res ])
def test_stdev_performance(self): start = time.time() self.configure(RESOURCES_DIR + "/jmeter/jtl/slow-stdev.jtl") res = list(self.obj.datapoints(final_pass=True)) lst_json = to_json(res) self.assertNotIn('"perc": {},', lst_json) elapsed = time.time() - start ROOT_LOGGER.debug("Elapsed/per datapoint: %s / %s", elapsed, elapsed / len(res)) # self.assertLess(elapsed, len(res)) # less than 1 datapoint per sec is a no-go exp = [2.2144798867972773, 0.7207704268609725, 0.606834452578833, 0.8284089170237546, 0.5858142211763572, 0.622922628329711, 0.5529488620851849, 0.6933748292117727, 0.4876162181858197, 0.42471180222446503, 0.2512251128133865] self.assertEqual(exp, [x[DataPoint.CURRENT][''][KPISet.STDEV_RESP_TIME] for x in res])
def test_not_junit(self): """ Check that JUnit runner fails if no tests were found :return: """ self.configure({ ScenarioExecutor.EXEC: { "executor": "selenium", "scenario": {"script": RESOURCES_DIR + "selenium/invalid/NotJUnittest.java"}}}) self.obj.prepare() self.assertIsInstance(self.obj.runner, JUnitTester) self.obj.startup() try: while not self.obj.check(): time.sleep(self.obj.engine.check_interval) self.fail() except ToolError as exc: diagnostics = "\n".join(exc.diagnostics) self.assertIn("Nothing to test", diagnostics) except BaseException as exc: ROOT_LOGGER.debug(traceback.format_exc()) self.fail("Unexpected exception %s, expected ToolError" % exc) self.obj.shutdown()
def test_load(self): obj = Configuration() configs = [ BASE_CONFIG, RESOURCES_DIR + "json/jmx.json", RESOURCES_DIR + "json/concurrency.json" ] obj.load(configs) ROOT_LOGGER.debug("config:\n%s", obj) fname = tempfile.mkstemp()[1] obj.dump(fname, Configuration.JSON) with open(fname) as fh: ROOT_LOGGER.debug("JSON:\n%s", fh.read()) fname = tempfile.mkstemp()[1] obj.dump(fname, Configuration.YAML) with open(fname) as fh: ROOT_LOGGER.debug("YAML:\n%s", fh.read())
def check_schedule_size_estimate(self, execution): self.configure({ ScenarioExecutor.EXEC: execution, "provisioning": "local", }) load = self.obj.get_load() self.obj.generator = TaurusPBenchGenerator(self.obj, ROOT_LOGGER) self.obj.generator.generate_payload(self.obj.get_scenario()) payload_count = len(self.obj.get_scenario().get('requests', [])) sch = Scheduler(load, self.obj.generator.payload_file, ROOT_LOGGER) estimated_schedule_size = self.obj.generator._estimate_schedule_size(load, payload_count) ROOT_LOGGER.debug("Estimated schedule size: %s", estimated_schedule_size) items = list(sch.generate()) actual_schedule_size = len(items) ROOT_LOGGER.debug("Actual schedule size: %s", actual_schedule_size) if actual_schedule_size != 0: error = abs(estimated_schedule_size - actual_schedule_size) error_rel = error / float(actual_schedule_size) ROOT_LOGGER.debug("Estimation error: %s", error) if error_rel >= 0.1: self.fail("Estimation failed (error=%s) on config %s" % (error_rel, pprint.pformat(execution)))
def monitoring_data(self, data): ROOT_LOGGER.debug("Data: %s", data)