def _write_schedule_file(self, load, scheduler, sfd): cnt = 0 payload_entry_count = None pbar = None start_time = time.time() for item in scheduler.generate(): # item : (time_offset, payload_len, payload_offset, payload, marker, record_type, overall_len) time_offset, payload_len, _, payload, marker, _, _ = item if scheduler.iterations > 1 and payload_entry_count is None: payload_entry_count = scheduler.count estimated_size = self._estimate_schedule_size( load, payload_entry_count) self.log.debug("Estimated schedule size: %s", estimated_size) if estimated_size: pbar = IncrementableProgressBar(maxval=estimated_size) pbar.catchup(start_time, cnt) if time_offset < 0: # special case, run worker with no delay time_offset = 0.0 sfd.write( b("%s %s %s%s" % (payload_len, int( 1000 * time_offset), marker, self.NEWLINE))) sfd.write(b("%s%s" % (payload, self.NEWLINE))) cnt += 1 if pbar: pbar.increment() self.log.debug("Actual schedule size: %s", cnt) if pbar: pbar.finish()
def test_server_agent(self): obj = Monitoring() obj.engine = EngineEmul() obj.parameters.merge({ "server-agent": [{ "address": "127.0.0.1:4444", "logging": "True", "metrics": ["cpu", "disks"] }, { "address": "10.0.0.1", "metrics": ["something1", "something2"] }] }) listener = LoggingMonListener() obj.add_listener(listener) widget = obj.get_widget() obj.add_listener(widget) crit_conf = BetterDict.from_dict({ "condition": ">", "threshold": 5, "subject": "127.0.0.1:4444/cpu" }) criteria = MonitoringCriteria(crit_conf, obj) obj.add_listener(criteria) obj.client_classes = {'server-agent': ServerAgentClientEmul} obj.prepare() obj.startup() for i in range(1, 10): obj.clients[0].socket.recv_data += b("%s\t%s\t\n" % (i, i * 10)) obj.check() ROOT_LOGGER.debug("Criteria state: %s", criteria) time.sleep(obj.engine.check_interval) obj.shutdown() obj.post_process() self.assertEquals(b("test\ninterval:1\nmetrics:cpu\tdisks\nexit\n"), obj.clients[0].socket.sent_data) self.assertIsNotNone(obj.clients[0].logs_file) with open(obj.clients[0].logs_file) as serveragent_logs: logs_reader = csv.reader(serveragent_logs) logs_reader = list(logs_reader) self.assertEquals(['ts', 'cpu', 'disks'], logs_reader[0]) for i in range(1, 10): self.assertEquals([str(i), str(i * 10)], logs_reader[i][1:])
def __upload_artifacts(self): """ If token provided, upload artifacts folder contents and bzt.log """ if not self._session.token: return worker_index = self.engine.config.get('modules').get('shellexec').get('env').get('TAURUS_INDEX_ALL') if worker_index: suffix = '-%s' % worker_index else: suffix = '' artifacts_zip = "artifacts%s.zip" % suffix mfile, zip_listing = self.__get_jtls_and_more() self.log.info("Uploading all artifacts as %s ...", artifacts_zip) self._session.upload_file(artifacts_zip, mfile.getvalue()) self._session.upload_file(artifacts_zip + '.tail.bz', self.__format_listing(zip_listing)) handlers = self.engine.log.parent.handlers for handler in handlers: if isinstance(handler, logging.FileHandler): fname = handler.baseFilename self.log.info("Uploading %s", fname) fhead, ftail = os.path.splitext(os.path.split(fname)[-1]) modified_name = fhead + suffix + ftail with open(fname, 'rb') as _file: self._session.upload_file(modified_name, _file.read()) _file.seek(-4096, 2) tail = _file.read() tail = tail[tail.index(b("\n")) + 1:] self._session.upload_file(modified_name + ".tail.bz", tail)
def test_schedule_empty(self): # concurrency: 1, iterations: 1 scheduler = self.get_scheduler(b("4 test\ntest\n")) items = list(scheduler.generate()) for item in items: ROOT_LOGGER.debug("Item: %s", item) self.assertEqual(1, len(items))
def test_schedule_rps(self): rps = 9 rampup = 12 self.configure({ "provisioning": "test", EXEC: { "throughput": rps, "ramp-up": rampup, "steps": 3, "hold-for": 0 } }) scheduler = self.get_scheduler(b("4 test\ntest\n")) cnt = 0 cur = 0 currps = 0 for item in scheduler.generate(): if int(math.ceil(item[0])) != cur: # self.assertLessEqual(currps, rps) cur = int(math.ceil(item[0])) ROOT_LOGGER.debug("RPS: %s", currps) currps = 0 cnt += 1 currps += 1 ROOT_LOGGER.debug("RPS: %s", currps)
def disconnect(self): self.log.debug("Closing connection with agent at %s:%s...", self.address, self.port) try: self.socket.send(b("exit\n")) except BaseException as exc: self.log.warning("Error during disconnecting from agent at %s:%s: %s", self.address, self.port, exc) finally: self.socket.close()
def test_schedule_concurrency_steps(self): self.configure({EXEC: {"concurrency": 5, "ramp-up": 10, "steps": 3}}) scheduler = self.get_scheduler(b("5 test1\ntest1\n5 test2\ntest2\n")) items = list(scheduler.generate()) self.assertEqual(8, len(items)) self.assertEqual(-1, items[5][0]) # instance became unlimited self.assertEqual(Scheduler.REC_TYPE_LOOP_START, items[6][5]) # looped payload
def connect(self): try: self.socket.connect((self.address, self.port)) self.socket.send(b("test\n")) resp = self.socket.recv(4) assert resp == b("Yep\n") self.log.debug("Connected to serverAgent at %s:%s successfully", self.address, self.port) except BaseException as exc: self.log.warning("Error during connecting to agent at %s:%s: %s", self.address, self.port, exc) msg = "Failed to connect to serverAgent at %s:%s" % (self.address, self.port) raise TaurusNetworkError(msg) if self.config.get("logging", False): self.logs_file = self.engine.create_artifact("SAlogs_{}_{}".format(self.address, self.port), ".csv") with open(self.logs_file, "a", newline='') as sa_logs: logs_writer = csv.writer(sa_logs, delimiter=',') metrics = ['ts'] + sorted([metric for metric in self._result_fields]) logs_writer.writerow(metrics)
def test_schedule_with_no_rampup(self): self.configure({ EXEC: { "concurrency": 10, "ramp-up": None, "steps": 3, "hold-for": 10 } }) # this line shouln't throw an exception self.get_scheduler(b("4 test\ntest\n"))
def test_server_agent_encoding(self): obj = Monitoring() obj.engine = EngineEmul() obj.parameters.merge({ "server-agent": [{ "address": "127.0.0.1:4444", "metrics": ["cpu", "disks"] }] }) obj.client_classes = {'server-agent': ServerAgentClientEmul} obj.prepare() self.assertEquals(b("test\n"), obj.clients[0].socket.sent_data)
def _write_schedule_file(self, load, scheduler, sfd): prev_offset = 0 accum_interval = 0.0 cnt = 0 payload_entry_count = None pbar = None start_time = time.time() for item in scheduler.generate(): # item : (time_offset, payload_len, payload_offset, payload, marker, record_type, overall_len) time_offset, _, payload_offset, _, _, record_type, overall_len = item if scheduler.iterations > 1 and payload_entry_count is None: payload_entry_count = scheduler.count estimated_size = self._estimate_schedule_size( load, payload_entry_count) self.log.debug("Estimated schedule size: %s", estimated_size) if estimated_size: pbar = IncrementableProgressBar(maxval=estimated_size) pbar.catchup(start_time, cnt) if time_offset >= 0: accum_interval += 1000 * (time_offset - prev_offset) interval = int(math.floor(accum_interval)) accum_interval -= interval else: interval = 0xFFFFFF type_and_delay = struct.pack("I", interval)[:-1] + b( chr(record_type)) payload_len_bytes = struct.pack('I', overall_len) payload_offset_bytes = struct.pack('Q', payload_offset) sfd.write(type_and_delay + payload_len_bytes + payload_offset_bytes) if pbar: pbar.increment() cnt += 1 prev_offset = time_offset self.log.debug("Actual schedule size: %s", cnt) if pbar: pbar.finish()
def __init__(self, family=AF_INET, atype=SOCK_STREAM, proto=0, _sock=None): self.recv_data = b("") self.sent_data = b("")
def start(self): self.socket.send(b("interval:%s\n" % self.interval)) command = "metrics:%s\n" % self._metrics_command self.log.debug("Sending metrics command: %s", command) self.socket.send(b(command)) self.socket.setblocking(False)
def test_schedule_throughput_only(self): self.configure({EXEC: {"throughput": 5}}) scheduler = self.get_scheduler(b("5 test1\ntest1\n5 test2\ntest2\n")) items = list(scheduler.generate()) self.assertTrue(len(items) > 0)
def __init__(self, parent_logger, label, config, engine): super(ServerAgentClientEmul, self).__init__(parent_logger, label, config, engine) self.socket = SocketEmul() self.socket.recv_data = b("Yep\n") self.select = self.select_emul