def random_datapoint(n): point = DataPoint(n) overall = point[DataPoint.CURRENT].get('', KPISet()) overall[KPISet.CONCURRENCY] = r(100) overall[KPISet.SAMPLE_COUNT] = int(100 * r(1000)) + 1 overall[KPISet.SUCCESSES] = int(overall[KPISet.SAMPLE_COUNT] * random()) overall[KPISet.FAILURES] = overall[KPISet.SAMPLE_COUNT] - overall[KPISet.SUCCESSES] overall[KPISet.PERCENTILES]['25'] = r(10) overall[KPISet.PERCENTILES]['50'] = r(20) overall[KPISet.PERCENTILES]['75'] = r(30) overall[KPISet.PERCENTILES]['90'] = r(40) overall[KPISet.PERCENTILES]['99'] = r(50) overall[KPISet.PERCENTILES]['100'] = r(100) overall[KPISet.RESP_CODES][rc()] = 1 overall[KPISet.AVG_RESP_TIME] = r(100) overall[KPISet.AVG_CONN_TIME] = overall[KPISet.AVG_RESP_TIME] / 3.0 overall[KPISet.AVG_LATENCY] = 2.0 * overall[KPISet.AVG_RESP_TIME] / 3.0 cumul = point[DataPoint.CUMULATIVE].get('', KPISet()) cumul.merge(overall) point.recalculate() overall[KPISet.AVG_RESP_TIME] = r(100) overall[KPISet.AVG_CONN_TIME] = overall[KPISet.AVG_RESP_TIME] / 3.0 overall[KPISet.AVG_LATENCY] = 2.0 * overall[KPISet.AVG_RESP_TIME] / 3.0 return point
def point_from_locust(timestamp, sid, data): """ :type timestamp: str :type sid: str :type data: dict :rtype: DataPoint """ point = DataPoint(int(timestamp)) point[DataPoint.SOURCE_ID] = sid overall = KPISet() for item in data['stats']: if timestamp not in item['num_reqs_per_sec']: continue kpiset = KPISet() kpiset[KPISet.SAMPLE_COUNT] = item['num_reqs_per_sec'][timestamp] kpiset[KPISet.CONCURRENCY] = data['user_count'] kpiset[KPISet.BYTE_COUNT] = item['total_content_length'] if item['num_requests']: avg_rt = (item['total_response_time'] / 1000.0) / item['num_requests'] kpiset.sum_rt = item['num_reqs_per_sec'][timestamp] * avg_rt for err in data['errors'].values(): if err['name'] == item['name']: new_err = KPISet.error_item_skel(err['error'], None, err['occurences'], KPISet.ERRTYPE_ERROR, Counter()) KPISet.inc_list(kpiset[KPISet.ERRORS], ("msg", err['error']), new_err) kpiset[KPISet.FAILURES] += err['occurences'] point[DataPoint.CURRENT][item['name']] = kpiset overall.merge_kpis(kpiset) point[DataPoint.CURRENT][''] = overall point.recalculate() return point
def point_from_locust(timestamp, sid, data): """ :type timestamp: str :type sid: str :type data: dict :rtype: DataPoint """ point = DataPoint(int(timestamp)) point[DataPoint.SOURCE_ID] = sid overall = KPISet() for item in data['stats']: if timestamp not in item['num_reqs_per_sec']: continue kpiset = KPISet() kpiset[KPISet.SAMPLE_COUNT] = item['num_reqs_per_sec'][timestamp] kpiset[KPISet.CONCURRENCY] = data['user_count'] kpiset[KPISet.BYTE_COUNT] = item['total_content_length'] if item['num_requests']: avg_rt = (item['total_response_time'] / 1000.0) / item['num_requests'] kpiset.sum_rt = item['num_reqs_per_sec'][timestamp] * avg_rt point[DataPoint.CURRENT][item['name']] = kpiset overall.merge_kpis(kpiset) point[DataPoint.CURRENT][''] = overall point.recalculate() return point
def random_datapoint(n): point = DataPoint(n) overall = point[DataPoint.CURRENT].get('', KPISet()) overall[KPISet.CONCURRENCY] = r(100) overall[KPISet.SAMPLE_COUNT] = int(100 * r(1000)) + 1 overall[KPISet.SUCCESSES] = int(overall[KPISet.SAMPLE_COUNT] * random()) overall[KPISet.FAILURES] = overall[KPISet.SAMPLE_COUNT] - overall[ KPISet.SUCCESSES] overall[KPISet.PERCENTILES]['25.0'] = r(10) overall[KPISet.PERCENTILES]['50.0'] = r(20) overall[KPISet.PERCENTILES]['75.0'] = r(30) overall[KPISet.PERCENTILES]['90.0'] = r(40) overall[KPISet.PERCENTILES]['99.0'] = r(50) overall[KPISet.PERCENTILES]['100.0'] = r(100) overall[KPISet.RESP_CODES][rc()] = 1 overall[KPISet.AVG_RESP_TIME] = r(100) overall[KPISet.AVG_CONN_TIME] = overall[KPISet.AVG_RESP_TIME] / 3.0 overall[KPISet.AVG_LATENCY] = 2.0 * overall[KPISet.AVG_RESP_TIME] / 3.0 overall.sum_rt = overall[KPISet.AVG_RESP_TIME] * overall[ KPISet.SAMPLE_COUNT] overall.sum_cn = overall[KPISet.AVG_CONN_TIME] * overall[ KPISet.SAMPLE_COUNT] overall.sum_lt = overall[KPISet.AVG_LATENCY] * overall[KPISet.SAMPLE_COUNT] cumul = point[DataPoint.CUMULATIVE].get('', KPISet()) cumul.merge_kpis(overall) cumul.recalculate() point.recalculate() overall[KPISet.AVG_RESP_TIME] = r(100) overall[KPISet.AVG_CONN_TIME] = overall[KPISet.AVG_RESP_TIME] / 3.0 overall[KPISet.AVG_LATENCY] = 2.0 * overall[KPISet.AVG_RESP_TIME] / 3.0 return point
def _calculate_datapoints(self, final_pass=False): if self.master_id is None: return data = self.client.get_kpis(self.master_id, self.min_ts) for label in data: if label['kpis']: label['kpis'].pop( -1) # never take last second since it could be incomplete timestamps = [] for label in data: if label['label'] == 'ALL': timestamps.extend([kpi['ts'] for kpi in label['kpis']]) for tstmp in timestamps: point = DataPoint(tstmp) for label in data: for kpi in label['kpis']: if kpi['ts'] != tstmp: continue kpiset = KPISet() kpiset[KPISet.FAILURES] = kpi['ec'] kpiset[KPISet.CONCURRENCY] = kpi['na'] kpiset[KPISet.SAMPLE_COUNT] = kpi['n'] kpiset.sum_rt += kpi['t_avg'] * kpi['n'] / 1000.0 kpiset.sum_lt += kpi['lt_avg'] * kpi['n'] / 1000.0 point[ DataPoint.CURRENT]['' if label['label'] == 'ALL' else label['label']] = kpiset point.recalculate() self.min_ts = point[DataPoint.TIMESTAMP] + 1 yield point
def point_from_locust(self, ts, sid, data): """ :type sid: str :type ts: str :type data: dict :rtype: DataPoint """ point = DataPoint(int(ts)) point[DataPoint.SOURCE_ID] = sid overall = KPISet() for item in data['stats']: if ts not in item['num_reqs_per_sec']: continue kpiset = KPISet() kpiset[KPISet.SAMPLE_COUNT] = item['num_reqs_per_sec'][ts] kpiset[KPISet.CONCURRENCY] = data['user_count'] if item['num_requests']: avg_rt = (item['total_response_time'] / 1000.0) / item['num_requests'] kpiset.sum_rt = item['num_reqs_per_sec'][ts] * avg_rt point[DataPoint.CURRENT][item['name']] = kpiset overall.merge_kpis(kpiset) point[DataPoint.CURRENT][''] = overall point.recalculate() return point
def _calculate_datapoints(self, final_pass=False): if self.master_id is None: return data = self.client.get_kpis(self.master_id, self.min_ts) for label in data: if label['kpis']: label['kpis'].pop(-1) # never take last second since it could be incomplete timestamps = [] for label in data: if label['label'] == 'ALL': timestamps.extend([kpi['ts'] for kpi in label['kpis']]) for tstmp in timestamps: point = DataPoint(tstmp) for label in data: for kpi in label['kpis']: if kpi['ts'] != tstmp: continue kpiset = KPISet() kpiset[KPISet.FAILURES] = kpi['ec'] kpiset[KPISet.CONCURRENCY] = kpi['na'] kpiset[KPISet.SAMPLE_COUNT] = kpi['n'] kpiset.sum_rt += kpi['t_avg'] * kpi['n'] / 1000.0 kpiset.sum_lt += kpi['lt_avg'] * kpi['n'] / 1000.0 point[DataPoint.CURRENT]['' if label['label'] == 'ALL' else label['label']] = kpiset point.recalculate() self.min_ts = point[DataPoint.TIMESTAMP] + 1 yield point
def merge_datapoints(self, max_full_ts): for key in sorted(self.join_buffer.keys(), key=int): if int(key) <= max_full_ts: sec_data = self.join_buffer.pop(key) self.log.debug("Processing complete second: %s", key) point = DataPoint(int(key)) for sid, item in iteritems(sec_data): point.merge_point(self.point_from_locust(key, sid, item)) point.recalculate() yield point
def merge_datapoints(self, max_full_ts): for key in sorted(self.join_buffer.keys(), key=lambda x: int(x)): if int(key) <= max_full_ts: sec_data = self.join_buffer.pop(key) self.log.debug("Processing complete second: %s", key) point = DataPoint(int(key)) for sid, item in iteritems(sec_data): point.merge_point(self.point_from_locust(key, sid, item)) point.recalculate() yield point
def merge_datapoints(self, max_full_ts): reader_id = self.file.name + "@" + str(id(self)) for key in sorted(self.join_buffer.keys(), key=int): if int(key) <= max_full_ts: sec_data = self.join_buffer.pop(key) self.log.debug("Processing complete second: %s", key) point = DataPoint(int(key)) point[DataPoint.SOURCE_ID] = reader_id for sid, item in iteritems(sec_data): point.merge_point(self.point_from_locust(key, sid, item)) point.recalculate() yield point
def _calculate_datapoints(self, final_pass=False): if self.master_id is None: return try: data = self.client.get_kpis(self.master_id, self.min_ts) except URLError as exc: self.log.warning( "Failed to get result KPIs, will retry in %s seconds...", self.client.timeout) self.log.debug("Full exception: %s", traceback.format_exc()) time.sleep(self.client.timeout) data = self.client.get_kpis(self.master_id, self.min_ts) self.log.info("Succeeded with retry") for label in data: if label['kpis']: label['kpis'].pop( -1) # never take last second since it could be incomplete timestamps = [] for label in data: if label['label'] == 'ALL': timestamps.extend([kpi['ts'] for kpi in label['kpis']]) for tstmp in timestamps: point = DataPoint(tstmp) for label in data: for kpi in label['kpis']: if kpi['ts'] != tstmp: continue kpiset = KPISet() kpiset[KPISet.FAILURES] = kpi['ec'] kpiset[KPISet.CONCURRENCY] = kpi['na'] kpiset[KPISet.SAMPLE_COUNT] = kpi['n'] kpiset.sum_rt += kpi['t_avg'] * kpi['n'] / 1000.0 kpiset.sum_lt += kpi['lt_avg'] * kpi['n'] / 1000.0 point[ DataPoint.CURRENT]['' if label['label'] == 'ALL' else label['label']] = kpiset point.recalculate() self.min_ts = point[DataPoint.TIMESTAMP] + 1 yield point
def _calculate_datapoints(self, final_pass=False): if self.master_id is None: return try: data = self.client.get_kpis(self.master_id, self.min_ts) except URLError: self.log.warning("Failed to get result KPIs, will retry in %s seconds...", self.client.timeout) self.log.debug("Full exception: %s", traceback.format_exc()) time.sleep(self.client.timeout) data = self.client.get_kpis(self.master_id, self.min_ts) self.log.info("Succeeded with retry") for label in data: if label['kpis']: label['kpis'].pop(-1) # never take last second since it could be incomplete timestamps = [] for label in data: if label['label'] == 'ALL': timestamps.extend([kpi['ts'] for kpi in label['kpis']]) for tstmp in timestamps: point = DataPoint(tstmp) for label in data: for kpi in label['kpis']: if kpi['ts'] != tstmp: continue kpiset = KPISet() kpiset[KPISet.FAILURES] = kpi['ec'] kpiset[KPISet.CONCURRENCY] = kpi['na'] kpiset[KPISet.SAMPLE_COUNT] = kpi['n'] kpiset.sum_rt += kpi['t_avg'] * kpi['n'] / 1000.0 kpiset.sum_lt += kpi['lt_avg'] * kpi['n'] / 1000.0 point[DataPoint.CURRENT]['' if label['label'] == 'ALL' else label['label']] = kpiset point.recalculate() self.min_ts = point[DataPoint.TIMESTAMP] + 1 yield point
def _calculate_datapoints(self, final_pass=False): if self.master is None: return data, aggr_raw = self.query_data() aggr = {} for label in aggr_raw: aggr[label['labelName']] = label for label in data: if label.get('kpis') and not final_pass: label['kpis'].pop( -1) # never take last second since it could be incomplete timestamps = [] for label in data: if label.get('label') == 'ALL': timestamps.extend([kpi['ts'] for kpi in label.get('kpis', [])]) self.handle_errors = True for tstmp in timestamps: point = DataPoint(tstmp) point[DataPoint.SOURCE_ID] = self.master['id'] self.__generate_kpisets(aggr, data, point, tstmp) if self.handle_errors: self.handle_errors = False self.cur_errors = self.__get_errors_from_bza() err_diff = self._get_err_diff() if err_diff: self.__add_err_diff(point, err_diff) self.prev_errors = self.cur_errors point.recalculate() self.min_ts = point[DataPoint.TIMESTAMP] + 1 yield point