def parse(input: TextIOWrapper, regexp: str, separator: str = None, labels: Dict[str, str] = {}, metric_name_prefix: str = '') -> List[Metric]: """Custom parse function for gauge tpm from mysql. TPM: 87060.0 TPM: 95220.0 TPM: 93600.0 TPM: 90000.0 """ new_metrics = [] new_line = readline_with_check(input, EOF_line='end') if "TPM:" in new_line: regex = re.findall(r'TPM: (?P<tpm>\d*.\d*)', new_line) tpm = float(regex[0]) new_metrics.append( Metric(metric_name_prefix + 'tpm', tpm, type=MetricType.GAUGE, labels=labels, help="TPM (transaction per minute) from mysql")) return new_metrics
def parse(input: TextIOWrapper, regexp: str, separator: str = None, labels: Dict[str, str] = {}, metric_name_prefix: str = '') -> List[Metric]: """Custom parse function for YCSB. Parses lines similar to (added new line characters to improve readibility): 2018-08-22 17:33:25:811 581 sec: 581117 operations; 975 current ops/sec; est completion in 2 hours 36 minutes [READ: Count=462, Max=554, Min=273, Avg=393.39, 90=457, 99=525, 99.9=554, 99.99=554] [UPDATE: Count=513, Max=699, Min=254, Avg=383.83, 90=441, 99=512, 99.9=589, 99.99=699] # noqa """ new_metrics = [] new_line = readline_with_check(input) if "operations" in new_line: operations_and_ops = \ re.search(r'(?P<operations>\d+) operations;', new_line).groupdict() operations = float(operations_and_ops['operations']) new_metrics.append( Metric(metric_name_prefix + 'operations', operations, type=MetricType.GAUGE, labels=labels, help="Done operations in Cassandra")) if "current ops" in new_line: operations_and_ops = \ re.search(r'(?P<ops_per_sec>\d+(\.\d+)?) current ops\/sec', new_line).groupdict() ops_per_sec = float(operations_and_ops['ops_per_sec']) new_metrics.append( Metric(metric_name_prefix + 'ops_per_sec', ops_per_sec, type=MetricType.GAUGE, labels=labels, help="Ops per sec Cassandra")) if "READ" in new_line: read = re.search(r'\[READ.*?99\.99=(\d+).*?\]', new_line) p9999 = float(read.group(1)) new_metrics.append( Metric(metric_name_prefix + 'read_p9999', p9999, type=MetricType.GAUGE, labels=labels, help="99.99th percentile of read latency in Cassandra")) if "UPDATE" in new_line: update = re.search(r'\[UPDATE.*?99\.99=(\d+).*?\]', new_line) p9999 = float(update.group(1)) new_metrics.append( Metric(metric_name_prefix + 'update_p9999', p9999, type=MetricType.GAUGE, labels=labels, help="99.99th percentile of update latency in Cassandra")) return new_metrics
def parse(input: TextIOWrapper, regexp: str, separator: str = None, labels: Dict[str, str] = {}, metric_name_prefix: str = '') -> List[Metric]: """ Custom parse function for specjbb. For sample output from specjbb see file: ./specjbb_sample_stdout.txt Discards until finds >>Response times:<< and read until empty line. Readed lines represents a table. In the code the table representation is named data_frame. """ new_metrics = [] input_lines = [] # discarding lines new_line = readline_with_check(input) while not re.match(r"^\s*Response times:\s*$", new_line): new_line = readline_with_check(input) new_line = readline_with_check(input) # reading until empty line while not re.match(EMPTY_LINE, new_line): input_lines.append(new_line) new_line = readline_with_check(input) log.debug("Found separator in {0}".format(new_line)) # Two dimensional list, first row contains names of columns. Almost as data frame. data_frame = [[el.strip() for el in line.split(",")] for line in input_lines] # For now we need only one metric: TotalPurchase, p99. metric_name = metric_name_prefix + 'p99_total_purchase' metric_value = float(data_frame[1][-3]) # total purchase, p99 new_metrics.append(Metric(metric_name, metric_value, type=MetricType.GAUGE, labels=labels, help="Specjbb2015 metric, Total Purchase, percentile 99")) return new_metrics
def parse(input: TextIOWrapper, regexp: str, separator: str = None, labels: Dict[str, str] = {}, metric_name_prefix: str = '') -> List[Metric]: """Custom parse function for cassandra-stress. Results: Op rate : 14,997 op/s [WRITE: 14,997 op/s] Partition rate : 14,997 pk/s [WRITE: 14,997 pk/s] Row rate : 14,997 row/s [WRITE: 14,997 row/s] Latency mean : 1.9 ms [WRITE: 1.9 ms] Latency median : 0.3 ms [WRITE: 0.3 ms] Latency 95th percentile : 0.4 ms [WRITE: 0.4 ms] Latency 99th percentile : 74.0 ms [WRITE: 74.0 ms] Latency 99.9th percentile : 146.8 ms [WRITE: 146.8 ms] Latency max : 160.2 ms [WRITE: 160.2 ms] Total partitions : 1,350,028 [WRITE: 1,350,028] Total errors : 0 [WRITE: 0] Total GC count : 0 Total GC memory : 0.000 KiB Total GC time : 0.0 seconds Avg GC time : NaN ms StdDev GC time : 0.0 ms Total operation time : 00:01:30 """ new_metrics = [] new_line = readline_with_check(input, EOF_line) if "Op rate" in new_line: read_op_rate = re.search(r'Op rate[ ]*:[ ]*([0-9,]*) op/s', new_line) op_rate = float(''.join(read_op_rate.group(1).split(','))) new_metrics.append( Metric(metric_name_prefix + 'qps', op_rate, type=MetricType.GAUGE, labels=labels, help="QPS")) if "Latency 99th percentile" in new_line: read = re.search( r'Latency 99th percentile[ ]*:[ ]*([0-9]*\.[0-9]*) ms', new_line) p99 = float(read.group(1)) new_metrics.append( Metric(metric_name_prefix + 'p99', p99, type=MetricType.GAUGE, labels=labels, help="99th percentile")) return new_metrics
def parse(input: TextIOWrapper, regexp: str, separator: str = None, labels: Dict[str, str] = {}, metric_name_prefix: str = '') -> List[Metric]: """Custom parse function for tensorflow benchmark training 180 images/sec: 74.9 +/- 0.5 (jitter = 8.9) 2.409 """ new_metrics = [] new_line = readline_with_check(input) if "images/sec" in new_line: read = re.search(r'[0-9]*\timages\/sec:[ ]*([0-9]*\.[0-9]*)', new_line) p99 = float(read.group(1)) new_metrics.append( Metric(metric_name_prefix + 'training_speed', p99, type=MetricType.GAUGE, labels=labels, help="tensorflow benchmark training speed")) return new_metrics
def parse(input: TextIOWrapper, regexp: str, separator: str = None, labels: Dict[str, str] = {}, metric_name_prefix: str = '') -> List[Metric]: """Custom parse function for tensorflow benchmark prediction 580 248.7 examples/sec """ new_metrics = [] new_line = readline_with_check(input) if "examples/sec" in new_line: read = re.search(r'[0-9]*\t([0-9]*\.[0-9]*)[ ]*examples\/sec', new_line) p99 = float(read.group(1)) new_metrics.append( Metric(metric_name_prefix + 'prediction_speed', p99, type=MetricType.GAUGE, labels=labels, help="tensorflow benchmark prediction speed")) return new_metrics
def parse(input: TextIOWrapper, regexp: str, separator: str = None, labels: Dict[str, str] = {}, metric_name_prefix: str = '') -> List[Metric]: """Custom parse function for rpc-perf. 2018-09-13 08:15:43.404 INFO [rpc-perf] ----- 2018-09-13 08:15:43.404 INFO [rpc-perf] Window: 155 2018-09-13 08:15:43.404 INFO [rpc-perf] Connections: Ok: 0 Error: 0 Timeout: 0 Open: 80 2018-09-13 08:15:43.404 INFO [rpc-perf] Sockets: Create: 0 Close: 0 Read: 31601 Write: 15795 Flush: 0 2018-09-13 08:15:43.404 INFO [rpc-perf] Requests: Sent: 15795 Prepared: 16384 In-Flight: 40 2018-09-13 08:15:43.404 INFO [rpc-perf] Responses: Ok: 15793 Error: 0 Timeout: 0 Hit: 3144 Miss: 6960 2018-09-13 08:15:43.404 INFO [rpc-perf] Rate: 15823.74 rps Success: 100.00 % Hit Rate: 31.12 % 2018-09-13 08:15:43.404 INFO [rpc-perf] Percentiles: Response OK (us): min: 47 p50: 389 p90: 775 p99:86436 p999: 89120 p9999: 89657 max: 89657 """ new_metrics = [] new_line = readline_with_check(input) if "[rpc-perf] Percentiles:" in new_line: percentiles = dict(re.findall(r'(?P<name>min|max|p\d*): (?P<value>\d+)', new_line)) p9999 = float(percentiles['p9999']) p999 = float(percentiles['p999']) p99 = float(percentiles['p99']) p90 = float(percentiles['p90']) p50 = float(percentiles['p50']) min = float(percentiles['min']) max = float(percentiles['max']) new_metrics.append(Metric(metric_name_prefix + 'p9999', p9999, type=MetricType.GAUGE, labels=labels, help="99.99th percentile of latency in rpc-perf")) new_metrics.append(Metric(metric_name_prefix + 'p999', p999, type=MetricType.GAUGE, labels=labels, help="99.9th percentile of latency in rpc-perf")) new_metrics.append(Metric(metric_name_prefix + 'p99', p99, type=MetricType.GAUGE, labels=labels, help="99th percentile of latency in rpc-perf")) new_metrics.append(Metric(metric_name_prefix + 'p90', p90, type=MetricType.GAUGE, labels=labels, help="90th percentile of latency in rpc-perf")) new_metrics.append(Metric(metric_name_prefix + 'p50', p50, type=MetricType.GAUGE, labels=labels, help="50th percentile of latency in rpc-perf")) new_metrics.append(Metric(metric_name_prefix + 'min', min, type=MetricType.GAUGE, labels=labels, help="min of latency in rpc-perf")) new_metrics.append(Metric(metric_name_prefix + 'max', max, type=MetricType.GAUGE, labels=labels, help="max of latency in rpc-perf")) if "[rpc-perf] Rate:" in new_line: statistic = \ dict(re.findall(r'(?P<name>Hit Rate|Success|Rate): (?P<value>\d+.\d+)', new_line)) hit_rate = float(statistic['Hit Rate']) success = float(statistic['Success']) rate = float(statistic['Rate']) new_metrics.append(Metric(metric_name_prefix + 'hit_rate', hit_rate, type=MetricType.GAUGE, labels=labels, help="Hit rate in rpc-perf")) new_metrics.append(Metric(metric_name_prefix + 'success', success, type=MetricType.GAUGE, labels=labels, help="Success responses in rpc-perf")) new_metrics.append(Metric(metric_name_prefix + 'rate', rate, type=MetricType.GAUGE, labels=labels, help="Rate in rpc-perf")) return new_metrics
def parse(input: TextIOWrapper, regexp: str, separator: str = None, labels: Dict[str, str] = {}, metric_name_prefix: str = '') -> List[Metric]: """Custom parse function for mutilate -for scan mode #type avg min 1st 5th 10th 90th 95th 99th QPS target read 76.3 346.3 21.1 23.5 24.5 34.3 38.7 2056.6 1002.0 1000 -for Q mode (run with -Q) #type avg std min 5th 10th 90th 95th 99th read 801.9 155.0 304.5 643.7 661.1 1017.8 1128.2 1386.5 update 804.6 157.8 539.4 643.4 661.2 1026.1 1136.1 1404.3 op_q 1.0 0.0 1.0 1.0 1.0 1.1 1.1 1.1 Total QPS = 159578.5 (1595835 / 10.0s) Misses = 0 (0.0%) Skipped TXs = 0 (0.0%) RX 382849511 bytes : 36.5 MB/s TX 67524708 bytes : 6.4 MB/s """ SCAN_MODE_COLUMNS = 11 new_metrics = [] new_line = readline_with_check(input, EOF_line) line = new_line.split() scan_prefix = 'scan_' if "read" in line: if len(line) != SCAN_MODE_COLUMNS: scan_prefix = '' else: qps = float(line[9]) new_metrics.append(Metric( metric_name_prefix + scan_prefix + 'qps', qps, type=MetricType.GAUGE, labels=labels, help="QPS" )) avg = float(line[1]) new_metrics.append( Metric(metric_name_prefix + scan_prefix + 'read_avg', avg, type=MetricType.GAUGE, labels=labels, help="Average")) p90 = float(line[6]) new_metrics.append( Metric(metric_name_prefix + scan_prefix + 'read_p90', p90, type=MetricType.GAUGE, labels=labels, help="90th percentile of read latency")) p95 = float(line[7]) new_metrics.append( Metric(metric_name_prefix + scan_prefix + 'read_p95', p95, type=MetricType.GAUGE, labels=labels, help="95th percentile of read latency")) p99 = float(line[8]) new_metrics.append( Metric(metric_name_prefix + scan_prefix + 'read_p99', p99, type=MetricType.GAUGE, labels=labels, help="99th percentile of read latency")) if "Total QPS" in new_line: read_qps = re.search(r'Total QPS = ([0-9]*\.[0-9])', new_line) if read_qps is not None: qps = float(read_qps.group(1)) new_metrics.append(Metric( metric_name_prefix + 'qps', qps, type=MetricType.GAUGE, labels=labels, help="QPS")) return new_metrics
def test_readline_with_check(*print_mock): with pytest.raises(StopIteration): readline_with_check(input=StringIO("")) line = "content_of_line" assert line == readline_with_check(input=StringIO(line))
def parse(input: TextIOWrapper, regexp: str, separator: str = None, labels: Dict[str, str] = {}, metric_name_prefix: str = '') -> List[Metric]: """Custom parse function for stress_ng stress-ng: info: [99] Time 1546433449, counter=173 stress-ng: info: [96] Time 1546433449, counter=210 stress-ng: info: [103] Time 1546433449, counter=191 stress-ng: info: [104] Time 1546433449, counter=195 stress-ng: info: [106] Time 1546433449, counter=197 stress-ng: info: [101] Time 1546433450, counter=250 stress-ng: info: [98] Time 1546433450, counter=261 stress-ng: info: [97] Time 1546433450, counter=273 stress-ng: info: [99] Time 1546433450, counter=217 stress-ng: info: [96] Time 1546433450, counter=263 stress-ng: info: [113] stress-ng-stream: memory rate: 1806.87 MB/sec, 722.75 Mflop/sec stress-ng: info: [114] Time 1546433537, counter=304 stress-ng: info: [115] Time 1546433537, counter=282 stress-ng: info: [119] stress-ng-stream: memory rate: 1742.69 MB/sec, 697.07 Mflop/sec stress-ng: info: [117] stress-ng-stream: memory rate: 1999.33 MB/sec, 799.73 Mflop/sec stress-ng: info: [115] stress-ng-stream: memory rate: 1922.38 MB/sec, 768.95 Mflop/sec stress-ng: info: [114] stress-ng-stream: memory rate: 2067.34 MB/sec, 826.94 Mflop/sec stress-ng: info: [121] stress-ng-stream: memory rate: 1849.08 MB/sec, 739.63 Mflop/sec stress-ng: info: [123] stress-ng-stream: memory rate: 1848.92 MB/sec, 739.57 Mflop/sec stress-ng: info: [116] stress-ng-stream: memory rate: 2027.03 MB/sec, 810.81 Mflop/sec stress-ng: info: [112] successful run completed in 6.02s --- system-info: stress-ng-version: 0.09.28 run-by: root date-yyyy-mm-dd: 2019:01:02 time-hh-mm-ss: 12:52:17 epoch-secs: 1546433537 hostname: d4840a594b43 sysname: Linux nodename: d4840a594b43 release: 4.15.0-43-generic version: #46-Ubuntu SMP Thu Dec 6 14:45:28 UTC 2018 machine: x86_64 uptime: 6933 totalram: 33605246976 freeram: 10913120256 sharedram: 1651642368 bufferram: 649097216 totalswap: 34233905152 freeswap: 34233905152 pagesize: 4096 cpus: 8 cpus-online: 8 ticks-per-second: 100 stress-ng: info: [112] stressor bogo ops real time usr time sys time bogo ops/s / bogo ops/s stress-ng: info: [112] (secs) (secs) (secs) (real time) / (usr+sys time) metrics: stress-ng: info: [112] stream 2250 6.01 40.81 0.39 374.12 / 54.61 - stressor: stream bogo-ops: 2250 bogo-ops-per-second-usr-sys-time: 54.611650 bogo-ops-per-second-real-time: 374.121510 wall-clock-time: 6.014089 user-time: 40.810000 system-time: 0.390000 ... """ new_metrics = [] new_line = readline_with_check(input, EOF_line) # Parse metric summary on the end stressing brief = re.search( r'(?P<bogo_ops>\d+.\d*) +' r'(?P<real_time>\d+.\d*) +' r'(?P<user_time>\d+.\d*) +' r'(?P<system_time>\d+.\d*) +' r'(?P<bogo_ops_per_second_real_time>\d+.\d*) +' r'(?P<bogo_ops_per_second_usr_sys_time>\d+.\d*)', new_line) if brief is not None: bogo_ops = float(brief['bogo_ops']) real_time = float(brief['real_time']) user_time = float(brief['user_time']) system_time = float(brief['system_time']) bogo_ops_real = float(brief['bogo_ops_per_second_real_time']) bogo_ops_usr_sys = float(brief['bogo_ops_per_second_usr_sys_time']) new_metrics.append( Metric(metric_name_prefix + 'bogo_ops', bogo_ops, type=MetricType.GAUGE, labels=labels, help="Summary bogo ops")) new_metrics.append( Metric(metric_name_prefix + 'real_time', real_time, type=MetricType.GAUGE, labels=labels, help="Summary real_time (secs)")) new_metrics.append( Metric(metric_name_prefix + 'user_time', user_time, type=MetricType.GAUGE, labels=labels, help="Summary user_time (secs)")) new_metrics.append( Metric(metric_name_prefix + 'system_time', system_time, type=MetricType.GAUGE, labels=labels, help="Summary system_time (secs)")) new_metrics.append( Metric(metric_name_prefix + 'bogo_ops_per_second_real_time', bogo_ops_real, type=MetricType.GAUGE, labels=labels, help="Summary bogo ops/s real time")) new_metrics.append( Metric(metric_name_prefix + 'bogo_ops_per_second_usr_sys_time', bogo_ops_usr_sys, type=MetricType.GAUGE, labels=labels, help="Summary bogo ops/s usr+sys time")) info = re.search( r'stress-ng: info: {2}\[(?P<id>\d*)\]+ ' + r'Time (?P<time>\d*), counter=(?P<counter>\d*)', new_line) if info is not None: id_proc = info['id'] counter = int(info['counter']) labels.update({"id_proc_stress_ng": id_proc}) new_metrics.append( Metric( metric_name_prefix + 'bogo_ops_counter', counter, type=MetricType.COUNTER, labels=labels, help="Counter bogo ops per proc stress-ng, updated per 1 sec")) return new_metrics
def parse(input: TextIOWrapper, regexp: str, separator: str = None, labels: Dict[str, str] = {}, metric_name_prefix: str = '') -> List[Metric]: """Custom parse function for stress_ng stress-ng: info: [25] Time 1572363779, counter 425, diff 61 stress-ng: info: [27] Time 1572363779, counter 426, diff 61 stress-ng: info: [23] Time 1572363780, counter 475, diff 60 stress-ng: info: [26] Time 1572363780, counter 322, diff 40 stress-ng: info: [24] Time 1572363780, counter 321, diff 39 stress-ng: info: [25] Time 1572363780, counter 485, diff 60 stress-ng: info: [27] Time 1572363780, counter 485, diff 59 stress-ng: info: [23] Time 1572363781, counter 536, diff 61 stress-ng: info: [24] Time 1572363781, counter 362, diff 41 stress-ng: info: [26] Time 1572363781, counter 363, diff 41 stress-ng: info: [25] Time 1572363781, counter 547, diff 62 stress-ng: info: [27] Time 1572363781, counter 547, diff 62 stress-ng: info: [23] stress-ng-stream: memory rate: 3852.67 MB/sec, 1541.07 Mflop/sec / (instance 0) stress-ng: info: [27] stress-ng-stream: memory rate: 3922.76 MB/sec, 1569.10 Mflop/sec / (instance 4) stress-ng: info: [26] Time 1572363782, counter 403, diff 40 stress-ng: info: [24] Time 1572363782, counter 402, diff 40 stress-ng: info: [25] Time 1572363782, counter 608, diff 61 stress-ng: info: [27] Time 1572363782, counter 609, diff 62 stress-ng: info: [25] stress-ng-stream: memory rate: 3920.62 MB/sec, 1568.25 Mflop/sec / (instance 2) stress-ng: info: [24] stress-ng-stream: memory rate: 2597.47 MB/sec, 1038.99 Mflop/sec / (instance 1) stress-ng: info: [26] stress-ng-stream: memory rate: 2603.78 MB/sec, 1041.51 Mflop/sec / (instance 3) stress-ng: info: [22] successful run completed in 10.01s --- system-info: stress-ng-version: 0.10.08 run-by: root date-yyyy-mm-dd: 2019:10:29 time-hh-mm-ss: 15:43:02 epoch-secs: 1572363782 hostname: 31ff53b6528c sysname: Linux nodename: 31ff53b6528c release: 4.15.0-66-generic version: #75-Ubuntu SMP Tue Oct 1 05:24:09 UTC 2019 machine: x86_64 uptime: 28799 totalram: 33605263360 freeram: 14022410240 sharedram: 2681659392 bufferram: 1237618688 totalswap: 0 freeswap: 0 pagesize: 4096 cpus: 8 cpus-online: 8 ticks-per-second: 100 stress-ng: info: [22] stressor bogo ops real time usr time sys time bogo ops/s / bogo ops/s stress-ng: info: [22] (secs) (secs) (secs) (real time) / (usr+sys time) metrics: stress-ng: info: [22] stream 2623 10.00 49.82 0.10 262.18 / 52.54 - stressor: stream bogo-ops: 2623 bogo-ops-per-second-usr-sys-time: 52.544071 bogo-ops-per-second-real-time: 262.179501 wall-clock-time: 10.004596 user-time: 49.820000 system-time: 0.100000 ... """ new_metrics = [] new_line = readline_with_check(input, EOF_line) # Parse metric summary on the end stressing brief = re.search( r'(?P<bogo_ops>\d+.\d*) +' r'(?P<real_time>\d+.\d*) +' r'(?P<user_time>\d+.\d*) +' r'(?P<system_time>\d+.\d*) +' r'(?P<bogo_ops_per_second_real_time>\d+.\d*) +' r'(?P<bogo_ops_per_second_usr_sys_time>\d+.\d*)', new_line) if brief is not None: bogo_ops = float(brief['bogo_ops']) real_time = float(brief['real_time']) user_time = float(brief['user_time']) system_time = float(brief['system_time']) bogo_ops_real = float(brief['bogo_ops_per_second_real_time']) bogo_ops_usr_sys = float(brief['bogo_ops_per_second_usr_sys_time']) new_metrics.append( Metric(metric_name_prefix + 'bogo_ops', bogo_ops, type=MetricType.GAUGE, labels=labels, help="Summary bogo ops")) new_metrics.append( Metric(metric_name_prefix + 'real_time', real_time, type=MetricType.GAUGE, labels=labels, help="Summary real_time (secs)")) new_metrics.append( Metric(metric_name_prefix + 'user_time', user_time, type=MetricType.GAUGE, labels=labels, help="Summary user_time (secs)")) new_metrics.append( Metric(metric_name_prefix + 'system_time', system_time, type=MetricType.GAUGE, labels=labels, help="Summary system_time (secs)")) new_metrics.append( Metric(metric_name_prefix + 'bogo_ops_per_second_real_time', bogo_ops_real, type=MetricType.GAUGE, labels=labels, help="Summary bogo ops/s real time")) new_metrics.append( Metric(metric_name_prefix + 'bogo_ops_per_second_usr_sys_time', bogo_ops_usr_sys, type=MetricType.GAUGE, labels=labels, help="Summary bogo ops/s usr+sys time")) info = re.search( r'stress-ng: info: {2}\[(?P<id>\d*)\]+ ' + r'Time (?P<time>\d*), counter (?P<counter>\d*), diff (?P<diff>\d*)', new_line) if info is not None: time = info['time'] id_proc = info['id'] counter = int(info['counter']) diff = int(info['diff']) labels.update({"id_proc_stress_ng": id_proc}) labels.update({"stress_ng_time": time}) new_metrics.append( Metric( metric_name_prefix + 'bogo_ops_counter', counter, type=MetricType.COUNTER, labels=labels, help="Counter bogo ops per proc stress-ng, updated per 1 sec")) new_metrics.append( Metric( metric_name_prefix + 'bogo_ops_gauge', diff, type=MetricType.GAUGE, labels=labels, help="Gauge bogo ops per proc stress-ng, updated per 1 sec")) return new_metrics