def consolidate_results(results): err_flag = False all_res = {'tool': 'wrk2'} total_count = len(results) if not total_count: return all_res for key in ['http_rps', 'http_total_req', 'http_sock_err', 'http_sock_timeout', 'http_throughput_kbytes']: all_res[key] = 0 for item in results: all_res[key] += item['results'].get(key, 0) all_res[key] = int(all_res[key]) if 'latency_stats' in results[0]['results']: # for item in results: # print item['results']['latency_stats'] all_res['latency_stats'] = [] histogram = HdrHistogram(1, 24 * 3600 * 1000 * 1000, 2) for item in results: if 'latency_stats' in item['results']: histogram.decode_and_add(item['results']['latency_stats']) else: err_flag = True perc_list = [50, 75, 90, 99, 99.9, 99.99, 99.999] latency_dict = histogram.get_percentile_to_value_dict(perc_list) for key, value in latency_dict.iteritems(): all_res['latency_stats'].append([key, value]) all_res['latency_stats'].sort() if err_flag: LOG.warning('Unable to find latency_stats from the result dictionary, this ' 'may indicate that the test application on VM exited abnormally.') return all_res
def consolidate_results(results): all_res = {'tool': 'wrk2'} total_count = len(results) if not total_count: return all_res for key in ['http_rps', 'http_total_req', 'http_sock_err', 'http_sock_timeout', 'http_throughput_kbytes']: all_res[key] = 0 for item in results: if (key in item['results']): all_res[key] += item['results'][key] all_res[key] = int(all_res[key]) if 'latency_stats' in results[0]['results']: # for item in results: # print item['results']['latency_stats'] all_res['latency_stats'] = [] histogram = HdrHistogram(1, 24 * 3600 * 1000 * 1000, 2) for item in results: histogram.decode_and_add(item['results']['latency_stats']) perc_list = [50, 75, 90, 99, 99.9, 99.99, 99.999] latency_dict = histogram.get_percentile_to_value_dict(perc_list) for key, value in latency_dict.iteritems(): all_res['latency_stats'].append([key, value]) all_res['latency_stats'].sort() return all_res
def test_hdr_interop(): # decode and add the encoded histograms histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT) corrected_histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT) histogram.decode_and_add(ENCODE_SAMPLES_HDRHISTOGRAM_C[0]) corrected_histogram.decode_and_add(ENCODE_SAMPLES_HDRHISTOGRAM_C[1]) # check the percentiles. min, max values match check_percentiles(histogram, corrected_histogram)
def test_hdr_interop(): # decode and add the encoded histograms histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT) corrected_histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT) histogram.decode_and_add(ENCODE_SAMPLES_HDRHISTOGRAM_C[0]) corrected_histogram.decode_and_add(ENCODE_SAMPLES_HDRHISTOGRAM_C[1]) # check the percentiles. min, max values match check_percentiles(histogram, corrected_histogram)
def check_dec_perf(): histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, 2) fill_start_index = (20 * histogram.counts_len) // 100 fill_to_index = fill_start_index + (30 * histogram.counts_len) // 100 fill_hist_counts(histogram, fill_to_index, fill_start_index) b64 = histogram.encode() # decode and add to self 1000 times start = datetime.datetime.now() for _ in range(1000): histogram.decode_and_add(b64) delta = datetime.datetime.now() - start print(delta)
def check_dec_perf(): histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, 2) fill_start_index = (20 * histogram.counts_len) // 100 fill_to_index = fill_start_index + (30 * histogram.counts_len) // 100 fill_hist_counts(histogram, fill_to_index, fill_start_index) b64 = histogram.encode() # decode and add to self 1000 times start = datetime.datetime.now() for _ in range(1000): histogram.decode_and_add(b64) delta = datetime.datetime.now() - start print(delta)
def test_hist_codec_partial(): histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, SIGNIFICANT) partial_histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, SIGNIFICANT) # put some known numbers in the first half buckets half_count = partial_histogram.counts_len fill_hist_counts(partial_histogram, half_count) encoded = partial_histogram.encode() histogram.decode_and_add(encoded) # now verify that the partial counters are identical to the original check_hist_counts(histogram, half_count, multiplier=1) check_hist_counts(histogram, histogram.counts_len, start=half_count + 1, multiplier=0)
def check_hist_codec_b64(word_size, b64_wrap): histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, SIGNIFICANT, b64_wrap=b64_wrap, word_size=word_size) # encode with all zero counters encoded = histogram.encode() # add back same histogram histogram.decode_and_add(encoded) # counters should remain zero check_hist_counts(histogram, histogram.counts_len, multiplier=0) # fill up the histogram fill_hist_counts(histogram, histogram.counts_len) encoded = histogram.encode() histogram.decode_and_add(encoded) check_hist_counts(histogram, histogram.counts_len, multiplier=2)
def test_hist_codec_partial(): histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, SIGNIFICANT) partial_histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, SIGNIFICANT) # put some known numbers in the first half buckets half_count = partial_histogram.counts_len fill_hist_counts(partial_histogram, half_count) encoded = partial_histogram.encode() histogram.decode_and_add(encoded) # now verify that the partial counters are identical to the original check_hist_counts(histogram, half_count, multiplier=1) check_hist_counts(histogram, histogram.counts_len, start=half_count + 1, multiplier=0)
def check_hist_codec_b64(word_size, b64_wrap): histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, SIGNIFICANT, b64_wrap=b64_wrap, word_size=word_size) # encode with all zero counters encoded = histogram.encode() # add back same histogram histogram.decode_and_add(encoded) # counters should remain zero check_hist_counts(histogram, histogram.counts_len, multiplier=0) # fill up the histogram fill_hist_counts(histogram, histogram.counts_len) encoded = histogram.encode() histogram.decode_and_add(encoded) check_hist_counts(histogram, histogram.counts_len, multiplier=2)
def consolidate_results(results): err_flag = False all_res = {'tool': 'wrk2'} total_count = len(results) if not total_count: return all_res for key in [ 'http_rps', 'http_total_req', 'http_sock_err', 'http_sock_timeout', 'http_throughput_kbytes' ]: all_res[key] = 0 for item in results: all_res[key] += item['results'].get(key, 0) all_res[key] = int(all_res[key]) if 'latency_stats' in results[0]['results']: # for item in results: # print item['results']['latency_stats'] all_res['latency_stats'] = [] histogram = HdrHistogram(1, 24 * 3600 * 1000 * 1000, 2) for item in results: if 'latency_stats' in item['results']: histogram.decode_and_add(item['results']['latency_stats']) else: err_flag = True perc_list = [50, 75, 90, 99, 99.9, 99.99, 99.999] latency_dict = histogram.get_percentile_to_value_dict(perc_list) for key, value in latency_dict.iteritems(): all_res['latency_stats'].append([key, value]) all_res['latency_stats'].sort() if err_flag: LOG.warning( 'Unable to find latency_stats from the result dictionary, this ' 'may indicate that the test application on VM exited abnormally.' ) return all_res
def consolidate_results(results): total_count = len(results) if not total_count: return {'tool': 'fio'} all_res = {} for key in [ 'read_iops', 'read_bw', 'write_iops', 'write_bw', 'read_runtime_ms', 'write_runtime_ms', 'read_KB', 'write_KB' ]: total = 0 for item in results: total += item['results'].get(key, 0) if total: all_res[key] = int(total) all_res['tool'] = results[0]['results']['tool'] clat_list = [] # perc_list = [1, 5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 95, 99, 99.5, 99.9, 99.95, 99.99] perc_list = [50, 75, 90, 99, 99.9, 99.99, 99.999] if 'read_hist' in results[0]['results']: clat_list.append('read_hist') if 'write_hist' in results[0]['results']: clat_list.append('write_hist') for clat in clat_list: all_res[clat] = [] histogram = HdrHistogram(1, 5 * 3600 * 1000, 3) for item in results: histogram.decode_and_add(item['results'][clat]) latency_dict = histogram.get_percentile_to_value_dict(perc_list) for key, value in latency_dict.iteritems(): all_res[clat].append([key, value]) all_res[clat].sort() return all_res