def analyze(self, keys): key_stat = { 'headers': ['Match', "Count", "Avg Count", "Min Count", "Max Count", "Stdev Count", "Value mem", "Real", "Ratio", "System", "Encoding", "Total"], 'data': [] } # Undone Prefered encoding for pattern, data in keys.items(): agg = ListAggregator((ListStatEntry(x, self.redis) for x in data), len(data)) stat_entry = [ pattern, len(data), agg.fieldAvgCount, agg.fieldMinCount, agg.fieldMaxCount, agg.fieldStdev, agg.valueUsedBytes, agg.valueAlignedBytes, agg.valueAlignedBytes / (agg.valueUsedBytes if agg.valueUsedBytes > 0 else 1), agg.system, agg.encoding, agg.valueAlignedBytes + agg.system ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda x: x[8], reverse=True) key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, 0, 0, 0, 0, sum, sum, 0, sum, '', sum])) return [ "List stat", key_stat ]
def analyze(self, keys): key_stat = { "headers": ["Match", "Count", "Avg Count", "Value mem", "Real", "Ratio", "System*", "Encoding", "Total"], "data": [], } # Undone Prefered encoding for pattern, data in keys.items(): agg = SetAggregator((SetStatEntry(x, self.redis) for x in data), len(data)) stat_entry = [ pattern, len(data), agg.fieldAvgCount, agg.valueUsedBytes, agg.valueAlignedBytes, agg.valueAlignedBytes / (agg.valueUsedBytes if agg.valueUsedBytes > 0 else 1), agg.system, agg.encoding, agg.total, ] key_stat["data"].append(stat_entry) key_stat["data"].sort(key=lambda x: x[8], reverse=True) key_stat["data"].append(make_total_row(key_stat["data"], ["Total:", sum, 0, sum, sum, 0, sum, "", sum])) return ["SET stat", key_stat]
def analyze(self, keys, total=0): """ :param keys: :param progress: :return: """ key_stat = { "headers": ["Match", "Count", "Useful", "Real", "Ratio", "Encoding", "Min", "Max", "Avg"], "data": [], } progress = tqdm(total=total, mininterval=1, desc="Processing keys", leave=False) for pattern, data in keys.items(): used_bytes_iter, aligned_iter, encoding_iter = tee( progress_iterator((StringEntry(value=x["name"]) for x in data), progress), 3 ) total_elements = len(data) if total_elements == 0: continue aligned = sum(obj.aligned for obj in aligned_iter) used_bytes_generator = (obj.useful_bytes for obj in used_bytes_iter) useful_iter, min_iter, max_iter, mean_iter = tee(used_bytes_generator, 4) prefered_encoding = pref_encoding((obj.encoding for obj in encoding_iter), redis_encoding_id_to_str) min_value = min(min_iter) if total_elements < 2: avg = min_value else: avg = statistics.mean(mean_iter) used_user = sum(useful_iter) stat_entry = [ pattern, total_elements, used_user, aligned, aligned / used_user, prefered_encoding, min_value, max(max_iter), avg, ] key_stat["data"].append(stat_entry) key_stat["data"].sort(key=lambda x: x[1], reverse=True) key_stat["data"].append(make_total_row(key_stat["data"], ["Total:", sum, sum, sum, 0, "", 0, 0, 0])) progress.close() return ["key stats", key_stat]
def analyze(self, keys): key_stat = { 'headers': ['Match', "Count", "Useful", "Free", "Real", "Ratio", "Encoding", "Min", "Max", "Avg"], 'data': [] } for pattern, data in keys.items(): used_bytes = [] free_bytes = [] aligned_bytes = [] encodings = [] for key_info in data: try: with RealStringEntry(redis=self.redis, info=key_info) as stat: used_bytes.append(stat.useful_bytes) free_bytes.append(stat.free_bytes) aligned_bytes.append(stat.aligned) encodings.append(stat.encoding) except RedisError as e: # This code works in real time so key me be deleted and this code fail self.logger.warning(repr(e)) total_elements = len(used_bytes) used_user = sum(used_bytes) free_user = sum(free_bytes) aligned = sum(aligned_bytes) preferred_encoding = pref_encoding(encodings, redis_encoding_id_to_str) min_bytes = min(used_bytes) mean = statistics.mean(used_bytes) if total_elements > 1 else min_bytes stat_entry = [ pattern, total_elements, used_user, free_user, aligned, aligned / (used_user if used_user > 0 else 1), preferred_encoding, min_bytes, max(used_bytes), mean, ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda e: e[1], reverse=True) key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, 0, sum, 0, '', 0, 0, 0])) return [ "String value stat", key_stat ]
def analyze(self, keys, total=0): key_stat = { 'headers': [ 'Match', "Count", "Avg field count", "Key mem", "Real", "Ratio", "Value mem", "Real", "Ratio", "System", "Encoding", "Total mem", "Total aligned", "TTL Min", "TTL Max", "TTL Avg." ], 'data': [] } progress = tqdm(total=total, mininterval=1, desc="Processing Hash patterns", leave=False) for pattern, data in keys.items(): agg = HashAggregator( progress_iterator((HashStatEntry(x, self.redis) for x in data), progress), len(data)) stat_entry = [ pattern, len(data), agg.fieldAvgCount, agg.fieldUsedBytes, agg.fieldAlignedBytes, agg.fieldAlignedBytes / (agg.fieldUsedBytes if agg.fieldUsedBytes > 0 else 1), agg.valueUsedBytes, agg.valueAlignedBytes, agg.valueAlignedBytes / (agg.valueUsedBytes if agg.valueUsedBytes > 0 else 1), agg.system, agg.encoding, agg.fieldUsedBytes + agg.valueUsedBytes, agg.fieldAlignedBytes + agg.valueAlignedBytes + agg.system, agg.ttlMin, agg.ttlMax, agg.ttlAvg, ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda x: x[12], reverse=True) key_stat['data'].append( make_total_row(key_stat['data'], [ 'Total:', sum, 0, sum, sum, 0, sum, sum, 0, sum, '', sum, sum, min, max, math.nan ])) progress.close() return key_stat
def analyze(self, keys, total=0): key_stat = { 'headers': ['Match', "Count", "Avg field count", "Key mem", "Real", "Ratio", "Value mem", "Real", "Ratio", "System", "Encoding", "Total mem", "Total aligned"], 'data': [] } progress = tqdm(total=total, mininterval=1, desc="Processing Hash patterns", leave=False) for pattern, data in keys.items(): agg = HashAggregator(progress_iterator((HashStatEntry(x, self.redis) for x in data), progress), len(data)) stat_entry = [ pattern, len(data), agg.fieldAvgCount, agg.fieldUsedBytes, agg.fieldAlignedBytes, agg.fieldAlignedBytes / (agg.fieldUsedBytes if agg.fieldUsedBytes > 0 else 1), agg.valueUsedBytes, agg.valueAlignedBytes, agg.valueAlignedBytes / (agg.valueUsedBytes if agg.valueUsedBytes > 0 else 1), agg.system, agg.encoding, agg.fieldUsedBytes + agg.valueUsedBytes, agg.fieldAlignedBytes + agg.valueAlignedBytes + agg.system, ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda x: x[12], reverse=True) key_stat['data'].append( make_total_row(key_stat['data'], ['Total:', sum, 0, sum, sum, 0, sum, sum, 0, sum, '', sum, sum])) progress.close() return [ "Hash stat", key_stat ]
def analyze(self, keys, total=0): key_stat = { 'headers': [ 'Match', "Count", "Avg Count", "Min Count", "Max Count", "Stdev Count", "Value mem", "Real", "Ratio", "System", "Encoding", "Total" ], 'data': [] } progress = tqdm(total=total, mininterval=1, desc="Processing List patterns", leave=False) for pattern, data in keys.items(): agg = ListAggregator( progress_iterator((ListStatEntry(x, self.redis) for x in data), progress), len(data)) stat_entry = [ pattern, len(data), agg.fieldAvgCount, agg.fieldMinCount, agg.fieldMaxCount, agg.fieldStdev, agg.valueUsedBytes, agg.valueAlignedBytes, agg.valueAlignedBytes / (agg.valueUsedBytes if agg.valueUsedBytes > 0 else 1), agg.system, agg.encoding, agg.valueAlignedBytes + agg.system ] key_stat['data'].append(stat_entry) progress.update() key_stat['data'].sort(key=lambda x: x[8], reverse=True) key_stat['data'].append( make_total_row( key_stat['data'], ['Total:', sum, 0, 0, 0, 0, sum, sum, 0, sum, '', sum])) progress.close() return ["List stat", key_stat]
def analyze(self, keys): key_stat = { 'headers': ['Match', "Count", "Useful", "Real", "Ratio", "Encoding", "Min", "Max", "Avg"], 'data': [] } for pattern, data in keys.items(): used_bytes_iter, aligned_iter, encoding_iter = tee( (StringEntry(value=x["name"]) for x in data), 3) total_elements = len(data) aligned = sum(obj.aligned for obj in aligned_iter) used_bytes_generator = (obj.useful_bytes for obj in used_bytes_iter) useful_iter, min_iter, max_iter, mean_iter = tee(used_bytes_generator, 4) prefered_encoding = pref_encoding((obj.encoding for obj in encoding_iter), redis_encoding_id_to_str) min_value = min(min_iter) if total_elements < 2: avg = min_value else: avg = statistics.mean(mean_iter) used_user = sum(useful_iter) stat_entry = [ pattern, total_elements, used_user, aligned, aligned / used_user, prefered_encoding, min_value, max(max_iter), avg, ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda x: x[1], reverse=True) key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, sum, 0, '', 0, 0, 0])) return [ "key stats", key_stat ]
def analyze(self, keys, total=0): """ :param keys: :param progress: :return: """ key_stat = { 'headers': [ 'Match', "Count", "Useful", "Real", "Ratio", "Encoding", "Min", "Max", "Avg" ], 'data': [] } progress = tqdm(total=total, mininterval=1, desc="Processing keys", leave=False) for pattern, data in keys.items(): used_bytes_iter, aligned_iter, encoding_iter = tee( progress_iterator((StringEntry(value=x["name"]) for x in data), progress), 3) total_elements = len(data) if total_elements == 0: continue aligned = sum(obj.aligned for obj in aligned_iter) used_bytes_generator = (obj.useful_bytes for obj in used_bytes_iter) useful_iter, min_iter, max_iter, mean_iter = tee( used_bytes_generator, 4) prefered_encoding = pref_encoding( (obj.encoding for obj in encoding_iter), redis_encoding_id_to_str) min_value = min(min_iter) if total_elements < 2: avg = min_value else: avg = statistics.mean(mean_iter) used_user = sum(useful_iter) stat_entry = [ pattern, total_elements, used_user, aligned, aligned / used_user, prefered_encoding, min_value, max(max_iter), avg, ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda x: x[1], reverse=True) key_stat['data'].append( make_total_row(key_stat['data'], ['Total:', sum, sum, sum, 0, '', 0, 0, 0])) progress.close() return ["key stats", key_stat]
def analyze(self, keys, total=0): key_stat = { 'headers': ['Match', "Count", "Useful", "Free", "Real", "Ratio", "Encoding", "Min", "Max", "Avg"], 'data': [] } progress = tqdm(total=total, mininterval=1, desc="Processing String patterns", leave=False) use_debug_command = True for pattern, data in keys.items(): used_bytes = [] free_bytes = [] aligned_bytes = [] encodings = [] for key_info in progress_iterator(data, progress): try: with RealStringEntry(redis=self.redis, info=key_info, use_debug=use_debug_command) as stat: used_bytes.append(stat.useful_bytes) free_bytes.append(stat.free_bytes) aligned_bytes.append(stat.aligned) encodings.append(stat.encoding) except RedisError as e: # This code works in real time so key me be deleted and this code fail error_string = repr(e) self.logger.warning(error_string) if 'DEBUG' in error_string: use_debug_command = False used_bytes = used_bytes if len(used_bytes) != 0 else [0] total_elements = len(used_bytes) used_user = sum(used_bytes) free_user = sum(free_bytes) aligned = sum(aligned_bytes) preferred_encoding = pref_encoding(encodings, redis_encoding_id_to_str) min_bytes = min(used_bytes) mean = statistics.mean(used_bytes) if total_elements > 1 else min_bytes stat_entry = [ pattern, total_elements, used_user, free_user, aligned, aligned / (used_user if used_user > 0 else 1), preferred_encoding, min_bytes, max(used_bytes), mean, ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda e: e[1], reverse=True) key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, 0, sum, 0, '', 0, 0, 0])) progress.close() return key_stat
def analyze(self, keys, total=0): key_stat = { 'headers': ['Match', "Count", "Useful", "Free", "Real", "Ratio", "Encoding", "Min", "Max", "Avg"], 'data': [] } progress = tqdm(total=total, mininterval=1, desc="Processing String patterns", leave=False) use_debug_command = True for pattern, data in keys.items(): used_bytes = [] free_bytes = [] aligned_bytes = [] encodings = [] for key_info in progress_iterator(data, progress): try: with RealStringEntry(redis=self.redis, info=key_info, use_debug=use_debug_command) as stat: used_bytes.append(stat.useful_bytes) free_bytes.append(stat.free_bytes) aligned_bytes.append(stat.aligned) encodings.append(stat.encoding) except RedisError as e: # This code works in real time so key me be deleted and this code fail error_string = repr(e) self.logger.warning(error_string) if 'DEBUG' in error_string: use_debug_command = False total_elements = len(used_bytes) used_user = sum(used_bytes) free_user = sum(free_bytes) aligned = sum(aligned_bytes) preferred_encoding = pref_encoding(encodings, redis_encoding_id_to_str) min_bytes = min(used_bytes) mean = statistics.mean(used_bytes) if total_elements > 1 else min_bytes stat_entry = [ pattern, total_elements, used_user, free_user, aligned, aligned / (used_user if used_user > 0 else 1), preferred_encoding, min_bytes, max(used_bytes), mean, ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda e: e[1], reverse=True) key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, 0, sum, 0, '', 0, 0, 0])) progress.close() return [ "String value stat", key_stat ]