Пример #1
0
    def __init__(self, all_obj, total):
        self.total_elements = total

        encode_iter, sys_iter, avg_iter, stdev_iter, min_iter, max_iter, value_used_iter, value_align_iter = \
            tee(all_obj, 8)

        self.encoding = pref_encoding([obj.encoding for obj in encode_iter], redis_encoding_id_to_str)
        self.system = sum(obj.system for obj in sys_iter)

        if total == 0:
            self.fieldAvgCount = 0
            self.fieldStdev = 0
            self.fieldMinCount = 0
            self.fieldMaxCount = 0
        elif total > 1:
            self.fieldAvgCount = statistics.mean(obj.count for obj in avg_iter)
            self.fieldStdev = statistics.stdev(obj.count for obj in stdev_iter)
            self.fieldMinCount = min((obj.count for obj in min_iter))
            self.fieldMaxCount = max((obj.count for obj in max_iter))
        else:
            self.fieldAvgCount = min((obj.count for obj in avg_iter))
            self.fieldStdev = 0
            self.fieldMinCount = self.fieldAvgCount
            self.fieldMaxCount = self.fieldAvgCount

        self.valueUsedBytes = sum(obj.valueUsedBytes for obj in value_used_iter)
        self.valueAlignedBytes = sum(obj.valueAlignedBytes for obj in value_align_iter)
Пример #2
0
    def __init__(self, all_obj, total):
        self.total_elements = total

        encode_iter, sys_iter, avg_iter, stdev_iter, min_iter, max_iter, value_used_iter, value_align_iter, ttl_iter = \
            tee(all_obj, 9)

        self.encoding = pref_encoding([obj.encoding for obj in encode_iter],
                                      redis_encoding_id_to_str)
        self.system = sum(obj.system for obj in sys_iter)

        if total == 0:
            self.fieldAvgCount = 0
            self.fieldStdev = 0
            self.fieldMinCount = 0
            self.fieldMaxCount = 0
        elif total > 1:
            self.fieldAvgCount = statistics.mean(obj.count for obj in avg_iter)
            self.fieldStdev = statistics.stdev(obj.count for obj in stdev_iter)
            self.fieldMinCount = min((obj.count for obj in min_iter))
            self.fieldMaxCount = max((obj.count for obj in max_iter))
        else:
            self.fieldAvgCount = min((obj.count for obj in avg_iter))
            self.fieldStdev = 0
            self.fieldMinCount = self.fieldAvgCount
            self.fieldMaxCount = self.fieldAvgCount

        self.valueUsedBytes = sum(obj.valueUsedBytes
                                  for obj in value_used_iter)
        self.valueAlignedBytes = sum(obj.valueAlignedBytes
                                     for obj in value_align_iter)

        ttls = [obj.ttl for obj in ttl_iter]
        self.ttlMin = min(ttls)
        self.ttlMax = max(ttls)
        self.ttlAvg = statistics.mean(ttls) if len(ttls) > 1 else min(ttls)
Пример #3
0
    def __init__(self, all_obj, total):
        self.total_elements = total

        g00, g0, g1, g2, g3, v1, v2, ttl = tee(all_obj, 8)

        self.encoding = pref_encoding([obj.encoding for obj in g00],
                                      redis_encoding_id_to_str)
        self.system = sum(obj.system for obj in g0)
        self.fieldUsedBytes = sum(obj.fieldUsedBytes for obj in g1)
        self.fieldAlignedBytes = sum(obj.fieldAlignedBytes for obj in g2)

        if total == 0:
            self.fieldAvgCount = 0
        elif total > 1:
            self.fieldAvgCount = statistics.mean(obj.count for obj in g3)
        else:
            self.fieldAvgCount = min((obj.count for obj in g3))

        self.valueUsedBytes = sum(obj.valueUsedBytes for obj in v1)
        self.valueAlignedBytes = sum(obj.valueAlignedBytes for obj in v2)

        ttls = [obj.ttl for obj in ttl]
        self.ttlMin = min(ttls)
        self.ttlMax = max(ttls)
        self.ttlAvg = statistics.mean(ttls) if len(ttls) > 1 else min(ttls)
Пример #4
0
    def analyze(self, keys, total=0):
        """

        :param keys:
        :param progress:
        :return:
        """
        key_stat = {
            "headers": ["Match", "Count", "Useful", "Real", "Ratio", "Encoding", "Min", "Max", "Avg"],
            "data": [],
        }

        progress = tqdm(total=total, mininterval=1, desc="Processing keys", leave=False)

        for pattern, data in keys.items():
            used_bytes_iter, aligned_iter, encoding_iter = tee(
                progress_iterator((StringEntry(value=x["name"]) for x in data), progress), 3
            )

            total_elements = len(data)
            if total_elements == 0:
                continue

            aligned = sum(obj.aligned for obj in aligned_iter)
            used_bytes_generator = (obj.useful_bytes for obj in used_bytes_iter)
            useful_iter, min_iter, max_iter, mean_iter = tee(used_bytes_generator, 4)

            prefered_encoding = pref_encoding((obj.encoding for obj in encoding_iter), redis_encoding_id_to_str)
            min_value = min(min_iter)
            if total_elements < 2:
                avg = min_value
            else:
                avg = statistics.mean(mean_iter)

            used_user = sum(useful_iter)

            stat_entry = [
                pattern,
                total_elements,
                used_user,
                aligned,
                aligned / used_user,
                prefered_encoding,
                min_value,
                max(max_iter),
                avg,
            ]
            key_stat["data"].append(stat_entry)

        key_stat["data"].sort(key=lambda x: x[1], reverse=True)
        key_stat["data"].append(make_total_row(key_stat["data"], ["Total:", sum, sum, sum, 0, "", 0, 0, 0]))

        progress.close()

        return ["key stats", key_stat]
Пример #5
0
    def analyze(self, keys):
        key_stat = {
            'headers': ['Match', "Count", "Useful", "Free", "Real", "Ratio", "Encoding", "Min", "Max", "Avg"],
            'data': []
        }

        for pattern, data in keys.items():
            used_bytes = []
            free_bytes = []
            aligned_bytes = []
            encodings = []

            for key_info in data:
                try:
                    with RealStringEntry(redis=self.redis, info=key_info) as stat:
                        used_bytes.append(stat.useful_bytes)
                        free_bytes.append(stat.free_bytes)
                        aligned_bytes.append(stat.aligned)
                        encodings.append(stat.encoding)
                except RedisError as e:
                    # This code works in real time so key me be deleted and this code fail
                    self.logger.warning(repr(e))

            total_elements = len(used_bytes)
            used_user = sum(used_bytes)
            free_user = sum(free_bytes)
            aligned = sum(aligned_bytes)
            preferred_encoding = pref_encoding(encodings, redis_encoding_id_to_str)

            min_bytes = min(used_bytes)
            mean = statistics.mean(used_bytes) if total_elements > 1 else min_bytes

            stat_entry = [
                pattern,
                total_elements,
                used_user,
                free_user,
                aligned,
                aligned / (used_user if used_user > 0 else 1),
                preferred_encoding,
                min_bytes,
                max(used_bytes),
                mean,
            ]
            key_stat['data'].append(stat_entry)

        key_stat['data'].sort(key=lambda e: e[1], reverse=True)
        key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, 0, sum, 0, '', 0, 0, 0]))

        return [
            "String value stat",
            key_stat
        ]
Пример #6
0
    def __init__(self, all_obj, total):
        self.total_elements = total

        g00, g0, g3, v1, v2, v3 = tee(all_obj, 6)

        self.encoding = pref_encoding([obj.encoding for obj in g00], redis_encoding_id_to_str)
        self.system = sum(obj.system for obj in g0)
        if total > 1:
            self.fieldAvgCount = statistics.mean(obj.count for obj in g3)
        else:
            self.fieldAvgCount = min((obj.count for obj in g3))

        self.valueUsedBytes = sum(obj.valueUsedBytes for obj in v1)
        self.valueAlignedBytes = sum(obj.valueAlignedBytes for obj in v2)
        self.total = sum(obj.total for obj in v3)
Пример #7
0
    def __init__(self, all_obj, total):
        self.total_elements = total

        g00, g0, g3, v1, v2, v3 = tee(all_obj, 6)

        self.encoding = pref_encoding([obj.encoding for obj in g00],
                                      redis_encoding_id_to_str)
        self.system = sum(obj.system for obj in g0)

        if total == 0:
            self.fieldAvgCount = 0
        elif total > 1:
            self.fieldAvgCount = statistics.mean(obj.count for obj in g3)
        else:
            self.fieldAvgCount = min((obj.count for obj in g3))

        self.valueUsedBytes = sum(obj.valueUsedBytes for obj in v1)
        self.valueAlignedBytes = sum(obj.valueAlignedBytes for obj in v2)
        self.total = sum(obj.total for obj in v3)
Пример #8
0
    def analyze(self, keys):
        key_stat = {
            'headers': ['Match', "Count", "Useful", "Real", "Ratio", "Encoding", "Min", "Max", "Avg"],
            'data': []
        }

        for pattern, data in keys.items():
            used_bytes_iter, aligned_iter, encoding_iter = tee(
                    (StringEntry(value=x["name"]) for x in data), 3)

            total_elements = len(data)
            aligned = sum(obj.aligned for obj in aligned_iter)
            used_bytes_generator = (obj.useful_bytes for obj in used_bytes_iter)
            useful_iter, min_iter, max_iter, mean_iter = tee(used_bytes_generator, 4)

            prefered_encoding = pref_encoding((obj.encoding for obj in encoding_iter), redis_encoding_id_to_str)
            min_value = min(min_iter)
            if total_elements < 2:
                avg = min_value
            else:
                avg = statistics.mean(mean_iter)

            used_user = sum(useful_iter)

            stat_entry = [
                pattern, total_elements, used_user, aligned, aligned / used_user, prefered_encoding,
                min_value, max(max_iter), avg,
            ]
            key_stat['data'].append(stat_entry)

        key_stat['data'].sort(key=lambda x: x[1], reverse=True)
        key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, sum, 0, '', 0, 0, 0]))

        return [
            "key stats",
            key_stat
        ]
Пример #9
0
    def analyze(self, keys, total=0):
        """

        :param keys:
        :param progress:
        :return:
        """
        key_stat = {
            'headers': [
                'Match', "Count", "Useful", "Real", "Ratio", "Encoding", "Min",
                "Max", "Avg"
            ],
            'data': []
        }

        progress = tqdm(total=total,
                        mininterval=1,
                        desc="Processing keys",
                        leave=False)

        for pattern, data in keys.items():
            used_bytes_iter, aligned_iter, encoding_iter = tee(
                progress_iterator((StringEntry(value=x["name"]) for x in data),
                                  progress), 3)

            total_elements = len(data)
            if total_elements == 0:
                continue

            aligned = sum(obj.aligned for obj in aligned_iter)
            used_bytes_generator = (obj.useful_bytes
                                    for obj in used_bytes_iter)
            useful_iter, min_iter, max_iter, mean_iter = tee(
                used_bytes_generator, 4)

            prefered_encoding = pref_encoding(
                (obj.encoding for obj in encoding_iter),
                redis_encoding_id_to_str)
            min_value = min(min_iter)
            if total_elements < 2:
                avg = min_value
            else:
                avg = statistics.mean(mean_iter)

            used_user = sum(useful_iter)

            stat_entry = [
                pattern,
                total_elements,
                used_user,
                aligned,
                aligned / used_user,
                prefered_encoding,
                min_value,
                max(max_iter),
                avg,
            ]
            key_stat['data'].append(stat_entry)

        key_stat['data'].sort(key=lambda x: x[1], reverse=True)
        key_stat['data'].append(
            make_total_row(key_stat['data'],
                           ['Total:', sum, sum, sum, 0, '', 0, 0, 0]))

        progress.close()

        return ["key stats", key_stat]
Пример #10
0
    def analyze(self, keys, total=0):
        key_stat = {
            'headers': ['Match', "Count", "Useful", "Free", "Real", "Ratio", "Encoding", "Min", "Max", "Avg"],
            'data': []
        }

        progress = tqdm(total=total,
                        mininterval=1,
                        desc="Processing String patterns",
                        leave=False)

        use_debug_command = True
        for pattern, data in keys.items():
            used_bytes = []
            free_bytes = []
            aligned_bytes = []
            encodings = []

            for key_info in progress_iterator(data, progress):
                try:
                    with RealStringEntry(redis=self.redis, info=key_info, use_debug=use_debug_command) as stat:
                        used_bytes.append(stat.useful_bytes)
                        free_bytes.append(stat.free_bytes)
                        aligned_bytes.append(stat.aligned)
                        encodings.append(stat.encoding)
                except RedisError as e:
                    # This code works in real time so key me be deleted and this code fail
                    error_string = repr(e)
                    self.logger.warning(error_string)
                    if 'DEBUG' in error_string:
                        use_debug_command = False

            used_bytes = used_bytes if len(used_bytes) != 0 else [0]
            total_elements = len(used_bytes)
            used_user = sum(used_bytes)
            free_user = sum(free_bytes)
            aligned = sum(aligned_bytes)
            preferred_encoding = pref_encoding(encodings, redis_encoding_id_to_str)

            min_bytes = min(used_bytes)
            mean = statistics.mean(used_bytes) if total_elements > 1 else min_bytes

            stat_entry = [
                pattern,
                total_elements,
                used_user,
                free_user,
                aligned,
                aligned / (used_user if used_user > 0 else 1),
                preferred_encoding,
                min_bytes,
                max(used_bytes),
                mean,
            ]
            key_stat['data'].append(stat_entry)

        key_stat['data'].sort(key=lambda e: e[1], reverse=True)
        key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, 0, sum, 0, '', 0, 0, 0]))

        progress.close()

        return key_stat
Пример #11
0
    def analyze(self, keys, total=0):
        key_stat = {
            'headers': ['Match', "Count", "Useful", "Free", "Real", "Ratio", "Encoding", "Min", "Max", "Avg"],
            'data': []
        }

        progress = tqdm(total=total,
                        mininterval=1,
                        desc="Processing String patterns",
                        leave=False)

        use_debug_command = True
        for pattern, data in keys.items():
            used_bytes = []
            free_bytes = []
            aligned_bytes = []
            encodings = []

            for key_info in progress_iterator(data, progress):
                try:
                    with RealStringEntry(redis=self.redis, info=key_info, use_debug=use_debug_command) as stat:
                        used_bytes.append(stat.useful_bytes)
                        free_bytes.append(stat.free_bytes)
                        aligned_bytes.append(stat.aligned)
                        encodings.append(stat.encoding)
                except RedisError as e:
                    # This code works in real time so key me be deleted and this code fail
                    error_string = repr(e)
                    self.logger.warning(error_string)
                    if 'DEBUG' in error_string:
                        use_debug_command = False

            total_elements = len(used_bytes)
            used_user = sum(used_bytes)
            free_user = sum(free_bytes)
            aligned = sum(aligned_bytes)
            preferred_encoding = pref_encoding(encodings, redis_encoding_id_to_str)

            min_bytes = min(used_bytes)
            mean = statistics.mean(used_bytes) if total_elements > 1 else min_bytes

            stat_entry = [
                pattern,
                total_elements,
                used_user,
                free_user,
                aligned,
                aligned / (used_user if used_user > 0 else 1),
                preferred_encoding,
                min_bytes,
                max(used_bytes),
                mean,
            ]
            key_stat['data'].append(stat_entry)

        key_stat['data'].sort(key=lambda e: e[1], reverse=True)
        key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, 0, sum, 0, '', 0, 0, 0]))

        progress.close()

        return [
            "String value stat",
            key_stat
        ]