Beispiel #1
0
 def calculate_report(self, parsed):
     """generates calculations"""
     start_log = dates.max_utc_time()
     last_log = dates.min_utc_time()
     # make this it's own method
     node_info_agg = []
     for node, events in parsed["nodes"].items():
         node_end_time = events.get("end")
         before_time = parsed["before_time"]
         if before_time != dates.max_utc_time(
         ) and node_end_time > before_time:
             node_end_time = before_time
         if node_end_time > last_log:
             last_log = node_end_time
         node_start_time = events.get("start")
         after_time = parsed["after_time"]
         if after_time != dates.min_utc_time(
         ) and node_start_time < after_time:
             node_start_time = after_time
         if node_start_time < start_log:
             start_log = node_start_time
         log_duration = (node_end_time -
                         node_start_time).total_seconds() * 1000
         first_node_evict = dates.max_utc_time()
         last_node_evict = dates.min_utc_time()
         for info in events.get("evictions"):
             for value in info.values():
                 if value.time_stamp > last_node_evict:
                     last_node_evict = value.time_stamp
                 if value.time_stamp < first_node_evict:
                     first_node_evict = value.time_stamp
         if log_duration < 0:
             log_duration = 0
         node_info_agg.append(
             create_report_block(
                 first_node_evict,
                 last_node_evict,
                 events.get("evictions"),
                 log_duration,
                 node,
             ))
     node_info_agg = sorted(node_info_agg, key=attrgetter("name"))
     node_info_agg = sorted(node_info_agg,
                            key=attrgetter("avg_evict_duration"),
                            reverse=True)
     node_info_agg = sorted(node_info_agg,
                            key=functools.cmp_to_key(sort_evict_freq))
     return OrderedDict([
         ("start_log", start_log),
         ("last_log", last_log),
         ("node_info", node_info_agg),
     ])
Beispiel #2
0
 def generate(self, parsed):
     """generates a time series report for a tarball"""
     table = []
     table.append("")
     table.append("filter cache evictions by hour")
     table.append("------------------------------")
     events_by_datetime = OrderedDefaultDict(list)
     start = dates.max_utc_time()
     end = dates.min_utc_time()
     for node, events in parsed["nodes"].items():
         for info in events.get("evictions"):
             # put into structure we can use for bucketize
             for value in info.values():
                 if value.time_stamp > end:
                     end = value.time_stamp
                 if value.time_stamp < start:
                     start = value.time_stamp
                 events_by_datetime[value.time_stamp].append(value)
     buckets = sorted(
         util.bucketize(events_by_datetime, start, end, 3600).items(),
         key=lambda t: t[0],
     )
     maxval = len(max(buckets, key=lambda t: len(t[1]))[1])
     for time, matches in buckets:
         pad = ""
         for x in range(len(str(maxval)) - len(str(len(matches)))):
             pad += " "
         table.append("%s %s %s" % (
             time.strftime("%Y-%m-%d %H:%M:%S") + pad,
             len(matches),
             util.textbar(maxval, len(matches)),
         ))
     return "\n".join(table)
Beispiel #3
0
def grep_date(log_string):
    """gets just the date from the log"""
    # pylint: disable=line-too-long
    match = re.search(
        rb' *(?P<level>[A-Z]*) *\[(?P<thread_name>[^\]]*?)[:_-]?(?P<thread_id>[0-9]*)\] (?P<date>.{10} .{12})*',
        log_string)
    if match:
        date_value = match.group("date").decode('ascii')
        date_parser = dates.LogDateFormatParser()
        return date_parser.parse_timestamp(date_value)
    return dates.min_utc_time()
Beispiel #4
0
def log_range(file_path):
    """gets timestamp of first log and last log"""
    with open(file_path, "rb") as file_handle:
        first = file_handle.readline()  # Read the first line.
        if not first:  # empty files are safe to not process
            return dates.max_utc_time(), dates.min_utc_time()
        check = file_handle.read(1)
        if check == b'':  # parse first line twice if only one line
            return grep_date(first), grep_date(first)
        file_handle.seek(-2, os.SEEK_END)  # Jump to the second last byte.
        while file_handle.read(1) != b"\n":  # Until EOL is found...
            file_handle.seek(
                -2, os.SEEK_CUR)  # ...jump back the read byte plus one more.
        last = file_handle.readline()  # Read last line.
    return grep_date(first), grep_date(last)
Beispiel #5
0
 def __init__(self, first_evict, last_evict, name):
     self.name = name
     min_utc_time = dates.min_utc_time()
     max_utc_time = dates.max_utc_time()
     if last_evict == min_utc_time:
         first_evict = min_utc_time
         last_evict = min_utc_time
     if first_evict == max_utc_time:
         first_evict = min_utc_time
         last_evict = min_utc_time
     self.evict_range = (last_evict - first_evict).total_seconds() * 1000
     self.log_duration = 0
     self.byte_limit = 0
     self.last_byte_limit = last_evict
     self.item_limit = 0
     self.last_item_limit = 0
     self.avg_evict_duration = 0.0
     self.avg_evict_freq = 0.0