Example #1
0
def parse(args):
    """parse entry point, generates a report object
    from a tarball or series of files"""
    logs = diag.find_files(args, args.system_log_prefix)
    if args.diag_dir == ".":
        directory_path = os.getcwd()
        print("from directory '%s':" % directory_path)
    else:
        print("from directory '%s':" % args.diag_dir)
    node_stats = OrderedDict()
    after_time = dates.date_parse(args.after)
    before_time = dates.date_parse(args.before)
    for log in logs:
        start_log_time, last_log_time = diag.log_range(log)
        with diag.FileWithProgress(log) as log_file:
            raw_events = parser.read_system_log(log_file)
            item_ev_stats, bytes_ev_stats = calculate_eviction_stats(
                raw_events, after_time, before_time)
            node = util.extract_node_name(log, True)
            node_stats[node] = OrderedDict([
                ("evictions", (bytes_ev_stats, item_ev_stats)),
                ("start", start_log_time),
                ("end", last_log_time),
            ])
    return OrderedDict([
        ("nodes", node_stats),
        ("after_time", after_time),
        ("before_time", before_time),
    ])
Example #2
0
 def __init__(
     self,
     diag_dir,
     files=None,
     start=None,
     end=None,
     wanted_stages=None,
     command_name="sperf core statuslogger",
     syslog_prefix="system.log",
     dbglog_prefix="debug.log",
 ):
     self.diag_dir = diag_dir
     self.files = files
     self.wanted_stages = wanted_stages
     if env.DEBUG:
         print("wanted stages:", self.wanted_stages)
     self.nodes = OrderedDefaultDict(Node)
     self.analyzed = False
     self.dumps_analyzed = 0
     self.rule_types = OrderedDefaultDict(int)
     self.command_name = command_name
     self.syslog_prefix = syslog_prefix
     self.dbglog_prefix = dbglog_prefix
     self.start = None
     self.end = None
     if start:
         self.start = date_parse(start)
     if end:
         self.end = date_parse(end)
Example #3
0
 def __init__(self,
              regex,
              diag_dir=None,
              files=None,
              start=None,
              end=None,
              ignorecase=True):
     self.diag_dir = diag_dir
     self.files = files
     self.start = None
     self.end = None
     self.start_time = None
     self.end_time = None
     self.last_time = None
     if start:
         self.start_time = date_parse(start)
     if end:
         self.end_time = date_parse(end)
     if ignorecase:
         self.strayregex = re.compile(self.strayre + regex + ".*",
                                      re.IGNORECASE)
         self.timeregex = re.compile(self.basere + regex + ".*",
                                     re.IGNORECASE)
         self.supplied_regex = regex.lower()
     else:
         self.strayregex = re.compile(self.strayre + regex + ".*")
         self.timeregex = re.compile(self.basere + regex + ".*")
         self.supplied_regex = regex
     self.valid_log_regex = re.compile(self.basere)
     self.matches = OrderedDefaultDict(list)
     self.count = 0
     self.unknown = 0
     self.analyzed = False
Example #4
0
def parse(args):
    """parse entry point, generates a report object
    from a tarball or series of files"""
    logs = diag.find_files(args, args.system_log_prefix)
    print("from directory '%s':" % args.diag_dir)
    node_stats = OrderedDict()
    after_time = dates.date_parse(args.after)
    before_time = dates.date_parse(args.before)
    for log in logs:
        start_log_time, last_log_time = diag.log_range(log)
        with diag.FileWithProgress(log) as log_file:
            raw_events = parser.read_system_log(log_file)
            filter_cache_events_all = [
                event for event in raw_events
                if event.get("event_category", "") == "filter_cache"
            ]
            filter_cache_events = [
                event for event in filter_cache_events_all if "date" in event
                and event["date"] > after_time and event["date"] < before_time
            ]
            item_eviction_stats = _get_stats(filter_cache_events, ItemFCStats,
                                             "eviction_items")
            bytes_eviction_stats = _get_stats(filter_cache_events,
                                              BytesFCStats, "eviction_bytes")
            node = util.extract_node_name(log, True)
            node_stats[node] = OrderedDict([
                ("evictions", (bytes_eviction_stats, item_eviction_stats)),
                ("start", start_log_time),
                ("end", last_log_time),
            ])
    return OrderedDict([
        ("nodes", node_stats),
        ("after_time", after_time),
        ("before_time", before_time),
    ])
Example #5
0
 def test_calculate_eviction_stats(self):
     lines = [
         "INFO  [RemoteMessageServer query worker - 81] 2020-01-21 11:34:33,033  SolrFilterCache.java:340 - Filter cache org.apache.solr.search.SolrFilterCache$1@7c723229 has reached 8000000 entries of a maximum of 8000000. Evicting oldest entries...",
         "ERROR [RemoteMessageServer query worker - 18] 2020-01-21 11:34:34,475  MessageServer.java:277 - Failed to process request:",
         "INFO  [RemoteMessageServer query worker - 81] 2020-01-21 11:34:35,448  SolrFilterCache.java:356 - ...eviction completed in 1304 milliseconds. Filter cache org.apache.solr.search.SolrFilterCache$1@7c723229 usage is now 32441266 bytes across 4000000 entries.",
         "INFO  [LocalMessageServer query worker - 77] 2020-01-21 12:24:23,912  SolrFilterCache.java:340 - Filter cache org.apache.solr.search.SolrFilterCache$1@324b2c16 has reached 3999974 entries of a maximum of 8000000. Evicting oldest entries...",
         "INFO  [LocalMessageServer query worker - 77] 2020-01-21 12:24:23,912  SolrFilterCache.java:356 - ...eviction completed in 1 milliseconds. Filter cache org.apache.solr.search.SolrFilterCache$1@324b2c16 usage is now 32005744 bytes across 3999962 entries.",
         "INFO  [RemoteMessageServer query worker - 41] 2020-01-21 12:47:26,942  SolrFilterCache.java:311 - Filter cache org.apache.solr.search.SolrFilterCache$6@5af917a4 has reached 16 GB bytes of off-heap memory usage, the maximum is 16 GB. Evicting oldest entries...",
         "INFO  [RemoteMessageServer query worker - 41] 2020-01-21 12:47:26,950  SolrFilterCache.java:328 - ...eviction completed in 9 milliseconds. Filter cache org.apache.solr.search.SolrFilterCache$6@5af917a4 usage is now 114781220 across 159 entries.",
         # new version of logs, after DSP-18693
         "INFO  [RemoteMessageServer query worker - 41] 2020-01-21 12:47:26,942  SolrFilterCache.java:311 - Filter cache org.apache.solr.search.SolrFilterCache$6@5af917b6 has reached 16 GB bytes of off-heap memory usage, the maximum is 16 GB. Evicting oldest entries...",
         "INFO  [RemoteMessageServer query worker - 41] 2020-01-21 12:47:26,950  SolrFilterCache.java:328 - ...eviction completed in 8 milliseconds. Filter cache org.apache.solr.search.SolrFilterCache$6@5af917b6 usage is now 114781220 bytes across 159 entries.",
         # eviction event without duration log line
         "INFO  [RemoteMessageServer query worker - 41] 2020-01-21 12:47:26,970  SolrFilterCache.java:311 - Filter cache org.apache.solr.search.SolrFilterCache$6@5af917c7 has reached 16 GB bytes of off-heap memory usage, the maximum is 16 GB. Evicting oldest entries...",
     ]
     raw_events = parser.read_system_log(lines)
     after_time = dates.date_parse("2020-01-21 00:00:00,000")
     before_time = dates.date_parse("2020-02-21 00:00:00,000")
     item_ev_stats, bytes_ev_stats = calculate_eviction_stats(
         raw_events, after_time, before_time
     )
     assert len(item_ev_stats.values()) == 2
     assert sum([s.duration for s in item_ev_stats.values()]) == 1304 + 1
     assert len(bytes_ev_stats.values()) == 3
     assert sum([s.duration for s in bytes_ev_stats.values()]) == 9 + 8 + 0
Example #6
0
def parse(args):
    """parse entry point, generates a report object
    from a tarball or series of files"""
    logs = diag.find_files(args, args.system_log_prefix)
    print("from directory '%s':" % args.diag_dir)
    node_stats = {}
    after_time = dates.date_parse(args.after)
    before_time = dates.date_parse(args.before)
    for log in logs:
        start_log_time, last_log_time = diag.log_range(log)
        with diag.FileWithProgress(log) as log_file:
            raw_events = parser.read_system_log(log_file)
            events = [
                event for event in raw_events
                if event['date'] > after_time and event['date'] < before_time
            ]
            filter_cache_events = [event for event in events \
                    if event['event_category'] == 'filter_cache']
            item_eviction_stats = _get_stats(filter_cache_events, ItemFCStats,
                                             'eviction_items')
            bytes_eviction_stats = _get_stats(filter_cache_events,
                                              BytesFCStats, 'eviction_bytes')
            node = util.extract_node_name(log, True)
            node_stats[node] = {"evictions" : (bytes_eviction_stats, item_eviction_stats), \
                "start": start_log_time, "end": last_log_time, \
                }
    return { \
            "nodes": node_stats, \
            "after_time": after_time, \
            "before_time": before_time, \
            }
Example #7
0
 def __init__(self, start=None, end=None):
     self.state = None
     self.start = None
     self.end = None
     if start:
         self.start = date_parse(start)
     if end:
         self.end = date_parse(end)
Example #8
0
 def __init__(self, diag_dir, files=None, start=None, end=None):
     self.diag_dir = diag_dir
     self.files = files
     self.parser = SlowQueryParser()
     self.querytimes = defaultdict(list)
     self.queries = []
     self.analyzed = False
     self.start = None
     self.end = None
     self.cross = 0
     self.start_time = None
     self.end_time = None
     if start:
         self.start_time = date_parse(start)
     if end:
         self.end_time = date_parse(end)
Example #9
0
 def __init__(self, diag_dir=None, files=None, start=None, end=None):
     self.diag_dir = diag_dir
     self.files = files
     self.pauses = OrderedDefaultDict(lambda: OrderedDefaultDict(list))
     self.gc_types = OrderedDefaultDict(int)
     self.start = None
     self.end = None
     self.starts = OrderedDefaultDict(datetime.datetime)
     self.ends = OrderedDefaultDict(datetime.datetime)
     self.analyzed = False
     self.start_time = None
     self.end_time = None
     if start:
         self.start_time = date_parse(start)
     if end:
         self.end_time = date_parse(end)