Exemplo n.º 1
0
 def parse(self, logfile):
     """parses a debug log for slow queries"""
     ret = OrderedDict()
     for line in logfile:
         m = self.begin_match.match(line)
         time_match = self.begin_timed_out.match(line)
         if m:
             ret["numslow"] = int(m.group("numslow"))
             ret["date"] = date()(m.group("date"))
         elif time_match:
             ret["numslow"] = int(time_match.group("numslow"))
             ret["date"] = date()(time_match.group("date"))
         else:
             for match in [
                     self.slow_match,
                     self.fail_match,
                     self.slow_match_multiple,
                     self.fail_match_multiple,
                     self.timed_out_match,
             ]:
                 m = match.match(line)
                 if m:
                     ret.update(m.groupdict())
                     if match in [
                             self.fail_match, self.fail_match_multiple
                     ]:
                         ret["type"] = "fail"
                     elif match == self.timed_out_match:
                         ret["type"] = "timed_out"
                     else:
                         ret["type"] = "slow"
                     yield ret
                     break
Exemplo n.º 2
0
 def parse(self, logfile):
     """ parses a debug log for slow queries """
     ret = {}
     for line in logfile:
         if self.state is None:
             m = self.begin_match.match(line)
             if m:
                 self.state = self.BEGIN
                 ret['numslow'] = int(m.group('numslow'))
                 ret['timeslow'] = int(m.group('timeslow'))
                 ret['date'] = date('%Y-%m-%d %H:%M:%S,%f')(m.group('date'))
             continue
         if self.state == self.BEGIN:
             for match in [
                     self.slow_match, self.fail_match,
                     self.slow_match_single, self.fail_match_single
             ]:
                 m = match.match(line)
                 if m:
                     ret.update(m.groupdict())
                     if match in [self.fail_match, self.fail_match_single]:
                         ret['type'] = 'fail'
                     else:
                         ret['type'] = 'slow'
                     self.state = None
                     yield ret
                     break
Exemplo n.º 3
0
 def parse(self, logfile):
     """ parses a debug log for slow queries """
     ret = OrderedDict()
     for line in logfile:
         if self.state is None:
             m = self.begin_match.match(line)
             if m:
                 self.state = self.BEGIN
                 ret["numslow"] = int(m.group("numslow"))
                 ret["timeslow"] = int(m.group("timeslow"))
                 ret["date"] = date()(m.group("date"))
             continue
         if self.state == self.BEGIN:
             for match in [
                     self.slow_match,
                     self.fail_match,
                     self.slow_match_single,
                     self.fail_match_single,
             ]:
                 m = match.match(line)
                 if m:
                     ret.update(m.groupdict())
                     if match in [self.fail_match, self.fail_match_single]:
                         ret["type"] = "fail"
                     else:
                         ret["type"] = "slow"
                     self.state = None
                     yield ret
                     break
Exemplo n.º 4
0
 def analyze(self):
     """parses logs for results"""
     print("bucketgrep version %s" % VERSION)
     print("search: '%s'" % self.supplied_regex)
     target = None
     if self.files:
         target = self.files
     elif self.diag_dir:
         if self.diag_dir == ".":
             directory_path = os.getcwd()
             print("from directory '%s':" % directory_path)
         else:
             print("from directory '%s':" % self.diag_dir)
         target = diag.find_logs(self.diag_dir)
     else:
         raise Exception("no diag dir and no files specified")
     for file in target:
         with diag.FileWithProgress(file) as log:
             node_name = extract_node_name(file, ignore_missing_nodes=True)
             self.node_matches[node_name] = OrderedDefaultDict(list)
             for line in log:
                 # as long as it's a valid log line we want the date,
                 # even if we don't care about the rest of the line so we can set
                 # the last date for any straregex lines that match
                 current_dt = self.valid_log_regex.match(line)
                 if current_dt:
                     dt = date()(current_dt.group("date"))
                     # if the log line is valite we want to set the last_time
                     self.last_time = dt
                 # we now can validate if our search term matches the log line
                 d = self.timeregex.match(line)
                 if d:
                     # normal case, well-formatted log line
                     self.__setdates(dt)
                     if self.start_time and dt < self.start_time:
                         continue
                     if self.end_time and dt > self.end_time:
                         continue
                     self.matches[dt].append(line)
                     self.node_matches[node_name][dt].append(line)
                     self.count += 1
                 else:
                     m = self.strayregex.match(line)
                     # check for a match in an unformatted line, like a traceback
                     if m:
                         if self.last_time is None:
                             # match, but no previous timestamp to associate with
                             self.unknown += 1
                             continue
                         self.matches[self.last_time].append(line)
                         self.node_matches[node_name][
                             self.last_time].append(line)
                         self.count += 1
     self.analyzed = True
Exemplo n.º 5
0
 def analyze(self):
     """parses logs for results"""
     target = None
     if self.files:
         target = self.files
     elif self.diag_dir:
         target = diag.find_logs(self.diag_dir)
     else:
         raise Exception("no diag dir and no files specified")
     for file in target:
         with diag.FileWithProgress(file) as log:
             for line in log:
                 # as long as it's a valid log line we want the date,
                 # even if we don't care about the rest of the line so we can set
                 # the last date for any straregex lines that match
                 current_dt = self.valid_log_regex.match(line)
                 if current_dt:
                     dt = date()(current_dt.group("date"))
                     # if the log line is valite we want to set the last_time
                     self.last_time = dt
                 # we now can validate if our search term matches the log line
                 d = self.timeregex.match(line)
                 if d:
                     # normal case, well-formatted log line
                     self.__setdates(dt)
                     if self.start_time and dt < self.start_time:
                         continue
                     if self.end_time and dt > self.end_time:
                         continue
                     self.matches[dt].append(line)
                     self.count += 1
                 else:
                     m = self.strayregex.match(line)
                     # check for a match in an unformatted line, like a traceback
                     if m:
                         if self.last_time is None:
                             # match, but no previous timestamp to associate with
                             self.unknown += 1
                             continue
                         self.matches[self.last_time].append(line)
                         self.count += 1
     self.analyzed = True
Exemplo n.º 6
0
 def parse(self, log):
     """ parse ttop output """
     total = OrderedDict()
     threads = OrderedDefaultDict(dict)
     for line in log:
         if self.state is None:
             m = self.begin_match.match(line)
             if m:
                 dt = date('%Y-%m-%dT%H:%M:%S.%f%z')(m.group('date'))
                 if self.start and dt < self.start:
                     continue
                 if self.end and dt > self.end:
                     continue
                 total['date'] = dt
                 self.state = self.BEGIN
                 continue
         if self.state == self.BEGIN:
             m = self.process_match.match(line)
             if not m:
                 raise ValueError("process line not found in " + line)
             self.state = self.PROCESS
             total['cpu_total'] = float(m.group('cpu_total'))
             continue
         if self.state == self.PROCESS:
             m = self.application_match.match(line)
             if not m:
                 raise ValueError("application line not found in " + line)
             self.state = self.APPLICATION
             total['app_cpu'] = float(m.group('app_cpu'))
             total['user_cpu'] = float(m.group('user_cpu'))
             total['sys_cpu'] = float(m.group('sys_cpu'))
             continue
         if self.state == self.APPLICATION:
             m = self.other_match.match(line)
             if not m:
                 raise ValueError("other line not found in '" + line + "'")
             self.state = self.OTHER
             total['other_cpu'] = float(m.group('other_cpu'))
             continue
         if self.state == self.OTHER:
             m = self.thread_match.match(line)
             if not m:
                 raise ValueError("thread line not found in '" + line + "'")
             self.state = self.THREAD
             total['thread_count'] = int(m.group('thread_count'))
             continue
         if self.state == self.THREAD:
             m = self.heap_match.match(line)
             if not m:
                 raise ValueError("heap line not found in '" + line + "'")
             self.state = self.TINFO
             total['heap_rate'] = self.convert(m.group('heap_rate'),
                                               m.group('heap_unit'))
             continue
         if self.state == self.TINFO:
             if line == '\n':
                 self.state = None
                 yield total, threads
                 total = OrderedDict()
                 threads = OrderedDefaultDict(dict)
             else:
                 m = self.tinfo_match.match(line)
                 if not m:
                     raise ValueError("thread info not found in '" + line +
                                      "'")
                 threads[m.group('thread_name')]['user_cpu'] = float(
                     m.group('user_cpu'))
                 threads[m.group('thread_name')]['sys_cpu'] = float(
                     m.group('sys_cpu'))
                 threads[m.group('thread_name')]['total_cpu'] = float(
                     m.group('sys_cpu')) + float(m.group('user_cpu'))
                 threads[m.group(
                     'thread_name')]['heap_rate'] = self.convert(
                         m.group('heap_rate'), m.group('heap_unit'))
                 continue