Пример #1
0
def run_func(args, command_name):
    """run statuslogger"""
    wanted = None
    files = None
    if args.files:
        files = args.files.split(",")
    if args.stages != "all":
        wanted = tuple(filter(None, args.stages.split(",")))
    if args.reporter == "histogram":
        StatusLogger(
            args.diag_dir,
            files=files,
            wanted_stages=wanted,
            command_name=command_name,
            start=args.start,
            end=args.end,
            syslog_prefix=args.system_log_prefix,
        ).print_histogram()
    elif args.reporter == "summary":
        StatusLogger(
            args.diag_dir,
            files=files,
            wanted_stages=wanted,
            command_name=command_name,
            start=args.start,
            end=args.end,
            syslog_prefix=args.system_log_prefix,
        ).print_summary()
    else:
        print("invalid reporter %s, must be either histogram or summary" %
              args.reporter)
Пример #2
0
def test_skip_duplicate_events_diag():
    """should merge events on the same node in different logs"""
    sl = StatusLogger(test_dse_tarball())
    sl.analyze()
    assert sl.analyzed
    assert len(sl.nodes) == 3
    s = Summary(sl.nodes)
    assert s.lines == 22054
    assert s.skipped_lines == 444
    assert s.get_busiest_stages()[0] == [
        '10.101.35.102', 'active', 'CompactionExecutor', 1
    ]
Пример #3
0
 def test_should_count_ops(self):
     """validate ops counting is doing the right thing even when crossing logs"""
     tarball = os.path.join(current_dir(__file__), "..", "testdata",
                            "sample_table_tarball")
     sl = StatusLogger(tarball)
     sl.analyze()
     s = Summary(sl.nodes)
     tables = s.get_busiest_tables("ops")
     # this proves the extra logs in debug.log and system.log that are duplicate are ignored
     # and that the extra entry from statuslogger is not causing a double count
     busiest = tables[0]
     self.assertEqual(busiest[1][0], "keyspace1.standard1")
     self.assertEqual(busiest[1][1].ops, 5931)
     self.assertEqual(busiest[1][1].data, 75690238)
Пример #4
0
 def test_68_debug_log_format(self):
     """should work with DSE 6.8 statuslogger debug files"""
     files = [
         os.path.join(current_dir(__file__), "..", "testdata",
                      "statuslogger68_debug.log")
     ]
     sl = StatusLogger(None, files=files)
     sl.analyze()
     self.assertTrue(sl.analyzed)
     s = Summary(sl.nodes)
     busiest_stages = s.get_busiest_stages()
     name, status, stage, value = busiest_stages[0]
     self.assertEqual(name, files[0])
     self.assertEqual(stage, "TPC/all/WRITE_REMOTE")
     self.assertEqual(status, "pending")
     self.assertEqual(value, 13094)
Пример #5
0
 def test_skip_duplicate_events_diag(self):
     """should merge events on the same node in different logs"""
     sl = StatusLogger(test_dse_tarball())
     sl.analyze()
     self.assertTrue(sl.analyzed)
     self.assertEqual(len(sl.nodes), 3)
     s = Summary(sl.nodes)
     self.assertEqual(s.lines, 22054)
     self.assertEqual(s.skipped_lines, 444)
     self.assertEqual(
         s.get_busiest_stages()[0],
         [
             "10.101.35.102",
             "active",
             "CompactionExecutor",
             1,
         ],
     )