コード例 #1
0
def test_shema_run():
    """validates we don't get a bad exit code"""
    parser = argparse.ArgumentParser(prog="mine", description="entry point")
    subparsers = parser.add_subparsers()
    schema.build(subparsers)
    test_file_1 = os.path.join(current_dir(__file__), "testdata", "emtpy_file_1.log")
    test_file_2 = os.path.join(current_dir(__file__), "testdata", "emtpy_file_2.log")
    args = parser.parse_args(["schema", "-f", "%s,%s" % (test_file_1, test_file_2)])
    schema.run(args)
コード例 #2
0
ファイル: test_jarcheck.py プロジェクト: phanirajl/sperf
def test_run_jarcheck_diff_only():
    """integration test for the full command with diff_only"""
    test_file_1 = os.path.join(current_dir(__file__), "testdata",
                               "simple-output.log")
    test_file_2 = os.path.join(current_dir(__file__), "testdata",
                               "simple-output2.log")
    parser = jarcheck.JarCheckParser(files=[test_file_1, test_file_2])
    output = steal_output(parser.print_report, diff_only=True)
    assert """Inconsistent jars
------------------------------
dse-advrep-6.7.1.jar

Analyzed 2 files""" in output
コード例 #3
0
ファイル: test_jarcheck.py プロジェクト: phanirajl/sperf
def test_jarcheck_run():
    """validates we raise FNFE if nonexistent files are passed"""
    parser = argparse.ArgumentParser(prog="mine", description="entry point")
    subparsers = parser.add_subparsers()
    jarcheck.build(subparsers)
    test_file_1 = os.path.join(current_dir(__file__), "testdata",
                               "emtpy_file_1.log")
    test_file_2 = os.path.join(current_dir(__file__), "testdata",
                               "emtpy_file_2.log")
    args = parser.parse_args(
        ["jarcheck", "-f",
         "%s,%s" % (test_file_1, test_file_2), "-o"])
    with pytest.raises(FileNotFoundError):
        jarcheck.run(args)
コード例 #4
0
ファイル: test_diag.py プロジェクト: phanirajl/sperf
def test_parse_diag():
    """happy path test for parsing a diag tarball"""
    config = types.SimpleNamespace()
    test_dir = os.path.join(current_dir(__file__), "testdata", "diag",
                            "DSE_CLUSTER")
    config.diag_dir = test_dir
    config.node_info_prefix = "node_info.json"
    config.system_log_prefix = "system.log"
    config.output_log_prefix = "output.log"
    config.cfstats_prefix = "cfstats"
    config.block_dev_prefix = "blockdev_report"
    env.DEBUG = True
    parsed = {}
    try:
        parsed = parse_diag(config)
    finally:
        env.DEBUG = False
    assert not parsed.get("warnings")
    first = parsed.get("configs")[0]
    assert first['busiest_table_writes']
    assert first['busiest_table_writes'][0] == 'my_solr.my_table'
    assert "%.2f" % first['busiest_table_writes'][1] == '96.18'
    assert first['busiest_table_reads'][0] == 'my_solr.my_table'
    assert "%.2f" % first['busiest_table_reads'][1] == '99.76'
    assert first['threads_per_core'] == 1
コード例 #5
0
ファイル: test_sysbottle.py プロジェクト: rsds143/sperf
 def test_sysbottle_run(self):
     """validates we don't get a bad exit code"""
     parser = argparse.ArgumentParser(
         prog="sysbottle", description="sysbottle is parsed"
     )
     subparsers = parser.add_subparsers()
     sysbottle.build(subparsers)
     test_file_1 = os.path.join(current_dir(__file__), "..", "testdata", "iostat")
     args = parser.parse_args(
         [
             "sysbottle",
             test_file_1,
             "-c",
             "90",
             "-q",
             "1",
             "-d",
             "sda",
             "-i",
             "5",
             "-t",
             "3",
         ]
     )
     sysbottle.run(args)
コード例 #6
0
ファイル: test_bgrep.py プロジェクト: phanirajl/sperf
def test_bgrep():
    """validates bgrep matches every line"""
    b = BucketGrep(
        'main',
        files=[os.path.join(current_dir(__file__), 'testdata', 'simple.log')])
    b.analyze()
    assert len(b.matches) == 2
コード例 #7
0
    def test_sperf_68(self):
        """integration test, this is not the best test and only verifies no change in calculations
        as changes in the codebase occur."""
        args = types.SimpleNamespace()
        args.diag_dir = os.path.join(current_dir(__file__), "testdata",
                                     "dse68")
        args.files = []
        args.top = 3
        args.interval = 3600
        args.start = None
        args.end = None

        def run():
            slowquery.run(args)

        output = steal_output(run)
        # reads better with the extra newline
        self.assertEqual(
            output,
            "sperf core slowquery version: %s" % (VERSION) + """

. <5073ms + >5073ms ! >5073ms X >5073ms
------------------------------
2020-07-22 13:39:05.889000+00:00  X

worst period: 2020-07-22 13:39:05.889000+00:00 (5074ms)

1 slow queries, 0 cross-node

Top 3 slow queries:
------------------------------
5074ms: <SELECT config FROM dse_insights.insights_config WHERE key = 1>""",
        )
コード例 #8
0
    def test_ttop_report(self):
        """ttop parser test"""
        test_file = os.path.join(current_dir(__file__), "testdata",
                                 "ttop-cpu.out")
        ttop = TTopAnalyzer([test_file])
        output = steal_output(ttop.print_report)
        self.maxDiff = None
        self.assertIn(
            """2020-01-09 16:08:06                                Threads CPU%  Total: 28.06%
================================================================================
ParkedThreadsMonitor                               4       23.52 -----------------
RMI TCP Connection(2)                              4       2.90  --
CoreThread                                         20      1.20  -
DseGossipStateUpdater                              4       0.10
ScheduledTasks                                     4       0.08
NodeHealthPlugin-Scheduler-thread                  4       0.06
OptionalTasks                                      4       0.05
JMX server connection timeout 425                  4       0.04
ContainerBackgroundProcessor[StandardEngine[Solr]] 4       0.02
PO-thread                                          4       0.02
GossipTasks                                        4       0.01
AsyncAppender-Worker-ASYNCDEBUGLOG                 4       0.01
LeasePlugin                                        4       0.01
NonPeriodicTasks                                   4       0.01
RxSchedulerPurge                                   4       0.01
internode-messaging RemoteMessageServer acceptor   4       0.00""",
            output,
        )
コード例 #9
0
    def test_ttop_allocation_report(self):
        """ttop parser allocation test"""
        test_file = os.path.join(current_dir(__file__), "testdata",
                                 "ttop-cpu.out")
        ttop = TTopAnalyzer([test_file])

        def run():
            ttop.print_report(alloc=True)

        self.maxDiff = None
        output = steal_output(run)
        self.assertIn(
            """================================================================================
CoreThread                                         20      2.14 mb   -------------
RMI TCP Connection(2)                              4       1.14 mb   -------
DseGossipStateUpdater                              4       38.00 kb
ScheduledTasks                                     4       24.00 kb
NodeHealthPlugin-Scheduler-thread                  4       19.00 kb
ContainerBackgroundProcessor[StandardEngine[Solr]] 4       6.01 kb
JMX server connection timeout 425                  4       4.18 kb
PO-thread                                          4       2.47 kb
AsyncAppender-Worker-ASYNCDEBUGLOG                 4       1.90 kb
LeasePlugin                                        4       1.38 kb
NonPeriodicTasks                                   4       1.07 kb
GossipTasks                                        4       841 bytes
RxSchedulerPurge                                   4       710 bytes
OptionalTasks                                      4       323 bytes
ParkedThreadsMonitor                               4       317 bytes
internode-messaging RemoteMessageServer acceptor   4       0 byte""",
            output,
        )
コード例 #10
0
ファイル: test_diag.py プロジェクト: rsds143/sperf
 def test_parse_diag(self):
     """happy path test for parsing a diag tarball"""
     config = types.SimpleNamespace()
     test_dir = os.path.join(current_dir(__file__), "testdata", "diag",
                             "DSE_CLUSTER")
     config.diag_dir = test_dir
     config.node_info_prefix = "node_info.json"
     config.system_log_prefix = "system.log"
     config.output_log_prefix = "output.log"
     config.cfstats_prefix = "cfstats"
     config.block_dev_prefix = "blockdev_report"
     env.DEBUG = True
     parsed = {}
     try:
         parsed = parse_diag(config)
     finally:
         env.DEBUG = False
     self.assertFalse(parsed.get("warnings"))
     first = parsed.get("configs")[0]
     self.assertTrue(first["busiest_table_writes"])
     self.assertEqual(first["busiest_table_writes"][0], "my_solr.my_table")
     self.assertEqual("%.2f" % first["busiest_table_writes"][1], "96.18")
     self.assertEqual(first["busiest_table_reads"][0], "my_solr.my_table")
     self.assertEqual("%.2f" % first["busiest_table_reads"][1], "99.76")
     self.assertEqual(first["threads_per_core"], 1)
コード例 #11
0
 def test_run_jarcheck_with_empty_file(self):
     """integration test for a single file with no jar data in it"""
     test_file_1 = os.path.join(current_dir(__file__), "testdata",
                                "empty.log")
     parser = jarcheck.JarCheckParser(files=[test_file_1])
     output = steal_output(parser.print_report)
     self.assertEqual(output, "Analyzed 1 file")
コード例 #12
0
ファイル: test_ttop.py プロジェクト: rsds143/sperf
    def test_ttop_allocation_report(self):
        """ttop parser allocation test"""
        test_file = os.path.join(current_dir(__file__), "testdata", "ttop-cpu.out")
        ttop = TTopAnalyzer([test_file])

        def run():
            ttop.print_report(alloc=True)

        self.maxDiff = None
        output = steal_output(run)
        self.assertIn(
            """2020-01-09 16:08:46                                Threads Alloc/s   Total: 3.24 mb
================================================================================
CoreThread                                         7       2.15 mb   -------------
RMI TCP Connection(2)                              1       1.05 mb   ------
ScheduledTasks                                     1       24.00 kb
ContainerBackgroundProcessor[StandardEngine[Solr]] 1       4.18 kb
JMX server connection timeout 425                  1       4.08 kb
BatchlogTasks                                      1       1.55 kb
LeasePlugin                                        1       1.49 kb
GossipTasks                                        1       916 bytes
RxSchedulerPurge                                   1       646 bytes
ParkedThreadsMonitor                               1       317 bytes
OptionalTasks                                      1       305 bytes
http-bio                                           1       57 bytes
AsyncFileHandlerWriter                             1       32 bytes
internode-messaging RemoteMessageServer acceptor   1       0 byte""",
            output,
            output.replace("\\n", "\n"),
        )
コード例 #13
0
    def test_sperf(self):
        """integration test, this is not the best test and only verifies no change in calculations
        as changes in the codebase occur."""
        args = types.SimpleNamespace()
        args.diag_dir = os.path.join(
            current_dir(__file__), "testdata", "diag", "DSE_CLUSTER"
        )
        args.files = []
        args.stages = "all"
        args.start = None
        args.end = None
        args.debug_log_prefix = "debug.log"
        args.reporter = "summary"
        args.system_log_prefix = "system.log"

        def run():
            statuslogger.run(args)

        output = steal_output(run)
        # reads better with the extra newline
        self.assertEqual(
            output,
            "sperf core statuslogger version: %s\n" % (VERSION)
            + """
Summary (22,054 lines)
Summary (444 skipped lines)

dse versions: {'6.7.7'}
cassandra versions: {'DSE Private Fork'}
first log time: 2020-01-10 15:27:58.554000+00:00
last log time: 2020-01-10 17:21:13.549000+00:00
duration: 1.89 hours
total stages analyzed: 2
total nodes analyzed: 3

GC pauses  max        p99        p75        p50        p25        min
           ---        ---        ---        ---        ---        ---
ms         800        729        358        282        243        201
total GC events: 236

busiest tables by ops across all nodes
------------------------------
* 10.101.35.102: OpsCenter.rollups60: 66,464 ops / 3.38 mb data

busiest table by data across all nodes
------------------------------
* 10.101.35.102: my_solr.my_table: 37,132 ops / 9.37 mb data

busiest stages across all nodes
------------------------------
* CompactionExecutor active:   1  (10.101.35.102)
* TPC/0/WRITE_REMOTE active:   1  (10.101.35.102)
* CompactionExecutor pending:  1  (10.101.35.102)

busiest stages in PENDING
------------------------------
10.101.35.102:
       CompactionExecutor:  1""",
        )
コード例 #14
0
def test_sysbottle_run():
    """validates we don't get a bad exit code"""
    parser = argparse.ArgumentParser(prog="mine", description="entry point")
    subparsers = parser.add_subparsers()
    sysbottle.build(subparsers)
    test_file_1 = os.path.join(current_dir(__file__), '..', 'testdata', 'iostat')
    args = parser.parse_args(["sysbottle", test_file_1, "-c", "90", "-q", "1", "-d", "sda", "-i", "5", "-t", "3"])
    sysbottle.run(args)
コード例 #15
0
 def test_read_line_output(self):
     """jar check parser test"""
     test_file = os.path.join(current_dir(__file__), "testdata",
                              "simple-output.log")
     parser = jarcheck.JarCheckParser(files=[test_file])
     parser.analyze()
     self.assertEqual(len(parser.jars), 501)
     self.assertTrue("exp4j-0.4.8.jar" in parser.jars)
コード例 #16
0
ファイル: test_jarcheck.py プロジェクト: phanirajl/sperf
def test_read_line_output():
    """jar check parser test"""
    test_file = os.path.join(current_dir(__file__), "testdata",
                             "simple-output.log")
    parser = jarcheck.JarCheckParser(files=[test_file])
    parser.analyze()
    assert len(parser.jars) == 501
    assert 'exp4j-0.4.8.jar' in parser.jars
コード例 #17
0
 def test_bgrep(self):
     """validates bgrep matches every line"""
     b = BucketGrep(
         "main",
         files=[
             os.path.join(current_dir(__file__), "testdata", "simple.log")
         ],
     )
     b.analyze()
     self.assertEqual(len(b.matches), 2)
コード例 #18
0
ファイル: test_statuslogger.py プロジェクト: phanirajl/sperf
def test_statuslogger_run():
    """validates we don't get a bad exit code"""
    parser = argparse.ArgumentParser(prog="mine", description="entry point")
    subparsers = parser.add_subparsers()
    statuslogger.build(subparsers)
    test_dir_1 = os.path.join(current_dir(__file__), '..', 'testdata', 'diag',
                              'statuslogger')
    args = parser.parse_args(
        ["statuslogger", "-d", test_dir_1, "-r", "histogram"])
    statuslogger.run(args)
コード例 #19
0
 def test_no_matches(self):
     """should match no rows"""
     b = BucketGrep(
         "this should never match anything",
         files=[
             os.path.join(current_dir(__file__), "testdata",
                          "statusloggernew_debug.log")
         ],
     )
     b.analyze()
     self.assertFalse(b.matches)
コード例 #20
0
 def test_bgrep_when_statuslogger_lines_present(self):
     """statuslogger matching maybe breaking bgrep"""
     b = BucketGrep(
         "RANGE_SLICE messages were dropped",
         files=[
             os.path.join(current_dir(__file__), "testdata",
                          "statusloggernew_debug.log")
         ],
     )
     b.analyze()
     self.assertEqual(len(b.matches), 1)
コード例 #21
0
 def test_nodate(self):
     """validates bgrep matches lines without a date"""
     b = BucketGrep(
         ".*No such file or directory.*",
         files=[
             os.path.join(current_dir(__file__), "testdata",
                          "traceback.log")
         ],
     )
     b.analyze()
     self.assertEqual(len(b.matches), 1)
コード例 #22
0
def test_parses_all_matches():
    """validates the parser returns every line"""
    rows = []
    with open(os.path.join(current_dir(__file__), "testdata",
                           "simple.log")) as test_file:
        events = parser.read_log(test_file, capture_line)
        rows = list(events)
    assert len(rows) == 2
    line1 = "WARN"
    assert rows[0]['level'] == line1
    line2 = "ERROR"
    assert rows[1]['level'] == line2
コード例 #23
0
ファイル: test_parser.py プロジェクト: rsds143/sperf
 def test_parses_all_matches(self):
     """validates the parser returns every line"""
     rows = []
     with open(
         os.path.join(current_dir(__file__), "testdata", "simple.log")
     ) as test_file:
         events = parser.read_log(test_file, capture_line)
         rows = list(events)
     self.assertEqual(len(rows), 2)
     line1 = "WARN"
     self.assertEqual(rows[0]["level"], line1)
     line2 = "ERROR"
     self.assertEqual(rows[1]["level"], line2)
コード例 #24
0
 def test_should_count_ops(self):
     """validate ops counting is doing the right thing even when crossing logs"""
     tarball = os.path.join(current_dir(__file__), "..", "testdata",
                            "sample_table_tarball")
     sl = StatusLogger(tarball)
     sl.analyze()
     s = Summary(sl.nodes)
     tables = s.get_busiest_tables("ops")
     # this proves the extra logs in debug.log and system.log that are duplicate are ignored
     # and that the extra entry from statuslogger is not causing a double count
     busiest = tables[0]
     self.assertEqual(busiest[1][0], "keyspace1.standard1")
     self.assertEqual(busiest[1][1].ops, 5931)
     self.assertEqual(busiest[1][1].data, 75690238)
コード例 #25
0
ファイル: test_schema.py プロジェクト: rsds143/sperf
    def test_sperf(self):
        """integration test, this is not the best test and only verifies no change in calculations
        as changes in the codebase occur."""
        args = types.SimpleNamespace()
        args.diag_dir = os.path.join(current_dir(__file__), "testdata", "diag",
                                     "DSE_CLUSTER")
        args.files = []

        def run():
            schema.run(args)

        output = steal_output(run)
        # reads better with the extra newline
        header = "sperf core schema version: %s" % (VERSION)
        self.assertEqual(
            output,
            header + """


Schema read     : %s
Keyspace Count  : 15
Table Count     : 61
2i Count        : 1
MV Count        : 0
UDT Count       : 6
Solr Index Count: 1
Solr Table Count: 1""" % os.path.join(
                current_dir(__file__),
                "testdata",
                "diag",
                "DSE_CLUSTER",
                "nodes",
                "10.101.33.205",
                "driver",
                "schema",
            ),
        )
コード例 #26
0
ファイル: test_diag.py プロジェクト: phanirajl/sperf
def test_find_files_by_diag_dir():
    """find logs by diag dir"""
    config = types.SimpleNamespace()
    test_dir = os.path.join(current_dir(__file__), "testdata", "diag",
                            "findfiles")
    config.diag_dir = test_dir
    config.files = ""
    files = find_files(config, "my.log")
    assert len(files) == 4
    assert os.path.join(test_dir, "nodes", "node1", "my.log") in files
    assert os.path.join(test_dir, "nodes", "node1", "my.log.1") in files
    assert os.path.join(test_dir, "nodes", "node1", "my.log.2") in files
    assert os.path.join(test_dir, "nodes", "node1", "debug.log") not in files
    assert os.path.join(test_dir, "nodes", "node2", "my.log") in files
    assert os.path.join(test_dir, "nodes", "node2", "debug.log") not in files
コード例 #27
0
 def test_68_debug_log_format(self):
     """should work with DSE 6.8 statuslogger debug files"""
     files = [
         os.path.join(current_dir(__file__), "..", "testdata",
                      "statuslogger68_debug.log")
     ]
     sl = StatusLogger(None, files=files)
     sl.analyze()
     self.assertTrue(sl.analyzed)
     s = Summary(sl.nodes)
     busiest_stages = s.get_busiest_stages()
     name, status, stage, value = busiest_stages[0]
     self.assertEqual(name, files[0])
     self.assertEqual(stage, "TPC/all/WRITE_REMOTE")
     self.assertEqual(status, "pending")
     self.assertEqual(value, 13094)
コード例 #28
0
ファイル: test_diag.py プロジェクト: phanirajl/sperf
def test_find_files_by_files_param():
    """find logs by file name and not just looking in a diag for all matches"""
    config = types.SimpleNamespace()
    test_dir = os.path.join(current_dir(__file__), "testdata", "diag",
                            "findfiles")
    config.diag_dir = ""
    config.files = os.path.join(test_dir, "nodes", "node1", "my.log") + "," + \
                    os.path.join(test_dir, "nodes", "node2", "my.log")
    files = find_files(config, "my.log")
    assert len(files) == 2
    assert os.path.join(test_dir, "nodes", "node1", "my.log") in files
    assert os.path.join(test_dir, "nodes", "node1", "my.log.1") not in files
    assert os.path.join(test_dir, "nodes", "node1", "my.log.2") not in files
    assert os.path.join(test_dir, "nodes", "node1", "debug.log") not in files
    assert os.path.join(test_dir, "nodes", "node2", "my.log") in files
    assert os.path.join(test_dir, "nodes", "node2", "debug.log") not in files
コード例 #29
0
ファイル: test_diag.py プロジェクト: phanirajl/sperf
def test_parse_diag_reports_no_files_found():
    """should see missing files in the warning list"""
    config = types.SimpleNamespace()
    test_dir = os.path.join(current_dir(__file__), "testdata", "diag", "empty")
    config.diag_dir = test_dir
    config.node_info_prefix = "node_info.json"
    config.system_log_prefix = "system.log"
    config.output_log_prefix = "output.log"
    config.cfstats_prefix = "cfstats"
    config.block_dev_prefix = "blockdev_report"
    parsed = parse_diag(config)
    warnings = parsed.get("warnings")
    assert len(warnings) == 4
    assert "missing output logs: all nodes" in warnings
    assert "unable to read 'node_info.json'" in warnings
    assert "missing system logs: all nodes" in warnings
    assert "missing cfstats: all nodes" in warnings
コード例 #30
0
ファイル: test_diag.py プロジェクト: rsds143/sperf
 def test_parse_diag_reports_missing_files(self):
     """should see missing files in the warning list"""
     config = types.SimpleNamespace()
     test_dir = os.path.join(current_dir(__file__), "testdata", "diag",
                             "missing")
     config.diag_dir = test_dir
     config.node_info_prefix = "node_info.json"
     config.system_log_prefix = "system.log"
     config.output_log_prefix = "output.log"
     config.cfstats_prefix = "cfstats"
     config.block_dev_prefix = "blockdev_report"
     parsed = parse_diag(config)
     warnings = parsed.get("warnings")
     self.assertEqual(len(warnings), 4)
     self.assertTrue("missing output logs: node2" in warnings)
     self.assertTrue("missing blockdev_reports: node2" in warnings)
     self.assertTrue("missing system logs: node2" in warnings)
     self.assertTrue("missing cfstats: node2" in warnings)