def test_read_systemlog_when_outputlog_is_empty(self): """verify configuration is parsed""" configs = {"node1": {}} output_logs = [ os.path.join( test_dse_tarball(), "nodes", "10.101.33.205", "logs", "cassandra", "doesntexist.log", ) ] system_logs = [ os.path.join( test_dse_tarball(), "nodes", "10.101.33.205", "logs", "cassandra", "system.log", ), os.path.join( test_dse_tarball(), "nodes", "10.101.33.205", "logs", "cassandra", "system.log.2", ), ] node_env.find_config_in_logs(configs, output_logs, system_logs) self.assertEqual( configs["10.101.33.205"]["memtable_cleanup_threshold"], "default")
def test_read_outputlog(): """verify configuration is parsed""" configs = {"node1": {}} node1 = "10.101.33.205" output_logs = [os.path.join(test_dse_tarball(), "nodes", \ node1, "logs", "cassandra", "output.log")] system_logs = [os.path.join(test_dse_tarball(), "nodes", \ node1, "logs", "cassandra", "system.log")] node_env.find_config_in_logs(configs, output_logs, system_logs) assert configs[node1]["memtable_cleanup_threshold"] == "default"
def test_add_stats_to_config(): """happy path""" configs = [{ "nodes_list": ["10.101.33.205"] }, { "node_list": ["10.101.35.102"] }] cfstats_files = [] for node_name in ["10.101.33.205", "10.101.35.102", "10.101.35.71"]: cfstat_file = os.path.join(test_dse_tarball(), "nodes", node_name, "nodetool", "cfstats") cfstats_files.append(cfstat_file) table_stats.add_stats_to_config(configs, cfstats_files) first_config = configs[0] assert first_config['worst_read_latency'] == ('10.101.33.205', 'OpsCenter.rollup_state', 1.704) assert first_config['worst_write_latency'] == ( '10.101.33.205', 'system_schema.dropped_columns', 4.009) assert first_config['worst_tombstone'] == ('10.101.33.205', 'OpsCenter.events', 7) assert first_config['worst_live_cells'] == ('10.101.33.205', 'system_schema.columns', 447) assert first_config['largest_table'] == ('10.101.33.205', 'my_solr.my_table', 113553687) assert first_config['busiest_table_reads'][0] == 'my_solr.my_table' assert "%.2f" % first_config['busiest_table_reads'][1] == '99.75' assert first_config['busiest_table_writes'][0] == 'my_solr.my_table' assert "%.2f" % first_config['busiest_table_writes'][1] == '96.76' assert first_config['worst_part_size'][0] == 'system.size_estimates' assert first_config['worst_part_size'][1] == '10.101.33.205' assert first_config['worst_part_size'][2] == 17084 assert first_config['worst_part_size'][3] == 6924 assert first_config['worst_part_size'][4] == 2760
def test_parse_file(): """test parsing a file""" events = None with open(os.path.join(test_dse_tarball(), "nodes", "10.101.33.205", "blockdev_report")) as blockdev_file: events = list(read_block_dev(blockdev_file)) assert len(events) == 4 first_event = events[0] assert first_event['ra'] == 256
def test_gcinspector(): """ test gcinspector analysis """ g = GCInspector(test_dse_tarball()) g.analyze() assert len(g.pauses) == 3 assert len(g.all_pauses()) == 236 output = steal_output(g.print_report) assert '!!++.+.+.!++.+.+...+.+..+.+.+.+..+++....++..+++....+..++.+++.+!+..+.+.+.+!......+++....+' in output
def test_parse_file(self): """test parsing a file""" events = None with open( os.path.join(test_dse_tarball(), "nodes", "10.101.33.205", "blockdev_report")) as blockdev_file: events = list(read_block_dev(blockdev_file)) self.assertEqual(len(events), 4) first_event = events[0] self.assertEqual(first_event["ra"], 256)
def test_gcinspector(self): """ test gcinspector analysis """ g = GCInspector(test_dse_tarball()) g.analyze() self.assertEqual(len(g.pauses), 3) self.assertEqual(len(g.all_pauses()), 236) output = steal_output(g.print_report) self.assertIn( "!!++.+.+.!++.+.+...+.+..+.+.+.+..+++....++..+++....+..++.+++.+!+..+.+.+.+!......+++....+", output, )
def test_skip_duplicate_events_diag(): """should merge events on the same node in different logs""" sl = StatusLogger(test_dse_tarball()) sl.analyze() assert sl.analyzed assert len(sl.nodes) == 3 s = Summary(sl.nodes) assert s.lines == 22054 assert s.skipped_lines == 444 assert s.get_busiest_stages()[0] == [ '10.101.35.102', 'active', 'CompactionExecutor', 1 ]
def test_read_schema(): """test the read schema report""" test_file = os.path.join(test_dse_tarball(), "nodes", "10.101.33.205", "driver", "schema") files = [test_file] parsed = schema.read(files) assert parsed["keyspaces"] == 15 assert parsed["tables"] == 61 assert parsed["2i"] == 1 assert parsed["mvs"] == 0 assert parsed["solr"] == 1 assert parsed["solr_table"] == 1
def test_read_schema(self): """test the read schema report""" test_file = os.path.join(test_dse_tarball(), "nodes", "10.101.33.205", "driver", "schema") files = [test_file] parsed = schema.read(files) self.assertEqual(parsed["keyspaces"], 15) self.assertEqual(parsed["tables"], 61) self.assertEqual(parsed["2i"], 1) self.assertEqual(parsed["mvs"], 0) self.assertEqual(parsed["udts"], 6) self.assertEqual(parsed["solr"], 1) self.assertEqual(parsed["solr_table"], 1)
def test_skip_duplicate_events_diag(self): """should merge events on the same node in different logs""" sl = StatusLogger(test_dse_tarball()) sl.analyze() self.assertTrue(sl.analyzed) self.assertEqual(len(sl.nodes), 3) s = Summary(sl.nodes) self.assertEqual(s.lines, 22054) self.assertEqual(s.skipped_lines, 444) self.assertEqual( s.get_busiest_stages()[0], [ "10.101.35.102", "active", "CompactionExecutor", 1, ], )
def test_read_cfstats_file(): """validate multiple keyspaces and tables are found as well as float and int parsing with and without ms suffix""" parsed = cfstats.read_file( os.path.join(test_dse_tarball(), 'nodes', '10.101.33.205', 'nodetool', 'cfstats')) assert parsed key = parsed.get('my_solr') assert key table = key.get('my_table') assert table assert table.get('SSTable count') == 1 assert table.get('Local write count') == 4114233 assert table.get('Local read count') == 4953524 assert ("%.2f") % table.get('Local read latency') == "0.26" assert ("%.2f") % table.get('Local write latency') == "0.40" ##finds other keyspaces too assert parsed['system'].get('local') assert parsed['system']['local'].get('SSTable count') == 2
def test_read_cfstats_file(self): """validate multiple keyspaces and tables are found as well as float and int parsing with and without ms suffix""" parsed = cfstats.read_file( os.path.join(test_dse_tarball(), "nodes", "10.101.33.205", "nodetool", "cfstats")) self.assertTrue(parsed) key = parsed.get("my_solr") self.assertIsNotNone(key) table = key.get("my_table") self.assertIsNotNone(table) self.assertEqual(table.get("SSTable count"), 1) self.assertEqual(table.get("Local write count"), 4114233) self.assertEqual(table.get("Local read count"), 4953524) self.assertEqual(("%.2f") % table.get("Local read latency"), "0.26") self.assertEqual(("%.2f") % table.get("Local write latency"), "0.40") # finds other keyspaces too self.assertIsNotNone(parsed["system"].get("local")) self.assertEqual(parsed["system"]["local"].get("SSTable count"), 2)
def test_add_stats_to_config(self): """happy path""" configs = [{ "nodes_list": ["10.101.33.205"] }, { "node_list": ["10.101.35.102"] }] cfstats_files = [] for node_name in ["10.101.33.205", "10.101.35.102", "10.101.35.71"]: cfstat_file = os.path.join(test_dse_tarball(), "nodes", node_name, "nodetool", "cfstats") cfstats_files.append(cfstat_file) table_stats.add_stats_to_config(configs, cfstats_files) first_config = configs[0] self.assertEqual( first_config["worst_read_latency"], ( "10.101.33.205", "OpsCenter.rollup_state", 1.704, ), ) self.assertEqual( first_config["worst_write_latency"], ( "10.101.33.205", "system_schema.dropped_columns", 4.009, ), ) self.assertEqual( first_config["worst_tombstone"], ( "10.101.33.205", "OpsCenter.events", 7, ), ) self.assertEqual( first_config["worst_live_cells"], ( "10.101.33.205", "system_schema.columns", 447, ), ) self.assertEqual( first_config["largest_table"], ( "10.101.33.205", "my_solr.my_table", 113553687, ), ) self.assertEqual(first_config["busiest_table_reads"][0], "my_solr.my_table") self.assertEqual("%.2f" % first_config["busiest_table_reads"][1], "99.75") self.assertEqual(first_config["busiest_table_writes"][0], "my_solr.my_table") self.assertEqual("%.2f" % first_config["busiest_table_writes"][1], "96.76") self.assertEqual(first_config["worst_part_size"][0], "system.size_estimates") self.assertEqual(first_config["worst_part_size"][1], "10.101.33.205") self.assertEqual(first_config["worst_part_size"][2], 17084) self.assertEqual(first_config["worst_part_size"][3], 6924) self.assertEqual(first_config["worst_part_size"][4], 2760)