def main(args): # initialise the RulesSpec parser rule_spec_parser = RulesSpec(args.rules_spec) rule_spec_parser.load_rules_from_spec() rule_spec_parser.perform_section_checks() # initialize the DatabaseOptions object db_options = DatabaseOptions(args.rocksdb_options) # Create DatabaseLogs object db_logs = DatabaseLogs(args.log_files_path_prefix, db_options.get_column_families()) # Create the Log STATS object db_log_stats = LogStatsParser(args.log_files_path_prefix, args.stats_dump_period_sec) data_sources = { DataSource.Type.DB_OPTIONS: [db_options], DataSource.Type.LOG: [db_logs], DataSource.Type.TIME_SERIES: [db_log_stats] } if args.ods_client: data_sources[DataSource.Type.TIME_SERIES].append( OdsStatsFetcher(args.ods_client, args.ods_entity, args.ods_tstart, args.ods_tend, args.ods_key_prefix)) triggered_rules = rule_spec_parser.get_triggered_rules( data_sources, db_options.get_column_families()) rule_spec_parser.print_rules(triggered_rules)
def setUp(self): # load the Rules this_path = os.path.abspath(os.path.dirname(__file__)) ini_path = os.path.join(this_path, 'input_files/test_rules.ini') self.db_rules = RulesSpec(ini_path) self.db_rules.load_rules_from_spec() self.db_rules.perform_section_checks() # load the data sources: LOG and OPTIONS log_path = os.path.join(this_path, 'input_files/LOG-1') options_path = os.path.join(this_path, 'input_files/OPTIONS-000005') self.data_sources = [] self.data_sources.append(DatabaseOptions(options_path)) self.data_sources.append(DatabaseLogs(log_path))
def setUp(self): # load the Rules this_path = os.path.abspath(os.path.dirname(__file__)) ini_path = os.path.join(this_path, 'input_files/test_rules.ini') self.db_rules = RulesSpec(ini_path) self.db_rules.load_rules_from_spec() self.db_rules.perform_section_checks() # load the data sources: LOG and OPTIONS log_path = os.path.join(this_path, 'input_files/LOG-1') options_path = os.path.join(this_path, 'input_files/OPTIONS-000005') db_options_parser = DatabaseOptions(options_path) self.column_families = db_options_parser.get_column_families() db_logs_parser = DatabaseLogs(log_path, self.column_families) self.data_sources = [db_options_parser, db_logs_parser]
def main(args): # initialise the RulesSpec parser rule_spec_parser = RulesSpec(args.rules_spec) rule_spec_parser.load_rules_from_spec() rule_spec_parser.perform_section_checks() # initialize the DatabaseOptions object db_options = DatabaseOptions(args.rocksdb_options) # Create DatabaseLogs object db_logs = DatabaseLogs( args.log_files_path_prefix, db_options.get_column_families() ) # Create the Log STATS object db_log_stats = LogStatsParser( args.log_files_path_prefix, args.stats_dump_period_sec ) data_sources = { DataSource.Type.DB_OPTIONS: [db_options], DataSource.Type.LOG: [db_logs], DataSource.Type.TIME_SERIES: [db_log_stats] } if args.ods_client: data_sources[DataSource.Type.TIME_SERIES].append(OdsStatsFetcher( args.ods_client, args.ods_entity, args.ods_tstart, args.ods_tend, args.ods_key_prefix )) triggered_rules = rule_spec_parser.get_triggered_rules( data_sources, db_options.get_column_families() ) rule_spec_parser.print_rules(triggered_rules)
def main(args): # initialise the RulesSpec parser rule_spec_parser = RulesSpec(args.rules_spec) # initialise the benchmark runner bench_runner_module = __import__(args.benchrunner_module, fromlist=[args.benchrunner_class]) bench_runner_class = getattr(bench_runner_module, args.benchrunner_class) ods_args = {} if args.ods_client and args.ods_entity: ods_args['client_script'] = args.ods_client ods_args['entity'] = args.ods_entity if args.ods_key_prefix: ods_args['key_prefix'] = args.ods_key_prefix db_bench_runner = bench_runner_class(args.benchrunner_pos_args, ods_args) # initialise the database configuration db_options = DatabaseOptions(args.rocksdb_options, args.misc_options) # set the frequency at which stats are dumped in the LOG file and the # location of the LOG file. db_log_dump_settings = { "DBOptions.stats_dump_period_sec": { NO_COL_FAMILY: args.stats_dump_period_sec } } db_options.update_options(db_log_dump_settings) # initialise the configuration optimizer config_optimizer = ConfigOptimizer(db_bench_runner, db_options, rule_spec_parser, args.base_db_path) # run the optimiser to improve the database configuration for given # benchmarks, with the help of expert-specified rules final_db_options = config_optimizer.run() # generate the final rocksdb options file print('Final configuration in: ' + final_db_options.generate_options_config('final')) print('Final miscellaneous options: ' + repr(final_db_options.get_misc_options()))
class TestConditionsConjunctions(unittest.TestCase): def setUp(self): # load the Rules this_path = os.path.abspath(os.path.dirname(__file__)) ini_path = os.path.join(this_path, 'input_files/test_rules.ini') self.db_rules = RulesSpec(ini_path) self.db_rules.load_rules_from_spec() self.db_rules.perform_section_checks() # load the data sources: LOG and OPTIONS log_path = os.path.join(this_path, 'input_files/LOG-1') options_path = os.path.join(this_path, 'input_files/OPTIONS-000005') db_options_parser = DatabaseOptions(options_path) self.column_families = db_options_parser.get_column_families() db_logs_parser = DatabaseLogs(log_path, self.column_families) self.data_sources = { DataSource.Type.DB_OPTIONS: [db_options_parser], DataSource.Type.LOG: [db_logs_parser] } def test_condition_conjunctions(self): conditions_dict = self.db_rules.get_conditions_dict() rules_dict = self.db_rules.get_rules_dict() # Make sure none of the conditions is triggered beforehand for cond in conditions_dict.values(): self.assertFalse(cond.is_triggered(), repr(cond)) for rule in rules_dict.values(): self.assertFalse( rule.is_triggered(conditions_dict, self.column_families), repr(rule)) # Trigger the conditions as per the data sources. self.db_rules.trigger_conditions(self.data_sources) # Check for the conditions conds_triggered = ['log-1-true', 'log-2-true', 'log-3-true'] conds_not_triggered = ['log-4-false', 'options-1-false'] for cond in conds_triggered: self.assertTrue(conditions_dict[cond].is_triggered(), repr(cond)) for cond in conds_not_triggered: self.assertFalse(conditions_dict[cond].is_triggered(), repr(cond)) # Check for the rules rules_triggered = ['multiple-conds-true'] rules_not_triggered = [ 'single-condition-false', 'multiple-conds-one-false', 'multiple-conds-all-false' ] for rule_name in rules_triggered: rule = rules_dict[rule_name] self.assertTrue( rule.is_triggered(conditions_dict, self.column_families), repr(rule)) for rule_name in rules_not_triggered: rule = rules_dict[rule_name] self.assertFalse( rule.is_triggered(conditions_dict, self.column_families), repr(rule))
def setUp(self): this_path = os.path.abspath(os.path.dirname(__file__)) ini_path = os.path.join(this_path, 'input_files/rules_err1.ini') db_rules = RulesSpec(ini_path) db_rules.load_rules_from_spec() self.rules_dict = db_rules.get_rules_dict() self.conditions_dict = db_rules.get_conditions_dict() self.suggestions_dict = db_rules.get_suggestions_dict()
class TestAllRulesTriggered(unittest.TestCase): def setUp(self): # load the Rules this_path = os.path.abspath(os.path.dirname(__file__)) ini_path = os.path.join(this_path, 'input_files/triggered_rules.ini') self.db_rules = RulesSpec(ini_path) self.db_rules.load_rules_from_spec() self.db_rules.perform_section_checks() # load the data sources: LOG and OPTIONS log_path = os.path.join(this_path, 'input_files/LOG-0') options_path = os.path.join(this_path, 'input_files/OPTIONS-000005') db_options_parser = DatabaseOptions(options_path) self.column_families = db_options_parser.get_column_families() db_logs_parser = DatabaseLogs(log_path, self.column_families) self.data_sources = { DataSource.Type.DB_OPTIONS: [db_options_parser], DataSource.Type.LOG: [db_logs_parser] } def test_triggered_conditions(self): conditions_dict = self.db_rules.get_conditions_dict() rules_dict = self.db_rules.get_rules_dict() # Make sure none of the conditions is triggered beforehand for cond in conditions_dict.values(): self.assertFalse(cond.is_triggered(), repr(cond)) for rule in rules_dict.values(): self.assertFalse( rule.is_triggered(conditions_dict, self.column_families), repr(rule) ) # # Trigger the conditions as per the data sources. # trigger_conditions(, conditions_dict) # Get the set of rules that have been triggered triggered_rules = self.db_rules.get_triggered_rules( self.data_sources, self.column_families ) # Make sure each condition and rule is triggered for cond in conditions_dict.values(): if cond.get_data_source() is DataSource.Type.TIME_SERIES: continue self.assertTrue(cond.is_triggered(), repr(cond)) for rule in rules_dict.values(): self.assertIn(rule, triggered_rules) # Check the suggestions made by the triggered rules for sugg in rule.get_suggestions(): self.assertIn(sugg, RuleToSuggestions[rule.name]) for rule in triggered_rules: self.assertIn(rule, rules_dict.values()) for sugg in RuleToSuggestions[rule.name]: self.assertIn(sugg, rule.get_suggestions())
class TestAllRulesTriggered(unittest.TestCase): def setUp(self): # load the Rules this_path = os.path.abspath(os.path.dirname(__file__)) ini_path = os.path.join(this_path, 'input_files/triggered_rules.ini') self.db_rules = RulesSpec(ini_path) self.db_rules.load_rules_from_spec() self.db_rules.perform_section_checks() # load the data sources: LOG and OPTIONS log_path = os.path.join(this_path, 'input_files/LOG-0') options_path = os.path.join(this_path, 'input_files/OPTIONS-000005') db_options_parser = DatabaseOptions(options_path) self.column_families = db_options_parser.get_column_families() db_logs_parser = DatabaseLogs(log_path, self.column_families) self.data_sources = { DataSource.Type.DB_OPTIONS: [db_options_parser], DataSource.Type.LOG: [db_logs_parser] } def test_triggered_conditions(self): conditions_dict = self.db_rules.get_conditions_dict() rules_dict = self.db_rules.get_rules_dict() # Make sure none of the conditions is triggered beforehand for cond in conditions_dict.values(): self.assertFalse(cond.is_triggered(), repr(cond)) for rule in rules_dict.values(): self.assertFalse( rule.is_triggered(conditions_dict, self.column_families), repr(rule)) # # Trigger the conditions as per the data sources. # trigger_conditions(, conditions_dict) # Get the set of rules that have been triggered triggered_rules = self.db_rules.get_triggered_rules( self.data_sources, self.column_families) # Make sure each condition and rule is triggered for cond in conditions_dict.values(): if cond.get_data_source() is DataSource.Type.TIME_SERIES: continue self.assertTrue(cond.is_triggered(), repr(cond)) for rule in rules_dict.values(): self.assertIn(rule, triggered_rules) # Check the suggestions made by the triggered rules for sugg in rule.get_suggestions(): self.assertIn(sugg, RuleToSuggestions[rule.name]) for rule in triggered_rules: self.assertIn(rule, rules_dict.values()) for sugg in RuleToSuggestions[rule.name]: self.assertIn(sugg, rule.get_suggestions())
def setUp(self): # load the Rules this_path = os.path.abspath(os.path.dirname(__file__)) ini_path = os.path.join(this_path, 'input_files/test_rules.ini') self.db_rules = RulesSpec(ini_path) self.db_rules.load_rules_from_spec() self.db_rules.perform_section_checks() # load the data sources: LOG and OPTIONS log_path = os.path.join(this_path, 'input_files/LOG-1') options_path = os.path.join(this_path, 'input_files/OPTIONS-000005') db_options_parser = DatabaseOptions(options_path) self.column_families = db_options_parser.get_column_families() db_logs_parser = DatabaseLogs(log_path, self.column_families) self.data_sources = { DataSource.Type.DB_OPTIONS: [db_options_parser], DataSource.Type.LOG: [db_logs_parser] }
class TestConditionsConjunctions(unittest.TestCase): def setUp(self): # load the Rules this_path = os.path.abspath(os.path.dirname(__file__)) ini_path = os.path.join(this_path, 'input_files/test_rules.ini') self.db_rules = RulesSpec(ini_path) self.db_rules.load_rules_from_spec() self.db_rules.perform_section_checks() # load the data sources: LOG and OPTIONS log_path = os.path.join(this_path, 'input_files/LOG-1') options_path = os.path.join(this_path, 'input_files/OPTIONS-000005') self.data_sources = [] self.data_sources.append(DatabaseOptions(options_path)) self.data_sources.append(DatabaseLogs(log_path)) def test_condition_conjunctions(self): conditions_dict = self.db_rules.get_conditions_dict() rules_dict = self.db_rules.get_rules_dict() # Make sure none of the conditions is triggered beforehand for cond in conditions_dict.values(): self.assertFalse(cond.is_triggered(), repr(cond)) for rule in rules_dict.values(): self.assertFalse(rule.is_triggered(conditions_dict), repr(rule)) # Trigger the conditions as per the data sources. trigger_conditions(self.data_sources, conditions_dict) # Check for the conditions conds_triggered = ['log-1-true', 'log-2-true', 'log-3-true'] conds_not_triggered = ['log-4-false', 'options-1-false'] for cond in conds_triggered: self.assertTrue(conditions_dict[cond].is_triggered(), repr(cond)) for cond in conds_not_triggered: self.assertFalse(conditions_dict[cond].is_triggered(), repr(cond)) # Check for the rules rules_triggered = ['multiple-conds-true'] rules_not_triggered = [ 'single-condition-false', 'multiple-conds-one-false', 'multiple-conds-all-false' ] for rule in rules_triggered: self.assertTrue( rules_dict[rule].is_triggered(conditions_dict), repr(rule) ) for rule in rules_not_triggered: self.assertFalse( rules_dict[rule].is_triggered(conditions_dict), repr(rule) )
class TestAllRulesTriggered(unittest.TestCase): def setUp(self): # load the Rules this_path = os.path.abspath(os.path.dirname(__file__)) ini_path = os.path.join(this_path, '../advisor/rules.ini') self.db_rules = RulesSpec(ini_path) self.db_rules.load_rules_from_spec() self.db_rules.perform_section_checks() # load the data sources: LOG and OPTIONS log_path = os.path.join(this_path, 'input_files/LOG-0') options_path = os.path.join(this_path, 'input_files/OPTIONS-000005') self.data_sources = [] self.data_sources.append(DatabaseOptions(options_path)) self.data_sources.append(DatabaseLogs(log_path)) def test_triggered_conditions(self): conditions_dict = self.db_rules.get_conditions_dict() rules_dict = self.db_rules.get_rules_dict() # Make sure none of the conditions is triggered beforehand for cond in conditions_dict.values(): self.assertFalse(cond.is_triggered(), repr(cond)) for rule in rules_dict.values(): self.assertFalse(rule.is_triggered(conditions_dict), repr(rule)) # Trigger the conditions as per the data sources. trigger_conditions(self.data_sources, conditions_dict) # Make sure each condition and rule is triggered for cond in conditions_dict.values(): self.assertTrue(cond.is_triggered(), repr(cond)) # Get the set of rules that have been triggered triggered_rules = get_triggered_rules(rules_dict, conditions_dict) for rule in rules_dict.values(): self.assertIn(rule, triggered_rules) # Check the suggestions made by the triggered rules for sugg in rule.get_suggestions(): self.assertIn(sugg, RuleToSuggestions[rule.name]) for rule in triggered_rules: self.assertIn(rule, rules_dict.values()) for sugg in RuleToSuggestions[rule.name]: self.assertIn(sugg, rule.get_suggestions())
def test_condition_missing_source(self): ini_path = os.path.join(self.this_path, 'input_files/rules_err2.ini') db_rules = RulesSpec(ini_path) regex = '.*provide source for condition.*' with self.assertRaisesRegex(NotImplementedError, regex): db_rules.load_rules_from_spec()
def test_section_no_name(self): ini_path = os.path.join(self.this_path, 'input_files/rules_err4.ini') db_rules = RulesSpec(ini_path) regex = 'Parsing error: needed section header:.*' with self.assertRaisesRegex(ValueError, regex): db_rules.load_rules_from_spec()
def test_suggestion_missing_action(self): ini_path = os.path.join(self.this_path, 'input_files/rules_err3.ini') db_rules = RulesSpec(ini_path) regex = '.*provide action for option.*' with self.assertRaisesRegex(ValueError, regex): db_rules.load_rules_from_spec()