def test_duplicate_file_names(self): """Test that no file names are duplicated.""" name_map = defaultdict(list) for file_path in rule_loader.load_rule_files(): base_name = os.path.basename(file_path) name_map[base_name].append(file_path) duplicates = {name: paths for name, paths in name_map.items() if len(paths) > 1} if duplicates: self.fail(f"Found duplicated file names {duplicates}")
def test_all_rule_queries_optimized(self): """Ensure that every rule query is in optimized form.""" for file_name, contents in rule_loader.load_rule_files().items(): rule = Rule(file_name, contents) if rule.query and rule.contents['language'] == 'kuery': tree = kql.parse(rule.query, optimize=False) optimized = tree.optimize(recursive=True) err_message = '\nQuery not optimized for rule: {} - {}\nExpected: {}\nActual: {}'.format( rule.name, rule.id, optimized, rule.query) self.assertEqual(tree, optimized, err_message)
def test_file_names(self): """Test that the file names meet the requirement.""" file_pattern = rule_loader.FILE_PATTERN self.assertIsNone(re.match(file_pattern, 'NotValidRuleFile.toml'), 'Incorrect pattern for verifying rule names: {}'.format(file_pattern)) self.assertIsNone(re.match(file_pattern, 'still_not_a_valid_file_name.not_json'), 'Incorrect pattern for verifying rule names: {}'.format(file_pattern)) for rule_file in rule_loader.load_rule_files().keys(): self.assertIsNotNone(re.match(file_pattern, os.path.basename(rule_file)), 'Invalid file name for {}'.format(rule_file))
def test_no_unrequired_defaults(self): """Test that values that are not required in the schema are not set with default values.""" rules_with_hits = {} for file_name, contents in rule_loader.load_rule_files().items(): rule = Rule(file_name, contents) default_matches = rule_loader.find_unneeded_defaults(rule) if default_matches: rules_with_hits['{} - {}'.format(rule.name, rule.id)] = default_matches error_msg = 'The following rules have unnecessary default values set: \n{}'.format( json.dumps(rules_with_hits, indent=2)) self.assertDictEqual(rules_with_hits, {}, error_msg)
def test_all_rule_files(self): """Ensure that every rule file can be loaded and validate against schema.""" rules = [] for file_name, contents in rule_loader.load_rule_files().items(): try: rule = Rule(file_name, contents) rules.append(rule) except (pytoml.TomlError, toml.TomlDecodeError) as e: print("TOML error when parsing rule file \"{}\"".format(os.path.basename(file_name)), file=sys.stderr) raise e except jsonschema.ValidationError as e: print("Schema error when parsing rule file \"{}\"".format(os.path.basename(file_name)), file=sys.stderr) raise e
def test_all_rules_as_rule_schema(self): """Ensure that every rule file validates against the rule schema.""" for file_name, contents in rule_loader.load_rule_files().items(): rule = Rule(file_name, contents) rule.validate(as_rule=True)
def test_schema_and_dupes(self): """Ensure that every rule matches the schema and there are no duplicates.""" rule_files = rule_loader.load_rule_files() self.assertGreaterEqual(len(rule_files), 1, 'No rules were loaded from rules directory!')
def setUpClass(cls): cls.rule_files = rule_loader.load_rule_files(verbose=False) cls.rule_lookup = rule_loader.load_rules(verbose=False) cls.rules = cls.rule_lookup.values() cls.production_rules = rule_loader.get_production_rules()