def handler(q=False): if q is False: return False request = json.loads(q) if not request.get('sigma'): misperrors['error'] = 'Sigma rule missing' return misperrors config = SigmaConfiguration() f = io.TextIOWrapper(io.BytesIO(request.get('sigma').encode()), encoding='utf-8') parser = SigmaCollectionParser(f, config) targets = [] results = [] for t in sigma_targets: backend = getBackend(t)(config, {'rulecomment': False}) try: parser.generate(backend) result = backend.finalize() if result: results.append(result) targets.append(t) except Exception: continue d_result = {t: r.strip() for t, r in zip(targets, results)} return { 'results': [{ 'types': mispattributes['output'], 'values': d_result }] }
def handler(q=False): if q is False: return False request = json.loads(q) if not request.get('sigma'): misperrors['error'] = 'Sigma rule missing' return misperrors config = SigmaConfiguration() backend_options = BackendOptions(None) f = io.TextIOWrapper(io.BytesIO(request.get('sigma').encode()), encoding='utf-8') parser = SigmaCollectionParser(f, config, None) targets = [] old_stdout = sys.stdout result = io.StringIO() sys.stdout = result for t in sigma_targets: backend = getBackend(t)(config, backend_options, None) try: parser.generate(backend) backend.finalize() print("#NEXT") targets.append(t) except Exception: continue sys.stdout = old_stdout results = result.getvalue()[:-5].split('#NEXT') d_result = {t: r.strip() for t, r in zip(targets, results)} return { 'results': [{ 'types': mispattributes['output'], 'values': d_result }] }
def test_backend_elastic_count_nofield_agg(): """ Test aggregation of the form count() by GroupedField < 3 """ sigma_config = SigmaConfiguration() backend = ElasticsearchDSLBackend(sigma_config) # setup the aggregator input object without calling __init__() agg = object.__new__(SigmaAggregationParser) agg.condition = "3" agg.cond_op = "<" agg.aggfunc = SigmaAggregationParser.AGGFUNC_COUNT agg.aggfield = None agg.groupfield = "GroupedField" # Make queries non-empty backend.queries = [{}] backend.generateAggregation(agg) bucket_selector = backend.queries[0]["aggs"]["GroupedField_count"]["aggs"][ "limit"]["bucket_selector"] assert len(backend.queries) == 1, "backend has exactly one query" assert ("GroupedField_count" in backend.queries[0]["aggs"] ), "GroupedField_count is the top aggregation key" assert ("params.count < 3" in bucket_selector["script"] ), "bucket selector script must be 'params.count < 3'" assert "count" in bucket_selector[ "buckets_path"], "buckets_path must be 'count'"
def setUp(self): with patch("analyse_forensicstore.ForensicstoreSigma.__init__", return_value=None): self.analysis = ForensicstoreSigma("any_forensicstore", "test_table", "any_sigma_config") self.analysis.config = SigmaConfiguration()
def test_empty_io_stream(self): self.analysis.config = SigmaConfiguration() self.analysis.table = "tablename" self.analysis.SQL = ForensicStoreBackend(self.analysis.config) with patch("builtins.open", mock_open(read_data="")): assert self.analysis.generateSqlQuery(open("empty file")) == []
def __init__(self, content, config=None, rulefilter=None): if config is None: from sigma.configuration import SigmaConfiguration config = SigmaConfiguration() self.yamls = yaml.safe_load_all(content) globalyaml = dict() self.parsers = list() prevrule = None for yamldoc in self.yamls: action = None try: action = yamldoc['action'] del yamldoc['action'] except KeyError: pass if action == "global": deep_update_dict(globalyaml, yamldoc) elif action == "reset": globalyaml = dict() elif action == "repeat": if prevrule is None: raise SigmaCollectionParseError("action 'repeat' is only applicable after first valid Sigma rule") newrule = prevrule.copy() deep_update_dict(newrule, yamldoc) if rulefilter is None or rulefilter is not None and not rulefilter.match(newrule): self.parsers.append(SigmaParser(newrule, config)) prevrule = newrule else: deep_update_dict(yamldoc, globalyaml) if rulefilter is None or rulefilter is not None and rulefilter.match(yamldoc): self.parsers.append(SigmaParser(yamldoc, config)) prevrule = yamldoc self.config = config
def test_options(self): # Setting attributes for testing self.analysis.config = SigmaConfiguration() self.analysis.table = "tablename" self.analysis.SQL = ForensicStoreBackend(self.analysis.config) sigma_rule = { "title": "Test", "level": "testing", "detection": { "keywords": ["test1", "test2"], "condition": "keywords" } } generated_query = "Dummy query" with patch("yaml.safe_load_all", return_value=[sigma_rule]) as mock_yaml_load: with patch("sigma.backends.sql.SQLBackend.generate", return_value=generated_query) as mock_sql_generate: # Test for yaml file containing single rule assert self.analysis.generateSqlQuery("any sigma io") == [ (generated_query, sigma_rule) ] mock_yaml_load.assert_called_with("any sigma io") # Test for yaml file containing two rules mock_yaml_load.return_value = [sigma_rule, sigma_rule] assert self.analysis.generateSqlQuery("any sigma io") == [ (generated_query, sigma_rule), (generated_query, sigma_rule) ] assert mock_yaml_load.call_count == 2 assert mock_sql_generate.call_count == 3
def test_invalid_io_stream(self): self.analysis.config = SigmaConfiguration() self.analysis.table = "tablename" self.analysis.SQL = ForensicStoreBackend(self.analysis.config) with patch("builtins.open", mock_open(read_data="not valid\n\nwhatever")): self.assertRaises( SigmaParseError, self.analysis.generateSqlQuery, open("invalid file"))
def __init__(self, url, sigmaconfig): if not os.path.exists(sigmaconfig): raise FileNotFoundError(sigmaconfig) if not os.path.exists(url): raise FileNotFoundError(url) self.table = "elements" self.store = forensicstore.open(url) self.config = SigmaConfiguration(open(sigmaconfig)) self.SQL = ForensicStoreBackend(self.config)
def get(self, name): """ Return a config by identifier or file path. First, it tries to resolve identifier from discovered configurations (file name stem). If this fails, the parameter value is treated as file name. """ try: # Lookup in discovered configurations return self.configs[name] except KeyError: # identifier not found, try with filename f = open(name) return SigmaConfiguration(f)
def generate_query(self, rule, backend_options=None, config=None, fieldmappings=None): if backend_options is None: backend_options = {} if config is None: config = {} if fieldmappings is None: fieldmappings = {} cfg = SigmaConfiguration() cfg.config = config cfg.fieldmappings = fieldmappings backend = DatadogLogsBackend(cfg, backend_options) parser = SigmaParser(rule, cfg) return backend.generate(parser)
def test_fieldname_mapping(self): detection = {"selection": {"fieldname": "test1"}, "condition": "selection"} expected_result = 'SELECT * FROM {} WHERE mapped_fieldname = "test1"'.format( self.table) # configure mapping config = SigmaConfiguration() config.fieldmappings["fieldname"] = FieldMapping( "fieldname", "mapped_fieldname") self.basic_rule["detection"] = detection with patch("yaml.safe_load_all", return_value=[self.basic_rule]): parser = SigmaCollectionParser("any sigma io", config, None) backend = SQLBackend(config, self.table) assert len(parser.parsers) == 1 for p in parser.parsers: self.assertEqual(expected_result, backend.generate(p))
def update(self): """Update configurations""" self.configs.clear() self.errors.clear() for path in reversed( self.paths): # Configs from first paths override latter ones for conf_path in path.glob("**/*.yml"): try: f = conf_path.open() self.configs[conf_path.stem] = SigmaConfiguration(f) f.close() except (SigmaConfigParseError, OSError) as e: self.errors.append((conf_path, e))
def test_fieldname_mapping(self): detection = {"selection": {"fieldname": "test1"}, "condition": "selection"} expected_result = 'SELECT json FROM {} ' \ 'WHERE json_extract(json, \'$.type\') = \'eventlog\' ' \ 'AND json_extract(json, \'$.mapped_fieldname\') = "test1"'.format(self.table) # configure mapping config = SigmaConfiguration() config.fieldmappings["fieldname"] = FieldMapping( "fieldname", "mapped_fieldname") self.basic_rule["detection"] = detection with patch("yaml.safe_load_all", return_value=[self.basic_rule]): parser = SigmaCollectionParser("any sigma io", config, None) backend = ForensicStoreBackend(config) assert len(parser.parsers) == 1 for p in parser.parsers: self.assertEqual(expected_result.lower(), backend.generate(p).lower())
def main(): backend = SigmaNormalizationBackend(SigmaConfiguration()) if args.recursive: paths = [ p for pathname in args.inputs for p in pathlib.Path(pathname).glob("**/*") if p.is_file() ] else: paths = [pathlib.Path(pathname) for pathname in args.inputs] primary_paths = None if args.primary: with open(args.primary, "r") as f: primary_paths = {pathname.strip() for pathname in f.readlines()} parsed = { str(path): SigmaCollectionParser(path.open().read()) for path in paths } converted = { str(path): list(sigma_collection.generate(backend)) for path, sigma_collection in parsed.items() } converted_flat = ((path, i, normalized) for path, nlist in converted.items() for i, normalized in zip(range(len(nlist)), nlist)) converted_pairs_iter = itertools.combinations(converted_flat, 2) if primary_paths: converted_pairs = [ pair for pair in converted_pairs_iter if pair[0][0] in primary_paths or pair[1][0] in paths ] else: converted_pairs = list(converted_pairs_iter) similarities = [ (item1[:2], item2[:2], difflib.SequenceMatcher(None, item1[2], item2[2]).ratio()) for item1, item2 in progressbar.progressbar(converted_pairs) ] i = 0 for similarity in sorted(similarities, key=lambda s: s[2], reverse=True): if args.min_similarity and similarity[ 2] * 100 < args.min_similarity: # finish after similarity drops below minimum break print("{:70} | {:2} | {:70} | {:2} | {:>3.2%}".format( *similarity[0], *similarity[1], similarity[2])) i += 1 if args.top and i >= args.top: # end after $top pairs break
def handler(q=False): if q is False: return False request = json.loads(q) if not request.get('sigma'): misperrors['error'] = 'Sigma rule missing' return misperrors config = SigmaConfiguration() try: parser = SigmaParser(yaml.safe_load(request.get('sigma')), config) result = ("Syntax valid: {}".format(parser.values)) except Exception as e: result = ("Syntax error: {}".format(str(e))) return {'results': [{'types': mispattributes['output'], 'values': result}]}
def validate(self, detection, expectation): config = SigmaConfiguration() self.basic_rule["detection"] = detection with patch("yaml.safe_load_all", return_value=[self.basic_rule]): parser = SigmaCollectionParser("any sigma io", config, None) backend = DevoBackend(config, self.table) assert len(parser.parsers) == 1 for p in parser.parsers: if isinstance(expectation, str): self.assertEqual(expectation, backend.generate(p)) elif isinstance(expectation, Exception): self.assertRaises(type(expectation), backend.generate, p)
parser.add_argument( "--verbose", default=False, action="store_true", help="Print individual results about each processed rule.") args = parser.parse_args() verbose_report = args.verbose skipped = 0 errors = 0 successes = 0 total = 0 config = SigmaConfiguration() backend = DatadogLogsBackend(config) for (dirpath, _, filenames) in os.walk("../rules"): for filename in filenames: if filename.endswith(".yaml") or filename.endswith(".yml"): rule_path = os.path.join(dirpath, filename) with open(rule_path, "r") as rule_file: total += 1 parser = SigmaParser(yaml.safe_load(rule_file), config) try: query = backend.generate(parser) except NotImplementedError as err: if verbose_report:
args = parser.parse_args() success_report = args.success skipped_report = args.skipped failed_report = args.failed display_results = False if success_report or skipped_report or failed_report: display_results = True skipped = 0 errors = 0 successes = 0 total = 0 config = SigmaConfiguration(open('./config/dnif.yml')) backend = DnifBackend(config) results = {'skipped': '', 'failed': '', 'success': ''} queries = '' for (dirpath, _, filenames) in os.walk("../rules"): for filename in filenames: if filename.endswith(".yaml") or filename.endswith(".yml"): rule_path = os.path.join(dirpath, filename) with open(rule_path, "r") as rule_file: total += 1 parser = SigmaParser(yaml.safe_load(rule_file), config) try:
def generateTypedValueNode(self, node): """Return normalized form of typed values""" return "type_{}({})".format(node.identifier, str(node)) def generateAggregation(self, agg): if agg.aggfunc_notrans == "near": return " near in={} ex={}".format(str(agg.include), str(agg.exclude)) else: return " | {}({}) by {} {} {}".format(agg.aggfunc_notrans, agg.aggfield, agg.groupfield, agg.cond_op, agg.condition) backend = SigmaNormalizationBackend(SigmaConfiguration()) if args.recursive: paths = [ p for pathname in args.inputs for p in pathlib.Path(pathname).glob("**/*") if p.is_file() ] else: paths = [pathlib.Path(pathname) for pathname in args.inputs] primary_paths = None if args.primary: with open(args.primary, "r") as f: primary_paths = {pathname.strip() for pathname in f.readlines()} parsed = {
"--verbose", default=False, action="store_true", help="Print individual results about each processed rule.", ) args = parser.parse_args() verbose_report = args.verbose skipped = 0 errors = 0 successes = 0 total = 0 config = SigmaConfiguration(open('./config/streamalert.yml')) backend = StreamAlertQueryBackend(config) results = {'skipped': '', 'failed': '', 'success': ''} queries = '' for (dirpath, _, filenames) in os.walk("../rules"): for filename in filenames: if filename.endswith(".yaml") or filename.endswith(".yml"): rule_path = os.path.join(dirpath, filename) with open(rule_path, "r") as rule_file: total += 1 parser = SigmaParser(yaml.safe_load(rule_file), config) try: