Esempio n. 1
0
    def resolve(self, key, value, sigmaparser):
        # build list of matching target mappings
        targets = set()
        for condfield in self.conditions:
            if condfield in sigmaparser.values:
                rulefieldvalues = sigmaparser.values[condfield]
                for condvalue in self.conditions[condfield]:
                    if condvalue in rulefieldvalues:
                        targets.update(self.conditions[condfield][condvalue])
        if len(targets
               ) == 0:  # no matching condition, try with default mapping
            if self.default != None:
                targets = self.default

        if len(
                targets
        ) == 1:  # result set contains only one target, return mapped item (like SimpleFieldMapping)
            return (targets.pop(), value)
        elif len(
                targets
        ) > 1:  # result set contains multiple targets, return all linked as OR condition (like MultiFieldMapping)
            cond = ConditionOR()
            for target in targets:
                cond.add((target, value))
            return cond
        else:  # no mapping found
            return (key, value)
Esempio n. 2
0
 def get_indexcond(self):
     """Get index condition if index field name is configured"""
     cond = ConditionOR()
     if self.indexfield:
         for index in self.index:
             cond.add((self.indexfield, index))
         return cond
     else:
         return None
Esempio n. 3
0
 def get_indexcond(self):
     """Get index condition if index field name is configured"""
     cond = ConditionOR()
     if self.indexfield:
         for index in self.index:
             cond.add((self.indexfield, index))
         return cond
     else:
         return None
Esempio n. 4
0
    def generateMapItemNode(self, node):
        """
        Map Expression for Lacework Query Language (LQL)

        Special handling for contains by inspecting value for wildcards
        """
        fieldname, value = node

        if self._should_ignore_field(fieldname):
            return None
        transformed_fieldname = self.fieldNameMapping(fieldname, value)

        # is not null
        if value == '*':
            if ':' in transformed_fieldname:
                return f'value_exists({transformed_fieldname})'
            return f'{transformed_fieldname} is not null'
        # contains
        if (isinstance(value, str) and value.startswith('*')
                and value.endswith('*')):
            value = self.generateValueNode(value[1:-1])
            return f"contains({transformed_fieldname}, {value})"
        # startswith
        if (isinstance(value, str) and value.endswith(
                '*')  # a wildcard at the end signifies startswith
            ):
            value = self.generateValueNode(value[:-1])
            return f"starts_with({transformed_fieldname}, {value})"
        # endswith
        if (isinstance(value, str) and value.startswith(
                '*')  # a wildcard at the start signifies endswith
            ):
            value = f'%{value[1:]}'
            new_value = self.generateValueNode(value)
            if new_value != (self.valueExpression % value):
                raise BackendError(
                    'Lacework backend only supports endswith for literal string values'
                )
            return f"{transformed_fieldname} LIKE {new_value}"
        if isinstance(value, (str, int)):
            return self.mapExpression % (transformed_fieldname,
                                         self.generateNode(value))
        # mapListsHandling
        elif type(value) == list:
            # if a list contains values with wildcards we can't use standard handling ("in")
            if any([x for x in value if x.startswith('*') or x.endswith('*')]):
                node = NodeSubexpression(
                    ConditionOR(None, None,
                                *[(transformed_fieldname, x) for x in value]))
                return self.generateNode(node)
            return self.generateMapItemListNode(transformed_fieldname, value)
        elif value is None:
            return self.nullExpression % (transformed_fieldname, )
        else:
            raise TypeError(
                f'Lacework backend does not support map values of type {type(value)}'
            )
Esempio n. 5
0
    def resolve(self, key, value, sigmaparser):
        targets = self._targets(sigmaparser)
        if len(targets
               ) == 0:  # no matching condition, try with default mapping
            if self.default != None:
                targets = self.default

        if len(
                targets
        ) == 1:  # result set contains only one target, return mapped item (like SimpleFieldMapping)
            if value is None:
                return ConditionNULLValue(val=list(targets)[0])
            else:
                return (list(targets)[0], value)
        elif len(
                targets
        ) > 1:  # result set contains multiple targets, return all linked as OR condition (like MultiFieldMapping)
            cond = ConditionOR()
            for target in targets:
                if value is None:
                    cond.add(ConditionNULLValue(val=target))
                else:
                    cond.add((target, value))
            return NodeSubexpression(cond)
        else:  # no mapping found
            if value is None:
                return ConditionNULLValue(val=key)
            else:
                return (key, value)
Esempio n. 6
0
 def resolve(self, key, value, sigmaparser):
     if type(self.fieldmappings) == str:  # one field mapping
         return (self.fieldmappings, value)
     elif isinstance(self.fieldmappings, SimpleFieldMapping):
         return self.fieldmappings.resolve(key, value, sigmaparser)
     elif type(self.fieldmappings) == set:
         cond = ConditionOR()
         for mapping in self.fieldmappings:
             if type(mapping) == str:
                 cond.add((mapping, value))
             elif isinstance(mapping, SimpleFieldMapping):
                 cond.add(mapping.resolve(key, value, sigmaparser))
         return NodeSubexpression(cond)
Esempio n. 7
0
 def resolve(self, key, value, sigmaparser):
     if type(self.fieldmappings) == str:     # one field mapping
         return (self.fieldmappings, value)
     elif isinstance(self.fieldmappings, ConditionalFieldMapping):
         logsource = sigmaparser.parsedyaml.get("logsource")
         condition = self.fieldmappings.conditions
         for source_type, logsource_item in logsource.items():
             if condition.get(source_type) and condition.get(source_type, {}).get(logsource_item):
                 new_field = condition.get(source_type, {}).get(logsource_item)
                 self.fieldmappings.default = new_field
         return self.fieldmappings.resolve(self.fieldmappings.source, value, sigmaparser)
     elif isinstance(self.fieldmappings, SimpleFieldMapping):
         return self.fieldmappings.resolve(key, value, sigmaparser)
     elif type(self.fieldmappings) == set:
         cond = ConditionOR()
         for mapping in self.fieldmappings:
             if type(mapping) == str:
                 cond.add((mapping, value))
             elif isinstance(mapping, SimpleFieldMapping):
                 cond.add(mapping.resolve(key, value, sigmaparser))
         return NodeSubexpression(cond)
Esempio n. 8
0
 def resolve(self, key, value, sigmaparser):
     """Returns multiple target field names as OR condition"""
     cond = ConditionOR()
     for fieldname in self.target:
         cond.add((fieldname, value))
     return NodeSubexpression(cond)
Esempio n. 9
0
    def __init__(self,
                 logsource=None,
                 defaultindex=None,
                 name=None,
                 mergemethod=MM_AND,
                 indexfield=None):
        self.name = name
        self.indexfield = indexfield
        if logsource == None:  # create empty object
            self.category = None
            self.product = None
            self.service = None
            self.index = list()
            self.conditions = None
        elif type(logsource) == list and all(
            [isinstance(o, SigmaLogsourceConfiguration) for o in logsource]
        ):  # list of SigmaLogsourceConfigurations: merge according to mergemethod
            # Merge category, product and service
            categories = set(
                [ls.category for ls in logsource if ls.category != None])
            products = set(
                [ls.product for ls in logsource if ls.product != None])
            services = set(
                [ls.service for ls in logsource if ls.service != None])
            if len(categories) > 1 or len(products) > 1 or len(services) > 1:
                raise ValueError(
                    "Merged SigmaLogsourceConfigurations must have disjunct categories (%s), products (%s) and services (%s)"
                    % (str(categories), str(products), str(services)))

            try:
                self.category = categories.pop()
            except KeyError:
                self.category = None
            try:
                self.product = products.pop()
            except KeyError:
                self.product = None
            try:
                self.service = services.pop()
            except KeyError:
                self.service = None

            # Merge all index patterns
            self.index = list(
                set([index for ls in logsource
                     for index in ls.index]))  # unique(flat(logsources.index))
            if len(
                    self.index
            ) == 0 and defaultindex is not None:  # if no index pattern matched and default index is present: use default index
                if type(defaultindex) == str:
                    self.index = [defaultindex]
                elif type(defaultindex) == list and all(
                    [type(i) == str for i in defaultindex]):
                    self.index = defaultindex
                else:
                    raise TypeError(
                        "Default index must be string or list of strings")

            # "merge" index field (should never differ between instances because it is provided by backend class
            indexfields = [
                ls.indexfield for ls in logsource if ls.indexfield != None
            ]
            try:
                self.indexfield = indexfields[0]
            except IndexError:
                self.indexfield = None

            # Merge conditions according to mergemethod
            if mergemethod == self.MM_AND:
                cond = ConditionAND()
            elif mergemethod == self.MM_OR:
                cond = ConditionOR()
            else:
                raise ValueError("Mergemethod must be '%s' or '%s'" %
                                 (self.MM_AND, self.MM_OR))
            for ls in logsource:
                if ls.conditions != None:
                    cond.add(ls.conditions)
            if len(cond) > 0:
                self.conditions = cond
            else:
                self.conditions = None
        elif type(logsource
                  ) == dict:  # create logsource configuration from parsed yaml
            if 'category' in logsource and type(logsource['category']) != str \
                    or 'product' in logsource and type(logsource['product']) != str \
                    or 'service' in logsource and type(logsource['service']) != str:
                raise SigmaConfigParseError(
                    "Logsource category, product or service must be a string")
            try:
                self.category = logsource['category']
            except KeyError:
                self.category = None
            try:
                self.product = logsource['product']
            except KeyError:
                self.product = None
            try:
                self.service = logsource['service']
            except KeyError:
                self.service = None
            if self.category == None and self.product == None and self.service == None:
                raise SigmaConfigParseError(
                    "Log source definition will not match")

            if 'index' in logsource:
                index = logsource['index']
                if type(index) not in (str, list):
                    raise SigmaConfigParseError(
                        "Logsource index must be string or list of strings")
                if type(index) == list and not all(
                    [type(index) == str for index in logsource['index']]):
                    raise SigmaConfigParseError(
                        "Logsource index patterns must be strings")
                if type(index) == list:
                    self.index = index
                else:
                    self.index = [index]
            else:
                # no default index handling here - this branch is executed if log source definitions are parsed from
                # config and these must not necessarily contain an index definition. A valid index may later be result
                # from a merge, where default index handling applies.
                self.index = []

            if 'conditions' in logsource:
                if type(logsource['conditions']) != dict:
                    raise SigmaConfigParseError(
                        "Logsource conditions must be a map")
                cond = ConditionAND()
                for key, value in logsource['conditions'].items():
                    cond.add((key, value))
                self.conditions = cond
            else:
                self.conditions = None
        else:
            raise SigmaConfigParseError("Logsource definitions must be maps")