Ejemplo n.º 1
0
    def test_will_parse_basic(self):
        query = json.loads("""
        {
          "condition": "AND",
          "rules": [
            {
              "id": "column",
              "field": "column",
              "type": "string",
              "input": "text",
              "operator": "column_equal",
              "value": [
                "model_id",
                "5500"
              ]
            }
          ]
        }
        """)

        network = Network()
        network.parse_query(query)

        # AND condition, single column condition
        assert len(network.conditions) == 2
Ejemplo n.º 2
0
    def test_parse_error_unknown_operator(self):
        # Different operators in each sub-group
        query = json.loads("""
        {
          "condition": "OR",
          "rules": [
            {
              "id": "query_name",
              "field": "query_name",
              "type": "string",
              "input": "text",
              "operator": "BAD OPERATOR",
              "value": "foo"
            }
          ]
        }""")

        network = Network()

        exc = None
        try:
            network.parse_query(query)
        except Exception as e:
            exc = e

        assert isinstance(exc, ValueError)
        assert exc.args == ("Unsupported operator: BAD OPERATOR",)
Ejemplo n.º 3
0
    def test_parse_error_unknown_operator(self):
        # Different operators in each sub-group
        query = json.loads("""
        {
          "condition": "OR",
          "rules": [
            {
              "id": "query_name",
              "field": "query_name",
              "type": "string",
              "input": "text",
              "operator": "BAD OPERATOR",
              "value": "foo"
            }
          ]
        }""")

        network = Network()

        exc = None
        try:
            network.parse_query(query)
        except Exception as e:
            exc = e

        assert isinstance(exc, ValueError)
        assert exc.args == ("Unsupported operator: BAD OPERATOR", )
Ejemplo n.º 4
0
    def test_will_parse_basic(self):
        query = json.loads("""
        {
          "condition": "AND",
          "rules": [
            {
              "id": "column",
              "field": "column",
              "type": "string",
              "input": "text",
              "operator": "column_equal",
              "value": [
                "model_id",
                "5500"
              ]
            }
          ]
        }
        """)

        network = Network()
        network.parse_query(query)

        # AND condition, single column condition
        assert len(network.conditions) == 2
Ejemplo n.º 5
0
    def load_rules(self):
        """ Load rules from the database. """
        from doorman.rules import Network
        from doorman.models import Rule

        if not self.should_reload_rules():
            return

        all_rules = list(Rule.query.all())

        self.network = Network()

        if not all_rules:
            return

        for rule in all_rules:
            # Verify the alerters
            for alerter in rule.alerters:
                if alerter not in self.alerters:
                    raise ValueError('No such alerter: "{0}"'.format(alerter))

            # Create the rule.
            self.network.parse_query(rule.conditions,
                                     alerters=rule.alerters,
                                     rule_id=rule.id)

        # Save the last updated date
        # Note: we do this here, and not in should_reload_rules, because it's
        # possible that we've reloaded a rule in between the two functions, and
        # thus we accidentally don't reload when we should.
        self.last_update = max(r.updated_at for r in all_rules)
Ejemplo n.º 6
0
    def test_will_cache_condition_instances(self):
        class TestCondition(BaseCondition):
            pass

        network = Network()
        one = network.make_condition(TestCondition)
        two = network.make_condition(TestCondition)

        assert one is two
Ejemplo n.º 7
0
    def test_will_cache_condition_instances(self):
        class TestCondition(BaseCondition):
            pass

        network = Network()
        one = network.make_condition(TestCondition)
        two = network.make_condition(TestCondition)

        assert one is two
Ejemplo n.º 8
0
    def test_will_not_reuse_different_groups(self):
        # Different operators in each sub-group
        query = json.loads("""
        {
          "condition": "AND",
          "rules": [
            {
              "condition": "AND",
              "rules": [
                {
                  "id": "query_name",
                  "field": "query_name",
                  "type": "string",
                  "input": "text",
                  "operator": "not_equal",
                  "value": "asdf"
                }
              ]
            },
            {
              "condition": "AND",
              "rules": [
                {
                  "id": "query_name",
                  "field": "query_name",
                  "type": "string",
                  "input": "text",
                  "operator": "equal",
                  "value": "asdf"
                }
              ]
            }
          ]
        }""")

        network = Network()
        network.parse_query(query)

        counts = defaultdict(int)
        for condition in network.conditions.values():
            counts[condition.__class__.__name__] += 1

        # Top level, each sub-group (not reused), each condition
        assert counts == {
            'AndCondition': 3,
            'EqualCondition': 1,
            'NotEqualCondition': 1
        }
Ejemplo n.º 9
0
    def load_rules(self):
        """ Load rules from the database. """
        from doorman.rules import Network
        from doorman.models import Rule
        from sqlalchemy.exc import SQLAlchemyError

        if not self.should_reload_rules():
            return

        all_rules = list(Rule.query.all())

        self.network = Network()
        for rule in all_rules:
            # Verify the alerters
            for alerter in rule.alerters:
                if alerter not in self.alerters:
                    raise ValueError('No such alerter: "{0}"'.format(alerter))

            # Create the rule.
            self.network.parse_query(rule.conditions, alerters=rule.alerters, rule_id=rule.id)

        # Save the last updated date
        # Note: we do this here, and not in should_reload_rules, because it's
        # possible that we've reloaded a rule in between the two functions, and
        # thus we accidentally don't reload when we should.
        self.last_update = max(r.updated_at for r in all_rules)
Ejemplo n.º 10
0
    def test_will_not_reuse_different_groups(self):
        # Different operators in each sub-group
        query = json.loads("""
        {
          "condition": "AND",
          "rules": [
            {
              "condition": "AND",
              "rules": [
                {
                  "id": "query_name",
                  "field": "query_name",
                  "type": "string",
                  "input": "text",
                  "operator": "not_equal",
                  "value": "asdf"
                }
              ]
            },
            {
              "condition": "AND",
              "rules": [
                {
                  "id": "query_name",
                  "field": "query_name",
                  "type": "string",
                  "input": "text",
                  "operator": "equal",
                  "value": "asdf"
                }
              ]
            }
          ]
        }""")

        network = Network()
        network.parse_query(query)

        counts = defaultdict(int)
        for condition in network.conditions.values():
            counts[condition.__class__.__name__] += 1

        # Top level, each sub-group (not reused), each condition
        assert counts == {'AndCondition': 3, 'EqualCondition': 1, 'NotEqualCondition': 1}
Ejemplo n.º 11
0
    def test_parse_error_no_rules_in_group(self):
        # Different operators in each sub-group
        query = json.loads("""
        {
          "condition": "AND",
          "rules": [
          ]
        }""")

        network = Network()

        exc = None
        try:
            network.parse_query(query)
        except Exception as e:
            exc = e

        assert isinstance(exc, ValueError)
        assert exc.args == ("A group contains no rules",)
Ejemplo n.º 12
0
    def test_parse_error_no_rules_in_group(self):
        # Different operators in each sub-group
        query = json.loads("""
        {
          "condition": "AND",
          "rules": [
          ]
        }""")

        network = Network()

        exc = None
        try:
            network.parse_query(query)
        except Exception as e:
            exc = e

        assert isinstance(exc, ValueError)
        assert exc.args == ("A group contains no rules", )
Ejemplo n.º 13
0
    def test_will_reuse_identical_conditions(self):
        # Operators are equal in each condition
        query = json.loads("""
        {
          "condition": "AND",
          "rules": [
            {
              "condition": "AND",
              "rules": [
                {
                  "id": "query_name",
                  "field": "query_name",
                  "type": "string",
                  "input": "text",
                  "operator": "equal",
                  "value": "asdf"
                }
              ]
            },
            {
              "id": "query_name",
              "field": "query_name",
              "type": "string",
              "input": "text",
              "operator": "equal",
              "value": "asdf"
            }
          ]
        }""")

        network = Network()
        network.parse_query(query)

        counts = defaultdict(int)
        for condition in network.conditions.values():
            counts[condition.__class__.__name__] += 1

        # Top-level AND, AND group, reused column condition
        assert counts == {'AndCondition': 2, 'EqualCondition': 1}
Ejemplo n.º 14
0
    def test_will_reuse_identical_conditions(self):
        # Operators are equal in each condition
        query = json.loads("""
        {
          "condition": "AND",
          "rules": [
            {
              "condition": "AND",
              "rules": [
                {
                  "id": "query_name",
                  "field": "query_name",
                  "type": "string",
                  "input": "text",
                  "operator": "equal",
                  "value": "asdf"
                }
              ]
            },
            {
              "id": "query_name",
              "field": "query_name",
              "type": "string",
              "input": "text",
              "operator": "equal",
              "value": "asdf"
            }
          ]
        }""")

        network = Network()
        network.parse_query(query)

        counts = defaultdict(int)
        for condition in network.conditions.values():
            counts[condition.__class__.__name__] += 1

        # Top-level AND, AND group, reused column condition
        assert counts == {'AndCondition': 2, 'EqualCondition': 1}
Ejemplo n.º 15
0
class RuleManager(object):
    def __init__(self, app=None):
        self.network = None
        self.last_update = None

        if app is not None:
            self.init_app(app)

    def init_app(self, app):
        self.app = app
        self.load_alerters()

        # Save this instance on the app, so we have a way to get at it.
        app.rule_manager = self

    def load_alerters(self):
        """ Load the alerter plugin(s) specified in the app config. """
        from importlib import import_module
        from doorman.plugins import AbstractAlerterPlugin

        alerters = self.app.config.get('DOORMAN_ALERTER_PLUGINS', {})

        self.alerters = {}
        for name, (plugin, config) in alerters.items():
            package, classname = plugin.rsplit('.', 1)
            module = import_module(package)
            klass = getattr(module, classname, None)

            if klass is None:
                raise ValueError('Could not find a class named "{0}" in package "{1}"'.format(classname, package))

            if not issubclass(klass, AbstractAlerterPlugin):
                raise ValueError('{0} is not a subclass of AbstractAlerterPlugin'.format(name))

            self.alerters[name] = klass(config)

    def should_reload_rules(self):
        """ Checks if we need to reload the set of rules. """
        from doorman.models import Rule

        if self.last_update is None:
            return True

        newest_rule = Rule.query.order_by(Rule.updated_at.desc()).limit(1).first()
        if self.last_update < newest_rule.updated_at:
            return True

        return False

    def load_rules(self):
        """ Load rules from the database. """
        from doorman.rules import Network
        from doorman.models import Rule
        from sqlalchemy.exc import SQLAlchemyError

        if not self.should_reload_rules():
            return

        all_rules = list(Rule.query.all())

        self.network = Network()
        for rule in all_rules:
            # Verify the alerters
            for alerter in rule.alerters:
                if alerter not in self.alerters:
                    raise ValueError('No such alerter: "{0}"'.format(alerter))

            # Create the rule.
            self.network.parse_query(rule.conditions, alerters=rule.alerters, rule_id=rule.id)

        # Save the last updated date
        # Note: we do this here, and not in should_reload_rules, because it's
        # possible that we've reloaded a rule in between the two functions, and
        # thus we accidentally don't reload when we should.
        self.last_update = max(r.updated_at for r in all_rules)

    def handle_log_entry(self, entry, node):
        """ The actual entrypoint for handling input log entries. """
        from doorman.models import Rule
        from doorman.rules import RuleMatch
        from doorman.utils import extract_results

        self.load_rules()

        to_trigger = []
        for name, action, columns, timestamp in extract_results(entry):
            result = {
                'name': name,
                'action': action,
                'timestamp': timestamp,
                'columns': columns,
            }
            alerts = self.network.process(result, node)
            if len(alerts) == 0:
                continue

            # Alerts is a set of (alerter name, rule id) tuples.  We convert
            # these into RuleMatch instances, which is what our alerters are
            # actually expecting.
            for alerter, rule_id in alerts:
                rule = Rule.get_by_id(rule_id)

                to_trigger.append((alerter, RuleMatch(
                    rule=rule,
                    result=result,
                    node=node
                )))

        # Now that we've collected all results, start triggering them.
        for alerter, match in to_trigger:
            self.alerters[alerter].handle_alert(node, match)
Ejemplo n.º 16
0
    def test_will_alert(self, node):
        query = json.loads("""
        {
          "condition": "AND",
          "rules": [
            {
              "id": "query_name",
              "field": "query_name",
              "type": "string",
              "input": "text",
              "operator": "begins_with",
              "value": "packs/osx-attacks/"
            },
            {
              "id": "action",
              "field": "action",
              "type": "string",
              "input": "text",
              "operator": "equal",
              "value": "added"
            },
            {
              "id": "column",
              "field": "column",
              "type": "string",
              "input": "text",
              "operator": "column_equal",
              "value": [
                "name", "com.whitesmoke.uploader.plist"
              ]
            }
          ]
        }""")

        network = Network()
        network.parse_query(query, alerters=['debug'], rule_id=1)
        network.parse_query(query, alerters=['debug'], rule_id=2)
        network.parse_query(query, alerters=['debug'], rule_id=3)

        # Should trigger the top-level alert, above
        now = dt.datetime.utcnow()
        bad_input = {
            'name': 'packs/osx-attacks/Whitesmoke',
            'action': 'added',
            'timestamp': now,
            'columns': {
                'path': '/LaunchAgents/com.whitesmoke.uploader.plist',
                'name': 'com.whitesmoke.uploader.plist',
                # Remainder omitted
            },
        }

        # Should *not* trigger the alert, above.
        good_input = {
            'name': 'other-query',
            'action': 'added',
            'timestamp': now,
            'columns': {
                'a_column': 'the_value',
            },
        }

        alerts = network.process(good_input, node)
        assert len(alerts) == 0

        alerts = network.process(bad_input, node)
        assert sorted(alerts,
                      key=lambda v: v[1]) == [('debug', 1), ('debug', 2),
                                              ('debug', 3)]

        # Re-process the good input to assert that we don't continue to alert
        # on good input after a bad one...
        alerts = network.process(good_input, node)
        assert len(alerts) == 0

        # ... and that we *do* continue to alert on bad input.
        alerts = network.process(bad_input, node)
        assert sorted(alerts,
                      key=lambda v: v[1]) == [('debug', 1), ('debug', 2),
                                              ('debug', 3)]
Ejemplo n.º 17
0
    def test_will_alert(self, node):
        query = json.loads("""
        {
          "condition": "AND",
          "rules": [
            {
              "id": "query_name",
              "field": "query_name",
              "type": "string",
              "input": "text",
              "operator": "begins_with",
              "value": "packs/osx-attacks/"
            },
            {
              "id": "action",
              "field": "action",
              "type": "string",
              "input": "text",
              "operator": "equal",
              "value": "added"
            },
            {
              "id": "column",
              "field": "column",
              "type": "string",
              "input": "text",
              "operator": "column_equal",
              "value": [
                "name", "com.whitesmoke.uploader.plist"
              ]
            }
          ]
        }""")

        network = Network()
        network.parse_query(query, alerters=['debug'], rule_id=1)
        network.parse_query(query, alerters=['debug'], rule_id=2)
        network.parse_query(query, alerters=['debug'], rule_id=3)

        # Should trigger the top-level alert, above
        now = dt.datetime.utcnow()
        bad_input = {
            'name': 'packs/osx-attacks/Whitesmoke',
            'action': 'added',
            'timestamp': now,
            'columns': {
                'path': '/LaunchAgents/com.whitesmoke.uploader.plist',
                'name': 'com.whitesmoke.uploader.plist',
                # Remainder omitted
            },
        }

        # Should *not* trigger the alert, above.
        good_input = {
            'name': 'other-query',
            'action': 'added',
            'timestamp': now,
            'columns': {
                'a_column': 'the_value',
            },
        }

        alerts = network.process(good_input, node)
        assert len(alerts) == 0

        alerts = network.process(bad_input, node)
        assert sorted(alerts, key=lambda v: v[1]) == [('debug', 1), ('debug', 2), ('debug', 3)]

        # Re-process the good input to assert that we don't continue to alert
        # on good input after a bad one...
        alerts = network.process(good_input, node)
        assert len(alerts) == 0

        # ... and that we *do* continue to alert on bad input.
        alerts = network.process(bad_input, node)
        assert sorted(alerts, key=lambda v: v[1]) == [('debug', 1), ('debug', 2), ('debug', 3)]
Ejemplo n.º 18
0
class RuleManager(object):
    def __init__(self, app=None):
        self.network = None
        self.last_update = None

        if app is not None:
            self.init_app(app)

    def init_app(self, app):
        self.app = app
        self.load_alerters()

        # Save this instance on the app, so we have a way to get at it.
        app.rule_manager = self

    def load_alerters(self):
        """ Load the alerter plugin(s) specified in the app config. """
        from importlib import import_module
        from doorman.plugins import AbstractAlerterPlugin

        alerters = self.app.config.get('DOORMAN_ALERTER_PLUGINS', {})

        self.alerters = {}
        for name, (plugin, config) in alerters.items():
            package, classname = plugin.rsplit('.', 1)
            module = import_module(package)
            klass = getattr(module, classname, None)

            if klass is None:
                raise ValueError(
                    'Could not find a class named "{0}" in package "{1}"'.
                    format(classname, package))

            if not issubclass(klass, AbstractAlerterPlugin):
                raise ValueError(
                    '{0} is not a subclass of AbstractAlerterPlugin'.format(
                        name))

            self.alerters[name] = klass(config)

    def should_reload_rules(self):
        """ Checks if we need to reload the set of rules. """
        from doorman.models import Rule

        if self.last_update is None:
            return True

        newest_rule = Rule.query.order_by(
            Rule.updated_at.desc()).limit(1).first()
        if newest_rule and self.last_update < newest_rule.updated_at:
            return True

        return False

    def load_rules(self):
        """ Load rules from the database. """
        from doorman.rules import Network
        from doorman.models import Rule

        if not self.should_reload_rules():
            return

        all_rules = list(Rule.query.all())

        self.network = Network()

        if not all_rules:
            return

        for rule in all_rules:
            # Verify the alerters
            for alerter in rule.alerters:
                if alerter not in self.alerters:
                    raise ValueError('No such alerter: "{0}"'.format(alerter))

            # Create the rule.
            self.network.parse_query(rule.conditions,
                                     alerters=rule.alerters,
                                     rule_id=rule.id)

        # Save the last updated date
        # Note: we do this here, and not in should_reload_rules, because it's
        # possible that we've reloaded a rule in between the two functions, and
        # thus we accidentally don't reload when we should.
        self.last_update = max(r.updated_at for r in all_rules)

    def handle_log_entry(self, entry, node):
        """ The actual entrypoint for handling input log entries. """
        from doorman.models import Rule
        from doorman.rules import RuleMatch
        from doorman.utils import extract_results

        self.load_rules()

        to_trigger = []
        for name, action, columns, timestamp in extract_results(entry):
            result = {
                'name': name,
                'action': action,
                'timestamp': timestamp,
                'columns': columns,
            }
            alerts = self.network.process(result, node)
            if len(alerts) == 0:
                continue

            # Alerts is a set of (alerter name, rule id) tuples.  We convert
            # these into RuleMatch instances, which is what our alerters are
            # actually expecting.
            for alerter, rule_id in alerts:
                rule = Rule.get_by_id(rule_id)

                to_trigger.append(
                    (alerter, RuleMatch(rule=rule, result=result, node=node)))

        # Now that we've collected all results, start triggering them.
        for alerter, match in to_trigger:
            self.alerters[alerter].handle_alert(node, match)