コード例 #1
0
    def handle_result(self, data, **kwargs):
        if self.result is None:
            return

        # Process each result individually
        try:
            for item in extract_results(data):
                fields = {}
                fields.update(kwargs)

                if item.timestamp:
                    timestamp = time.mktime(item.timestamp.timetuple())
                else:
                    timestamp = time.mktime(dt.datetime.utcnow.timetuple())

                fields.update(name=item.name, timestamp=timestamp)

                base = self.join_fields(fields)

                # Write each added/removed entry on a different line
                curr_fields = {'result_type': item.action}
                for key, val in item.columns.items():
                    curr_fields['_'.join([item.action, key])] = val

                self.result.write(base + ', ' + self.join_fields(curr_fields) + '\n')
        finally:
            self.result.flush()
            os.fsync(self.result.fileno())
コード例 #2
0
ファイル: rules.py プロジェクト: efueger/doorman
    def handle_log_entry(self, entry, node):
        """
        This function processes an incoming log entry.  It normalizes the data,
        validates the node name (if that filter was given), and then dispatches
        to the underlying rule.

        Note: the entry passed in contains a `hostIdentifier` field sent by the
        client, and `node` contains a `host_identifier` field that was
        originally stored when the node enrolled.  Note that the
        `hostIdentifier` field in the incoming entry may have changed, e.g. if
        the user changes their hostname.  All the rules here use the node's
        (original) `host_identifier` value for comparisons.

        :param entry: The full request received from the client (i.e. including "node_key", etc.)
        :param node: Information about the sending node, retrieved from the database.
        :type entry: dict
        :type node: dict, created from the Node model

        :returns: A list of matches
        """
        if self.node_name is not None and node['host_identifier'] != self.node_name:
            return []

        matches = []
        for result in extract_results(entry):
            res = self.handle_result(result, node)
            if res is not None:
                matches.extend(res)

        return matches
コード例 #3
0
ファイル: logstash.py プロジェクト: socrata/doorman
    def handle_result(self, data, **kwargs):
        if self.fp is None:
            return

        fp = self.fp

        host_identifier = kwargs.get('host_identifier')
        created = dt.datetime.utcnow().isoformat()

        for item in extract_results(data):
            json_dump(
                {
                    '@version': 1,
                    '@host_identifier': host_identifier,
                    '@timestamp': item.timestamp.isoformat(),
                    'log_type': 'result',
                    'action': item.action,
                    'columns': item.columns,
                    'name': item.name,
                    'created': created,
                }, fp)
            fp.write('\r\n')
        else:
            fp.flush()
            os.fsync()
コード例 #4
0
ファイル: extensions.py プロジェクト: waldow90/doorman-1
    def handle_log_entry(self, entry, node):
        """ The actual entrypoint for handling input log entries. """
        from doorman.models import Rule
        from doorman.rules import RuleMatch
        from doorman.utils import extract_results

        self.load_rules()

        to_trigger = []
        for name, action, columns, timestamp in extract_results(entry):
            result = {
                'name': name,
                'action': action,
                'timestamp': timestamp,
                'columns': columns,
            }
            alerts = self.network.process(result, node)
            if len(alerts) == 0:
                continue

            # Alerts is a set of (alerter name, rule id) tuples.  We convert
            # these into RuleMatch instances, which is what our alerters are
            # actually expecting.
            for alerter, rule_id in alerts:
                rule = Rule.get_by_id(rule_id)

                to_trigger.append(
                    (alerter, RuleMatch(rule=rule, result=result, node=node)))

        # Now that we've collected all results, start triggering them.
        for alerter, match in to_trigger:
            self.alerters[alerter].handle_alert(node, match)
コード例 #5
0
ファイル: logstash.py プロジェクト: javuto/doorman
    def handle_result(self, data, **kwargs):
        if self.fp is None:
            return

        fp = self.fp

        host_identifier = kwargs.get('host_identifier')
        created = dt.datetime.utcnow().isoformat()

        try:
            for item in extract_results(data):
                json_dump({
                    '@version': 1,
                    '@host_identifier': host_identifier,
                    '@timestamp': item.timestamp.isoformat(),
                    'log_type': 'result',
                    'action': item.action,
                    'columns': item.columns,
                    'name': item.name,
                    'created': created,
                }, fp)
                fp.write('\r\n')
        finally:
            fp.flush()
            os.fsync(fp.fileno())
コード例 #6
0
ファイル: extensions.py プロジェクト: jcorina/doorman
    def handle_log_entry(self, entry, node):
        """ The actual entrypoint for handling input log entries. """
        from doorman.models import Rule
        from doorman.rules import RuleMatch
        from doorman.utils import extract_results

        self.load_rules()

        to_trigger = []
        for name, action, columns, timestamp in extract_results(entry):
            result = {
                'name': name,
                'action': action,
                'timestamp': timestamp,
                'columns': columns,
            }
            alerts = self.network.process(result, node)
            if len(alerts) == 0:
                continue

            # Alerts is a set of (alerter name, rule id) tuples.  We convert
            # these into RuleMatch instances, which is what our alerters are
            # actually expecting.
            for alerter, rule_id in alerts:
                rule = Rule.get_by_id(rule_id)

                to_trigger.append((alerter, RuleMatch(
                    rule=rule,
                    result=result,
                    node=node
                )))

        # Now that we've collected all results, start triggering them.
        for alerter, match in to_trigger:
            self.alerters[alerter].handle_alert(node, match)
コード例 #7
0
ファイル: file.py プロジェクト: davinirjr/doorman
    def handle_result(self, data, **kwargs):
        if self.result is None:
            return

        # Process each result individually
        for item in extract_results(data):
            fields = {}
            fields.update(kwargs)

            if item.timestamp:
                timestamp = time.mktime(item.timestamp.timetuple())
            else:
                timestamp = time.mktime(dt.datetime.utcnow.timetuple())

            fields.update(name=item.name, timestamp=timestamp)

            base = self.join_fields(fields)

            # Write each added/removed entry on a different line
            for entry in item.added:
                curr_fields = {'result_type': 'added'}
                for key, val in entry.items():
                    curr_fields['added_' + key] = val

                self.result.write(base + ', ' + self.join_fields(curr_fields) + '\n')

            for entry in item.removed:
                curr_fields = {'result_type': 'removed'}
                for key, val in entry.items():
                    curr_fields['removed_' + key] = val

                self.result.write(base + ', ' + self.join_fields(curr_fields) + '\n')
コード例 #8
0
ファイル: file.py プロジェクト: davinirjr/doorman
    def handle_result(self, data, **kwargs):
        if self.result is None:
            return

        # Process each result individually
        for item in extract_results(data):
            fields = {}
            fields.update(kwargs)

            if item.timestamp:
                timestamp = time.mktime(item.timestamp.timetuple())
            else:
                timestamp = time.mktime(dt.datetime.utcnow.timetuple())

            fields.update(name=item.name, timestamp=timestamp)

            base = self.join_fields(fields)

            # Write each added/removed entry on a different line
            for entry in item.added:
                curr_fields = {'result_type': 'added'}
                for key, val in entry.items():
                    curr_fields['added_' + key] = val

                self.result.write(base + ', ' + self.join_fields(curr_fields) +
                                  '\n')

            for entry in item.removed:
                curr_fields = {'result_type': 'removed'}
                for key, val in entry.items():
                    curr_fields['removed_' + key] = val

                self.result.write(base + ', ' + self.join_fields(curr_fields) +
                                  '\n')