Esempio n. 1
0
def test_journal_reader_tagging(tmpdir):
    config = {
        "readers": {
            "system": {
                "journal_flags": ["SYSTEM"],
                "searches": [
                    {
                        "name": "kernel.cpu.temperature",
                        "fields": {
                            "MESSAGE": r"(?P<cpu>CPU\d+): .*temperature.*",
                            "SYSLOG_IDENTIFIER": r"^(?P<from>.*)$",
                            "PRIORITY": r"^(?P<level>[0-4])$",  # emergency, alert, critical, error
                            "SYSLOG_FACILITY": r"^0$",          # kernel only
                        },
                        "tags": {"section": "cputemp"},
                    },
                    {
                        "name": "noresults",
                        "fields": {
                            "MESSAGE": "(?P<msg>.*)",
                            "nosuchfield": ".*",
                        },
                    },
                ],
            },
        },
    }
    journalpump_path = str(tmpdir.join("journalpump.json"))
    with open(journalpump_path, "w") as fp:
        fp.write(json.dumps(config))
    pump = JournalPump(journalpump_path)
    reader = pump.readers["system"]

    # matching entry
    entry = JournalObject(entry={
        "MESSAGE": "CPU0: Core temperature above threshold, cpu clock throttled (total events = 1)",
        "PRIORITY": "2",
        "SYSLOG_FACILITY": "0",
        "SYSLOG_IDENTIFIER": "kernel",
    })
    result = reader.perform_searches(entry)
    expected = {
        "kernel.cpu.temperature": {
            "cpu": "CPU0",
            "from": "kernel",
            "level": "2",
            "section": "cputemp",
        }
    }
    assert result == expected

    # some fields are not matching
    entry = JournalObject(entry={
        "MESSAGE": "CPU1: on fire",
        "PRIORITY": "1",
        "SYSLOG_FACILITY": "0",
        "SYSLOG_IDENTIFIER": "kernel",
    })
    result = reader.perform_searches(entry)
    assert result == {}
Esempio n. 2
0
    def test_filtered_processing(self):
        jobject = JournalObject(entry=OrderedDict(a=1,
                                                  b=2,
                                                  c=3,
                                                  REALTIME_TIMESTAMP=1),
                                cursor=10)
        handler = JournalObjectHandler(jobject, self.reader, self.pump)
        assert handler.process() is True
        assert (json.dumps({
            "a": 1
        }).encode("utf-8"), 10) in self.sender_a.msg_buffer.messages

        assert (json.dumps(OrderedDict(a=1, b=2)).encode("utf-8"),
                10) in self.sender_b.msg_buffer.messages

        largest_data = json.dumps(
            OrderedDict(a=1,
                        b=2,
                        c=3,
                        REALTIME_TIMESTAMP=1,
                        timestamp=datetime.utcfromtimestamp(1)),
            default=default_json_serialization,
        ).encode("utf-8")
        assert len(self.sender_c.msg_buffer.messages) == 1
        self.reader.inc_line_stats.assert_called_once_with(
            journal_bytes=len(largest_data), journal_lines=1)
Esempio n. 3
0
    def test_too_large_data(self):
        self.pump.make_tags.return_value = "tags"
        too_large = OrderedDict(a=1, b="x" * MAX_KAFKA_MESSAGE_SIZE)
        jobject = JournalObject(entry=too_large, cursor=10)
        handler = JournalObjectHandler(jobject, self.reader, self.pump)
        assert handler.process() is True
        assert (json.dumps({
            "a": 1
        }).encode("utf-8"), 10) in self.sender_a.msg_buffer.messages
        assert "too large message" in str(self.sender_b.msg_buffer.messages)

        self.pump.stats.increase.assert_called_once_with("journal.read_error",
                                                         tags="tags")