def test_journal_reader_tagging(tmpdir): config = { "readers": { "system": { "journal_flags": ["SYSTEM"], "searches": [ { "name": "kernel.cpu.temperature", "fields": { "MESSAGE": r"(?P<cpu>CPU\d+): .*temperature.*", "SYSLOG_IDENTIFIER": r"^(?P<from>.*)$", "PRIORITY": r"^(?P<level>[0-4])$", # emergency, alert, critical, error "SYSLOG_FACILITY": r"^0$", # kernel only }, "tags": {"section": "cputemp"}, }, { "name": "noresults", "fields": { "MESSAGE": "(?P<msg>.*)", "nosuchfield": ".*", }, }, ], }, }, } journalpump_path = str(tmpdir.join("journalpump.json")) with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) pump = JournalPump(journalpump_path) reader = pump.readers["system"] # matching entry entry = JournalObject(entry={ "MESSAGE": "CPU0: Core temperature above threshold, cpu clock throttled (total events = 1)", "PRIORITY": "2", "SYSLOG_FACILITY": "0", "SYSLOG_IDENTIFIER": "kernel", }) result = reader.perform_searches(entry) expected = { "kernel.cpu.temperature": { "cpu": "CPU0", "from": "kernel", "level": "2", "section": "cputemp", } } assert result == expected # some fields are not matching entry = JournalObject(entry={ "MESSAGE": "CPU1: on fire", "PRIORITY": "1", "SYSLOG_FACILITY": "0", "SYSLOG_IDENTIFIER": "kernel", }) result = reader.perform_searches(entry) assert result == {}
def _run_pump_test(*, config_path, logfile): journalpump = None threads = [] try: journalpump = JournalPump(config_path) pump = threading.Thread(target=journalpump.run) pump.start() threads.append(pump) assert _journalpump_initialized(journalpump), "Failed to initialize journalpump" identifier = ''.join(random.sample(string.ascii_uppercase + string.digits, k=8)) logger = logging.getLogger("rsyslog-tester") logger.info("Info message for %s", identifier) logger.warning("Warning message for %s", identifier) logger.error("Error message for %s", identifier) logger.critical("Critical message for %s", identifier) # Wait for everything to trickle thru sleep(5) finally: # Stop the journalpump and senders if journalpump is not None: journalpump.running = False for _, reader in journalpump.readers.items(): for _, sender in reader.senders.items(): threads.append(sender) sender.request_stop() # Wait a little while for threads to finish retry = 0 while retry < 5: if not [thread for thread in threads if thread.is_alive()]: break sleep(1) retry += 1 # Check the results found = 0 with open(logfile, "r") as fp: lines = fp.readlines() for txt in ["Info", "Warning", "Error", "Critical"]: m = re.compile(r".*{} message for {}.*".format(txt, identifier)) for line in lines: if m.match(line): found += 1 break assert found == 4, "Expected messages not found in syslog"
def test_journalpump_state_file(tmpdir): journalpump_path = str(tmpdir.join("journalpump.json")) statefile_path = str(tmpdir.join("journalpump_state.json")) config = { "json_state_file_path": statefile_path, "readers": { "state_test": { "senders": { "fake_syslog": { "output_type": "rsyslog", "rsyslog_server": "127.0.0.1", "rsyslog_port": 514, }, }, }, }, } with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) pump = JournalPump(journalpump_path) for _, reader in pump.readers.items(): reader.initialize_senders() sleep(1.1) reader.request_stop() pump.save_state() with open(statefile_path, "r") as fp: state = json.load(fp) assert "readers" in state assert "start_time" in state assert "state_test" in state["readers"] reader_state = state["readers"]["state_test"] assert reader_state.get("total_bytes") == 0 assert reader_state.get("total_lines") == 0 assert "senders" in reader_state assert "fake_syslog" in reader_state["senders"] sender_state = reader_state["senders"]["fake_syslog"] assert "health" in sender_state assert "elapsed" in sender_state["health"] assert sender_state["health"]["elapsed"] > 1.0 assert "status" in sender_state["health"] assert sender_state["health"]["status"] == "stopped"
def test_journalpump_init(tmpdir): # Logplex sender journalpump_path = str(tmpdir.join("journalpump.json")) config = { "logplex_token": "foo", "logplex_log_input_url": "http://logplex.com", "output_type": "logplex" } with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) a = JournalPump(journalpump_path) a.initialize_sender() a.sender.running = False assert isinstance(a.sender, LogplexSender) # Kafka sender config = { "output_type": "kafka", "logplex_token": "foo", "kafka_address": "localhost", "kafka_topic": "foo"} with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) a = JournalPump(journalpump_path) a.initialize_sender() a.sender.running = False assert isinstance(a.sender, KafkaSender) # Elasticsearch sender config = { "output_type": "elasticsearch", "elasticsearch_url": "https://foo.aiven.io", "elasticsearch_index_prefix": "fooprefix", } with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) a = JournalPump(journalpump_path) a.initialize_sender() a.sender.running = False assert isinstance(a.sender, ElasticsearchSender)
def test_journalpump_init(tmpdir): # Logplex sender journalpump_path = str(tmpdir.join("journalpump.json")) config = { "field_filters": { "filter_a": { "fields": ["message"] } }, "readers": { "foo": { "senders": { "bar": { "field_filter": "filter_a", "logplex_token": "foo", "logplex_log_input_url": "http://logplex.com", "output_type": "logplex", }, }, }, }, } with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) a = JournalPump(journalpump_path) assert len(a.field_filters) == 1 assert len(a.readers) == 1 for rn, r in a.readers.items(): assert rn == "foo" r.running = False for sn, s in r.senders.items(): assert sn == "bar" s.running = False assert isinstance(s, LogplexSender) assert s.field_filter.name == "filter_a" assert s.field_filter.fields == ["message"] # Kafka sender config = { "readers": { "foo": { "senders": { "bar": { "output_type": "kafka", "logplex_token": "foo", "kafka_address": "localhost", "kafka_topic": "foo", }, }, }, }, } with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) a = JournalPump(journalpump_path) assert len(a.readers) == 1 for rn, r in a.readers.items(): assert rn == "foo" r.running = False for sn, s in r.senders.items(): assert sn == "bar" s.running = False assert isinstance(s, KafkaSender) # Elasticsearch sender config = { "readers": { "foo": { "senders": { "bar": { "output_type": "elasticsearch", "elasticsearch_url": "https://foo.aiven.io", "elasticsearch_index_prefix": "fooprefix", }, }, }, }, } with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) a = JournalPump(journalpump_path) assert len(a.readers) == 1 for rn, r in a.readers.items(): assert rn == "foo" r.running = False for sn, s in r.senders.items(): assert sn == "bar" s.running = False assert isinstance(s, ElasticsearchSender)
def test_journalpump_init(tmpdir): # Logplex sender journalpump_path = str(tmpdir.join("journalpump.json")) config = { "logplex_token": "foo", "logplex_log_input_url": "http://logplex.com", "output_type": "logplex" } with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) a = JournalPump(journalpump_path) a.initialize_sender() a.sender.running = False assert isinstance(a.sender, LogplexSender) # Kafka sender config = { "output_type": "kafka", "logplex_token": "foo", "kafka_address": "localhost", "kafka_topic": "foo" } with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) a = JournalPump(journalpump_path) a.initialize_sender() a.sender.running = False assert isinstance(a.sender, KafkaSender) # Elasticsearch sender config = { "output_type": "elasticsearch", "elasticsearch_url": "https://foo.aiven.io", "elasticsearch_index_prefix": "fooprefix", } with open(journalpump_path, "w") as fp: fp.write(json.dumps(config)) a = JournalPump(journalpump_path) a.initialize_sender() a.sender.running = False assert isinstance(a.sender, ElasticsearchSender)