Beispiel #1
0
from film_record import FilmRecord
from logger import CustomLogger
# additional modules for better UX
from utils import Gauge

# Global presets
file_name = "movies.json"
kafka_brockers = ["10.40.1.142:9092", "10.40.1.141:9092"]
kafka_topic_name = "avikulin_test"
kafka_clientid = "Python test util"
kafka_value_serializer = FilmRecord.serialize

# ! Executed code
if __name__ == "__main__":
    logger_instance = CustomLogger("kafka_wtire_util")
    logger_instance.activate()

    films_store = list()

    logger_instance.get.info(f"Start reading data from file {file_name}.")
    with open(file_name, mode="r") as source_file:
        data_store = json.load(source_file)
        print("JSON loaded.")
        for i, item in enumerate(data_store):
            films_store.append(FilmRecord.decode(item))

        print(
            f"Statistics: count ={len(data_store)}, collection type = {type(data_store)}"
        )
        print(
            f"Film store: count = {len(films_store)}, item type = {type(films_store[0]) if len(films_store) > 0 else 'None'}"
from kafka.structs import TopicPartition, KafkaMessage

from logger import CustomLogger, LogLevels

from sys import exit

# Global presets
kafka_brokers = ["10.40.1.142:9092", "10.40.1.141:9092"]
kafka_topic_name = "avikulin_test"
kafka_consumer_group_id = "test_group#111"
kafka_client_id = __file__

if __name__ == "__main__":
    # Enable logging for kafka consumer
    kafka_logger = CustomLogger("kafka", log_level= LogLevels.DEBUG)
    kafka_logger.activate()

    # Enable logging for app
    app_logger = CustomLogger("kafka_read_util", log_level=LogLevels.INFO)
    app_logger.activate()

    while True:
        consumer = KafkaConsumer(
            kafka_topic_name,
            group_id=kafka_consumer_group_id,
            client_id=kafka_client_id,
            bootstrap_servers=kafka_brokers,
            request_timeout_ms=6001,
            session_timeout_ms=6000,
            heartbeat_interval_ms=2000,
            auto_offset_reset="earliest",