def produce(producer: Producer, record: AbstractRecord, **kwargs): for key, value in producer.produce( config = utils.retrieve(kwargs['--path_to_config'], kwargs['--section']), topic_name = kwargs['--topic'], num_partitions = kwargs['--partitions'], key=kwargs['--key'], value={**record} )
def instantiate_producer(**kwargs): conf = utils.retrieve(kwargs['--path_to_config'], kwargs['--section']) producer = Producer(config=conf, key_serializer=AvroStringKeySerializer(schema_registry_url="http://localhost:8081"), value_serializer=AvroSerializer(schema_registry_url="http://localhost:8081", subject_name_strategy=SubjectNameStrategy.TopicRecordNameStrategy) )
def producer(): return Producer(PRODUCER_CONFIG)
from pprint import pprint from confluent_kafka.cimpl import Message from kafkian import Producer from kafkian_example import config PRODUCER_CONFIG = {'bootstrap.servers': config.KAFKA_BOOTSTRAP_SERVERS} def delivery_success_callback(msg: Message): pprint({ 'topic': msg.topic(), 'partition': msg.partition(), 'timestamp': msg.timestamp(), 'key': msg.key(), 'value': msg.value(), }) producer = Producer(PRODUCER_CONFIG, delivery_success_callback=delivery_success_callback)
#config the producer PRODUCER_CONFIG = {'bootstrap.servers': os.environ['KAFKA_BOOTSTRAP_SERVERS']} producer = Producer( PRODUCER_CONFIG, key_serializer=AvroStringKeySerializer(os.environ['SCHEMA_REGISTRY_URL']), value_serializer=AvroSerializer(os.environ['SCHEMA_REGISTRY_URL'])) # the number of partitions and topic retention/compaction strategies import os import random import uuid from datetime import datetime import structlog from confluent_kafka import avro from kafkian import Producer from kafkian.serde.avroserdebase import AvroRecord from kafkian.serde.serialization import AvroSerializer, AvroStringKeySerializer logger = structlog.getLogger(__name__) value_schema_str = """ { "namespace": "locations", "name": "LocationReceived", "type": "record", "fields" : [ { "name" : "deviceId",
def delivery_success_callback(msg: Message): pprint({ 'topic': msg.topic(), 'partition': msg.partition(), 'timestamp': msg.timestamp(), 'key': msg.key(), 'value': msg.value(), }) producer = Producer( PRODUCER_CONFIG, delivery_success_callback=delivery_success_callback, key_serializer=AvroSerializer( config.SCHEMA_REGISTRY_URL, subject_name_strategy=SubjectNameStrategy.RecordNameStrategy), value_serializer=AvroSerializer( config.SCHEMA_REGISTRY_URL, subject_name_strategy=SubjectNameStrategy.RecordNameStrategy), ) key_schema_str = """ { "namespace": "net.treqster.locations", "name": "LocationKey", "type": "record", "fields" : [ { "name" : "userId", "type" : ["null", "string"] },
class LocationReceived(AvroRecord): _schema = avro.loads(value_schema_str) SCHEMA_REGISTRY_CONFIG = { 'KAFKA_BOOTSTRAP_SERVERS': 'localhost:29092', 'SCHEMA_REGISTRY_URL': 'http://localhost:8081' } PRODUCER_CONFIG = { 'bootstrap.servers': SCHEMA_REGISTRY_CONFIG.get('KAFKA_BOOTSTRAP_SERVERS') } producer = Producer(PRODUCER_CONFIG, key_serializer=AvroStringKeySerializer( SCHEMA_REGISTRY_CONFIG.get('SCHEMA_REGISTRY_URL')), value_serializer=AvroSerializer( SCHEMA_REGISTRY_CONFIG.get('SCHEMA_REGISTRY_URL'))) def produce_location_received(device_id: str, latitude: float, longitude: float): message = LocationReceived( dict(deviceId=device_id, latitude=latitude, longitude=longitude)) logger.msg("Sending to {} {} message: {}".format( SCHEMA_REGISTRY_CONFIG.get('KAFKA_BOOTSTRAP_SERVERS'), SCHEMA_REGISTRY_CONFIG.get('SCHEMA_REGISTRY_URL'), message)) try: producer.produce('location_ingress', device_id, message, sync=True) except Exception as e:
def producer(): return Producer(PRODUCER_CONFIG, key_serializer=AvroStringKeySerializer( schema_registry_url=SCHEMA_REGISTRY_URL), value_serializer=AvroSerializer( schema_registry_url=SCHEMA_REGISTRY_URL))