def __init__(self, configFile, topic):
     self.conf = ccloud_lib.read_ccloud_config(configFile)
     self.producer_conf = ccloud_lib.pop_schema_registry_params_from_config(
         self.conf)
     self.producer = Producer(self.producer_conf)
     self.topic = topic
     ccloud_lib.create_topic(self.conf, topic)
 def __init__(self, configFile):
     self.conf = ccloud_lib.read_ccloud_config(configFile)
     self.consumer_conf = ccloud_lib.pop_schema_registry_params_from_config(
         self.conf)
     self.consumer_conf['group.id'] = 'python_example_group_1'
     self.consumer_conf['auto.offset.reset'] = 'earliest'
     self.consumer = Consumer(self.consumer_conf)
# =============================================================================

from confluent_kafka import Producer, KafkaError
import json
import ccloud_lib

if __name__ == '__main__':

    # Read arguments and configurations and initialize
    args = ccloud_lib.parse_args()
    config_file = args.config_file
    topic = args.topic
    conf = ccloud_lib.read_ccloud_config(config_file)

    # Create Producer instance
    producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)
    producer = Producer(producer_conf)

    # Create topic if needed
    ccloud_lib.create_topic(conf, topic)

    delivered_records = 0

    # Optional per-message on_delivery handler (triggered by poll() or flush())
    # when a message has been successfully delivered or
    # permanently failed delivery (after retries).
    def acked(err, msg):
        global delivered_records
        """Delivery report handler called on
        successful or failed delivery of message
        """