class KafkaListener():
    def __init__(self, topic, group_id, advertised_listeners, message_handler):
        self.consumer = KafkaConsumer(
            topic,
            group_id=group_id,
            bootstrap_servers=advertised_listeners
        )

        self.message_handler = message_handler

        logging.info('initializing kafka listener')

        thread = threading.Thread(target=self.run, args=())
        thread.daemon = True
        thread.start()

    def is_connected(self):
        return self.consumer.bootstrap_connected()

    def run(self):
        for msg in self.consumer:
            try:
                json_message = json.loads(msg.value)

                self.message_handler(
                    url=json_message['url'],
                    html_element_id=json_message['html_element_id'],
                    filename=json_message['output_file']
                )

            except Exception:
                logging.error('cannot parse message')
 def consume_messages(self):
     try:
         kafka_consumer = KafkaConsumer(
             self.topic_name, bootstrap_servers=self.bootstrap_servers)
         print(kafka_consumer.bootstrap_connected())
         if kafka_consumer is not None:
             for message in kafka_consumer:
                 # message value and key are raw bytes -- decode if necessary!
                 # e.g., for unicode: `message.value.decode('utf-8')`
                 print(message)
                 # print("%s:%d:%d: key=%s value=%s" % (message.topic, message.partition,
                 #                                      message.offset, message.key,
                 #                                      message.value))
     except Exception as ex:
         print("Failure in consume messages", ex)
from kafka import KafkaConsumer
from kafka.errors import NoBrokersAvailable
import json
import time
import sys

try:
    consumer = KafkaConsumer(
        'test-docker',
        group_id='my-group-1',
        bootstrap_servers='kafka:9092',
        value_deserializer=lambda m: json.loads(m.decode('utf-8')))
except NoBrokersAvailable:
    sys.exit('broker not available (yet?)')

if not consumer.bootstrap_connected():
    sys.exit('not connected, restarting...')

try:
    print('consuming')
    for message in consumer:
        # message value and key are raw bytes -- decode if necessary!
        # e.g., for unicode: `message.value.decode('utf-8')`
        print("%s:%d:%d: key=%s value=%s" %
              (message.topic, message.partition, message.offset, message.key,
               message.value))
    print('finished batch')
    time.sleep(1)
except Exception as e:
    print(repr(e))
    consumer.unsubscribe()
from kafka import KafkaConsumer
from setup import topicName
# To consume latest messages and auto-commit offsets
consumer = KafkaConsumer(topicName,
                         group_id='group_1',
                         bootstrap_servers=['localhost:9092'])

if consumer.bootstrap_connected() == False:
    raise Exception('You arent connected to the kafka server')

for message in consumer:
    # message value and key are raw bytes -- decode if necessary!
    # e.g., for unicode: `message.value.decode('utf-8')`
    print("%s:%d:%d: key=%s value=%s" %
          (message.topic, message.partition, message.offset, message.key,
           message.value))

#If we  need more thn one consumer
# consumer1 = KafkaConsumer('my-topic',
#                           group_id='my-group',
#                           bootstrap_servers='my.server.com')
# consumer2 = KafkaConsumer('my-topic',
#                           group_id='my-group',
#                           bootstrap_servers='my.server.com')
if __name__ == '__main__':
    print("Starting up: Task 3 ---- Kafka Service Event Consumer")
    sleep(1)

    try:
        # create the Producer instance
        service_consumer = KafkaConsumer(
            bootstrap_servers=kafka_server,
            auto_offset_reset='earliest',
            enable_auto_commit=True,
            auto_commit_interval_ms=500,
            group_id='my-group',
            value_deserializer=lambda x: json.loads(x.decode('utf-8')))
        # subscribe to topic
        service_consumer.subscribe(topics=kafka_event_topic)
        print("Consumer connected: ", service_consumer.bootstrap_connected())

        ## Attempting connection to the Postgres database
        # read connection parameters
        params = config()
        # connect to the PostgreSQL server
        print('Connecting to the PostgreSQL database...')
        conn = psycopg2.connect(**params)
        # create a psycopg2 cursor that can execute queries
        cursor = conn.cursor()

    except KeyboardInterrupt:
        print("Keyboard Interrupt ")

    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
from kafka import KafkaConsumer
import json
import io

topic = 'electric'
key_deserializer = 'org.apache.kafka.connect.storage.StringConverter'
value_deserializer = lambda m: json.loads(m.decode('ascii'))
group_id = 'electric-group'

consumer = KafkaConsumer(topic,
                         group_id='consumer-grp',
                         value_deserializer=value_deserializer)
print("Consumer connected : ", consumer.bootstrap_connected())
i = 0
for msg in consumer:
    print(i, " Message is : ", msg)
    i = i + 1
from kafka import KafkaConsumer

# continuous loop
var = 1

while var == 1:

    # initialize consumer to given topic and broker
    consumer = KafkaConsumer('test', bootstrap_servers='localhost:9092')

    # verify connection
    print('Connected')
    print(consumer.bootstrap_connected())

    # loop and print messages
    for msg in consumer:
        print('...received')
        print(msg)
Exemple #8
0
def monitor(config_name: str, kafka_bootstrap: str, retry_count: int = 10):
    """Monitor Kafka topics and write to designated alarm process variables.

    Args:
        config_name (str): Name of configuration 
        kafka_bootstrap (str): Name of kafka bootstrap server
        retry_count (int): Number of connection retries before logging a failure

    """

    while True:

        while retry_count:
            try:
                consumer = KafkaConsumer(
                    config_name,
                    bootstrap_servers=[kafka_bootstrap],
                    key_deserializer=lambda x: x.decode('utf-8'))
                retry_count = 0

            except KeyboardInterrupt:
                print("Shutting down")
                sys.exit(0)

            except:
                print("No consumers available.")
                print("Retrying...")
                retry_count -= 1

        if consumer is None:
            print("Unable to connect to Kafka bootstrap server.")
            sys.exit(0)

        elif consumer.bootstrap_connected():

            # initialize so can seek to beginning to get latest compacted state
            while not consumer._client.poll():
                continue

            consumer.seek_to_beginning()

            for message in consumer:

                try:

                    if "config:/" in message.key:
                        val = json.loads(message.value.decode('utf-8'))
                        pv = message.key.split("/")[-1]

                        # if enabled, write alarm
                        print(f"writing alarm {pv}FP")
                        if val.get("enabled") is not None:
                            # will be false
                            epics.caput(f"{pv}FP", 1)

                        else:
                            epics.caput(f"{pv}FP", 0)

                    assert 1 == 5

                except KeyboardInterrupt:
                    print("Shutting down...")
                    sys.exit(0)

                except Exception as e:
                    print(e)

        else:
            print("Unable to connect to Kafka bootstrap server.")
            sys.exit(0)