Esempio n. 1
0
class ConfluentKafkaWriter(KafkaWriter):
    def __init__(self, kafka_hosts: List[str]):
        self.config = {'bootstrap.servers': ','.join(kafka_hosts)}
        self.producer = Producer(**self.config)
        self.messages: List[Message] = []

    def write(self, topic: str, partiton: int, message: Message):
        self.producer.produce(topic=topic,
                              value=message.value,
                              key=message.key,
                              partition=partiton,
                              on_delivery=self.delivery_callback,
                              timestamp=message.timestamp,
                              headers=message.headers)
        self.producer.poll(0)

    def flush(self):
        self.producer.flush()

    def delivery_callback(self, err, msg):
        if err:
            logger.error('%% Message failed delivery: %s\n' % err)
        else:
            self.messages.append(
                Message(msg.offset(), msg.key(), msg.value(),
                        msg.timestamp()[1], None))

    def close(self):
        self.producer.close()
Esempio n. 2
0
class ConfluentKafkaProducerUtil(object):
    def __init__(self, host):
        self.host = host
        self.producer = Producer(self.host)

    def single_producer(self, topic_name, content):
        """
        put single message, flush
        :param topic_name:
        :param content:
        :return:
        """
        try:
            self.producer.produce(topic_name, content)
            self.producer.flush()
            return True
        except Exception as e:
            raise e.message

    def producer_message(self, topic_name, content):
        """
        producer single message, not flush
        :return:
        """
        try:
            self.producer.produce(topic_name, content)
        except Exception as e:
            raise e.message

    def message_flush(self):
        """
        message flush
        :return:
        """
        try:
            self.producer.flush()
        except Exception as e:
            raise e.message

    def close(self):
        """
        close producer connection
        :return:
        """
        self.producer.close()

    def batch_producer(self, topic_name, content_list):
        """
        batch produce message to kafka
        :param topic_name:
        :param content_list:
        :return:
        """
        try:
            for tmp_message in content_list:
                self.producer.produce(topic_name, tmp_message)
            self.producer.flush()
        except Exception as e:
            raise e.message
class MetricsEventsProducer:
    def __init__(self):
        self.prepareProducer("ReeferTelemetryProducers")

    def prepareProducer(self, groupID):
        options = {
            'bootstrap.servers':
            EventBackboneConfiguration.getBrokerEndPoints(),
            'group.id': groupID,
        }
        if (EventBackboneConfiguration.isSecured()):
            options['security.protocol'] = 'SASL_SSL'
            # If we are connecting to ES on IBM Cloud, the SASL mechanism is plain
            if (EventBackboneConfiguration.getKafkaUser() == 'token'):
                options['sasl.mechanisms'] = 'PLAIN'
            # If we are connecting to ES on OCP, the SASL mechanism is scram-sha-512
            else:
                options['sasl.mechanisms'] = 'SCRAM-SHA-512'
            options['sasl.username'] = EventBackboneConfiguration.getKafkaUser(
            )
            options[
                'sasl.password'] = EventBackboneConfiguration.getKafkaPassword(
                )
        if (EventBackboneConfiguration.isEncrypted()):
            options[
                'ssl.ca.location'] = EventBackboneConfiguration.getKafkaCertificate(
                )
        print("Kafka options are:")
        print(options)
        self.producer = Producer(options)

    def delivery_report(self, err, msg):
        """ Called once for each message produced to indicate delivery result.
            Triggered by poll() or flush(). """
        if err is not None:
            print(
                str(datetime.datetime.today()) +
                ' - Message delivery failed: {}'.format(err))
        else:
            print(
                str(datetime.datetime.today()) +
                ' - Message delivered to {} [{}]'.format(
                    msg.topic(), msg.partition()))

    def publishEvent(self, eventToSend, keyName):
        dataStr = json.dumps(eventToSend)
        print(dataStr)
        self.producer.produce(
            EventBackboneConfiguration.getTelemetryTopicName(),
            key=eventToSend[keyName],
            value=dataStr.encode('utf-8'),
            callback=self.delivery_report)
        self.producer.flush()

    def close(self):
        self.producer.close()
Esempio n. 4
0
class KafkaTaskQueueSender(TaskQueueSenderInterface):
    def __init__(self, config: dict):
        # Producer configuration
        # See https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
        # See https://github.com/edenhill/librdkafka/wiki/Using-SSL-with-librdkafka#prerequisites for SSL issues
        self.poll_timeout_sec = config.get("poll_timeout_sec", 100)
        self.config, self.topic, err = prepare_kafka_config(config)
        if not self.config:
            raise Exception(f"No config parsed: {err}")
        if not self.topic:
            raise Exception("No topic configured: {err}")

        # Create Producer instance
        self.producer = Producer(self.config)
        logger.info(f"Producer will send messages to the topic - '{config['connection_string'].split('=')[-1]}'.")

        # Wait until all messages have been delivered
        logger.info(f"Waiting for {len(self.producer)} deliveries")
        self.producer.flush()

    def __del__(self):
        if self.producer:
            self.producer.close()

    def send_event(self, msg: typing.Optional[bytes]):
        try:
            self.producer.produce(self.topic, msg, on_delivery=self.delivery_callback)
        except BufferError as e:
            logger.info(
                f"Local producer queue is full ({len(self.producer)} messages awaiting delivery): try again. {e}"
            )
        # self.producer.poll(0)

    def put_task(self, task: Task):
        task_bytes: typing.Optional[bytes] = None
        try:
            task_bytes = serialize_task(task)
        except Exception as e:
            logger.error(f"Error while serizlising task: {e}")
            logger.error(f" + Task: {task}")
            raise e
        if not task_bytes:
            raise Exception("Could not serialize task")
        self.send_event(task_bytes)

    @staticmethod
    def delivery_callback(err, msg):
        """This is called only when self.producer.poll(0) will be called.

        To test it: call self.producer.poll(0) from 'send_event' func.
        """
        if err:
            logger.error(f"Message failed delivery: {err} ({msg.topic()} [{msg.partition()}] @ {msg.offset()})")
        else:
            logger.info(f"Message delivered to {msg.topic()} [{msg.partition()}] @ {msg.offset()}")
class MetricsEventsProducer:
    def __init__(self):
        self.prepareProducer("ReeferTelemetryProducer")
        print("MetricsEventsProducer")

    def prepareProducer(self, groupID):
        options = {
            'bootstrap.servers': KAFKA_BROKERS,
            'group.id': groupID,
            'delivery.timeout.ms': 15000,
            'request.timeout.ms': 15000
        }
        print("kafka-user: "******"--- This is the configuration for the producer: ---")
        logging.info('[KafkaProducer] - {}'.format(options))
        logging.info("---------------------------------------------------")
        self.producer = Producer(options)

    def delivery_report(self, err, msg):
        """ Called once for each message produced to indicate delivery result.
            Triggered by poll() or flush(). """
        if err is not None:
            logging.info(
                str(datetime.datetime.today()) +
                ' - Message delivery failed: {}'.format(err))
        else:
            logging.info(
                str(datetime.datetime.today()) +
                ' - Message delivered to {} [{}]'.format(
                    msg.topic(), msg.partition()))

    def publishEvent(self, eventToSend, keyName):
        dataStr = json.dumps(eventToSend)
        logging.info("Send " + dataStr + " with key " + keyName + " to " +
                     TOPIC_NAME)

        self.producer.produce(TOPIC_NAME,
                              key=eventToSend[keyName],
                              value=dataStr.encode('utf-8'),
                              callback=self.delivery_report)
        self.producer.flush()
        # self.producer.poll(5)

    def close(self):
        self.producer.close()
Esempio n. 6
0
class MetricsEventsProducer:

    def __init__(self):
        self.currentRuntime = EventBackboneConfiguration.getCurrentRuntimeEnvironment()
        self.brokers = EventBackboneConfiguration.getBrokerEndPoints()
        self.apikey = EventBackboneConfiguration.getEndPointAPIKey()
        self.topic_name = "containerMetrics"
        self.prepareProducer("pythonreefermetricproducers")
        
    def prepareProducer(self,groupID):
        options ={
                'bootstrap.servers':  self.brokers,
                'group.id': groupID
        }
        # We need this test as local kafka does not expect SSL protocol.
        if (self.apikey != ''):
            options['security.protocol'] = 'SASL_SSL'
            options['sasl.mechanisms'] = 'PLAIN'
            options['sasl.username'] = '******'
            options['sasl.password'] = self.apikey
        if (self.currentRuntime == 'ICP'):
            options['ssl.ca.location'] = 'es-cert.pem'
        print(options)
        self.producer = Producer(options)

    def delivery_report(self,err, msg):
        """ Called once for each message produced to indicate delivery result.
            Triggered by poll() or flush(). """
        if err is not None:
            print('Message delivery failed: {}'.format(err))
        else:
            print('Message delivered to {} [{}]'.format(msg.topic(), msg.partition()))

    def publishEvent(self, eventToSend, keyName):
        dataStr = json.dumps(eventToSend)
        self.producer.produce("containerMetrics",
                            key=eventToSend[keyName],
                            value=dataStr.encode('utf-8'), 
                            callback=self.delivery_report)
        self.producer.flush()

    def close(self):
        self.producer.close()
class ProducerQueue(
):  # Provides A Queue Based Interface To Send Messages To The Kafka Broker At The Specified Topic #
    def __init__(self, Logger: object, Host: str, TopicName: str,
                 Queue: object, Partition: int):  # Initialization #

        # Create Local Vars #
        self.Logger = Logger
        self.Host = Host
        self.TopicName = TopicName
        self.Queue = Queue
        self.Partition = Partition

        # Create Producer Object #
        Configuration = {'bootstrap.servers': Host}
        self.ProducerObject = Producer(Configuration)

        # Register Exit Function #
        atexit.register(self.Shutdown, self)

        # Launch Queue Handler #
        self.QueueHandler()

    def QueueHandler(self):  # Queue Handler Function #

        # Enter Main Loop #
        while True:

            # Produce Message #
            Message = self.Queue.get()
            self.ProducerObject.Produce(self.TopicName, Message)

    def Shutdown(self):  # Destroys Connection Object #

        # Shutdown Connection #
        self.ProducerObject.close()
        self.Logger.Log(
            f'Producer Instance Exited At Address {str(hex(id(self)))}')
class KafkaProducer(Greenlet):
    def __init__(self, params):
        Greenlet.__init__(self)
        self.params = params
        self.producer = Producer(config)
        self.runflag = True
        self.index = 0
        gevent.sleep()
        atexit.register(self.cleanup)

    def run(self):
        global producer_total, consumer_total, producer_topic
        # thread is stopped by setting close event
        while self.runflag:
            self.index += 1
            producer_total += 1
            val = makeRecord(self.params)
            key = str(self.index % 10)
            self.producer.produce(producer_topic,
                                  key=key.encode('utf-8'),
                                  value=val.encode('utf-8'))
            socketio.emit('newMessage', {'msg': val}, namespace='/producer')
            print("-> producer sent: ", val)
            smsg = "EPD Simulator Totals --> Produced: {}   Consumed: {}".format(
                producer_total, consumer_total)
            socketio.emit('newMessage', {'msg': smsg}, namespace='/epd')
            # add some random time delay between sent messages
            delay = 0.5 + 2 * random.random()
            gevent.sleep(delay)

    def cleanup(self):
        try:
            self.producer.flush(timeout=0.5)
            self.producer.close(timeout=0.5)
        except:
            print("-> producer not initialized...")
Esempio n. 9
0
def main():
    print("Confluent Kafka Version: %s - Libversion: %s" %
          (version(), libversion()))
    topic = os.environ["MAPR_STREAMS_STREAM_LOCATION"].replace(
        '"', '') + ":" + os.environ["MAPR_STREAMS_TOPIC"].replace('"', '')
    print("Producing messages to: %s" % topic)
    p = Producer({'bootstrap.servers': ''})

    # Listen for messages
    running = True
    lastflush = 0
    while running:
        curtime = int(time.time())
        curts = time.strftime("%m/%d/%Y %H:%M:%S")
        message = {}
        message['ts'] = curts
        message['field1'] = "This is fun"
        message['field2'] = "or is it?"
        message_json = json.dumps(message)
        print(message_json)

        try:
            p.produce(topic, value=message_json, callback=delivery_callback)
            p.poll(0)
        except BufferError as e:
            print("Buffer full, waiting for free space on the queue")
            p.poll(10)
            p.produce(topic, value=message_json, callback=delivery_callback)

        except KeyboardInterrupt:
            print("\n\nExiting per User Request")
            p.close()
            sys.exit(0)
        delay = random.randint(1, 9)
        print("Sleeping for %s" % delay)
        time.sleep(delay)
def signal_handler(sig,frame):
    producer.close()
    sys.exit(0)

def parseArguments():
    version="0"
    arg2="./data/products.json"
    if len(sys.argv) == 1:
        print("Usage: SendProductToKafka  --file datafilename ")
        exit(1)
    else:
        for idx in range(1, len(sys.argv)):
            arg=sys.argv[idx]
            if arg == "--file":
                arg2=sys.argv[idx+1]
            if arg == "--help":
                print("Send product json documents to a kafka cluster. Use environment variables KAFKA_BROKERS")
                print(" and KAFKA_APIKEY is the cluster accept sasl connection with token user")
                print(" and KAFKA_CERT for ca.crt to add for TLS communication")
                exit(0)
    return arg2

if __name__ == "__main__":
    filename = parseArguments()
    products = readProducts(filename)
    try:
        publishEvent(SOURCE_TOPIC,products)
    except KeyboardInterrupt:
        producer.close()
        sys.exit(0)
    
Esempio n. 11
0
class KafkaTaskQueueSender(TaskQueueSenderInterface):
    # This is not possible in Azure as it is expected you create your topics (a.k.a. event hubs)
    # Manually in Azure portal, or using azure proprietary APIs
    def _create_topics(self):
        # Create Admin instance
        self.admin = AdminClient(self.config)
        # Create topics
        fs = self.admin.create_topics([
            NewTopic(topic, num_partitions=3, replication_factor=1)
            for topic in [self.topic]
        ])
        # Wait for topic creation to complete
        for topic, f in fs.items():
            try:
                logger.info(f"Waiting for topic '{topic}' to be created")
                f.result()  # The result itself is None
                logger.info(f"Topic '{topic}' created")
            except Exception as e:
                logger.error(f"Failed to create topic '{topic}': {e}")

    def __init__(self, config: dict):
        # Producer configuration
        # See https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
        # See https://github.com/edenhill/librdkafka/wiki/Using-SSL-with-librdkafka#prerequisites for SSL issues
        connection_string = str(config.get("connection_string"))
        if not connection_string:
            raise Exception("No connection_string configured")
        self.config = prepare_kafka_config(config)
        # Find our topic

        parts = parse_event_hub_connection_string(connection_string) or {}
        if not parts:
            raise Exception(f"No parts found in connection_string")
        self.topic = parts.get("entity_path")
        if not self.topic:
            raise Exception("No topic configured")
        # self._create_topics()
        # Create Producer instance
        self.producer = Producer(self.config)

        # Wait until all messages have been delivered
        logger.info(f"Waiting for {len(self.producer)} deliveries")
        self.producer.flush()

    def __del__(self):
        if self.producer:
            self.producer.close()

    def send_event(self, msg: typing.Optional[bytes]):
        try:
            self.producer.produce(self.topic, msg, callback=delivery_callback)
        except BufferError as e:
            logger.info(
                f"Local producer queue is full ({len(self.producer)} messages awaiting delivery): try again"
            )
        # self.producer.poll(0)

    def put_task(self, task: Task):
        task_bytes: typing.Optional[bytes] = None
        try:
            task_bytes = serialize_task(task)
        except Exception as e:
            logger.error(f"Error while serizlising task: {e}")
            logger.error(f" + Task: {task}")
            raise e
        if not task_bytes:
            raise Exception("Could not serialize task")
        self.send_event(task_bytes)
class KafkaClient:
    def __init__(self,
                 producer=False,
                 consumer=False,
                 groupId=None,
                 brokers=[],
                 topics=[]):
        if (producer and consumer) or (not producer and not consumer):
            raise Exception(KAFKA.ERROR_BOTH_CLIENT)
        self.type = KAFKA.CONSUMER_TYPE if consumer else KAFKA.PRODUCER_TYPE
        self.specs = {"groupId": groupId, "brokers": brokers, "topics": topics}
        if self.type == KAFKA.CONSUMER_TYPE:
            self.__setConsumer()
        else:
            self.__setProducer()

    def __setConsumer(self):
        """This function is used by consumer type client as a default, 
            and is responsible for setting the consumer client and 
            subscribing to a topic specified"""
        self.client = Consumer({
            'bootstrap.servers':
            ','.join(self.specs["brokers"]),
            'group.id':
            self.specs['groupId'],
            'auto.offset.reset':
            'earliest'
        })
        self.client.subscribe(self.specs["topics"])
        return self

    def __setProducer(self):
        """This function is used by producer type client as a default, 
            and is responsible to set the producer client"""
        self.client = Producer(
            {'bootstrap.servers': ','.join(self.specs["brokers"])})
        return self

    def __useMessage(self, message, actionable, action):
        """This function triggers a actions passed by parameter if 
            another verification function passed by parameter meet the 
            its conditions, these functions take the message consumed"""
        if actionable(message):
            action(message)
        return

    def __consume(self, actionable, action):
        """This function starts consuming messages fromm the specified broker 
            and acts on them with the specified functions"""
        stop = False
        # self.threads = []
        while not stop:
            message = self.client.poll(KAFKA.CONSUMER_POLL_TIME)
            if message != None:
                self.__useMessage(message.value().decode("utf-8"), actionable,
                                  action)
        self.client.close()
        return

    def __produce(self, message):
        """This function produces a message the specified broker"""
        self.client.produce(self.specs["topics"][0], message)
        self.client.flush()
        return

    def perform(self, message=None, actionable=None, action=None):
        """This function perform the action of consuming or producing 
            depending of the type of the client"""
        if self.type == KAFKA.PRODUCER_TYPE:
            if message != None:
                self.__produce(message)
            else:
                print(KAFKA.ERROR_PERFORM_MISSING_PARAMS)
        else:
            if actionable != None and action != None:
                self.__consume(actionable, action)
            else:
                print(KAFKA.ERROR_PERFORM_MISSING_PARAMS)
        return self
Esempio n. 13
0
class KafkaConnector(object):
    """Simple wrapper class to configure a simple kafka consumer
    and producer pair, so that they can be used to perform simple
    filter() and map() operations over the received tweets"""

    def __init__(
        self,
        group_id=None,
        consumer_topic='consumer_limbo',
        producer_topic='consumer_limbo',
        logging_topic='minteressa_stats',
        bootstrap_servers='kafka:9092'
    ):

        self.group_id = group_id
        self.bootstrap_servers = bootstrap_servers
        self.consumer_topic = consumer_topic
        self.producer_topic = producer_topic
        self.logging_topic = logging_topic

        self.consumer = None
        self.producer = None

    def listen(self):
        while True:
            msg = self.consumer.poll()
            if msg is None:
                continue
            if msg.error():
                # Error or event
                if msg.error().code() == KafkaError._PARTITION_EOF:
                    # End of partition event
                    sys.stderr.write(
                        '%% %s [%d] reached end at offset %d\n' % (
                            msg.topic(),
                            msg.partition(),
                            msg.offset()
                        )
                    )
                elif msg.error():
                    # Error
                    raise KafkaException(msg.error())
            else:
                # Proper message
                sys.stdout.write(
                    '%s [partition-%d] at offset %d with key %s:\n' %
                    (
                        msg.topic(),
                        msg.partition(),
                        msg.offset(),
                        str(msg.key())
                    )
                )
                yield msg

    def connect(self):
        self.consumer = Consumer({
            'bootstrap.servers': self.bootstrap_servers,
            'group.id': self.group_id,
            'default.topic.config': {
                'auto.offset.reset': 'smallest'
            }
        })
        print("subscribing to %s" % self.consumer_topic)
        self.consumer.subscribe([
            self.consumer_topic
        ])
        print("Subscribed to topic %s " % self.consumer_topic)

        self.producer = Producer({
            'bootstrap.servers': self.bootstrap_servers,
            'group.id': self.group_id
        })

    def send(self, message, producer_topic=None):
        producer_topic = producer_topic \
            if producer_topic is not None \
            else self.producer_topic

        self.producer.produce(
            producer_topic,
            message
        )
        # self.producer.flush()


    def log(self, message, logging_topic=None):
        logging_topic = logging_topic \
            if logging_topic is not None \
            else self.logging_topic

        self.producer.produce(logging_topic, message)
        self.producer.flush()

    def close(self):
        self.consumer.close()
        self.producer.close()
Esempio n. 14
0
class KafkaProducer:

    def __init__(self,topic_name,cleanup_policy='delete',num_partitions=1,num_replicas=1):
        
        # Topic configuration that you wanna modify
        self.num_partitions = num_partitions
        self.num_replicas= num_replicas
        self.topic_name = topic_name
        self.cleanup_policy = cleanup_policy

        # If you chose compact, pleas use lz4, for more details - view README.md
        if(self.cleanup_policy == 'compact'):
            ## Configure Topic for compact
            self.topic_configuration  = {
                "cleanup.policy": 'compact',
                "compression.type": "lz4",
                "delete.retention.ms": 100,
                "file.delete.delay.ms": 10
            }
        else:
            ## Configure Topic for compact
            self.topic_configuration  = {
                "cleanup.policy": 'delete'
            }            

        # create a dict with Producer configuration
        self.producer_configuration = {
            "bootstrap.servers": BROKER_URL,
            "client.id": f"{self.topic_name}_group"
        }

        # create a producer
        self.producer = Producer(
            self.producer_configuration
        )
        


    def create_topic(self):
        client = AdminClient({"bootstrap.servers":BROKER_URL})
        
        topic_metadata = client.list_topics(timeout = 5)

        if (topic_metadata.topics.get(self.topic_name) is not None):
            print("Topic already exist")
            pass
        else:
            print(f" Creating New Topic {self.topic_name}")

            futures = client.create_topic([
                NewTopic(
                    topic = self.topic_name,
                    num_partitions = self.num_partitions,
                    replication_factor = self.num_replicas,
                    config = self.topic_configuration  
                )
            ])
            for topic, future in futures.items():
                try:
                    future.result()
                    print(f"Topic {self.topic_name} Create Sucessuful!!!")
                except Exception as e:
                    print(f"Failed to create topic {self.topic_name}: {e}")
                    raise


    def close(self):
        # Close the Producer, firts flush second close
        self.producer.flush()
        self.producer.close()
        print("Produce Close Succesulf!!")
Esempio n. 15
0
def get_producer():
    producer = Producer({'bootstrap.servers':'127.0.0.1:9092'})

    yield producer

    producer.close()