def _consume_messages(cls, host, port): kafka = KafkaClient(cls.server.host + ":" + str(cls.server.port)) consumer = MultiProcessConsumer(kafka, None, cls.beaver_config.get('kafka_topic'), num_procs=5) return consumer.get_messages(count=100, block=True, timeout=5)
def cluster(self): rc = 1 # Test cluster as a whole self._client = DevopsSimpleClient(self._broker) # Use multiprocessing for parallel consumers from kafka import MultiProcessConsumer # This will split the number of partitions among two processes consumer = MultiProcessConsumer(self._client, "devops-group", "devopstest1", num_procs=2) # This will spawn processes such that each handles 2 partitions max consumer = MultiProcessConsumer(self._client, "devops-group", "devopstest1", partitions_per_proc=2) for message in consumer: if self._pattern.match(message.message.value): if self._debug: print(message.message.value) rc = 0 for message in consumer.get_messages(count=5, block=True, timeout=4): if self._pattern.match(message.message.value): if self._debug: print(message.message.value) rc = 0 else: rc = 2 self._client.close() return (rc, None)
def multiprocess_consumer(): '''multiprocess consumer''' from kafka import KafkaClient, MultiProcessConsumer kafka = KafkaClient(KAFKA_SERVER) # This will split the number of partitions among two processes consumer = MultiProcessConsumer(kafka, b'my-group', b'topic1', num_procs=2) # This will spawn processes such that each handles 2 partitions max consumer = MultiProcessConsumer(kafka, b'my-group', b'topic1', partitions_per_proc=2) for message in consumer: print message for message in consumer.get_messages(count=5, block=True, timeout=4): print message
#!/usr/bin/env python ####Takes two arguments topic n #### n is the expected number of messages after which the app will stop reading ####Read the messages from the topic and print each message on a new line import uuid import sys from kafka import KafkaClient, MultiProcessConsumer topic=sys.argv[1] n=int(sys.argv[2]) kafka = KafkaClient("192.168.4.40:9092") consumer = MultiProcessConsumer(kafka, str(uuid.uuid4()), topic) for msg in consumer.get_messages(count=n, block=True, timeout=60000): print(msg) kafka.close()
#!/usr/bin/env python ####Takes two arguments topic n #### n is the expected number of messages after which the app will stop reading ####Read the messages from the topic and print each message on a new line import uuid import sys from kafka import KafkaClient, MultiProcessConsumer topic = sys.argv[1] n = int(sys.argv[2]) kafka = KafkaClient("192.168.4.40:9092") consumer = MultiProcessConsumer(kafka, str(uuid.uuid4()), topic) for msg in consumer.get_messages(count=n, block=True, timeout=60000): print(msg) kafka.close()
from kafka import KafkaClient, MultiProcessConsumer kafka = KafkaClient('localhost:9092') # This will split the number of partitions among two processes consumer = MultiProcessConsumer(kafka, b'my-group', b'my-topic', num_procs=2) # This will spawn processes such that each handles 2 partitions max consumer = MultiProcessConsumer(kafka, b'my-group', b'my-topic', partitions_per_proc=2) for message in consumer: print(message) for message in consumer.get_messages(count=5, block=True, timeout=4): print(message)