def test_confluent_admin_client_get_consumer_offsets(monkeypatch): monkeypatch.setattr(confluent_kafka, 'Consumer', MockedConsumer) confluent_admin_client = ConfluentAdminClient(['foo_baz_1']) confluent_admin_client.client = MockedAdmin() offsets = confluent_admin_client.get_consumer_offsets(['Topic1']) assert offsets offsets = confluent_admin_client.get_consumer_offsets(['Topic1'], no_of_threads=2) assert offsets
def backup(self): # Read topic messages from kafka broker if self.config.kafka_library == 'confluent': reader = ConfluentKafkaReader(self.config.brokers) else: reader = PythonKafkaReader(self.config.brokers) if self.config.ignore_missing_topics: logger.debug('Filter out topics that are not in Kafka broker') broker_topic_names = reader.list_topics() topics = [] for t in self.config.topics: if t not in broker_topic_names: logger.debug(f'Ignore topic {t} since it is ' 'missing in kafka broker') continue topics.append(t) else: topics = self.config.topics reader.subscribe(topics) msg_dict = reader.read(timeout=self.config.consumer_timeout) partitions = [ Partition(topic, partition_no, msgs) for (topic, partition_no), msgs in msg_dict.items() ] # Fetch consumer group offsets admin_client = ConfluentAdminClient(self.config.brokers) offsets = admin_client.get_consumer_offsets( topics, no_of_threads=self.config.threads) # Write topic messages and consumer offsets to disk data_flow_manager = DataFlowManager(self.config.data) data_flow_manager.write(offsets, partitions)
def test_get_consumer_offsets(confluent_admin_client: ConfluentAdminClient): offsets = confluent_admin_client.get_consumer_offsets( ['CBBCAutomationInstrumentFitPOParameters'], no_of_threads=8) assert offsets