Esempio n. 1
0
 def backup(self):
     # Read topic messages from kafka broker
     if self.config.kafka_library == 'confluent':
         reader = ConfluentKafkaReader(self.config.brokers)
     else:
         reader = PythonKafkaReader(self.config.brokers)
     if self.config.ignore_missing_topics:
         logger.debug('Filter out topics that are not in Kafka broker')
         broker_topic_names = reader.list_topics()
         topics = []
         for t in self.config.topics:
             if t not in broker_topic_names:
                 logger.debug(f'Ignore topic {t} since it is '
                              'missing in kafka broker')
                 continue
             topics.append(t)
     else:
         topics = self.config.topics
     reader.subscribe(topics)
     msg_dict = reader.read(timeout=self.config.consumer_timeout)
     partitions = [
         Partition(topic, partition_no, msgs)
         for (topic, partition_no), msgs in msg_dict.items()
     ]
     # Fetch consumer group offsets
     admin_client = ConfluentAdminClient(self.config.brokers)
     offsets = admin_client.get_consumer_offsets(
         topics, no_of_threads=self.config.threads)
     # Write topic messages and consumer offsets to disk
     data_flow_manager = DataFlowManager(self.config.data)
     data_flow_manager.write(offsets, partitions)
Esempio n. 2
0
def test_confluent_admin_client_get_consumer_offsets(monkeypatch):
    monkeypatch.setattr(confluent_kafka, 'Consumer', MockedConsumer)
    confluent_admin_client = ConfluentAdminClient(['foo_baz_1'])
    confluent_admin_client.client = MockedAdmin()
    offsets = confluent_admin_client.get_consumer_offsets(['Topic1'])
    assert offsets
    offsets = confluent_admin_client.get_consumer_offsets(['Topic1'],
                                                          no_of_threads=2)
    assert offsets
Esempio n. 3
0
    def restore(self):
        # Read topic messages and consumer offsets from disk
        data_flow_manager = DataFlowManager(self.config.data)
        offsets, partitions = data_flow_manager.read(self.config.topics)

        def func(partition):
            logger.debug(f'Write {len(partition.messages)} messages'
                         f'to topic: {partition.topic} '
                         f'partition: {partition.name} in kafka broker')
            writer = ConfluentKafkaWriter(self.config.brokers)
            for msg in partition.messages:
                writer.write(partition.topic, partition.name, msg)
            writer.flush()

        # Write topic messages to kafka broker
        pool = ThreadPool(self.config.threads)
        pool.map(func, partitions)
        # Calculate new offsets
        new_offsets = generate_new_offsets(offsets, partitions)
        # Set consumer group offsets
        admin_client = ConfluentAdminClient(self.config.brokers)
        admin_client.set_consumer_offsets(new_offsets)
Esempio n. 4
0
def test_confluent_kafka_writer_write(
        confluent_kafka_writer: ConfluentKafkaWriter):
    confluent_admin_client = ConfluentAdminClient(['localhost'])
    new_topic = NewTopic('Topic1', num_partitions=1, replication_factor=1)
    try:
        future_dict = confluent_admin_client.client.create_topics([new_topic])
        wait_for_futures(future_dict)
    except:
        pass
    confluent_kafka_writer.write(
        'Topic1', 0, Message(1, 'Key1', 'Val1',
                             int(datetime.now().timestamp())))
    sleep(5)
    confluent_kafka_reader = ConfluentKafkaReader(['localhost'])
    confluent_kafka_reader.subscribe(['Topic1'])
    partition_message_dict = confluent_kafka_reader.read()
    assert partition_message_dict
Esempio n. 5
0
def test_get_consumer_groups(confluent_admin_client: ConfluentAdminClient):
    consumer_groups = confluent_admin_client.get_consumer_groups()
    assert consumer_groups
Esempio n. 6
0
def confluent_admin_client(KAFKA_HOSTS):
    return ConfluentAdminClient(KAFKA_HOSTS)
Esempio n. 7
0
def test_get_consumer_offsets(confluent_admin_client: ConfluentAdminClient):
    offsets = confluent_admin_client.get_consumer_offsets(
        ['CBBCAutomationInstrumentFitPOParameters'], no_of_threads=8)
    assert offsets