Beispiel #1
0
 def test_offset_metadata_invalid_partition_subset(self, kafka_client_mock):
     with pytest.raises(UnknownPartitions):
         get_consumer_offsets_metadata(
             kafka_client_mock,
             self.group,
             {'topic1': [1, 99]},
         )
Beispiel #2
0
 def test_offset_metadata_unknown_topic(self, kafka_client_mock):
     with pytest.raises(UnknownTopic):
         get_consumer_offsets_metadata(
             kafka_client_mock,
             "this won't even be consulted",
             ["something that doesn't exist"],
         )
Beispiel #3
0
 def test_offset_metadata_unknown_topic(self, kafka_client_mock):
     with pytest.raises(UnknownTopic):
         get_consumer_offsets_metadata(
             kafka_client_mock,
             "this won't even be consulted",
             ["something that doesn't exist"],
         )
Beispiel #4
0
 def test_offset_metadata_invalid_arguments(self, kafka_client_mock):
     with pytest.raises(TypeError):
         get_consumer_offsets_metadata(
             kafka_client_mock,
             "this won't even be consulted",
             "this should be a list or dict",
         )
Beispiel #5
0
 def test_offset_metadata_invalid_partition_subset(self, kafka_client_mock):
     with pytest.raises(UnknownPartitions):
         get_consumer_offsets_metadata(
             kafka_client_mock,
             self.group,
             {'topic1': [1, 99]},
         )
Beispiel #6
0
 def test_offset_metadata_invalid_arguments(self, kafka_client_mock):
     with pytest.raises(TypeError):
         get_consumer_offsets_metadata(
             kafka_client_mock,
             "this won't even be consulted",
             "this should be a list or dict",
         )
Beispiel #7
0
 def test_get_metadata_kafka_error(self, kafka_client_mock):
     with mock.patch.object(MyKafkaToolClient,
                            'load_metadata_for_topics',
                            side_effect=KafkaUnavailableError("Boom!"),
                            autospec=True) as mock_func:
         with pytest.raises(KafkaUnavailableError):
             get_consumer_offsets_metadata(
                 kafka_client_mock,
                 self.group,
                 {'topic1': [99]},
             )
         assert mock_func.call_count == 2
Beispiel #8
0
 def test_get_metadata_kafka_error(self, kafka_client_mock):
     with mock.patch.object(
         MyKafkaToolClient,
         'load_metadata_for_topics',
         side_effect=KafkaUnavailableError("Boom!"),
         autospec=True
     ) as mock_func:
         with pytest.raises(KafkaUnavailableError):
             get_consumer_offsets_metadata(
                 kafka_client_mock,
                 self.group,
                 {'topic1': [99]},
             )
         assert mock_func.call_count == 2
    def test_offsets_kafka_error(self, kafka_client_mock):
        with mock.patch.object(
            MyKafkaToolClient,
            'send_offset_fetch_request_kafka',
            side_effect=GroupCoordinatorNotAvailableError('Boom!'),
            autospec=True,
        ) as mock_get_kafka:
            with pytest.raises(GroupCoordinatorNotAvailableError):
                get_consumer_offsets_metadata(
                    kafka_client_mock,
                    self.group,
                    self.topics,
                )

            assert mock_get_kafka.call_count == 1
 def test_offset_metadata_unknown_topic_no_fail(self, kafka_client_mock):
     actual = get_consumer_offsets_metadata(
         kafka_client_mock,
         "this won't even be consulted",
         ["something that doesn't exist"],
         raise_on_error=False
     )
     assert not actual
 def test_offset_metadata_unknown_partitions_no_fail(self, kafka_client_mock):
     actual = get_consumer_offsets_metadata(
         kafka_client_mock,
         self.group,
         {'topic1': [99]},
         raise_on_error=False
     )
     assert not actual
Beispiel #12
0
 def get_offsets(cls, client, group, topics_dict):
     try:
         return get_consumer_offsets_metadata(
             client, group, topics_dict, False,
         )
     except Exception:
         print(
             "Error: Encountered error with Kafka, please try again later.",
             file=sys.stderr
         )
         raise
Beispiel #13
0
    def test_offset_metadata_invalid_partition_subset_no_fail(
            self, kafka_client_mock):
        # Partition 99 does not exist, so we expect to have
        # offset metadata ONLY for partition 1.
        expected = [ConsumerPartitionOffsets('topic1', 1, 20, 30, 5)]

        actual = get_consumer_offsets_metadata(kafka_client_mock,
                                               self.group, {'topic1': [1, 99]},
                                               raise_on_error=False)
        assert 'topic1' in actual
        assert actual['topic1'] == expected
 def get_offsets(cls, client, group, topics_dict):
     try:
         return get_consumer_offsets_metadata(
             client,
             group,
             topics_dict,
             False,
         )
     except Exception:
         print(
             "Error: Encountered error with Kafka, please try again later.",
             file=sys.stderr)
         raise
    def test_offsets_kafka_empty(self, kafka_client_mock):
        with mock.patch.object(
            MyKafkaToolClient,
            'send_offset_fetch_request_kafka',
            return_value={},
            autospec=True,
        ) as mock_get_kafka:
            actual = get_consumer_offsets_metadata(
                kafka_client_mock,
                self.group,
                self.topics,
            )

            assert mock_get_kafka.call_count == 1
            assert self._has_no_partitions(actual)
Beispiel #16
0
    def restore_offsets(cls, client, parsed_consumer_offsets, storage):
        """Fetch current offsets from kafka, validate them against given
        consumer-offsets data and commit the new offsets.

        :param client: Kafka-client
        :param parsed_consumer_offsets: Parsed consumer offset data from json file
        :type parsed_consumer_offsets: dict(group: dict(topic: partition-offsets))
        :param storage: String describing where to store the committed offsets.
        """
        # Fetch current offsets
        try:
            consumer_group = parsed_consumer_offsets['groupid']
            topics_offset_data = parsed_consumer_offsets['offsets']
            topic_partitions = dict(
                (topic, [partition for partition in offset_data.keys()])
                for topic, offset_data in topics_offset_data.iteritems())
        except IndexError:
            print(
                "Error: Given parsed consumer-offset data {consumer_offsets} "
                "could not be parsed".format(
                    consumer_offsets=parsed_consumer_offsets),
                file=sys.stderr,
            )
            raise
        current_offsets = get_consumer_offsets_metadata(
            client,
            consumer_group,
            topic_partitions,
            offset_storage=storage,
        )
        # Build new offsets
        new_offsets = cls.build_new_offsets(
            client,
            topics_offset_data,
            topic_partitions,
            current_offsets,
        )

        # Commit offsets
        consumer_group = parsed_consumer_offsets['groupid']
        set_consumer_offsets(
            client,
            consumer_group,
            new_offsets,
            offset_storage=storage,
        )
        print("Restored to new offsets {offsets}".format(
            offsets=dict(new_offsets)))
Beispiel #17
0
    def restore_offsets(cls, client, parsed_consumer_offsets, storage):
        """Fetch current offsets from kafka, validate them against given
        consumer-offsets data and commit the new offsets.

        :param client: Kafka-client
        :param parsed_consumer_offsets: Parsed consumer offset data from json file
        :type parsed_consumer_offsets: dict(group: dict(topic: partition-offsets))
        :param storage: String describing where to store the committed offsets.
        """
        # Fetch current offsets
        try:
            consumer_group = parsed_consumer_offsets['groupid']
            topics_offset_data = parsed_consumer_offsets['offsets']
            topic_partitions = dict(
                (topic, [partition for partition in offset_data.keys()])
                for topic, offset_data in topics_offset_data.iteritems()
            )
        except IndexError:
            print(
                "Error: Given parsed consumer-offset data {consumer_offsets} "
                "could not be parsed".format(consumer_offsets=parsed_consumer_offsets),
                file=sys.stderr,
            )
            raise
        current_offsets = get_consumer_offsets_metadata(
            client,
            consumer_group,
            topic_partitions,
            offset_storage=storage,
        )
        # Build new offsets
        new_offsets = cls.build_new_offsets(
            client,
            topics_offset_data,
            topic_partitions,
            current_offsets,
        )

        # Commit offsets
        consumer_group = parsed_consumer_offsets['groupid']
        set_consumer_offsets(
            client,
            consumer_group,
            new_offsets,
            offset_storage=storage,
        )
        print("Restored to new offsets {offsets}".format(offsets=dict(new_offsets)))
Beispiel #18
0
    def test_offset_metadata_invalid_partition_subset_no_fail(
        self,
        kafka_client_mock
    ):
        # Partition 99 does not exist, so we expect to have
        # offset metadata ONLY for partition 1.
        expected = [
            ConsumerPartitionOffsets('topic1', 1, 20, 30, 5)
        ]

        actual = get_consumer_offsets_metadata(
            kafka_client_mock,
            self.group,
            {'topic1': [1, 99]},
            raise_on_error=False
        )
        assert 'topic1' in actual
        assert actual['topic1'] == expected
Beispiel #19
0
    def run(cls, args, cluster_config):
        # Setup the Kafka client
        client = KafkaToolClient(cluster_config.broker_list)
        client.load_metadata_for_topics()

        topics_dict = cls.preprocess_args(
            groupid=args.groupid,
            topic=args.topic,
            partitions=args.partitions,
            cluster_config=cluster_config,
            client=client,
            storage=args.storage,
        )
        try:
            consumer_offsets_metadata = get_consumer_offsets_metadata(
                client,
                args.groupid,
                topics_dict,
                offset_storage=args.storage,
            )
        except KafkaUnavailableError:
            print(
                "Error: Encountered error with Kafka, please try again later.",
                file=sys.stderr,
            )
            raise

        # Warn the user if a topic being subscribed to does not exist in Kafka.
        for topic in topics_dict:
            if topic not in consumer_offsets_metadata:
                print(
                    "Warning: Topic {topic} does not exist in Kafka".format(
                        topic=topic),
                    file=sys.stderr,
                )

        cls.save_offsets(
            consumer_offsets_metadata,
            topics_dict,
            args.json_file,
            args.groupid,
        )
        client.close()
Beispiel #20
0
    def run(cls, args, cluster_config):
        # Setup the Kafka client
        client = KafkaToolClient(cluster_config.broker_list)
        client.load_metadata_for_topics()

        topics_dict = cls.preprocess_args(
            groupid=args.groupid,
            topic=args.topic,
            partitions=args.partitions,
            cluster_config=cluster_config,
            client=client,
        )
        try:
            consumer_offsets_metadata = get_consumer_offsets_metadata(
                client,
                args.groupid,
                topics_dict,
            )
        except KafkaUnavailableError:
            print(
                "Error: Encountered error with Kafka, please try again later.",
                file=sys.stderr,
            )
            raise

        # Warn the user if a topic being subscribed to does not exist in Kafka.
        for topic in topics_dict:
            if topic not in consumer_offsets_metadata:
                print(
                    "Warning: Topic {topic} does not exist in Kafka"
                    .format(topic=topic),
                    file=sys.stderr,
                )

        cls.save_offsets(
            consumer_offsets_metadata,
            topics_dict,
            args.json_file,
            args.groupid,
        )
        client.close()
Beispiel #21
0
    def test_dual_offsets_kafka_error(self, kafka_client_mock):
        with mock.patch.object(
                MyKafkaToolClient,
                'send_offset_fetch_request',
                return_value={},
                autospec=True,
        ) as mock_get_zk, mock.patch.object(
                MyKafkaToolClient,
                'send_offset_fetch_request_kafka',
                side_effect=ConsumerCoordinatorNotAvailableCode('Boom!'),
                autospec=True,
        ) as mock_get_kafka:
            actual = get_consumer_offsets_metadata(
                kafka_client_mock,
                self.group,
                self.topics,
                offset_storage='dual',
            )

            assert mock_get_zk.call_count == 1
            assert mock_get_kafka.call_count == 1
            assert self._has_no_partitions(actual)