Esempio n. 1
0
    def test_set_consumer_offsets_kafka(
        self,
        topics,
        kafka_client_mock
    ):
        kafka_client_mock = mock.Mock(wraps=kafka_client_mock)
        new_offsets = {
            'topic1': {
                0: 100,
                1: 200,
            },
            'topic2': {
                0: 150,
                1: 300,
            },
        }

        set_consumer_offsets(
            kafka_client_mock,
            "group",
            new_offsets,
            raise_on_error=True,
            offset_storage='kafka',
        )
        assert kafka_client_mock.send_offset_commit_request.call_count == 0
        assert kafka_client_mock.send_offset_commit_request_kafka.call_count == 1
Esempio n. 2
0
    def test_set_consumer_offsets_invalid_storage(
        self,
        topics,
        kafka_client_mock
    ):
        kafka_client_mock = mock.Mock(wraps=kafka_client_mock)
        new_offsets = {
            'topic1': {
                0: 100,
                1: 200,
            },
            'topic2': {
                0: 150,
                1: 300,
            },
        }

        with pytest.raises(InvalidOffsetStorageError):
            set_consumer_offsets(
                kafka_client_mock,
                "group",
                new_offsets,
                raise_on_error=True,
                offset_storage='randon_string',
            )
        assert kafka_client_mock.send_offset_commit_request.call_count == 0
        assert kafka_client_mock.send_offset_commit_request_kafka.call_count == 0
Esempio n. 3
0
 def unsubscribe_partitions(self, group, topic, partitions):
     offsets = {topic: {partition: 0 for partition in partitions}}
     set_consumer_offsets(
         self.client,
         group,
         nullify_offsets(offsets),
     )
Esempio n. 4
0
 def rename_group_with_storage_kafka(
     cls,
     client,
     old_groupid,
     new_groupid,
     topics,
 ):
     copied_offsets = get_current_consumer_offsets(
         client,
         old_groupid,
         topics,
         offset_storage='kafka',
     )
     set_consumer_offsets(
         client,
         new_groupid,
         copied_offsets,
         offset_storage='kafka',
     )
     set_consumer_offsets(
         client,
         old_groupid,
         nullify_offsets(topics),
         offset_storage='kafka',
     )
Esempio n. 5
0
    def test_set_consumer_offsets_invalid_storage(
        self,
        topics,
        kafka_client_mock
    ):
        kafka_client_mock = mock.Mock(wraps=kafka_client_mock)
        new_offsets = {
            'topic1': {
                0: 100,
                1: 200,
            },
            'topic2': {
                0: 150,
                1: 300,
            },
        }

        with pytest.raises(InvalidOffsetStorageError):
            set_consumer_offsets(
                kafka_client_mock,
                "group",
                new_offsets,
                raise_on_error=True,
                offset_storage='randon_string',
            )
        assert kafka_client_mock.send_offset_commit_request.call_count == 0
        assert kafka_client_mock.send_offset_commit_request_kafka.call_count == 0
Esempio n. 6
0
    def test_set_consumer_offsets_kafka(
        self,
        topics,
        kafka_client_mock
    ):
        kafka_client_mock = mock.Mock(wraps=kafka_client_mock)
        new_offsets = {
            'topic1': {
                0: 100,
                1: 200,
            },
            'topic2': {
                0: 150,
                1: 300,
            },
        }

        set_consumer_offsets(
            kafka_client_mock,
            "group",
            new_offsets,
            raise_on_error=True,
            offset_storage='kafka',
        )
        assert kafka_client_mock.send_offset_commit_request.call_count == 0
        assert kafka_client_mock.send_offset_commit_request_kafka.call_count == 1
Esempio n. 7
0
 def delete_group_kafka(cls, client, group, topics):
     new_offsets = nullify_offsets(topics)
     set_consumer_offsets(
         client,
         group,
         new_offsets,
         offset_storage='kafka',
     )
Esempio n. 8
0
def set_consumer_group_offset(topic, group, offset):
    client = KafkaToolClient(KAFKA_URL)
    set_consumer_offsets(
        client,
        group,
        {topic: {0: offset}},
        raise_on_error=True,
    )
Esempio n. 9
0
 def delete_group_kafka(cls, client, group, topics):
     new_offsets = nullify_offsets(topics)
     set_consumer_offsets(
         client,
         group,
         new_offsets,
         offset_storage='kafka',
     )
Esempio n. 10
0
def create_consumer_group(topic, group_name, num_messages=1):
    client = KafkaToolClient(KAFKA_URL)
    set_consumer_offsets(
        client,
        group_name,
        {topic: {0: num_messages}},
        raise_on_error=True,
    )
    return client
Esempio n. 11
0
def commit_offsets(offsets, group):
    # Setup the Kafka client
    config = get_cluster_config()
    client = KafkaToolClient(config.broker_list)
    set_consumer_offsets(
        client,
        group,
        offsets,
    )
    client.close()
Esempio n. 12
0
def set_consumer_group_offset(topic, group, offset):
    client = KafkaToolClient(KAFKA_URL)
    set_consumer_offsets(
        client,
        group,
        {topic: {
            0: offset
        }},
        raise_on_error=True,
    )
Esempio n. 13
0
def commit_offsets(offsets, group):
    # Setup the Kafka client
    config = get_cluster_config()
    client = KafkaToolClient(config.broker_list)
    set_consumer_offsets(
        client,
        group,
        offsets,
    )
    client.close()
Esempio n. 14
0
 def delete_topic(self, group, topic):
     offsets = get_current_consumer_offsets(
         self.client,
         group,
         [topic],
     )
     set_consumer_offsets(
         self.client,
         group,
         nullify_offsets(offsets),
     )
Esempio n. 15
0
 def delete_topic(self, group, topic):
     offsets = get_current_consumer_offsets(
         self.client,
         group,
         [topic],
     )
     set_consumer_offsets(
         self.client,
         group,
         nullify_offsets(offsets),
     )
Esempio n. 16
0
def create_consumer_group(topic, group_name, num_messages=1):
    client = KafkaToolClient(KAFKA_URL)
    set_consumer_offsets(
        client,
        group_name,
        {topic: {
            0: num_messages
        }},
        raise_on_error=True,
    )
    return client
Esempio n. 17
0
def create_consumer_group_with_kafka_storage(topic, group_name):
    client = KafkaToolClient(KAFKA_URL)
    set_consumer_offsets(
        client,
        group_name,
        {topic: {
            0: 1
        }},
        offset_storage='kafka',
        raise_on_error=True,
    )
    return client
Esempio n. 18
0
 def unsubscribe_partitions(self, group, topic, partitions):
     offsets = {
         topic: {
             partition: 0
             for partition in partitions
         }
     }
     set_consumer_offsets(
         self.client,
         group,
         nullify_offsets(offsets),
     )
Esempio n. 19
0
 def copy_group_kafka(cls, client, topics, source_group, destination_group):
     copied_offsets = get_current_consumer_offsets(
         client,
         source_group,
         topics,
         offset_storage='kafka',
     )
     set_consumer_offsets(
         client,
         destination_group,
         copied_offsets,
         offset_storage='kafka',
     )
Esempio n. 20
0
def initialize_kafka_offsets_topic():
    if '__consumer_offsets' in list_topics():
        return
    topic = create_random_topic(1, 1)
    produce_example_msg(topic, num_messages=1)
    kafka = KafkaToolClient(KAFKA_URL)
    set_consumer_offsets(
        kafka,
        create_random_group_id(),
        {topic: {0: 1}},
        raise_on_error=True,
    )
    time.sleep(20)
Esempio n. 21
0
def initialize_kafka_offsets_topic():
    if '__consumer_offsets' in list_topics():
        return
    topic = create_random_topic(1, 1)
    produce_example_msg(topic, num_messages=1)
    kafka = KafkaToolClient(KAFKA_URL)
    set_consumer_offsets(
        kafka,
        create_random_group_id(),
        {topic: {
            0: 1
        }},
        raise_on_error=True,
    )
    time.sleep(20)
Esempio n. 22
0
    def restore_offsets(cls, client, parsed_consumer_offsets, storage):
        """Fetch current offsets from kafka, validate them against given
        consumer-offsets data and commit the new offsets.

        :param client: Kafka-client
        :param parsed_consumer_offsets: Parsed consumer offset data from json file
        :type parsed_consumer_offsets: dict(group: dict(topic: partition-offsets))
        :param storage: String describing where to store the committed offsets.
        """
        # Fetch current offsets
        try:
            consumer_group = parsed_consumer_offsets['groupid']
            topics_offset_data = parsed_consumer_offsets['offsets']
            topic_partitions = dict(
                (topic, [partition for partition in offset_data.keys()])
                for topic, offset_data in topics_offset_data.iteritems())
        except IndexError:
            print(
                "Error: Given parsed consumer-offset data {consumer_offsets} "
                "could not be parsed".format(
                    consumer_offsets=parsed_consumer_offsets),
                file=sys.stderr,
            )
            raise
        current_offsets = get_consumer_offsets_metadata(
            client,
            consumer_group,
            topic_partitions,
            offset_storage=storage,
        )
        # Build new offsets
        new_offsets = cls.build_new_offsets(
            client,
            topics_offset_data,
            topic_partitions,
            current_offsets,
        )

        # Commit offsets
        consumer_group = parsed_consumer_offsets['groupid']
        set_consumer_offsets(
            client,
            consumer_group,
            new_offsets,
            offset_storage=storage,
        )
        print("Restored to new offsets {offsets}".format(
            offsets=dict(new_offsets)))
Esempio n. 23
0
    def test_set_consumer_offsets_fail(self, kafka_client_mock):
        kafka_client_mock.set_commit_error()
        new_offsets = {
            'topic1': {
                0: 100,
                1: 200,
            },
            'topic2': {
                0: 150,
                1: 300,
            },
        }
        expected_status = [
            OffsetCommitError("topic1", 0, RequestTimedOutError.message),
            OffsetCommitError("topic1", 1, RequestTimedOutError.message),
            OffsetCommitError("topic2", 0, RequestTimedOutError.message),
            OffsetCommitError("topic2", 1, RequestTimedOutError.message),
        ]

        status = set_consumer_offsets(
            kafka_client_mock,
            "group",
            new_offsets,
            raise_on_error=True,
        )

        assert len(status) == len(expected_status)
        for expected in expected_status:
            assert any(actual == expected for actual in status)
        assert kafka_client_mock.group_offsets == self.group_offsets
Esempio n. 24
0
    def test_set_consumer_offsets(self, kafka_client_mock):
        new_offsets = {
            'topic1': {
                0: 100,
                1: 200,
            },
            'topic2': {
                0: 150,
                1: 300,
            },
        }

        status = set_consumer_offsets(
            kafka_client_mock,
            "group",
            new_offsets,
        )

        expected_offsets = {
            'topic1': {
                0: 100,
                1: 200,
                2: 10,
            },
            'topic2': {
                0: 150,
                1: 300,
            }
        }
        assert status == []
        assert kafka_client_mock.group_offsets == expected_offsets
Esempio n. 25
0
    def test_set_consumer_offsets_fail(self, kafka_client_mock):
        kafka_client_mock.set_commit_error()
        new_offsets = {
            'topic1': {
                0: 100,
                1: 200,
            },
            'topic2': {
                0: 150,
                1: 300,
            },
        }
        expected_status = [
            OffsetCommitError("topic1", 0, RequestTimedOutError.message),
            OffsetCommitError("topic1", 1, RequestTimedOutError.message),
            OffsetCommitError("topic2", 0, RequestTimedOutError.message),
            OffsetCommitError("topic2", 1, RequestTimedOutError.message),
        ]

        status = set_consumer_offsets(
            kafka_client_mock,
            "group",
            new_offsets,
            raise_on_error=True,
        )

        assert len(status) == len(expected_status)
        for expected in expected_status:
            assert any(actual == expected for actual in status)
        assert kafka_client_mock.group_offsets == self.group_offsets
Esempio n. 26
0
    def test_set_consumer_offsets(self, kafka_client_mock):
        new_offsets = {
            'topic1': {
                0: 100,
                1: 200,
            },
            'topic2': {
                0: 150,
                1: 300,
            },
        }

        status = set_consumer_offsets(
            kafka_client_mock,
            "group",
            new_offsets,
        )

        expected_offsets = {
            'topic1': {
                0: 100,
                1: 200,
                2: 10,
            },
            'topic2': {
                0: 150,
                1: 300,
            }
        }

        expected_status = []

        assert set(status) == set(expected_status)
        assert kafka_client_mock.group_offsets == expected_offsets
Esempio n. 27
0
    def restore_offsets(cls, client, parsed_consumer_offsets, storage):
        """Fetch current offsets from kafka, validate them against given
        consumer-offsets data and commit the new offsets.

        :param client: Kafka-client
        :param parsed_consumer_offsets: Parsed consumer offset data from json file
        :type parsed_consumer_offsets: dict(group: dict(topic: partition-offsets))
        :param storage: String describing where to store the committed offsets.
        """
        # Fetch current offsets
        try:
            consumer_group = parsed_consumer_offsets['groupid']
            topics_offset_data = parsed_consumer_offsets['offsets']
            topic_partitions = dict(
                (topic, [partition for partition in offset_data.keys()])
                for topic, offset_data in topics_offset_data.iteritems()
            )
        except IndexError:
            print(
                "Error: Given parsed consumer-offset data {consumer_offsets} "
                "could not be parsed".format(consumer_offsets=parsed_consumer_offsets),
                file=sys.stderr,
            )
            raise
        current_offsets = get_consumer_offsets_metadata(
            client,
            consumer_group,
            topic_partitions,
            offset_storage=storage,
        )
        # Build new offsets
        new_offsets = cls.build_new_offsets(
            client,
            topics_offset_data,
            topic_partitions,
            current_offsets,
        )

        # Commit offsets
        consumer_group = parsed_consumer_offsets['groupid']
        set_consumer_offsets(
            client,
            consumer_group,
            new_offsets,
            offset_storage=storage,
        )
        print("Restored to new offsets {offsets}".format(offsets=dict(new_offsets)))
Esempio n. 28
0
 def rename_group(
     cls,
     client,
     old_groupid,
     new_groupid,
     topics,
 ):
     copied_offsets = get_current_consumer_offsets(
         client,
         old_groupid,
         topics,
     )
     set_consumer_offsets(client, new_groupid, copied_offsets)
     set_consumer_offsets(
         client,
         old_groupid,
         nullify_offsets(topics),
     )
Esempio n. 29
0
 def rename_group(
     cls,
     client,
     old_groupid,
     new_groupid,
     topics,
 ):
     copied_offsets = get_current_consumer_offsets(
         client,
         old_groupid,
         topics,
     )
     set_consumer_offsets(client, new_groupid, copied_offsets)
     set_consumer_offsets(
         client,
         old_groupid,
         nullify_offsets(topics),
     )
Esempio n. 30
0
    def run(cls, args, cluster_config):
        # Setup the Kafka client
        client = KafkaToolClient(cluster_config.broker_list)
        client.load_metadata_for_topics()

        # Let's verify that the consumer does exist in Zookeeper
        if not args.force:
            cls.get_topics_from_consumer_group_id(
                cluster_config,
                args.groupid,
                storage=args.storage,
            )

        try:
            results = set_consumer_offsets(
                client,
                args.groupid,
                cls.new_offsets_dict,
                offset_storage=args.storage,
            )
        except TypeError:
            print(
                "Error: Badly formatted input, please re-run command "
                "with --help option.",
                file=sys.stderr)
            raise

        client.close()

        if results:
            final_error_str = (
                "Error: Unable to commit consumer offsets for:\n")
            for result in results:
                error_str = (
                    "  Topic: {topic} Partition: {partition} Error: {error}\n".
                    format(topic=result.topic,
                           partition=result.partition,
                           error=result.error))
                final_error_str += error_str
            print(final_error_str, file=sys.stderr)
            sys.exit(1)
Esempio n. 31
0
    def run(cls, args, cluster_config):
        # Setup the Kafka client
        client = KafkaToolClient(cluster_config.broker_list)
        client.load_metadata_for_topics()

        # Let's verify that the consumer does exist in Zookeeper
        if not args.force:
            cls.get_topics_from_consumer_group_id(
                cluster_config,
                args.groupid,
            )

        try:
            results = set_consumer_offsets(
                client,
                args.groupid,
                cls.new_offsets_dict,
                offset_storage=args.storage,
            )
        except TypeError:
            print(
                "Error: Badly formatted input, please re-run command "
                "with --help option.", file=sys.stderr
            )
            raise

        client.close()

        if results:
            final_error_str = ("Error: Unable to commit consumer offsets for:\n")
            for result in results:
                error_str = (
                    "  Topic: {topic} Partition: {partition} Error: {error}\n".format(
                        topic=result.topic,
                        partition=result.partition,
                        error=result.error
                    )
                )
                final_error_str += error_str
            print(final_error_str, file=sys.stderr)
            sys.exit(1)
Esempio n. 32
0
    def test_set_consumer_offsets(self, kafka_client_mock):
        new_offsets = {
            'topic1': {
                0: 100,
                1: 200,
            },
            'topic2': {
                0: 150,
                1: 300,
            },
        }

        status = set_consumer_offsets(
            kafka_client_mock,
            "group",
            new_offsets,
        )

        expected_offsets = {
            'topic1': {
                0: 100,
                1: 200,
                2: 10,
            },
            'topic2': {
                0: 150,
                1: 300,
            }
        }

        expected_status = [
            OffsetCommitResponsePayload("topic1", 0, 0),
            OffsetCommitResponsePayload("topic1", 1, 0),
            OffsetCommitResponsePayload("topic2", 0, 0),
            OffsetCommitResponsePayload("topic2", 1, 0),
        ]

        assert set(status) == set(expected_status)
        assert kafka_client_mock.group_offsets == expected_offsets
Esempio n. 33
0
 def copy_group_kafka(cls, client, topics, source_group, destination_group):
     copied_offsets = get_current_consumer_offsets(client, source_group, topics)
     set_consumer_offsets(client, destination_group, copied_offsets)