Exemple #1
0
    def test_output_sorting_parcentage(self):
        offsets = OrderedDict(
            [("topic1", [ConsumerPartitionOffsets("t1", 0, 1, 10, 0),
                         ConsumerPartitionOffsets("t1", 1, 2, 10, 0)]),
             ("topic2", [ConsumerPartitionOffsets("t2", 0, 900, 1000, 0)])])

        sorted_dict = OffsetGet.sort_by_distance_percentage(offsets)
        assert sorted_dict.keys()[0] == "topic2"
        assert sorted_dict.keys()[1] == "topic1"
    def test_output_sorting(self):
        offsets = OrderedDict([("topic2",
                                [ConsumerPartitionOffsets("t2", 0, 0, 10, 0)]),
                               ("topic1", [
                                   ConsumerPartitionOffsets("t1", 0, 5, 10, 0),
                                   ConsumerPartitionOffsets("t1", 1, 9, 10, 0)
                               ])])

        sorted_dict = OffsetGet.sort_by_distance(offsets)
        assert list(sorted_dict.keys())[0] == "topic1"
        assert list(sorted_dict.keys())[1] == "topic2"
Exemple #3
0
    def test_offset_metadata_invalid_partition_subset_no_fail(
            self, kafka_client_mock):
        # Partition 99 does not exist, so we expect to have
        # offset metadata ONLY for partition 1.
        expected = [ConsumerPartitionOffsets('topic1', 1, 20, 30, 5)]

        actual = get_consumer_offsets_metadata(kafka_client_mock,
                                               self.group, {'topic1': [1, 99]},
                                               raise_on_error=False)
        assert 'topic1' in actual
        assert actual['topic1'] == expected
class TestOffsetRestore(object):

    topics_partitions = {
        "topic1": [0, 1, 2],
        "topic2": [0, 1, 2, 3],
        "topic3": [0, 1],
    }
    consumer_offsets_metadata = {
        'topic1': [
            ConsumerPartitionOffsets(topic='topic1',
                                     partition=0,
                                     current=20,
                                     highmark=655,
                                     lowmark=655),
            ConsumerPartitionOffsets(topic='topic1',
                                     partition=1,
                                     current=10,
                                     highmark=655,
                                     lowmark=655)
        ]
    }
    parsed_consumer_offsets = {
        'groupid': 'group1',
        'offsets': {
            'topic1': {
                0: 10,
                1: 20
            }
        }
    }
    new_consumer_offsets = {'topic1': {0: 10, 1: 20}}
    kafka_consumer_offsets = {
        'topic1': [
            ConsumerPartitionOffsets(topic='topic1',
                                     partition=0,
                                     current=30,
                                     highmark=40,
                                     lowmark=10),
            ConsumerPartitionOffsets(topic='topic1',
                                     partition=1,
                                     current=20,
                                     highmark=40,
                                     lowmark=10),
        ]
    }

    @pytest.fixture
    def mock_kafka_client(self):
        mock_kafka_client = mock.MagicMock(spec=KafkaToolClient)
        mock_kafka_client.get_partition_ids_for_topic. \
            side_effect = self.topics_partitions
        return mock_kafka_client

    def test_build_new_offsets(self, mock_kafka_client):
        new_offsets = OffsetRestore.build_new_offsets(
            mock_kafka_client,
            {'topic1': {
                0: 10,
                1: 20
            }},
            {'topic1': [0, 1]},
            self.kafka_consumer_offsets,
        )

        assert new_offsets == self.new_consumer_offsets
class TestOffsetSave(object):
    topics_partitions = {
        "topic1": [0, 1, 2],
        "topic2": [0, 1, 2, 3],
        "topic3": [0, 1],
    }
    consumer_offsets_metadata = {
        'topic1':
        [
            ConsumerPartitionOffsets(topic='topic1', partition=0, current=10, highmark=655, lowmark=655),
            ConsumerPartitionOffsets(topic='topic1', partition=1, current=20, highmark=655, lowmark=655),
        ]
    }
    offset_data_file = {'groupid': 'group1', 'offsets': {'topic1': {0: 10, 1: 20}}}
    json_data = {'groupid': 'group1', 'offsets': {'topic1': {'0': 10, '1': 20}}}

    @mock.patch('kafka_utils.kafka_consumer_manager.'
                'commands.offset_save.KafkaToolClient')
    def test_save_offsets(self, mock_client):
        with mock.patch.object(
            OffsetSave,
            "write_offsets_to_file",
            spec=OffsetSave.write_offsets_to_file,
            return_value=[],
        ) as mock_write_offsets:
            filename = 'offset_file'
            consumer_group = 'group1'
            OffsetSave.save_offsets(
                self.consumer_offsets_metadata,
                self.topics_partitions,
                filename,
                consumer_group,
            )

            ordered_args, _ = mock_write_offsets.call_args
            assert ordered_args[0] == filename
            assert ordered_args[1] == self.offset_data_file

    @mock.patch('kafka_utils.kafka_consumer_manager.'
                'commands.offset_save.KafkaToolClient')
    def test_run(self, mock_client):
        with mock.patch.object(
            OffsetSave,
            "preprocess_args",
            spec=OffsetSave.preprocess_args,
            return_value=self.topics_partitions,
        ), mock.patch(
            "kafka_utils.kafka_consumer_manager."
            "commands.offset_save.get_consumer_offsets_metadata",
            return_value=self.consumer_offsets_metadata,
            autospec=True,
        ), mock.patch.object(
            OffsetSave,
            "write_offsets_to_file",
            spec=OffsetSave.write_offsets_to_file,
            return_value=[],
        ) as mock_write_offsets:
            args = mock.Mock(
                groupid="group1",
                json_file="some_file",
            )
            cluster_config = mock.Mock()
            OffsetSave.run(args, cluster_config)

            mock_client.return_value.load_metadata_for_topics. \
                assert_called_once_with()
            mock_client.return_value.close.assert_called_once_with()
            ordered_args, _ = mock_write_offsets.call_args
            assert ordered_args[0] == "some_file"
            assert ordered_args[1] == self.offset_data_file
class TestOffsetRestore(object):

    topics_partitions = {
        "topic1": [0, 1, 2],
        "topic2": [0, 1, 2, 3],
        "topic3": [0, 1],
    }
    consumer_offsets_metadata = {
        'topic1': [
            ConsumerPartitionOffsets(topic='topic1',
                                     partition=0,
                                     current=20,
                                     highmark=655,
                                     lowmark=655),
            ConsumerPartitionOffsets(topic='topic1',
                                     partition=1,
                                     current=10,
                                     highmark=655,
                                     lowmark=655)
        ]
    }
    parsed_consumer_offsets = {
        'groupid': 'group1',
        'offsets': {
            'topic1': {
                0: 10,
                1: 20
            }
        }
    }
    new_consumer_offsets = {'topic1': {0: 10, 1: 20}}
    kafka_consumer_offsets = {
        'topic1': [
            ConsumerPartitionOffsets(topic='topic1',
                                     partition=0,
                                     current=30,
                                     highmark=40,
                                     lowmark=10),
            ConsumerPartitionOffsets(topic='topic1',
                                     partition=1,
                                     current=20,
                                     highmark=40,
                                     lowmark=10),
        ]
    }

    @pytest.fixture
    def mock_kafka_client(self):
        mock_kafka_client = mock.MagicMock(spec=KafkaToolClient)
        mock_kafka_client.get_partition_ids_for_topic. \
            side_effect = self.topics_partitions
        return mock_kafka_client

    def test_restore_offsets_zk(self, mock_kafka_client):
        with mock.patch(
                "kafka_utils.kafka_consumer_manager."
                "commands.offset_restore.set_consumer_offsets",
                return_value=[],
                autospec=True,
        ) as mock_set_offsets, mock.patch.object(
                OffsetRestore,
                "parse_consumer_offsets",
                spec=OffsetRestore.parse_consumer_offsets,
                return_value=self.parsed_consumer_offsets,
        ), mock.patch(
                "kafka_utils.kafka_consumer_manager."
                "commands.offset_restore.get_consumer_offsets_metadata",
                return_value=self.consumer_offsets_metadata,
                autospec=True,
        ):
            OffsetRestore.restore_offsets(
                mock_kafka_client,
                self.parsed_consumer_offsets,
                'zookeeper',
            )

            ordered_args, _ = mock_set_offsets.call_args
            assert ordered_args[1] == 'group1'
            assert ordered_args[2] == self.new_consumer_offsets

    def test_build_new_offsets(self, mock_kafka_client):
        new_offsets = OffsetRestore.build_new_offsets(
            mock_kafka_client,
            {'topic1': {
                0: 10,
                1: 20
            }},
            {'topic1': [0, 1]},
            self.kafka_consumer_offsets,
        )

        assert new_offsets == self.new_consumer_offsets