Exemple #1
0
 def test_get_current_consumer_offsets_unknown_topic(self, kafka_client_mock):
     with pytest.raises(UnknownTopic):
         get_current_consumer_offsets(
             kafka_client_mock,
             "this won't even be consulted",
             ["something that doesn't exist"],
         )
Exemple #2
0
 def test_get_current_consumer_offsets_unknown_topic(self, kafka_client_mock):
     with pytest.raises(UnknownTopic):
         get_current_consumer_offsets(
             kafka_client_mock,
             "this won't even be consulted",
             ["something that doesn't exist"],
         )
Exemple #3
0
 def test_get_current_consumer_offsets_invalid_partition_subset(self, kafka_client_mock):
     with pytest.raises(UnknownPartitions):
         get_current_consumer_offsets(
             kafka_client_mock,
             self.group,
             {'topic1': [1, 99]},
         )
Exemple #4
0
 def test_get_current_consumer_offsets_invalid_arguments(self, kafka_client_mock):
     with pytest.raises(TypeError):
         get_current_consumer_offsets(
             kafka_client_mock,
             "this won't even be consulted",
             "this should be a list or dict",
         )
Exemple #5
0
def _get_current_offsets_dual(
    kafka_client,
    group,
    topics,
    raise_on_error,
):
    """Get current consumer offsets from Zookeeper and from Kafka
    and return the higher partition offsets from the responses.
    """
    zk_offsets = get_current_consumer_offsets(
        kafka_client,
        group,
        topics,
        False,
        'zookeeper',
    )
    try:
        kafka_offsets = get_current_consumer_offsets(
            kafka_client,
            group,
            topics,
            False,
            'kafka',
        )
    except GroupCoordinatorNotAvailableError:
        kafka_offsets = {}
    return merge_offsets_metadata(topics, zk_offsets, kafka_offsets)
Exemple #6
0
 def test_get_current_consumer_offsets_invalid_partition_subset(self, kafka_client_mock):
     with pytest.raises(UnknownPartitions):
         get_current_consumer_offsets(
             kafka_client_mock,
             self.group,
             {'topic1': [1, 99]},
         )
Exemple #7
0
 def test_get_current_consumer_offsets_invalid_arguments(self, kafka_client_mock):
     with pytest.raises(TypeError):
         get_current_consumer_offsets(
             kafka_client_mock,
             "this won't even be consulted",
             "this should be a list or dict",
         )
 def test_get_current_consumer_offsets_from_kafka(self, topics,
                                                  kafka_client_mock):
     kafka_client_mock = mock.Mock(wraps=kafka_client_mock)
     get_current_consumer_offsets(
         kafka_client_mock,
         self.group,
         topics,
     )
     assert kafka_client_mock.send_offset_fetch_request_kafka.call_count == 1
Exemple #9
0
 def test_get_current_consumer_offsets_from_zookeeper(
         self, topics, kafka_client_mock):
     kafka_client_mock = mock.Mock(wraps=kafka_client_mock)
     get_current_consumer_offsets(
         kafka_client_mock,
         self.group,
         topics,
         offset_storage='zookeeper',
     )
     assert kafka_client_mock.send_offset_fetch_request.call_count == 1
     assert kafka_client_mock.send_offset_fetch_request_kafka.call_count == 0
Exemple #10
0
def step_impl4(context):
    old_group_offsets = get_current_consumer_offsets(
        context.client,
        context.group,
        [context.topic],
    )
    new_group_offsets = get_current_consumer_offsets(
        context.client,
        NEW_GROUP,
        [context.topic],
    )
    assert old_group_offsets == new_group_offsets
Exemple #11
0
def step_impl4(context):
    old_group_offsets = get_current_consumer_offsets(
        context.client,
        context.group,
        [context.topic],
    )
    new_group_offsets = get_current_consumer_offsets(
        context.client,
        NEW_GROUP,
        [context.topic],
    )
    assert old_group_offsets == new_group_offsets
Exemple #12
0
 def test_get_current_consumer_offsets_from_kafka(
     self,
     topics,
     kafka_client_mock
 ):
     kafka_client_mock = mock.Mock(wraps=kafka_client_mock)
     get_current_consumer_offsets(
         kafka_client_mock,
         self.group,
         topics,
     )
     assert kafka_client_mock.send_offset_fetch_request_kafka.call_count == 1
Exemple #13
0
 def test_get_current_consumer_offsets_invalid_storage(
         self, topics, kafka_client_mock):
     kafka_client_mock = mock.Mock(wraps=kafka_client_mock)
     with pytest.raises(InvalidOffsetStorageError):
         get_current_consumer_offsets(
             kafka_client_mock,
             self.group,
             topics,
             offset_storage='random_string',
         )
     assert kafka_client_mock.send_offset_fetch_request.call_count == 0
     assert kafka_client_mock.send_offset_fetch_request_kafka.call_count == 0
Exemple #14
0
 def test_get_current_consumer_offsets_from_zookeeper(
     self,
     topics,
     kafka_client_mock
 ):
     kafka_client_mock = mock.Mock(wraps=kafka_client_mock)
     get_current_consumer_offsets(
         kafka_client_mock,
         self.group,
         topics,
         offset_storage='zookeeper',
     )
     assert kafka_client_mock.send_offset_fetch_request.call_count == 1
     assert kafka_client_mock.send_offset_fetch_request_kafka.call_count == 0
Exemple #15
0
 def test_get_current_consumer_offsets_invalid_storage(
     self,
     topics,
     kafka_client_mock
 ):
     kafka_client_mock = mock.Mock(wraps=kafka_client_mock)
     with pytest.raises(InvalidOffsetStorageError):
         get_current_consumer_offsets(
             kafka_client_mock,
             self.group,
             topics,
             offset_storage='random_string',
         )
     assert kafka_client_mock.send_offset_fetch_request.call_count == 0
     assert kafka_client_mock.send_offset_fetch_request_kafka.call_count == 0
Exemple #16
0
 def test_get_current_consumer_offsets_unknown_topic_no_fail(
         self, kafka_client_mock):
     actual = get_current_consumer_offsets(kafka_client_mock,
                                           "this won't even be consulted",
                                           ["something that doesn't exist"],
                                           raise_on_error=False)
     assert not actual
Exemple #17
0
 def test_get_current_consumer_offsets(self, topics, kafka_client_mock):
     actual = get_current_consumer_offsets(
         kafka_client_mock,
         self.group,
         topics
     )
     assert actual == {'topic1': {0: 30, 1: 20, 2: 10}}
Exemple #18
0
 def rename_group_with_storage_kafka(
     cls,
     client,
     old_groupid,
     new_groupid,
     topics,
 ):
     copied_offsets = get_current_consumer_offsets(
         client,
         old_groupid,
         topics,
         offset_storage='kafka',
     )
     set_consumer_offsets(
         client,
         new_groupid,
         copied_offsets,
         offset_storage='kafka',
     )
     set_consumer_offsets(
         client,
         old_groupid,
         nullify_offsets(topics),
         offset_storage='kafka',
     )
Exemple #19
0
 def test_get_current_consumer_offsets(self, topics, kafka_client_mock):
     actual = get_current_consumer_offsets(
         kafka_client_mock,
         self.group,
         topics
     )
     assert actual == {'topic1': {0: 30, 1: 20, 2: 10}}
Exemple #20
0
 def test_get_current_consumer_offsets_invalid_partition_subset_no_fail(
         self, kafka_client_mock):
     actual = get_current_consumer_offsets(kafka_client_mock,
                                           self.group, {'topic1': [1, 99]},
                                           raise_on_error=False)
     assert actual['topic1'][1] == 20
     # Partition 99 does not exist so it shouldn't be in the result
     assert 99 not in actual['topic1']
Exemple #21
0
 def test_get_current_consumer_offsets_unknown_partitions_no_fail(self, kafka_client_mock):
     actual = get_current_consumer_offsets(
         kafka_client_mock,
         self.group,
         {'topic1': [99]},
         raise_on_error=False
     )
     assert not actual
Exemple #22
0
 def test_get_current_consumer_offsets_unknown_topic_no_fail(self, kafka_client_mock):
     actual = get_current_consumer_offsets(
         kafka_client_mock,
         "this won't even be consulted",
         ["something that doesn't exist"],
         raise_on_error=False
     )
     assert not actual
Exemple #23
0
 def test_get_current_consumer_offsets_unknown_partitions_no_fail(self, kafka_client_mock):
     actual = get_current_consumer_offsets(
         kafka_client_mock,
         self.group,
         {'topic1': [99]},
         raise_on_error=False
     )
     assert not actual
Exemple #24
0
def step_impl3(context):
    config = get_cluster_config()
    context.client = KafkaToolClient(config.broker_list)
    offsets = get_current_consumer_offsets(
        context.client,
        context.group,
        [context.topic],
    )
    assert offsets[context.topic][0] == SET_OFFSET_KAFKA
def step_impl5(context):
    config = get_cluster_config()
    context.client = KafkaToolClient(config.broker_list)
    offsets = get_current_consumer_offsets(
        context.client,
        context.group,
        [context.topic],
    )
    assert offsets[context.topic][0] == 0
Exemple #26
0
 def test_get_current_consumer_offsets_invalid_partition_subset_no_fail(self, kafka_client_mock):
     actual = get_current_consumer_offsets(
         kafka_client_mock,
         self.group,
         {'topic1': [1, 99]},
         raise_on_error=False
     )
     assert actual['topic1'][1] == 20
     # Partition 99 does not exist so it shouldn't be in the result
     assert 99 not in actual['topic1']
Exemple #27
0
 def delete_topic(self, group, topic):
     offsets = get_current_consumer_offsets(
         self.client,
         group,
         [topic],
     )
     set_consumer_offsets(
         self.client,
         group,
         nullify_offsets(offsets),
     )
 def delete_topic(self, group, topic):
     offsets = get_current_consumer_offsets(
         self.client,
         group,
         [topic],
     )
     set_consumer_offsets(
         self.client,
         group,
         nullify_offsets(offsets),
     )
Exemple #29
0
 def copy_group_kafka(cls, client, topics, source_group, destination_group):
     copied_offsets = get_current_consumer_offsets(
         client,
         source_group,
         topics,
         offset_storage='kafka',
     )
     set_consumer_offsets(
         client,
         destination_group,
         copied_offsets,
         offset_storage='kafka',
     )
Exemple #30
0
def get_consumer_offsets_metadata(
    kafka_client,
    group,
    topics,
    raise_on_error=True,
):
    """This method:
        * refreshes metadata for the kafka client
        * fetches group offsets
        * fetches watermarks

    :param kafka_client: KafkaToolClient instance
    :param group: group id
    :param topics: list of topics
    :param raise_on_error: if False the method ignores missing topics and
      missing partitions. It still may fail on the request send.
    :returns: dict <topic>: [ConsumerPartitionOffsets]
    """
    # Refresh client metadata. We do not use the topic list, because we
    # don't want to accidentally create the topic if it does not exist.
    # If Kafka is unavailable, let's retry loading client metadata
    try:
        kafka_client.load_metadata_for_topics()
    except KafkaUnavailableError:
        kafka_client.load_metadata_for_topics()

    group_offsets = get_current_consumer_offsets(
        kafka_client, group, topics, raise_on_error
    )

    watermarks = get_topics_watermarks(
        kafka_client, topics, raise_on_error
    )

    result = {}
    for topic, partitions in six.iteritems(group_offsets):
        result[topic] = [
            ConsumerPartitionOffsets(
                topic=topic,
                partition=partition,
                current=group_offsets[topic][partition],
                highmark=watermarks[topic][partition].highmark,
                lowmark=watermarks[topic][partition].lowmark,
            ) for partition in partitions
        ]
    return result
Exemple #31
0
 def rename_group(
     cls,
     client,
     old_groupid,
     new_groupid,
     topics,
 ):
     copied_offsets = get_current_consumer_offsets(
         client,
         old_groupid,
         topics,
     )
     set_consumer_offsets(client, new_groupid, copied_offsets)
     set_consumer_offsets(
         client,
         old_groupid,
         nullify_offsets(topics),
     )
Exemple #32
0
 def rename_group(
     cls,
     client,
     old_groupid,
     new_groupid,
     topics,
 ):
     copied_offsets = get_current_consumer_offsets(
         client,
         old_groupid,
         topics,
     )
     set_consumer_offsets(client, new_groupid, copied_offsets)
     set_consumer_offsets(
         client,
         old_groupid,
         nullify_offsets(topics),
     )
Exemple #33
0
def get_consumer_offsets_metadata(
    kafka_client,
    group,
    topics,
    raise_on_error=True,
):
    """This method:
        * refreshes metadata for the kafka client
        * fetches group offsets
        * fetches watermarks

    :param kafka_client: KafkaToolClient instance
    :param group: group id
    :param topics: list of topics
    :param raise_on_error: if False the method ignores missing topics and
      missing partitions. It still may fail on the request send.
    :returns: dict <topic>: [ConsumerPartitionOffsets]
    """
    # Refresh client metadata. We do not use the topic list, because we
    # don't want to accidentally create the topic if it does not exist.
    # If Kafka is unavailable, let's retry loading client metadata
    try:
        kafka_client.load_metadata_for_topics()
    except KafkaUnavailableError:
        kafka_client.load_metadata_for_topics()

    group_offsets = get_current_consumer_offsets(kafka_client, group, topics,
                                                 raise_on_error)

    watermarks = get_topics_watermarks(kafka_client, topics, raise_on_error)

    result = {}
    for topic, partitions in six.iteritems(group_offsets):
        result[topic] = [
            ConsumerPartitionOffsets(
                topic=topic,
                partition=partition,
                current=group_offsets[topic][partition],
                highmark=watermarks[topic][partition].highmark,
                lowmark=watermarks[topic][partition].lowmark,
            ) for partition in partitions
        ]
    return result
Exemple #34
0
def get_current_offsets(
    kafka_client,
    group,
    topics,
    raise_on_error,
    offset_storage,
):
    """Get the current consumer offsets from either Zookeeper or Kafka
    or the combination of both.
    """
    if offset_storage in ['zookeeper', 'kafka']:
        return get_current_consumer_offsets(kafka_client, group, topics,
                                            raise_on_error, offset_storage)
    elif offset_storage == 'dual':
        return _get_current_offsets_dual(
            kafka_client,
            group,
            topics,
            raise_on_error,
        )
    else:
        raise InvalidOffsetStorageError(offset_storage)
Exemple #35
0
def get_consumer_offset(topics, group, storage='zookeeper'):
    client = KafkaToolClient(KAFKA_URL)
    return get_current_consumer_offsets(client, group, topics, storage)
Exemple #36
0
 def copy_group_kafka(cls, client, topics, source_group, destination_group):
     copied_offsets = get_current_consumer_offsets(client, source_group, topics)
     set_consumer_offsets(client, destination_group, copied_offsets)