def _verify_topics_and_partitions(kafka_client, topics, raise_on_error): topics = _validate_topics_list_or_dict(topics) valid_topics = {} for topic, partitions in six.iteritems(topics): # Check topic exists if not kafka_client.has_metadata_for_topic(topic): if raise_on_error: raise UnknownTopic("Topic {topic!r} does not exist in " "kafka".format(topic=topic)) else: continue if partitions: # Check the partitions really exist unknown_partitions = set(partitions) - \ set(kafka_client.get_partition_ids_for_topic(topic)) if unknown_partitions: if raise_on_error: raise UnknownPartitions( "Partitions {partitions!r} for topic {topic!r} do not" "exist in kafka".format( partitions=unknown_partitions, topic=topic, )) else: # We only use the available partitions in this case partitions = set(partitions) - unknown_partitions else: # Default get all partitions metadata partitions = kafka_client.get_partition_ids_for_topic(topic) valid_topics[topic] = partitions return valid_topics
def get_offset_topic_partition_count(kafka_config): """Given a kafka cluster configuration, return the number of partitions in the offset topic. It will raise an UnknownTopic exception if the topic cannot be found.""" metadata = get_topic_partition_metadata(kafka_config.broker_list) if CONSUMER_OFFSET_TOPIC not in metadata: raise UnknownTopic("Consumer offset topic is missing.") return len(metadata[CONSUMER_OFFSET_TOPIC])