def test_kafka_info_consumer_group(host): """ Check if can get info on consumer groups """ # Given topic_name = get_topic_name() ensure_topic( host, topic_defaut_configuration, topic_name ) time.sleep(0.3) consumer_group = get_consumer_group() total_msg = 42 # When produce_and_consume_topic( topic_name, total_msg, consumer_group) time.sleep(0.3) # Then results = call_kafka_info( host, { 'resource': 'consumer_group' } ) for r in results: assert consumer_group in r['ansible_module_results']
def test_kafka_info_topics_config_not_include_defaults(host): """ Check if can get config on topic. """ # Given topic_name = get_topic_name() test_topic_configuration = topic_defaut_configuration.copy() test_topic_configuration.update({ 'options': { 'retention.ms': 66574936 } }) ensure_topic( host, test_topic_configuration, topic_name ) time.sleep(0.3) # When results = call_kafka_info( host, { 'resource': 'topic-config', 'include_defaults': False }, ) # Then for r in results: assert topic_name in r['ansible_module_results'] for name, topic_config in r['ansible_module_results'].items(): if name == topic_name: assert int(topic_config['retention.ms']) == 66574936 assert 'min.insync.replicas' not in topic_config
def test_kafka_info_topic(host): """ Check if can get info on topic """ # Given topic_name = get_topic_name() ensure_topic( host, topic_defaut_configuration, topic_name ) time.sleep(0.3) topic_test_name = get_topic_name() topic_test_configuration = topic_defaut_configuration.copy() topic_test_configuration.update({ 'options': { 'min.insync.replicas': 2 } }) ensure_topic( host, topic_test_configuration, topic_test_name ) time.sleep(0.3) produce_and_consume_topic(topic_name, 10, get_consumer_group()) time.sleep(0.3) # When results = call_kafka_info( host, { 'resource': 'topic' } ) # Then for r in results: assert topic_name in r['ansible_module_results'] assert '__consumer_offsets' not in r['ansible_module_results'] for name, topic_info in r['ansible_module_results'].items(): for partition, partition_info in topic_info.items(): assert 'earliest_offset' in partition_info assert 'latest_offset' in partition_info assert partition_info['earliest_offset'] >= 0 assert (partition_info['latest_offset'] == 10 if name == topic_name else partition_info['latest_offset'] >= 0) if name == topic_name: assert partition_info['at_min_isr'] assert not partition_info['under_replicated'] assert not partition_info['under_min_isr'] assert not partition_info['unavailable_partition'] elif name == topic_test_name: assert not partition_info['at_min_isr'] assert not partition_info['under_replicated'] assert partition_info['under_min_isr'] assert not partition_info['unavailable_partition']
def test_kafka_info_brokers(): """ Check if can get info on brokers """ # Given topic_name = get_topic_name() ensure_topic(localhost, topic_defaut_configuration, topic_name) time.sleep(0.5) # When results = call_kafka_info(localhost, {'resource': 'broker'}) # Then for r in results: assert len(r['ansible_module_results']) == 2
def test_kafka_info_topic(host): """ Check if can get info on topic """ # Given topic_name = get_topic_name() ensure_topic(host, topic_defaut_configuration, topic_name) time.sleep(0.3) # When results = call_kafka_info(host, {'resource': 'topic'}) # Then for r in results: assert topic_name in r['ansible_module_results']
def test_kafka_info_topic_include_internal(host): """ Check if can get info on topic """ # Given topic_name = get_topic_name() ensure_topic( host, topic_defaut_configuration, topic_name ) time.sleep(0.3) topic_test_name = get_topic_name() topic_test_configuration = topic_defaut_configuration.copy() topic_test_configuration.update({ 'options': { 'min.insync.replicas': 2 } }) ensure_topic( host, topic_test_configuration, topic_test_name ) time.sleep(0.3) produce_and_consume_topic(topic_name, 10, get_consumer_group()) time.sleep(0.3) # When results = call_kafka_info( host, { 'resource': 'topic', 'include_internal': True } ) # Then for r in results: assert topic_name in r['ansible_module_results'] assert '__consumer_offsets' in r['ansible_module_results']
def test_kafka_info_acl(host): """ Check if can get info on acl """ # Given test_acl_configuration = acl_defaut_configuration.copy() resource_name = get_acl_name() test_acl_configuration.update({ 'name': resource_name, 'state': 'present', **sasl_default_configuration }) ensure_acl( host, test_acl_configuration ) time.sleep(0.3) # When results = call_kafka_info( host, { 'resource': 'acl' } ) expected = { 'resource_type': 'topic', 'operation': 'write', 'permission_type': 'allow', 'resource_name': resource_name, 'principal': 'User:common', 'host': '*', 'pattern_type': 'literal' } # Then for r in results: assert expected in r['ansible_module_results']['topic'][resource_name]
def test_rebalance_all_partitions_preserve_leader(host): """ Check if can reassign all topics-partition with leader conservation. """ # Given test_topic_name = get_topic_name() test_topic_configuration = topic_defaut_configuration.copy() test_topic_configuration.update({ 'partitions': 50, 'replica_factor': 2, 'preserve_leader': True }) ensure_kafka_topic(host, test_topic_configuration, test_topic_name) time.sleep(0.3) topics_config = call_kafka_info( host, { 'resource': 'topic-config', 'include_internal': True, 'include_defaults': False }) topics = call_kafka_info(host, { 'resource': 'topic', 'include_internal': True }) all_topics = topics[0]['ansible_module_results'] all_topics_config = topics_config[0]['ansible_module_results'] test_topics = [] for topic_name, partitions in all_topics.items(): if topic_name == test_topic_name or topic_name.startswith('__'): test_topics.append({ 'name': topic_name, 'partitions': len(partitions), 'replica_factor': len(partitions['0']['replicas']), 'options': all_topics_config[topic_name], 'force_reassign': True, 'preserve_leader': True, 'state': 'present' }) existing_leaders = [] for i, broker_topics in enumerate(topics): existing_leaders.append({}) for topic_name, partitions in ( broker_topics['ansible_module_results'].items()): if topic_name == test_topic_name or topic_name.startswith('__'): for partition, config in partitions.items(): existing_leaders[i][(topic_name, partition)] = \ config['leader'] # When test_topics_configuration = { 'topics': test_topics, 'kafka_sleep_time': 5, 'kafka_max_retries': 60 } ensure_kafka_topics(host, test_topics_configuration) time.sleep(10) # Then topics = call_kafka_info(host, { 'resource': 'topic', 'include_internal': True }) for i, broker_topics in enumerate(topics): resulting_replicas = {} for topic_name, partitions in ( topics[i]['ansible_module_results'].items()): if topic_name == test_topic_name or topic_name.startswith('__'): for partition, config in partitions.items(): resulting_replicas[(topic_name, partition)] = \ config['replicas'] for topic_partition, leader in existing_leaders[i].items(): assert leader in resulting_replicas[topic_partition] total_partitions = 0 partitions_per_broker = {} for topic_name, partitions in ( topics[i]['ansible_module_results'].items()): if topic_name == test_topic_name or topic_name.startswith('__'): for partition, config in partitions.items(): for replica in config['replicas']: if replica not in partitions_per_broker: partitions_per_broker[replica] = 0 partitions_per_broker[replica] += 1 total_partitions += 1 mean_partitions = int(total_partitions * 1.0 / len(partitions_per_broker)) for node_id, partitions in partitions_per_broker.items(): assert partitions >= mean_partitions, partitions_per_broker assert partitions <= (mean_partitions + 1), partitions_per_broker