def test_kafka_info_consumer_group(host): """ Check if can get info on consumer groups """ # Given topic_name = get_topic_name() ensure_topic( host, topic_defaut_configuration, topic_name ) time.sleep(0.3) consumer_group = get_consumer_group() total_msg = 42 # When produce_and_consume_topic( topic_name, total_msg, consumer_group) time.sleep(0.3) # Then results = call_kafka_info( host, { 'resource': 'consumer_group' } ) for r in results: assert consumer_group in r['ansible_module_results']
def test_consumer_lag(host): """ Check if can check global consumer lag """ # Given topic_name = get_topic_name() ensure_topic( host, topic_defaut_configuration, topic_name ) time.sleep(0.3) consumer_group = get_consumer_group() total_msg = 42 # When produce_and_consume_topic( topic_name, total_msg, consumer_group) time.sleep(0.3) # Then lags = call_kafka_stat_lag(host, { 'consummer_group': consumer_group }) for lag in lags: msg = json.loads(lag['msg']) global_lag_count = msg['global_lag_count'] assert global_lag_count == (total_msg - 1)
def test_kafka_info_topic(host): """ Check if can get info on topic """ # Given topic_name = get_topic_name() ensure_topic( host, topic_defaut_configuration, topic_name ) time.sleep(0.3) topic_test_name = get_topic_name() topic_test_configuration = topic_defaut_configuration.copy() topic_test_configuration.update({ 'options': { 'min.insync.replicas': 2 } }) ensure_topic( host, topic_test_configuration, topic_test_name ) time.sleep(0.3) produce_and_consume_topic(topic_name, 10, get_consumer_group()) time.sleep(0.3) # When results = call_kafka_info( host, { 'resource': 'topic' } ) # Then for r in results: assert topic_name in r['ansible_module_results'] assert '__consumer_offsets' not in r['ansible_module_results'] for name, topic_info in r['ansible_module_results'].items(): for partition, partition_info in topic_info.items(): assert 'earliest_offset' in partition_info assert 'latest_offset' in partition_info assert partition_info['earliest_offset'] >= 0 assert (partition_info['latest_offset'] == 10 if name == topic_name else partition_info['latest_offset'] >= 0) if name == topic_name: assert partition_info['at_min_isr'] assert not partition_info['under_replicated'] assert not partition_info['under_min_isr'] assert not partition_info['unavailable_partition'] elif name == topic_test_name: assert not partition_info['at_min_isr'] assert not partition_info['under_replicated'] assert partition_info['under_min_isr'] assert not partition_info['unavailable_partition']
def delete_consumer_offset(host, partitions=None): """ Check if can delete consumer group for topic """ # Given consumer_group = get_consumer_group() topic_name1 = get_topic_name() ensure_kafka_topic(host, topic_defaut_configuration, topic_name1, minimal_api_version="2.4.0") time.sleep(0.3) produce_and_consume_topic(topic_name1, 1, consumer_group, True, "2.4.0") time.sleep(0.3) # When test_cg_configuration = cg_defaut_configuration.copy() test_cg_configuration.update({ 'consumer_group': consumer_group, 'action': 'delete', 'api_version': '2.4.0' }) if partitions is None: test_cg_configuration.update({'topics': [{'name': topic_name1}]}) else: test_cg_configuration.update( {'topics': [{ 'name': topic_name1, 'partitions': partitions }]}) test_cg_configuration.update(sasl_default_configuration) ensure_idempotency(ensure_kafka_consumer_group, host, test_cg_configuration, minimal_api_version="2.4.0") time.sleep(0.3) # Then for host, host_vars in kafka_hosts.items(): if (parse_version( host_protocol_version[host_vars['inventory_hostname']]) < parse_version("2.4.0")): continue kfk_addr = "%s:9092" % \ host_vars['ansible_eth0']['ipv4']['address']['__ansible_unsafe'] check_unconsumed_topic(consumer_group, topic_name1, kfk_addr)
def test_consumer_lag(): """ Check if can check global consumer lag """ # Given topic_name = get_topic_name() ensure_topic(localhost, topic_defaut_configuration, topic_name) time.sleep(0.5) consumer_group = get_consumer_group() total_msg = 42 # When produce_and_consume_topic(topic_name, total_msg, consumer_group) time.sleep(0.5) # Then for host, host_vars in kafka_hosts.items(): lag = call_kafka_stat_lag(localhost, {'consummer_group': consumer_group}) msg = json.loads(lag[0]['msg']) global_lag_count = msg['global_lag_count'] assert global_lag_count == (total_msg - 1)
def test_kafka_info_topic_include_internal(host): """ Check if can get info on topic """ # Given topic_name = get_topic_name() ensure_topic( host, topic_defaut_configuration, topic_name ) time.sleep(0.3) topic_test_name = get_topic_name() topic_test_configuration = topic_defaut_configuration.copy() topic_test_configuration.update({ 'options': { 'min.insync.replicas': 2 } }) ensure_topic( host, topic_test_configuration, topic_test_name ) time.sleep(0.3) produce_and_consume_topic(topic_name, 10, get_consumer_group()) time.sleep(0.3) # When results = call_kafka_info( host, { 'resource': 'topic', 'include_internal': True } ) # Then for r in results: assert topic_name in r['ansible_module_results'] assert '__consumer_offsets' in r['ansible_module_results']