def get(self, topicName): """ Get Topic Detail. """ app.logger.info( "Request to get details for topic {0}.".format(topicName)) try: admin = KafkaAdminClient( bootstrap_servers=config['cluster.broker.listeners'], security_protocol=config['cluster.security.protocol'], ssl_cafile=config['cluster.ssl.cafile'], ssl_certfile=config['cluster.ssl.certfile'], ssl_keyfile=config['cluster.ssl.keyfile']) result = admin.describe_topics([topicName]) except UnknownTopicOrPartitionError as e: api.abort(500, e.description) except Exception as e: api.abort(500, str(e.args)) finally: admin.close() app.logger.debug(result) if result[0]['error_code'] == 0: return { 'partitions': len(result[0]['partitions']), 'replicas': len(result[0]['partitions'][0]['replicas']) } else: api.abort(400, "Bad Request(Wrong Topic Name)")
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from json import dumps, loads from datetime import datetime from kafka.admin import KafkaAdminClient, NewTopic from kafka import KafkaConsumer, KafkaProducer, TopicPartition admin_client = KafkaAdminClient(bootstrap_servers="localhost:9091") consumer = KafkaConsumer("topic_test", bootstrap_servers=["localhost:9091"], auto_offset_reset='latest', enable_auto_commit=True, group_id='my-group', value_deserializer=lambda x: loads(x.decode('utf-8'))) producer = KafkaProducer(bootstrap_servers=["localhost:9091"], value_serializer=lambda x: dumps(x).encode('utf-8')) topics_list = consumer.topics() if len(topics_list) == 0: new_topics_list = [] new_topics_list.append(NewTopic(name="topic_test", num_partitions=2, replication_factor=1)) admin_client.create_topics(new_topics=new_topics_list, validate_only=False) print(admin_client.describe_topics(topics=["topic_test"])) producer.send("topic_test", value={ 'test': 'test' }) partitions = [TopicPartition("topic_test", p) for p in consumer.partitions_for_topic("topic_test")] print(partitions) timestamps = {p: 1609799391301 for p in partitions} print(consumer.offsets_for_times(timestamps)) for message in consumer: print(message)
class KafkaAdminCli(Config): ''' KPA, kafka admin cli create topic delete topic rebalance topics ''' def __init__(self, **kwargs): super(KafkaAdminCli, self).__init__(**kwargs) self._topic_list = [] self._connection = None self._topic_name = None self._topic_partition = None self._topic_replica = None self._topic_assignments = None self._topic_configs = None # print(self.__dir__()) if self._connection is None: if (self._ssl is None) or (self._ssl is False): self._connection = self.kafka_admin_connect(secure='PLAINTEXT') else: self._connection = self.kafka_admin_connect(secure='SSL') # print ("kwargs: {}\n, _testOnly: {} ".format(kwargs,self._testOnly.__getattribute__)) # logger.debug( "dir: %s", kwargs.__dir__) # if self.dryrun is None: # self._testOnly = false # print("self._testOnly: {} ".format(self._testOnly)) def kafka_admin_connect(self, secure=None): ''' connect kafka ''' logger.info("connecting via KafkaAdminClient") try: self._connection = KafkaAdminClient( bootstrap_servers=self.bootstrap, client_id=self.clientId, security_protocol=secure) except NoBrokersAvailable as e: print("kafka_admin_connect: {} NoBrokerAailable Error!".format(e)) logger.debug("client connected: %s", self._connection) return self._connection def _topic_create_from_json_file(self): ''' create topic from topic.json file return self.kafka_topic_create ''' jsonData = self._open_file(self._file, "json") # self.dup_check(jsonData) # logging.debug('{}{}'.format(self._listTopics, self._file)) list_topic = [] for item in jsonData['topic']: print(item) ret = self._topic_formater_binder(item) list_topic.append(ret) print("list_topic: {}".format(list_topic)) #size_of_dict = len(jsonData['topic'][0].keys()) return self.kafka_topic_create(list_topic, self._testOnly) def _topic_create_from_yaml_file(self): ''' create topic from topic.json file return self.kafka_topic_create ''' jsonData = self._open_file(self._file, "yaml") # self.dup_check(jsonData) # logging.debug('{}{}'.format(self._listTopics, self._file)) list_topic = [] for item in jsonData['topic']: print(item) ret = self._topic_formater_binder(item) list_topic.append(ret) print("list_topic: {}".format(list_topic)) #size_of_dict = len(jsonData['topic'][0].keys()) # return self.kafka_topic_create(list_topic, self._testOnly) def _topic_formater_binder(self, Data): ''' :toppic_formater_binder: topics with mixed formatted topic config to correct format return set_binder ''' _topic_format = { "name": None, "partition": None, "replica": None, "assignments": None, "configs": None } for key, value in Data.items(): #print ("K:{}, V:{}".format(key, value)) if key == 'name': self._topic_name = value elif key == 'partition': self._topic_partition = value elif key == 'replica': self._topic_replica = value elif (key == 'assignments' and value != 'default'): self._topic_assignments = value[0] elif (key == 'configs'): self._topic_configs = value[0] _topic_format['name'] = self._topic_name _topic_format['partition'] = self._topic_partition _topic_format['replica'] = self._topic_replica _topic_format['assignments'] = self._topic_assignments _topic_format['configs'] = self._topic_configs logger.debug("topic_name: %s", self._topic_name) logger.debug("topic_partition: %s", self._topic_partition) logger.debug("topic_replica: %s", self._topic_replica) logger.debug("topic_replica_assignment: [%s]", self._topic_assignments) logger.debug("topic_configuration: [%s]", self._topic_configs) return self._newtopic_binding(_topic_format['name'], _topic_format['partition'], _topic_format['replica'], _topic_format['assignments'], _topic_format['configs']) def kafka_topic_create(self, topic_list, flag=None): ''' create_topics ''' if flag is not None: flag = self._testOnly # logger.debug("flag: %s, _testOnly: %s", flag.__self__, self._testOnly.__self__) try: self._connection.create_topics(new_topics=topic_list, validate_only=flag) except TopicAlreadyExistsError as t: print("Type: ", type(t), "TopicAlreadyExistsError:: \n", t) pass def kafka_topic_delete(self): ''' kafka_topic_delete ''' logger.debug("_deleteTopic:{}, _testOnly:{}, _topicNames:{}".format( self._deleteTopics, self._testOnly, self._topicNames)) if (self._deleteTopics): return self._topic_delete(topic_list=self._topicNames, flag=self._testOnly) else: raise ("delete topic flag has not been triggerred") def _topic_delete(self, topic_list=None, flag=None): ''' _topic_delete parms: topic_list (string) ''' #topic_list=[] logger.debug( "topic_list:{} is going to be deleted!".format(topic_list)) if topic_list is None or len(topic_list) == 0: raise TopicDeletionError( "at least more than one topic need to be input") else: new_list = topic_list.split(",") logger.info("newlist: %s", new_list) if flag is not None: flag = self._testOnly try: self._connection.delete_topics(new_list) except UnknownTopicOrPartitionError as u: logger.debug("Error while deleteing topic: {}".format(type(u))) print("TypeL: ", type(u), "UnknownTopicOrPartitionError:: \n", u) def kafka_list_topics(self): logger.info("Listing Topics") return self._connection.list_topics() def kafka_describe_topics(self): _topic = [] if (self._descTopics == 'all') or len(self._descTopics) == 0: _topic = None else: logger.debug("type: {}, list: {}".format(type(self._descTopics), self._descTopics)) _topic = self._descTopics.split(',') logger.info("Describe Topics") return self._connection.describe_topics(_topic) def kafka_consumer_groups(self, Broker_ids=None): #result2 = self._connection.list_consumer_groups(Broker_ids) logger.info("Listing Consumer Groups") return self._connection.list_consumer_groups(Broker_ids) def _load_Json(self, data): ''' load json data to directionary param : json data return: json dictionary ''' return json.load(data) def _open_file(self, filename, fileType): ''' open file curor to read context return: file ''' with open(filename, 'rt') as rtfile: # print (type(file)) if (fileType == "json"): outfile = self._load_Json(rtfile) elif (fileType == "yaml"): outyaml = yaml.safe_load(rtfile) logger.debug("yaml: %s", outyaml) # print (type(outyaml)) outfile = outyaml logger.debug("output (dictionary): %s", outfile) return outfile def _newtopic_binding(self, topic_name, topic_parition, topic_replica, topic_replica_assign=None, topic_configs=None): ''' binding newtopic with criteria of NewTopic class required: name(string), num_partitions(int), replication_factor(int) optional: replica_assignment(dict(int)), topic_configs(dict(str)) return: instance of class ''' NT = NewTopic(name=topic_name, num_partitions=topic_parition, replication_factor=topic_replica, replica_assignments=topic_replica_assign, topic_configs=topic_configs) return NT def kafka_api_version(self): ''' get api version ''' return self._connection.get_api_versions() def kafka_describe_consumer_group(self): ''' describe consumer group :params: consumer_group_id (string) ''' _consumer_group_id = [] if self._descConsumerGroup is None or len( self._descConsumerGroup) == 0: _consumer_group_id = None else: _consumer_group_id = self._descConsumerGroup.split(",") return self._connection.describe_consumer_groups( group_ids=_consumer_group_id) def kafka_list_consumer_group_offsets(self): ''' list consumer group offsets ''' if self._partitionId is None: partition_id = None else: partition_id = self._partitionId.split(",") if self._consumerGroup is None or self._consumerGroup == 'all' or self._consumerGroup == 'ALL': consumer_group_id = self.kafka_consumer_groups() else: consumer_group_id = self._consumerGroup.split(",") return self._connection.list_consumer_group_offsets( group_id=consumer_group_id, partitions=partition_id)