Exemple #1
0
class KafkaAdminGroupReader:
    def __init__(self, kafka_config):
        self.log = logging.getLogger(__name__)
        self.admin_client = KafkaAdminClient(
            bootstrap_servers=kafka_config.broker_list, )

    def read_group(self, groupid):
        topics = set()
        group_offsets = self.admin_client.list_consumer_group_offsets(groupid)
        for tp in six.iterkeys(group_offsets):
            topics.add(tp.topic)

        return list(topics)

    def read_groups(self, groupids=None, list_only=False):
        if groupids is None:
            groupids = self._list_groups()

        if list_only:
            return {groupid: [] for groupid in groupids}

        groups = {}
        for groupid in groupids:
            topics = self.read_group(groupid)
            groups[groupid] = topics

        return groups

    def _list_groups(self):
        groups_and_protocol_types = self.admin_client.list_consumer_groups()
        return [gpt[0] for gpt in groups_and_protocol_types]
Exemple #2
0
    def get_consumer(self):
        if self.err_var is not None:
            self.err_var.set("")
            self.ui.update()
        set_content(self)
        from kafka.admin import KafkaAdminClient
        adminClient = KafkaAdminClient(bootstrap_servers=self.cur_broker)
        data = adminClient.list_consumer_group_offsets(self.cur_consumer)
        self.content_text.config(state=NORMAL)
        self.content_text.delete(1.0, END)
        self.content_text.insert(
            END, u"{} {} :\n\n".format(self.cur_broker, self.cur_consumer))
        self.content_text.insert(
            END,
            u"{:20s} {:10s} {:12s} {:12s} {}\n".format("topic", "partition",
                                                       "offset", "newest",
                                                       "lag"))
        for key in data.keys():
            num = self.get_topic_single_partiton_offset(
                self.cur_broker, key.topic, key.partition)
            lag = int(num) - int(data[key].offset)
            self.content_text.insert(
                END, u"{:20s} {:10s} {:12s} {:12s} {}\n".format(
                    key.topic, str(key.partition), str(data[key].offset),
                    str(num), lag))
            msg = "offset:{}  lag:{}".format(num, lag)
            self.write_file(msg, self.cur_consumer)

        if len(data.keys()) == 0:
            self.content_text.insert(END, u"没有查到数据")
        self.content_text.config(state=DISABLED)
Exemple #3
0
    def get_consumer(self):
        from kafka.admin import KafkaAdminClient
        adminClient = KafkaAdminClient(bootstrap_servers=self.cur_broker)
        data = adminClient.list_consumer_group_offsets(self.cur_consumer)
        print(u"{} {} :\n\n".format(self.cur_broker, self.cur_consumer))
        print(u"{:20s} {:10s} {:12s} {:12s} {}\n".format("topic", "partition", "offset", "newest", "lag"))
        for key in data.keys():
            num = self.get_topic_single_partiton_offset(self.cur_broker, key.topic, key.partition)
            lag = int(num) - int(data[key].offset)
            print(u"{:20s} {:10s} {:12s} {:12s} {}\n".format(
                                                        key.topic, str(key.partition),
                                                        str(data[key].offset), str(num), lag))
            msg = "offset:{}  lag:{}".format(num, lag)
            self.write_file(msg, self.cur_consumer)

        if len(data.keys()) == 0:
            print(u"没有查到数据")
Exemple #4
0
class KafkaAdminCli(Config):
    '''
        KPA, kafka admin cli
        create topic
        delete topic
        rebalance topics
    '''
    def __init__(self, **kwargs):
        super(KafkaAdminCli, self).__init__(**kwargs)
        self._topic_list = []
        self._connection = None
        self._topic_name = None
        self._topic_partition = None
        self._topic_replica = None
        self._topic_assignments = None
        self._topic_configs = None

        #        print(self.__dir__())

        if self._connection is None:
            if (self._ssl is None) or (self._ssl is False):
                self._connection = self.kafka_admin_connect(secure='PLAINTEXT')
            else:
                self._connection = self.kafka_admin_connect(secure='SSL')

#        print ("kwargs: {}\n, _testOnly: {} ".format(kwargs,self._testOnly.__getattribute__))
#        logger.debug( "dir: %s", kwargs.__dir__)
#        if self.dryrun is None:
#            self._testOnly = false
#        print("self._testOnly: {} ".format(self._testOnly))

    def kafka_admin_connect(self, secure=None):
        '''
        connect kafka
        '''
        logger.info("connecting via KafkaAdminClient")
        try:
            self._connection = KafkaAdminClient(
                bootstrap_servers=self.bootstrap,
                client_id=self.clientId,
                security_protocol=secure)
        except NoBrokersAvailable as e:
            print("kafka_admin_connect: {} NoBrokerAailable Error!".format(e))
        logger.debug("client connected: %s", self._connection)
        return self._connection

    def _topic_create_from_json_file(self):
        '''
        create topic from topic.json file
        return self.kafka_topic_create
        '''
        jsonData = self._open_file(self._file, "json")
        #        self.dup_check(jsonData)
        #        logging.debug('{}{}'.format(self._listTopics, self._file))

        list_topic = []
        for item in jsonData['topic']:
            print(item)
            ret = self._topic_formater_binder(item)
            list_topic.append(ret)
        print("list_topic: {}".format(list_topic))
        #size_of_dict = len(jsonData['topic'][0].keys())

        return self.kafka_topic_create(list_topic, self._testOnly)

    def _topic_create_from_yaml_file(self):
        '''
        create topic from topic.json file
        return self.kafka_topic_create
        '''
        jsonData = self._open_file(self._file, "yaml")
        #        self.dup_check(jsonData)
        #        logging.debug('{}{}'.format(self._listTopics, self._file))

        list_topic = []
        for item in jsonData['topic']:
            print(item)
            ret = self._topic_formater_binder(item)
            list_topic.append(ret)
        print("list_topic: {}".format(list_topic))
        #size_of_dict = len(jsonData['topic'][0].keys())

#        return self.kafka_topic_create(list_topic, self._testOnly)

    def _topic_formater_binder(self, Data):
        '''
        :toppic_formater_binder:
        topics with mixed formatted topic config to correct format
        return set_binder
        '''
        _topic_format = {
            "name": None,
            "partition": None,
            "replica": None,
            "assignments": None,
            "configs": None
        }

        for key, value in Data.items():
            #print ("K:{}, V:{}".format(key, value))
            if key == 'name':
                self._topic_name = value
            elif key == 'partition':
                self._topic_partition = value
            elif key == 'replica':
                self._topic_replica = value
            elif (key == 'assignments' and value != 'default'):
                self._topic_assignments = value[0]
            elif (key == 'configs'):
                self._topic_configs = value[0]

        _topic_format['name'] = self._topic_name
        _topic_format['partition'] = self._topic_partition
        _topic_format['replica'] = self._topic_replica
        _topic_format['assignments'] = self._topic_assignments
        _topic_format['configs'] = self._topic_configs

        logger.debug("topic_name: %s", self._topic_name)
        logger.debug("topic_partition: %s", self._topic_partition)
        logger.debug("topic_replica: %s", self._topic_replica)
        logger.debug("topic_replica_assignment: [%s]", self._topic_assignments)
        logger.debug("topic_configuration: [%s]", self._topic_configs)

        return self._newtopic_binding(_topic_format['name'],
                                      _topic_format['partition'],
                                      _topic_format['replica'],
                                      _topic_format['assignments'],
                                      _topic_format['configs'])

    def kafka_topic_create(self, topic_list, flag=None):
        '''
        create_topics
        '''
        if flag is not None:
            flag = self._testOnly


#        logger.debug("flag: %s, _testOnly: %s", flag.__self__, self._testOnly.__self__)
        try:
            self._connection.create_topics(new_topics=topic_list,
                                           validate_only=flag)
        except TopicAlreadyExistsError as t:
            print("Type: ", type(t), "TopicAlreadyExistsError:: \n", t)
            pass

    def kafka_topic_delete(self):
        '''
        kafka_topic_delete
        '''
        logger.debug("_deleteTopic:{}, _testOnly:{}, _topicNames:{}".format(
            self._deleteTopics, self._testOnly, self._topicNames))
        if (self._deleteTopics):
            return self._topic_delete(topic_list=self._topicNames,
                                      flag=self._testOnly)
        else:
            raise ("delete topic flag has not been triggerred")

    def _topic_delete(self, topic_list=None, flag=None):
        '''
        _topic_delete
        parms: topic_list (string)
        '''
        #topic_list=[]
        logger.debug(
            "topic_list:{} is going to be deleted!".format(topic_list))
        if topic_list is None or len(topic_list) == 0:
            raise TopicDeletionError(
                "at least more than one topic need to be input")
        else:
            new_list = topic_list.split(",")
            logger.info("newlist: %s", new_list)

        if flag is not None:
            flag = self._testOnly
        try:
            self._connection.delete_topics(new_list)
        except UnknownTopicOrPartitionError as u:
            logger.debug("Error while deleteing topic: {}".format(type(u)))
            print("TypeL: ", type(u), "UnknownTopicOrPartitionError:: \n", u)

    def kafka_list_topics(self):
        logger.info("Listing Topics")
        return self._connection.list_topics()

    def kafka_describe_topics(self):

        _topic = []

        if (self._descTopics == 'all') or len(self._descTopics) == 0:
            _topic = None
        else:
            logger.debug("type: {}, list: {}".format(type(self._descTopics),
                                                     self._descTopics))
            _topic = self._descTopics.split(',')

        logger.info("Describe Topics")
        return self._connection.describe_topics(_topic)

    def kafka_consumer_groups(self, Broker_ids=None):
        #result2 = self._connection.list_consumer_groups(Broker_ids)
        logger.info("Listing Consumer Groups")
        return self._connection.list_consumer_groups(Broker_ids)

    def _load_Json(self, data):
        '''
        load json data to directionary
        param : json data
        return: json dictionary
        '''
        return json.load(data)

    def _open_file(self, filename, fileType):
        '''
        open file curor to read context
        return: file
        '''
        with open(filename, 'rt') as rtfile:
            #   print (type(file))
            if (fileType == "json"):
                outfile = self._load_Json(rtfile)
            elif (fileType == "yaml"):
                outyaml = yaml.safe_load(rtfile)
                logger.debug("yaml: %s", outyaml)
                # print (type(outyaml))
                outfile = outyaml
        logger.debug("output (dictionary): %s", outfile)
        return outfile

    def _newtopic_binding(self,
                          topic_name,
                          topic_parition,
                          topic_replica,
                          topic_replica_assign=None,
                          topic_configs=None):
        '''
        binding newtopic with criteria of NewTopic class

        required: name(string), num_partitions(int), replication_factor(int)
        optional: replica_assignment(dict(int)), topic_configs(dict(str))

        return: instance of class
        '''
        NT = NewTopic(name=topic_name,
                      num_partitions=topic_parition,
                      replication_factor=topic_replica,
                      replica_assignments=topic_replica_assign,
                      topic_configs=topic_configs)

        return NT

    def kafka_api_version(self):
        '''
        get api version
        '''
        return self._connection.get_api_versions()

    def kafka_describe_consumer_group(self):
        '''
        describe consumer group
        :params: consumer_group_id (string)
        '''
        _consumer_group_id = []

        if self._descConsumerGroup is None or len(
                self._descConsumerGroup) == 0:
            _consumer_group_id = None
        else:
            _consumer_group_id = self._descConsumerGroup.split(",")

        return self._connection.describe_consumer_groups(
            group_ids=_consumer_group_id)

    def kafka_list_consumer_group_offsets(self):
        '''
        list consumer group offsets
        '''
        if self._partitionId is None:
            partition_id = None
        else:
            partition_id = self._partitionId.split(",")

        if self._consumerGroup is None or self._consumerGroup == 'all' or self._consumerGroup == 'ALL':
            consumer_group_id = self.kafka_consumer_groups()
        else:
            consumer_group_id = self._consumerGroup.split(",")

        return self._connection.list_consumer_group_offsets(
            group_id=consumer_group_id, partitions=partition_id)