コード例 #1
0
 def config(self, cfg):
     """Initializes variables from conf files."""
     for children in cfg.children:
         if children.key == INTERVAL:
             self.interval = children.values[0]
         if children.key == PROCESS:
             self.process = children.values[0]
             #get jolokia instance
             self.jclient = JolokiaClient(os.path.basename(__file__)[:-3], self.process)
コード例 #2
0
 def __init__(self):
     """Initializes interval and previous dictionary variable."""
     self.process = 'zookeeper'
     self.interval = DEFAULT_INTERVAL
     self.listenerip = 'localhost'
     self.port = None
     self.prev_data = {}
     self.documentsTypes = []
     self.jclient = JolokiaClient(
         os.path.basename(__file__)[:-3], self.process)
コード例 #3
0
    def get_pid_jmx_stats(self, pid, port, output):
        """Call get_jmx_parameters function for each doc_type and add dict to queue"""
        jolokiaclient = JolokiaClient.get_jolokia_inst(port)
        for doc in GENERIC_DOCS:
            try:
                dict_jmx = {}
                self.get_jmx_parameters(jolokiaclient, doc, dict_jmx)
                if not dict_jmx:
                    raise ValueError("No data found")

                collectd.info("Plugin kafkajmx: Added %s doctype information successfully for pid %s" % (doc, pid))
                self.add_common_params(doc, dict_jmx)
                output.put((doc, dict_jmx))
            except Exception as err:
                collectd.error("Plugin kafkajmx: Error in collecting stats of %s doctype: %s" % (doc, str(err)))
コード例 #4
0
ファイル: kafkatopic.py プロジェクト: akil96/collectd-plugins
    def get_pid_jmx_stats(self, pid, port, output):
        """Call get_jmx_parameters function for each doc_type and add dict to queue"""
        jolokiaclient = JolokiaClient.get_jolokia_inst(port)
        for doc in KAFKA_DOCS:
            try:
                dict_jmx = {}
                self.get_jmx_parameters(jolokiaclient, doc, dict_jmx)
                if not dict_jmx:
                    raise ValueError("No data found")

                collectd.info(
                    "Plugin kafkatopic: Added %s doctype information successfully for pid %s"
                    % (doc, pid))
                if doc in ["topicStats", "partitionStats", "consumerStats"]:
                    output.append((pid, doc, dict_jmx))
                    continue

                self.add_common_params(doc, dict_jmx)
                output.append((pid, doc, dict_jmx))
            except Exception as err:
                collectd.error(
                    "Plugin kafkatopic: Error in collecting stats of %s doctype: %s"
                    % (doc, str(err)))
コード例 #5
0
ファイル: kafkajmx.py プロジェクト: janselva/collectd-plugins
class JmxStat(object):
    """Plugin object will be created only once and collects JMX statistics info every interval."""
    def __init__(self):
        """Initializes interval and previous dictionary variable."""
        self.jclient = None
        self.process = None
        self.interval = DEFAULT_INTERVAL

    def config(self, cfg):
        """Initializes variables from conf files."""
        for children in cfg.children:
            if children.key == INTERVAL:
                self.interval = children.values[0]
            if children.key == PROCESS:
                self.process = children.values[0]
                #get jolokia instance
                self.jclient = JolokiaClient(
                    os.path.basename(__file__)[:-3], self.process)

    def get_jmx_parameters(self, jolokiaclient, doc, dict_jmx):
        """Fetch stats based on doc_type"""
        if doc == "memoryPoolStats":
            self.add_memory_pool_parameters(jolokiaclient, dict_jmx)
        elif doc == "memoryStats":
            self.add_memory_parameters(jolokiaclient, dict_jmx)
        elif doc == "threadStats":
            self.add_threading_parameters(jolokiaclient, dict_jmx)
        elif doc == "gcStats":
            self.add_gc_parameters(jolokiaclient, dict_jmx)
        elif doc == "classLoadingStats":
            self.add_classloading_parameters(jolokiaclient, dict_jmx)
        elif doc == "compilationStats":
            self.add_compilation_parameters(jolokiaclient, dict_jmx)
        elif doc == "nioStats":
            self.add_nio_parameters(jolokiaclient, dict_jmx)
        elif doc == "operatingSysStats":
            self.add_operating_system_parameters(jolokiaclient, dict_jmx)
        elif doc == "jmxStats":
            self.add_jmxstat_parameters(jolokiaclient, dict_jmx)

    def add_jmxstat_parameters(self, jolokiaclient, dict_jmx):
        """Add specific jmxstats parameter"""
        #classloading Stats
        classloading = jolokiaclient.request(
            type='read', mbean='java.lang:type=ClassLoading')
        if classloading['status'] == 200:
            dict_jmx['unloadedClass'] = classloading['value'][
                'UnloadedClassCount']
            dict_jmx['loadedClass'] = classloading['value']['LoadedClassCount']

        #threading Stats
        thread_json = jolokiaclient.request(type='read',
                                            mbean='java.lang:type=Threading')
        if thread_json['status'] == 200:
            dict_jmx['threads'] = thread_json['value']['ThreadCount']

        #memory Stats
        memory_json = jolokiaclient.request(type='read',
                                            mbean='java.lang:type=Memory')
        if memory_json['status'] == 200:
            heap = memory_json['value']['HeapMemoryUsage']
            self.handle_neg_bytes(heap['init'], 'heapMemoryUsageInit',
                                  dict_jmx)
            dict_jmx['heapMemoryUsageUsed'] = round(
                heap['used'] / 1024.0 / 1024.0, 2)
            dict_jmx['heapMemoryUsageCommitted'] = round(
                heap['committed'] / 1024.0 / 1024.0, 2)
            non_heap = memory_json['value']['NonHeapMemoryUsage']
            self.handle_neg_bytes(non_heap['init'], 'nonHeapMemoryUsageInit',
                                  dict_jmx)
            dict_jmx['nonHeapMemoryUsageUsed'] = round(
                non_heap['used'] / 1024.0 / 1024.0, 2)
            dict_jmx['nonHeapMemoryUsageCommitted'] = round(
                non_heap['committed'] / 1024.0 / 1024.0, 2)

        #initailize default values
        param_list = ['G1OldGenerationCollectionTime', 'G1OldGenerationCollectionCount', 'G1YoungGenerationCollectionTime',\
                      'G1YoungGenerationCollectionCount', 'G1OldGenUsageUsed', 'G1SurvivorSpaceUsageUsed', 'MetaspaceUsageUsed',\
                      'CodeCacheUsageUsed', 'CompressedClassSpaceUsageUsed', 'G1EdenSpaceUsageUsed']
        for param in param_list:
            dict_jmx[param] = 0

        #gc Stats
        gc_names = self.get_gc_names(jolokiaclient)
        for gc_name in gc_names:
            str_mbean = 'java.lang:type=GarbageCollector,name=' + gc_name
            valid = jolokiaclient.request(type='read',
                                          mbean=str_mbean,
                                          attribute='Valid')
            if valid['status'] == 200 and valid['value'] == True:
                str_attribute = 'CollectionTime,CollectionCount'
                gc_values = jolokiaclient.request(type='read',
                                                  mbean=str_mbean,
                                                  attribute=str_attribute)
                gc_name_no_spaces = ''.join(gc_name.split())
                if gc_values['status'] == 200:
                    dict_jmx[gc_name_no_spaces + 'CollectionTime'] = round(
                        gc_values['value']['CollectionTime'] * 0.001, 2)
                    dict_jmx[gc_name_no_spaces +
                             'CollectionCount'] = gc_values['value'][
                                 'CollectionCount']

        #memoryPool Stats
        mp_names = self.get_memory_pool_names(jolokiaclient)
        for pool_name in mp_names:
            str_mbean = 'java.lang:type=MemoryPool,name=' + pool_name
            valid = jolokiaclient.request(type='read',
                                          mbean=str_mbean,
                                          attribute='Valid')
            if valid['status'] == 200 and valid['value'] == True:
                mp_values = jolokiaclient.request(type='read',
                                                  mbean=str_mbean,
                                                  attribute='Usage')
                pool_name_no_spaces = ''.join(pool_name.split())
                if mp_values['status'] == 200:
                    dict_jmx[pool_name_no_spaces + 'UsageUsed'] = round(
                        mp_values['value']['used'] / 1024.0 / 1024.0, 2)

    def add_operating_system_parameters(self, jolokiaclient, dict_jmx):
        """Add operating system related jmx stats"""
        ops = jolokiaclient.request(type='read',
                                    mbean='java.lang:type=OperatingSystem')
        if ops['status'] == 200:
            dict_jmx['osArchitecture'] = ops['value']['Arch']
            dict_jmx['availableProcessors'] = ops['value'][
                'AvailableProcessors']
            self.handle_neg_bytes(ops['value']['CommittedVirtualMemorySize'],
                                  'committedVirtualMemorySize', dict_jmx)
            dict_jmx['freePhysicalMemorySize'] = round(
                ops['value']['FreePhysicalMemorySize'] / 1024.0 / 1024.0, 2)
            dict_jmx['freeSwapSpaceSize'] = round(
                ops['value']['FreeSwapSpaceSize'] / 1024.0 / 1024.0, 2)
            dict_jmx['maxFileDescriptors'] = ops['value'][
                'MaxFileDescriptorCount']
            dict_jmx['osName'] = ops['value']['Name']
            dict_jmx['openFileDescriptors'] = ops['value'][
                'OpenFileDescriptorCount']
            dict_jmx['processCpuLoad'] = ops['value']['ProcessCpuLoad']
            pcputime = ops['value']['ProcessCpuTime']
            if pcputime >= 0:
                pcputime = round(pcputime / 1000000000.0, 2)
            dict_jmx['processCpuTime'] = pcputime
            dict_jmx['totalPhysicalMemorySize'] = round(
                ops['value']['TotalPhysicalMemorySize'] / 1024.0 / 1024.0, 2)
            dict_jmx['totalSwapSpaceSize'] = round(
                ops['value']['TotalSwapSpaceSize'] / 1024.0 / 1024.0, 2)
            dict_jmx['osVersion'] = ops['value']['Version']
            dict_jmx['systemCpuLoad'] = ops['value']['SystemCpuLoad']
            dict_jmx['systemLoadAverage'] = ops['value']['SystemLoadAverage']

    def add_nio_parameters(self, jolokiaclient, dict_jmx):
        """Add network related jmx stats"""
        nio = jolokiaclient.request(type='read',
                                    mbean='java.nio:type=BufferPool,*',
                                    attribute='Name')
        bufferpool_names = []
        if nio['status'] == 200:
            for _, value in nio['value'].items():
                bufferpool_names.append(value['Name'])

        for poolname in bufferpool_names:
            str_mbean = 'java.nio:type=BufferPool,name=' + poolname
            poolinfo = jolokiaclient.request(type='read', mbean=str_mbean)
            if poolinfo['status'] == 200:
                dict_jmx[poolname +
                         'BufferPoolCount'] = poolinfo['value']['Count']
                self.handle_neg_bytes(poolinfo['value']['MemoryUsed'],
                                      poolname + 'BufferPoolMemoryUsed',
                                      dict_jmx)
                self.handle_neg_bytes(poolinfo['value']['TotalCapacity'],
                                      poolname + 'BufferPoolTotalCapacity',
                                      dict_jmx)

    def add_compilation_parameters(self, jolokiaclient, dict_jmx):
        """Add compilation related jmx stats"""
        compilation = jolokiaclient.request(type='read',
                                            mbean='java.lang:type=Compilation')
        if compilation['status'] == 200:
            dict_jmx['compilerName'] = compilation['value']['Name']
            dict_jmx['totalCompilationTime'] = round(
                compilation['value']['TotalCompilationTime'] * 0.001, 2)

    def add_classloading_parameters(self, jolokiaclient, dict_jmx):
        """Add classloading related jmx stats"""
        classloading = jolokiaclient.request(
            type='read', mbean='java.lang:type=ClassLoading')
        if classloading['status'] == 200:
            dict_jmx['unloadedClass'] = classloading['value'][
                'UnloadedClassCount']
            dict_jmx['loadedClass'] = classloading['value']['LoadedClassCount']
            dict_jmx['totalLoadedClass'] = classloading['value'][
                'TotalLoadedClassCount']

    def add_gc_parameters(self, jolokiaclient, dict_jmx):
        """Add garbage collector related jmx stats"""
        def memory_gc_usage(self, mempool_gc, key, gc_name, dict_jmx):
            for name, values in mempool_gc.items():
                if name in ['G1 Eden Space', 'G1 Old Gen']:
                    mp_name = ''.join(name.split())
                    self.handle_neg_bytes(values['init'],
                                          gc_name + key + mp_name + 'Init',
                                          dict_jmx)
                    self.handle_neg_bytes(values['max'],
                                          gc_name + key + mp_name + 'Max',
                                          dict_jmx)
                    dict_jmx[gc_name + key + mp_name + 'Used'] = round(
                        values['used'] / 1024.0 / 1024.0, 2)
                    dict_jmx[gc_name + key + mp_name + 'Committed'] = round(
                        values['committed'] / 1024.0 / 1024.0, 2)

        gc_names = self.get_gc_names(jolokiaclient)
        for gc_name in gc_names:
            str_mbean = 'java.lang:type=GarbageCollector,name=' + gc_name
            if_valid = jolokiaclient.request(type='read',
                                             mbean=str_mbean,
                                             attribute='Valid')
            if if_valid['status'] == 200 and if_valid['value'] == True:
                str_attribute = 'CollectionTime,CollectionCount,LastGcInfo'
                gc_values = jolokiaclient.request(type='read',
                                                  mbean=str_mbean,
                                                  attribute=str_attribute)
                gc_name_no_spaces = ''.join(gc_name.split())
                if gc_values['status'] == 200:
                    dict_jmx[gc_name_no_spaces + 'CollectionTime'] = round(
                        gc_values['value']['CollectionTime'] * 0.001, 2)
                    dict_jmx[gc_name_no_spaces +
                             'CollectionCount'] = gc_values['value'][
                                 'CollectionCount']
                    if gc_values['value']['LastGcInfo']:
                        dict_jmx[gc_name_no_spaces +
                                 'GcThreadCount'] = gc_values['value'][
                                     'LastGcInfo']['GcThreadCount']
                        dict_jmx[gc_name_no_spaces + 'StartTime'] = round(
                            gc_values['value']['LastGcInfo']['startTime'] *
                            0.001, 2)
                        dict_jmx[gc_name_no_spaces + 'EndTime'] = round(
                            gc_values['value']['LastGcInfo']['endTime'] *
                            0.001, 2)
                        dict_jmx[gc_name_no_spaces + 'Duration'] = round(
                            gc_values['value']['LastGcInfo']['duration'] *
                            0.001, 2)
                        mem_aftergc = gc_values['value']['LastGcInfo'][
                            'memoryUsageAfterGc']
                        memory_gc_usage(self, mem_aftergc, 'MemUsageAfGc',
                                        gc_name_no_spaces, dict_jmx)
                        mem_beforegc = gc_values['value']['LastGcInfo'][
                            'memoryUsageBeforeGc']
                        memory_gc_usage(self, mem_beforegc, 'MemUsageBfGc',
                                        gc_name_no_spaces, dict_jmx)

    def add_threading_parameters(self, jolokiaclient, dict_jmx):
        """Add thread related jmx stats"""
        mbean_threading = 'java.lang:type=Threading'
        thread_json = jolokiaclient.request(type='read', mbean=mbean_threading)
        if thread_json['status'] == 200:
            dict_jmx['threads'] = thread_json['value']['ThreadCount']
            dict_jmx['peakThreads'] = thread_json['value']['PeakThreadCount']
            dict_jmx['daemonThreads'] = thread_json['value'][
                'DaemonThreadCount']
            dict_jmx['totalStartedThreads'] = thread_json['value'][
                'TotalStartedThreadCount']
            if thread_json['value']['CurrentThreadCpuTimeSupported']:
                dict_jmx['currentThreadCpuTime'] = round(
                    thread_json['value']['CurrentThreadCpuTime'] /
                    1000000000.0, 2)
                dict_jmx['currentThreadUserTime'] = round(
                    thread_json['value']['CurrentThreadUserTime'] /
                    1000000000.0, 2)

    def add_memory_parameters(self, jolokiaclient, dict_jmx):
        """Add memory related jmx stats"""
        memory_json = jolokiaclient.request(type='read',
                                            mbean='java.lang:type=Memory')
        if memory_json['status'] == 200:
            heap = memory_json['value']['HeapMemoryUsage']
            self.handle_neg_bytes(heap['init'], 'heapMemoryUsageInit',
                                  dict_jmx)
            self.handle_neg_bytes(heap['max'], 'heapMemoryUsageMax', dict_jmx)
            dict_jmx['heapMemoryUsageUsed'] = round(
                heap['used'] / 1024.0 / 1024.0, 2)
            dict_jmx['heapMemoryUsageCommitted'] = round(
                heap['committed'] / 1024.0 / 1024.0, 2)

            non_heap = memory_json['value']['NonHeapMemoryUsage']
            self.handle_neg_bytes(non_heap['init'], 'nonHeapMemoryUsageInit',
                                  dict_jmx)
            self.handle_neg_bytes(non_heap['max'], 'nonHeapMemoryUsageMax',
                                  dict_jmx)
            dict_jmx['nonHeapMemoryUsageUsed'] = round(
                non_heap['used'] / 1024.0 / 1024.0, 2)
            dict_jmx['nonHeapMemoryUsageCommitted'] = round(
                non_heap['committed'] / 1024.0 / 1024.0, 2)
            dict_jmx['objectPendingFinalization'] = memory_json['value'][
                'ObjectPendingFinalizationCount']

    def add_memory_pool_parameters(self, jolokiaclient, dict_jmx):
        """Add memory pool related jmx stats"""
        mp_names = self.get_memory_pool_names(jolokiaclient)
        for poll_name in mp_names:
            str_mbean = 'java.lang:type=MemoryPool,name=' + poll_name
            if_valid = jolokiaclient.request(type='read',
                                             mbean=str_mbean,
                                             attribute='Valid')
            if if_valid['status'] == 200 and if_valid['value'] == True:
                str_attribute = 'CollectionUsage,PeakUsage,Type,Usage,CollectionUsageThresholdSupported,UsageThresholdSupported'
                mp_values = jolokiaclient.request(type='read',
                                                  mbean=str_mbean,
                                                  attribute=str_attribute)
                poll_name_no_spaces = ''.join(poll_name.split())
                if mp_values['status'] == 200:
                    coll_usage = mp_values['value']['CollectionUsage']
                    if coll_usage:
                        self.handle_neg_bytes(
                            coll_usage['max'],
                            poll_name_no_spaces + 'CollectionUsageMax',
                            dict_jmx)
                        self.handle_neg_bytes(
                            coll_usage['init'],
                            poll_name_no_spaces + 'CollectionUsageInit',
                            dict_jmx)
                        dict_jmx[poll_name_no_spaces +
                                 'CollectionUsageUsed'] = round(
                                     coll_usage['used'] / 1024.0 / 1024.0, 2)
                        dict_jmx[poll_name_no_spaces +
                                 'CollectionUsageCommitted'] = round(
                                     coll_usage['committed'] / 1024.0 / 1024.0,
                                     2)
                    usage = mp_values['value']['Usage']
                    self.handle_neg_bytes(usage['max'],
                                          poll_name_no_spaces + 'UsageMax',
                                          dict_jmx)
                    self.handle_neg_bytes(usage['init'],
                                          poll_name_no_spaces + 'UsageInit',
                                          dict_jmx)
                    dict_jmx[poll_name_no_spaces + 'UsageUsed'] = round(
                        usage['used'] / 1024.0 / 1024.0, 2)
                    dict_jmx[poll_name_no_spaces + 'UsageCommitted'] = round(
                        usage['committed'] / 1024.0 / 1024.0, 2)
                    peak_usage = mp_values['value']['PeakUsage']
                    self.handle_neg_bytes(peak_usage['max'],
                                          poll_name_no_spaces + 'PeakUsageMax',
                                          dict_jmx)
                    self.handle_neg_bytes(
                        peak_usage['init'],
                        poll_name_no_spaces + 'PeakUsageInit', dict_jmx)
                    dict_jmx[poll_name_no_spaces + 'PeakUsageUsed'] = round(
                        peak_usage['used'] / 1024.0 / 1024.0, 2)
                    dict_jmx[poll_name_no_spaces +
                             'PeakUsageCommitted'] = round(
                                 peak_usage['committed'] / 1024.0 / 1024.0, 2)
                    if mp_values['value']['CollectionUsageThresholdSupported']:
                        coll_attr = 'CollectionUsageThreshold,CollectionUsageThresholdCount,CollectionUsageThresholdExceeded'
                        coll_threshold = jolokiaclient.request(
                            type='read', mbean=str_mbean, attribute=coll_attr)
                        if coll_threshold['status'] == 200:
                            dict_jmx[poll_name_no_spaces +
                                     'CollectionUsageThreshold'] = round(
                                         coll_threshold['value']
                                         ['CollectionUsageThreshold'] /
                                         1024.0 / 1024.0, 2)
                            dict_jmx[
                                poll_name_no_spaces +
                                'CollectionUsageThresholdCount'] = coll_threshold[
                                    'value']['CollectionUsageThreshold']
                            dict_jmx[
                                poll_name_no_spaces +
                                'CollectionUsageThresholdExceeded'] = coll_threshold[
                                    'value'][
                                        'CollectionUsageThresholdExceeded']
                    if mp_values['value']['UsageThresholdSupported']:
                        usage_attr = 'UsageThreshold,UsageThresholdCount,UsageThresholdExceeded'
                        usage_threshold = jolokiaclient.request(
                            type='read', mbean=str_mbean, attribute=usage_attr)
                        if usage_threshold['status'] == 200:
                            dict_jmx[poll_name_no_spaces +
                                     'UsageThreshold'] = round(
                                         usage_threshold['value']
                                         ['UsageThreshold'] / 1024.0 / 1024.0,
                                         2)
                            dict_jmx[poll_name_no_spaces +
                                     'UsageThresholdCount'] = usage_threshold[
                                         'value']['UsageThreshold']
                            dict_jmx[
                                poll_name_no_spaces +
                                'UsageThresholdExceeded'] = usage_threshold[
                                    'value']['UsageThresholdExceeded']

    def get_gc_names(self, jolokiaclient):
        gc_json = jolokiaclient.request(
            type='read',
            mbean='java.lang:type=GarbageCollector,*',
            attribute='Name')
        gc_names = []
        if gc_json['status'] == 200:
            for _, value in gc_json['value'].items():
                if value['Name'] in DEFAULT_GC:
                    gc_names.append(value['Name'])
                else:
                    collectd.error("Plugin kafkajmx: not supported for GC %s" %
                                   value['Name'])
        return gc_names

    def get_memory_pool_names(self, jolokiaclient):
        """Get memory pool names of jvm process"""
        mempool_json = jolokiaclient.request(
            type='read', mbean='java.lang:type=MemoryPool,*', attribute='Name')
        mempool_names = []
        if mempool_json['status'] == 200:
            for _, value in mempool_json['value'].items():
                if value['Name'] in DEFAULT_MP:
                    mempool_names.append(value['Name'])
                else:
                    collectd.error(
                        "Plugin kafkajmx: not supported for memory pool %s" %
                        value['Name'])
        return mempool_names

    def handle_neg_bytes(self, value, resultkey, dict_jmx):
        """Condition for byte keys whose return value may be -1 if not supported."""
        if value == -1:
            dict_jmx[resultkey] = value
        else:
            dict_jmx[resultkey] = round(value / 1024.0 / 1024.0, 2)

    def add_common_params(self, doc, dict_jmx):
        """Adds TIMESTAMP, PLUGIN, PLUGITYPE to dictionary."""
        timestamp = int(round(time.time()))
        dict_jmx[TIMESTAMP] = timestamp
        dict_jmx[PLUGIN] = KAFKA_JMX
        dict_jmx[PLUGINTYPE] = doc
        dict_jmx[ACTUALPLUGINTYPE] = KAFKA_JMX
        dict_jmx[PROCESSNAME] = self.process
        #dict_jmx[PLUGIN_INS] = doc
        collectd.info(
            "Plugin kafkajmx: Added common parameters successfully for %s doctype"
            % doc)

    def get_pid_jmx_stats(self, pid, port, output):
        """Call get_jmx_parameters function for each doc_type and add dict to queue"""
        jolokiaclient = JolokiaClient.get_jolokia_inst(port)
        for doc in GENERIC_DOCS:
            try:
                dict_jmx = {}
                self.get_jmx_parameters(jolokiaclient, doc, dict_jmx)
                if not dict_jmx:
                    raise ValueError("No data found")

                collectd.info(
                    "Plugin kafkajmx: Added %s doctype information successfully for pid %s"
                    % (doc, pid))
                self.add_common_params(doc, dict_jmx)
                output.put((doc, dict_jmx))
            except Exception as err:
                collectd.error(
                    "Plugin kafkajmx: Error in collecting stats of %s doctype: %s"
                    % (doc, str(err)))

    def run_pid_process(self, list_pid):
        """Spawn process for each pid"""
        procs = []
        output = multiprocessing.Queue()
        for pid in list_pid:
            port = self.jclient.get_jolokia_port(pid)
            if port and self.jclient.connection_available(port):
                proc = multiprocessing.Process(target=self.get_pid_jmx_stats,
                                               args=(pid, port, output))
                procs.append(proc)
                proc.start()

        for proc in procs:
            proc.join()


#       for p in procs:
#          collectd.debug("%s, %s" % (p, p.is_alive()))
        return procs, output

    def collect_jmx_data(self):
        """Collects stats and spawns process for each pids."""
        list_pid = self.jclient.get_pid()
        if not list_pid:
            collectd.error("Plugin kafkajmx: No %s processes are running" %
                           self.process)
            return

        procs, output = self.run_pid_process(list_pid)
        for _ in procs:
            for _ in GENERIC_DOCS:
                try:
                    doc_name, doc_result = output.get_nowait()
                except Queue.Empty:
                    collectd.error(
                        "Failed to send one or more doctype document to collectd"
                    )
                    continue
                self.dispatch_data(doc_name, doc_result)
        output.close()

    def dispatch_data(self, doc_name, result):
        """Dispatch data to collectd."""
        collectd.info(
            "Plugin kafkajmx: Succesfully sent %s doctype to collectd." %
            doc_name)
        collectd.debug("Plugin kafkajmx: Values dispatched =%s" %
                       json.dumps(result))
        utils.dispatch(result)

    def read_temp(self):
        """Collectd first calls register_read. At that time default interval is taken,
        hence temporary function is made to call, the read callback is unregistered
        and read() is called again with interval obtained from conf by register_config callback."""
        collectd.unregister_read(self.read_temp)
        collectd.register_read(self.collect_jmx_data,
                               interval=int(self.interval))
コード例 #6
0
ファイル: kafkatopic.py プロジェクト: akil96/collectd-plugins
class JmxStat(object):
    """Plugin object will be created only once and collects JMX statistics info every interval."""
    def __init__(self):
        """Initializes interval and previous dictionary variable."""
        self.interval = DEFAULT_INTERVAL
        self.process = 'kafka.Kafka'
        self.listenerip = 'localhost'
        self.prev_topic_data = {}
        self.prev_data = {}
        self.port = None
        self.documentsTypes = []
        self.jclient = JolokiaClient(
            os.path.basename(__file__)[:-3], self.process)

    def config(self, cfg):
        """Initializes variables from conf files."""
        for children in cfg.children:
            if children.key == INTERVAL:
                self.interval = children.values[0]
            if children.key == PORT:
                self.port = children.values[0]
            if children.key == DOCUMENTSTYPES:
                self.documentsTypes = children.values[0]
        self.listenerip = self.add_kafka_listenerip(self.port)

    @staticmethod
    def add_kafka_listenerip(port):
        """
        Return listener IP of kafka broker
        """
        list_addr = []
        for if_name, if_info in psutil.net_if_addrs().items():
            for family in if_info:
                if family.family == socket.AF_INET:
                    list_addr.append(family.address)

        # Get the first matched listener IP
        for addr in list_addr:
            try:
                ip = '%s:%s' % (addr, port)
                consumer = kafka.KafkaConsumer(bootstrap_servers=ip)
                collectd.info("found ip %s" % addr)
                return addr
            except Exception:
                pass
        return ''

    def get_jmx_parameters(self, jolokiaclient, doc, dict_jmx):
        """Fetch stats based on doc_type"""
        if doc == "kafkaStats":
            self.add_kafka_parameters(jolokiaclient, dict_jmx)
        elif doc == "topicStats":
            self.add_topic_parameters(jolokiaclient, dict_jmx, "topic")
        elif doc == "partitionStats":
            self.add_topic_parameters(jolokiaclient, dict_jmx, "partition")
        elif doc == "consumerStats":
            self.add_consumer_parameters(dict_jmx)

    def add_default_rate_value(self, dict_jmx, doctype):
        """Add default value to rate key based on type"""
        if doctype == "kafka":
            keylist = ["messagesIn", "bytesIn", "bytesOut", "isrExpands", "isrShrinks", "leaderElection", \
                       "uncleanLeaderElections", "producerRequests", "fetchConsumerRequests", "fetchFollowerRequests"]
        else:
            keylist = ["messagesInRate", "bytesOutRate", "bytesInRate", "totalFetchRequestsRate", "totalProduceRequestsRate", \
                       "produceMessageConversionsRate", "failedProduceRequestsRate", "fetchMessageConversionsRate", "failedFetchRequestsRate",\
                       "bytesRejectedRate"]
        for key in keylist:
            dict_jmx[key] = 0

    def get_rate(self, key, curr_data, prev_data):
        """Calculate and returns rate. Rate=(current_value-prev_value)/time."""
        #TODO The code is similar to the one in utils.py.
        rate = NAN
        if not prev_data:
            return rate

        if key not in prev_data:
            collectd.error("%s key not in previous data. Shouldn't happen." %
                           key)
            return rate

        if TIMESTAMP not in curr_data or TIMESTAMP not in prev_data:
            collectd.error("%s key not in previous data. Shouldn't happen." %
                           key)
            return rate

        curr_time = curr_data[TIMESTAMP]
        prev_time = prev_data[TIMESTAMP]

        if curr_time <= prev_time:
            collectd.error(
                "Current data time: %s is less than previous data time: %s. "
                "Shouldn't happen." % (curr_time, prev_time))
            return rate

        rate = (curr_data[key] - prev_data[key]) / float(self.interval)
        # rate can get negative if the topic(s) are deleted and created again with the same name
        # intializing rate to 0 if rates are negative value.
        if rate < 0:
            rate = 0.0
        return rate

    def add_rate(self, pid, dict_jmx):
        """Rate only for kafka jmx metrics"""
        rate = self.get_rate("messagesInPerSec", dict_jmx, self.prev_data[pid])
        if rate != NAN:
            dict_jmx["messagesIn"] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate("bytesInPerSec", dict_jmx, self.prev_data[pid])
        if rate != NAN:
            dict_jmx["bytesIn"] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate("bytesOutPerSec", dict_jmx, self.prev_data[pid])
        if rate != NAN:
            dict_jmx["bytesOut"] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate("isrExpandsPerSec", dict_jmx, self.prev_data[pid])
        if rate != NAN:
            dict_jmx["isrExpands"] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate("isrShrinksPerSec", dict_jmx, self.prev_data[pid])
        if rate != NAN:
            dict_jmx["isrShrinks"] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate("leaderElectionPerSec", dict_jmx,
                             self.prev_data[pid])
        if rate != NAN:
            dict_jmx["leaderElection"] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate("uncleanLeaderElectionPerSec", dict_jmx,
                             self.prev_data[pid])
        if rate != NAN:
            dict_jmx["uncleanLeaderElections"] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate("producerRequestsPerSec", dict_jmx,
                             self.prev_data[pid])
        if rate != NAN:
            dict_jmx["producerRequests"] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate("fetchConsumerRequestsPerSec", dict_jmx,
                             self.prev_data[pid])
        if rate != NAN:
            dict_jmx["fetchConsumerRequests"] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate("fetchFollowerRequestsPerSec", dict_jmx,
                             self.prev_data[pid])
        if rate != NAN:
            dict_jmx["fetchFollowerRequests"] = round(rate, FLOATING_FACTOR)

    def add_topic_rate(self, pid, topic_name, topic_info):
        """Rate only for kafka topic metrics"""
        rate = self.get_rate('messagesIn', topic_info,
                             self.prev_topic_data[pid][topic_name])
        if rate != NAN:
            topic_info['messagesInRate'] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate('bytesOut', topic_info,
                             self.prev_topic_data[pid][topic_name])
        if rate != NAN:
            topic_info['bytesOutRate'] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate('bytesIn', topic_info,
                             self.prev_topic_data[pid][topic_name])
        if rate != NAN:
            topic_info['bytesInRate'] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate('totalFetchRequests', topic_info,
                             self.prev_topic_data[pid][topic_name])
        if rate != NAN:
            topic_info['totalFetchRequestsRate'] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate('totalProduceRequests', topic_info,
                             self.prev_topic_data[pid][topic_name])
        if rate != NAN:
            topic_info['totalProduceRequestsRate'] = round(
                rate, FLOATING_FACTOR)
        rate = self.get_rate('produceMessageConversions', topic_info,
                             self.prev_topic_data[pid][topic_name])
        if rate != NAN:
            topic_info['produceMessageConversionsRate'] = round(
                rate, FLOATING_FACTOR)
        rate = self.get_rate('failedProduceRequests', topic_info,
                             self.prev_topic_data[pid][topic_name])
        if rate != NAN:
            topic_info['failedProduceRequestsRate'] = round(
                rate, FLOATING_FACTOR)
        rate = self.get_rate('fetchMessageConversions', topic_info,
                             self.prev_topic_data[pid][topic_name])
        if rate != NAN:
            topic_info['fetchMessageConversionsRate'] = round(
                rate, FLOATING_FACTOR)
        rate = self.get_rate('failedFetchRequests', topic_info,
                             self.prev_topic_data[pid][topic_name])
        if rate != NAN:
            topic_info['failedFetchRequestsRate'] = round(
                rate, FLOATING_FACTOR)
        rate = self.get_rate('bytesRejected', topic_info,
                             self.prev_topic_data[pid][topic_name])
        if rate != NAN:
            topic_info['bytesRejectedRate'] = round(rate, FLOATING_FACTOR)

    def add_kafka_parameters(self, jolokiaclient, dict_jmx):
        """Add jmx stats specific to kafka metrics"""
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.server:name=UnderReplicatedPartitions,type=ReplicaManager',
            attribute='Value')
        jolokiaclient.add_request(
            type='read',
            mbean='kafka.server:name=MessagesInPerSec,type=BrokerTopicMetrics',
            attribute='Count')
        jolokiaclient.add_request(
            type='read',
            mbean='kafka.server:name=BytesInPerSec,type=BrokerTopicMetrics',
            attribute='Count')
        jolokiaclient.add_request(
            type='read',
            mbean='kafka.server:name=BytesOutPerSec,type=BrokerTopicMetrics',
            attribute='Count')
        jolokiaclient.add_request(
            type='read',
            mbean='kafka.server:name=PartitionCount,type=ReplicaManager',
            attribute='Value')
        jolokiaclient.add_request(
            type='read',
            mbean='kafka.server:name=IsrExpandsPerSec,type=ReplicaManager',
            attribute='Count')
        jolokiaclient.add_request(
            type='read',
            mbean='kafka.server:name=IsrShrinksPerSec,type=ReplicaManager',
            attribute='Count')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.server:name=RequestHandlerAvgIdlePercent,type=KafkaRequestHandlerPool',
            attribute='MeanRate')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.controller:name=OfflinePartitionsCount,type=KafkaController',
            attribute='Value')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.controller:name=ActiveControllerCount,type=KafkaController',
            attribute='Value')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.controller:name=LeaderElectionRateAndTimeMs,type=ControllerStats',
            attribute='Count')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.controller:name=UncleanLeaderElectionsPerSec,type=ControllerStats',
            attribute='Count')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.network:name=RequestsPerSec,request=Produce,type=RequestMetrics',
            attribute='Count')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.network:name=RequestsPerSec,request=FetchConsumer,type=RequestMetrics',
            attribute='Count')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.network:name=RequestsPerSec,request=FetchFollower,type=RequestMetrics',
            attribute='Count')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.network:name=NetworkProcessorAvgIdlePercent,type=SocketServer',
            attribute='Value')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.network:type=RequestMetrics,name=TotalTimeMs,request=FetchFollower',
            attribute='Mean')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.network:type=RequestMetrics,name=TotalTimeMs,request=FetchConsumer',
            attribute='Mean')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.network:type=RequestMetrics,name=TotalTimeMs,request=Produce',
            attribute='Mean')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.network:name=ResponseSendTimeMs,request=FetchFollower,type=RequestMetrics',
            attribute='Mean')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.network:name=ResponseSendTimeMs,request=FetchConsumer,type=RequestMetrics',
            attribute='Mean')
        jolokiaclient.add_request(
            type='read',
            mbean=
            'kafka.network:name=ResponseSendTimeMs,request=Produce,type=RequestMetrics',
            attribute='Mean')
        jolokiaclient.add_request(
            type='read',
            mbean='kafka.server:name=BrokerState,type=KafkaServer',
            attribute='Value')
        bulkdata = jolokiaclient.getRequests()
        dict_jmx['underReplicatedPartitions'] = bulkdata[0].get('value', 0)
        dict_jmx['messagesInPerSec'] = bulkdata[1].get('value', 0)
        dict_jmx['bytesInPerSec'] = bulkdata[2].get('value', 0)
        dict_jmx['bytesOutPerSec'] = bulkdata[3].get('value', 0)
        dict_jmx['partitionCount'] = bulkdata[4].get('value', 0)
        dict_jmx['isrExpandsPerSec'] = bulkdata[5].get('value', 0)
        dict_jmx['isrShrinksPerSec'] = bulkdata[6].get('value', 0)
        dict_jmx['requestHandlerAvgIdle'] = float(
            str(bulkdata[7].get('value', 0))[:4])
        dict_jmx['offlinePartitions'] = bulkdata[8].get('value', 0)
        dict_jmx['activeController'] = bulkdata[9].get('value', 0)
        dict_jmx['leaderElectionPerSec'] = bulkdata[10].get('value', 0)
        dict_jmx['uncleanLeaderElectionPerSec'] = bulkdata[11].get('value', 0)
        dict_jmx['producerRequestsPerSec'] = bulkdata[12].get('value', 0)
        dict_jmx['fetchConsumerRequestsPerSec'] = bulkdata[13].get('value', 0)
        dict_jmx['fetchFollowerRequestsPerSec'] = bulkdata[14].get('value', 0)
        dict_jmx['networkProcessorAvgIdlePercent'] = float(
            str(bulkdata[15].get('value', 0))[:4])
        dict_jmx['followerRequestTime'] = round(bulkdata[16].get('value', 0),
                                                2)
        dict_jmx['consumerRequestTime'] = round(bulkdata[17].get('value', 0),
                                                2)
        dict_jmx['producerRequestTime'] = round(bulkdata[18].get('value', 0),
                                                2)
        dict_jmx['followerResponseTime'] = round(bulkdata[19].get('value', 0),
                                                 2)
        dict_jmx['consumerResponseTime'] = round(bulkdata[20].get('value', 0),
                                                 2)
        dict_jmx['producerResponseTime'] = round(bulkdata[21].get('value', 0),
                                                 2)
        if bulkdata[22]['status'] == 200:
            dict_jmx['brokerState'] = BROKER_STATES[bulkdata[22]['value']]
        else:
            dict_jmx['brokerState'] = "NotAvailable"

    def add_topic_parameters(self, jolokiaclient, dict_jmx, flag="topic"):
        """JMX stats specific to topic and partition"""
        def get_partitions(topic):
            client = kafka.KafkaClient(self.listenerip + ':' + self.port)
            topic_partition_ids = client.get_partition_ids_for_topic(topic)
            return len(topic_partition_ids)

        def get_topics():
            consumer = kafka.KafkaConsumer(bootstrap_servers=self.listenerip +
                                           ':' + self.port)
            return consumer.topics()

        parti_list = []
        topics = get_topics()
        for topic in topics:
            partitions = get_partitions(topic)
            if flag == "topic":
                dict_topic = {}
                msgIn = jolokiaclient.request(
                    type='read',
                    mbean=
                    'kafka.server:name=MessagesInPerSec,topic=%s,type=BrokerTopicMetrics'
                    % topic,
                    attribute='Count')
                bytesOut = jolokiaclient.request(
                    type='read',
                    mbean=
                    'kafka.server:name=BytesOutPerSec,topic=%s,type=BrokerTopicMetrics'
                    % topic,
                    attribute='Count')
                bytesIn = jolokiaclient.request(
                    type='read',
                    mbean=
                    'kafka.server:name=BytesInPerSec,topic=%s,type=BrokerTopicMetrics'
                    % topic,
                    attribute='Count')
                totalFetch = jolokiaclient.request(
                    type='read',
                    mbean=
                    'kafka.server:name=TotalFetchRequestsPerSec,topic=%s,type=BrokerTopicMetrics'
                    % topic,
                    attribute='Count')
                totalProduce = jolokiaclient.request(
                    type='read',
                    mbean=
                    'kafka.server:name=TotalProduceRequestsPerSec,topic=%s,type=BrokerTopicMetrics'
                    % topic,
                    attribute='Count')
                produceMsg = jolokiaclient.request(
                    type='read',
                    mbean=
                    'kafka.server:name=ProduceMessageConversionsPerSec,topic=%s,type=BrokerTopicMetrics'
                    % topic,
                    attribute='Count')
                failedProduce = jolokiaclient.request(
                    type='read',
                    mbean=
                    'kafka.server:name=FailedProduceRequestsPerSec,topic=%s,type=BrokerTopicMetrics'
                    % topic,
                    attribute='Count')
                fetchMsg = jolokiaclient.request(
                    type='read',
                    mbean=
                    'kafka.server:name=FetchMessageConversionsPerSec,topic=%s,type=BrokerTopicMetrics'
                    % topic,
                    attribute='Count')
                failedFetch = jolokiaclient.request(
                    type='read',
                    mbean=
                    'kafka.server:name=FailedFetchRequestsPerSec,topic=%s,type=BrokerTopicMetrics'
                    % topic,
                    attribute='Count')
                bytesReject = jolokiaclient.request(
                    type='read',
                    mbean=
                    'kafka.server:name=BytesRejectedPerSec,topic=%s,type=BrokerTopicMetrics'
                    % topic,
                    attribute='Count')
                if msgIn['status'] == 200:
                    dict_topic['messagesIn'] = msgIn['value']
                if bytesOut['status'] == 200:
                    dict_topic['bytesOut'] = bytesOut['value']
                if bytesIn['status'] == 200:
                    dict_topic['bytesIn'] = bytesIn['value']
                if totalFetch['status'] == 200:
                    dict_topic['totalFetchRequests'] = totalFetch['value']
                if totalProduce['status'] == 200:
                    dict_topic['totalProduceRequests'] = totalProduce['value']
                if produceMsg['status'] == 200:
                    dict_topic['produceMessageConversions'] = produceMsg[
                        'value']
                if failedProduce['status'] == 200:
                    dict_topic['failedProduceRequests'] = failedProduce[
                        'value']
                if fetchMsg['status'] == 200:
                    dict_topic['fetchMessageConversions'] = fetchMsg['value']
                if failedFetch['status'] == 200:
                    dict_topic['failedFetchRequests'] = failedFetch['value']
                if bytesReject['status'] == 200:
                    dict_topic['bytesRejected'] = bytesReject['value']
                if dict_topic:
                    dict_topic['_topicName'] = topic
                    dict_topic['partitionCount'] = partitions
                    dict_jmx[topic] = {}
                    dict_jmx[topic].update(dict_topic)
            else:
                for partition in range(0, partitions):
                    dict_parti = {}
                    numLogSegments = jolokiaclient.request(
                        type='read',
                        mbean=
                        'kafka.log:name=NumLogSegments,partition=%s,topic=%s,type=Log'
                        % (partition, topic),
                        attribute='Value')
                    logSize = jolokiaclient.request(
                        type='read',
                        mbean=
                        'kafka.log:name=Size,partition=%s,topic=%s,type=Log' %
                        (partition, topic),
                        attribute='Value')
                    if numLogSegments['status'] == 200:
                        dict_parti['_topicName'] = topic
                        dict_parti['_partitionNum'] = partition
                        dict_parti['partitionLogSegments'] = numLogSegments[
                            'value']
                        try:
                            dict_parti['partitionLogSize'] = round(
                                logSize['value'] / 1024.0 / 1024.0, 2)
                        except:
                            dict_parti['partitionLogSize'] = 0
                    if dict_parti:
                        parti_list.append(dict_parti)

        if flag == "partition":
            dict_jmx["partitionStats"] = parti_list

    def add_consumer_parameters(self, dict_jmx):
        """
        Collect console consumer group stats
        :param jolokiaclient:
        :param dict_jmx:
        :return:
        """
        grp_list = []
        with open(os.devnull, 'w') as devnull:
            p1 = subprocess.Popen([
                "sudo",
                "/opt/kafka/kafka_2.12-1.0.0/bin/kafka-consumer-groups.sh",
                "--list", "--bootstrap-server",
                self.listenerip + ':' + self.port
            ],
                                  stdout=subprocess.PIPE,
                                  stderr=devnull)
        p1.wait()
        consumerGrp = p1.communicate()[0]
        if consumerGrp:
            consumerGrp_list = consumerGrp.splitlines()
            for consGrp in consumerGrp_list:
                with open(os.devnull, 'w') as devnull:
                    p2 = subprocess.Popen([
                        "sudo",
                        "/opt/kafka/kafka_2.12-1.0.0/bin/kafka-consumer-groups.sh",
                        "--describe", "--group", consGrp, "--bootstrap-server",
                        self.listenerip + ':' + self.port
                    ],
                                          stdout=subprocess.PIPE,
                                          stderr=devnull)
                    p2.wait()
                    dict_grp = {}
                    grp_detail = p2.communicate()[0]
                    if grp_detail:
                        grp_detail_list = grp_detail.splitlines()
                        for group in grp_detail_list[2:]:
                            cons_details = group.split()
                            if cons_details[6] == '-':
                                #consumer without client id is considered as inactive consumer
                                continue
                            dict_grp["_groupName"] = consGrp
                            dict_grp["_topicName"] = cons_details[0]
                            dict_grp["partition"] = int(cons_details[1])
                            dict_grp["currentOffset"] = long(cons_details[2])
                            dict_grp["logEndOffset"] = long(cons_details[3])
                            dict_grp["lag"] = long(cons_details[4])
                            dict_grp["custId"] = cons_details[5]
                            dict_grp["clientId"] = cons_details[6]
                            grp_list.append(dict_grp.copy())
            dict_jmx["consumerStats"] = grp_list

    def add_common_params(self, doc, dict_jmx):
        """Adds TIMESTAMP, PLUGIN, PLUGITYPE to dictionary."""
        timestamp = int(round(time.time()))
        dict_jmx[TIMESTAMP] = timestamp
        dict_jmx[PLUGIN] = KAFKA_TOPIC
        dict_jmx[PLUGINTYPE] = doc
        dict_jmx[ACTUALPLUGINTYPE] = KAFKA_TOPIC
        #dict_jmx[PLUGIN_INS] = doc
        collectd.info(
            "Plugin kafkatopic: Added common parameters successfully for %s doctype"
            % doc)

    def add_rate_dispatch_topic(self, pid, doc, dict_jmx):
        """Rate calculation for topic metrics"""
        for topic, topic_info in dict_jmx.items():
            self.add_common_params(doc, topic_info)
            if pid in self.prev_topic_data:
                if topic in self.prev_topic_data[pid]:
                    self.add_topic_rate(pid, topic, topic_info)
                else:
                    self.add_default_rate_value(topic_info, "topic")
            else:
                self.prev_topic_data[pid] = {}
                self.add_default_rate_value(topic_info, "topic")

            self.prev_topic_data[pid][topic] = topic_info
            self.dispatch_data(doc, deepcopy(topic_info))

    def add_rate_dispatch_kafka(self, pid, doc, dict_jmx):
        if pid in self.prev_data:
            self.add_rate(pid, dict_jmx)
        else:
            self.add_default_rate_value(dict_jmx, "kafka")

        self.prev_data[pid] = dict_jmx
        self.dispatch_data(doc, deepcopy(dict_jmx))

    def dispatch_stats(self, doc, dict_jmx):
        if doc == "partitionStats":
            stats_list = dict_jmx["partitionStats"]
        else:
            stats_list = dict_jmx["consumerStats"]
        for stats in stats_list:
            self.add_common_params(doc, stats)
            self.dispatch_data(doc, stats)

    def get_pid_jmx_stats(self, pid, port, output):
        """Call get_jmx_parameters function for each doc_type and add dict to queue"""
        jolokiaclient = JolokiaClient.get_jolokia_inst(port)
        for doc in KAFKA_DOCS:
            try:
                dict_jmx = {}
                self.get_jmx_parameters(jolokiaclient, doc, dict_jmx)
                if not dict_jmx:
                    raise ValueError("No data found")

                collectd.info(
                    "Plugin kafkatopic: Added %s doctype information successfully for pid %s"
                    % (doc, pid))
                if doc in ["topicStats", "partitionStats", "consumerStats"]:
                    output.append((pid, doc, dict_jmx))
                    continue

                self.add_common_params(doc, dict_jmx)
                output.append((pid, doc, dict_jmx))
            except Exception as err:
                collectd.error(
                    "Plugin kafkatopic: Error in collecting stats of %s doctype: %s"
                    % (doc, str(err)))

    def run_pid_process(self, list_pid):
        """Spawn process for each pid"""
        #procs = []
        #output = multiprocessing.Queue()
        output = []
        for pid in list_pid:
            port = self.jclient.get_jolokia_port(pid)
            if port and self.jclient.connection_available(port):
                self.get_pid_jmx_stats(pid, port, output)
                #proc = multiprocessing.Process(target=self.get_pid_jmx_stats, args=(pid, port, output))
                #procs.append(proc)
                #proc.start()

        #for proc in procs:
        #proc.join()


#       for p in procs:
#          collectd.debug("%s, %s" % (p, p.is_alive()))
        return output

    def collect_jmx_data(self):
        """Collects stats and spawns process for each pids."""
        list_pid = self.jclient.get_pid()
        if not list_pid:
            collectd.error("Plugin kafkatopic: No %s processes are running" %
                           self.process)
            return

        output = self.run_pid_process(list_pid)
        for doc in output:
            pid, doc_name, doc_result = doc
            if doc_name in self.documentsTypes:
                if doc_name == "topicStats":
                    self.add_rate_dispatch_topic(pid, doc_name, doc_result)
                elif doc_name == "kafkaStats":
                    self.add_rate_dispatch_kafka(pid, doc_name, doc_result)
                else:
                    self.dispatch_stats(doc_name, doc_result)
        '''
        procs, output = self.run_pid_process(list_pid)
        for _ in procs:
            for _ in KAFKA_DOCS:
                try:
                    pid, doc_name, doc_result = output.get_nowait()
                except Queue.Empty:
                    collectd.error("Failed to send one or more doctype document to collectd")
                    continue
                # Dispatching documentsTypes which are requetsed alone
                if doc_name in self.documentsTypes:
                    if doc_name == "topicStats":
                        self.add_rate_dispatch_topic(pid, doc_name, doc_result)
                    elif doc_name == "kafkaStats":
                        self.add_rate_dispatch_kafka(pid, doc_name, doc_result)
                    else:
                        self.dispatch_stats(doc_name, doc_result)
        output.close()
        '''

    def dispatch_data(self, doc_name, result):
        """Dispatch data to collectd."""
        if doc_name == "topicStats":
            for item in ['messagesIn', 'bytesOut', 'bytesIn', 'totalFetchRequests', 'totalProduceRequests', 'produceMessageConversions',\
                         'failedProduceRequests', 'fetchMessageConversions', 'failedFetchRequests', 'bytesRejected']:
                try:
                    del result[item]
                except KeyError:
                    pass
                    #collectd.error("Key %s deletion error in topicStats doctype for topic %s: %s" % (item, result['_topicName'], str(err)))
            collectd.info(
                "Plugin kafkatopic: Succesfully sent topicStats: %s" %
                result['_topicName'])

        elif doc_name == "kafkaStats":
            for item in ["messagesInPerSec", "bytesInPerSec", "bytesOutPerSec", "isrExpandsPerSec", "isrShrinksPerSec", "leaderElectionPerSec",\
                             "uncleanLeaderElectionPerSec", "producerRequestsPerSec", "fetchConsumerRequestsPerSec", "fetchFollowerRequestsPerSec"]:
                try:
                    del result[item]
                except KeyError:
                    pass
                    #collectd.error("Key %s deletion error in kafkaStats doctype: %s" % (item, str(err)))

            collectd.info(
                "Plugin kafkatopic: Succesfully sent %s doctype to collectd." %
                doc_name)
            collectd.debug("Plugin kafkatopic: Values dispatched =%s" %
                           json.dumps(result))

        elif doc_name == "consumerStats":
            collectd.info(
                "Plugin kafkatopic: Succesfully sent consumerStats of consumer group %s of topic %s"
                % (result['_groupName'], result['_topicName']))

        else:
            collectd.info(
                "Plugin kafkatopic: Succesfully sent topic %s of partitionStats: %s."
                % (result['_topicName'], result['_partitionNum']))
        utils.dispatch(result)

    def read_temp(self):
        """Collectd first calls register_read. At that time default interval is taken,
        hence temporary function is made to call, the read callback is unregistered
        and read() is called again with interval obtained from conf by register_config callback."""
        collectd.unregister_read(self.read_temp)
        collectd.register_read(self.collect_jmx_data,
                               interval=int(self.interval))
コード例 #7
0
class JmxStat(object):
    """Plugin object will be created only once and collects JMX statistics info every interval."""
    def __init__(self):
        """Initializes interval and previous dictionary variable."""
        self.process = 'zookeeper'
        self.interval = DEFAULT_INTERVAL
        self.listenerip = 'localhost'
        self.port = None
        self.prev_data = {}
        self.documentsTypes = []
        self.jclient = JolokiaClient(
            os.path.basename(__file__)[:-3], self.process)

    def config(self, cfg):
        """Initializes variables from conf files."""
        for children in cfg.children:
            if children.key == INTERVAL:
                self.interval = children.values[0]
            if children.key == LISTENERIP:
                self.listenerip = children.values[0]
            if children.key == PORT:
                self.port = children.values[0]
            if children.key == DOCUMENTSTYPES:
                self.documentsTypes = children.values[0]

    def get_jmx_parameters(self, jolokiaclient, doc, dict_jmx):
        """Fetch stats based on doc_type"""
        if doc == "memoryPoolStats":
            self.add_memory_pool_parameters(jolokiaclient, dict_jmx)
        elif doc == "memoryStats":
            self.add_memory_parameters(jolokiaclient, dict_jmx)
        elif doc == "threadStats":
            self.add_threading_parameters(jolokiaclient, dict_jmx)
        elif doc == "gcStats":
            self.add_gc_parameters(jolokiaclient, dict_jmx)
        elif doc == "classLoadingStats":
            self.add_classloading_parameters(jolokiaclient, dict_jmx)
        elif doc == "compilationStats":
            self.add_compilation_parameters(jolokiaclient, dict_jmx)
        elif doc == "nioStats":
            self.add_nio_parameters(jolokiaclient, dict_jmx)
        elif doc == "operatingSysStats":
            self.add_operating_system_parameters(jolokiaclient, dict_jmx)
        elif doc == "zookeeperStats":
            self.add_zookeeper_parameters(jolokiaclient, dict_jmx)
        elif doc == "jmxStats":
            self.add_jmxstats_parameters(jolokiaclient, dict_jmx)

    def add_default_rate_value(self, dict_jmx):
        """Add default value to rate key based on type"""
        keylist = ["packetsReceivedRate", "packetsSentRate"]
        for key in keylist:
            dict_jmx[key] = 0

    def get_rate(self, key, curr_data, prev_data):
        """Calculate and returns rate. Rate=(current_value-prev_value)/time."""
        #TODO The code is similar to the one in utils.py.
        rate = NAN
        if not prev_data:
            return rate

        if key not in prev_data:
            collectd.error("%s key not in previous data. Shouldn't happen." %
                           key)
            return rate

        if TIMESTAMP not in curr_data or TIMESTAMP not in prev_data:
            collectd.error("%s key not in previous data. Shouldn't happen." %
                           key)
            return rate

        curr_time = curr_data[TIMESTAMP]
        prev_time = prev_data[TIMESTAMP]
        if curr_time <= prev_time:
            collectd.error(
                "Current data time: %s is less than previous data time: %s. "
                "Shouldn't happen." % (curr_time, prev_time))
            return rate

        rate = (curr_data[key] - prev_data[key]) / float(self.interval)
        if rate < 0:
            rate = 0
        return rate

    def add_rate(self, pid, dict_jmx):
        """Rate only for zookeeper jmx metrics"""
        rate = self.get_rate("packetsReceived", dict_jmx, self.prev_data[pid])
        if rate != NAN:
            dict_jmx["packetsReceivedRate"] = round(rate, FLOATING_FACTOR)
        rate = self.get_rate("packetsSent", dict_jmx, self.prev_data[pid])
        if rate != NAN:
            dict_jmx["packetsSentRate"] = round(rate, FLOATING_FACTOR)

    def add_rate_dispatch(self, pid, doc, dict_jmx):
        """Add default for rate keys in first poll."""
        if pid in self.prev_data:
            self.add_rate(pid, dict_jmx)
        else:
            self.add_default_rate_value(dict_jmx)
        self.prev_data[pid] = dict_jmx
        self.dispatch_data(doc, deepcopy(dict_jmx))

    def get_memory_pool_names(self, jolokiaclient):
        """Get memory pool names of jvm process"""
        mempool_json = jolokiaclient.request(
            type='read', mbean='java.lang:type=MemoryPool,*', attribute='Name')
        mempool_names = []
        if mempool_json['status'] == 200:
            for _, value in mempool_json['value'].items():
                if value['Name'] in DEFAULT_MP:
                    mempool_names.append(value['Name'])
                else:
                    collectd.error(
                        "Plugin zookeeperjmx: not supported for memory pool %s"
                        % value['Name'])
        return mempool_names

    def get_gc_names(self, jolokiaclient):
        gc_json = jolokiaclient.request(
            type='read',
            mbean='java.lang:type=GarbageCollector,*',
            attribute='Name')
        gc_names = []
        if gc_json['status'] == 200:
            for _, value in gc_json['value'].items():
                if value['Name'] in DEFAULT_GC:
                    gc_names.append(value['Name'])
                else:
                    collectd.error(
                        "Plugin zookeeperjmx: not supported for GC %s" %
                        value['Name'])
        return gc_names

    def add_jmxstats_parameters(self, jolokiaclient, dict_jmx):
        """Add specific jmxstats parameter"""
        #classloading Stats
        classloading = jolokiaclient.request(
            type='read', mbean='java.lang:type=ClassLoading')
        if classloading['status'] == 200:
            dict_jmx['unloadedClass'] = classloading['value'][
                'UnloadedClassCount']
            dict_jmx['loadedClass'] = classloading['value']['LoadedClassCount']

        #threading Stats
        thread_json = jolokiaclient.request(type='read',
                                            mbean='java.lang:type=Threading')
        if thread_json['status'] == 200:
            dict_jmx['threads'] = thread_json['value']['ThreadCount']

        #memory Stats
        memory_json = jolokiaclient.request(type='read',
                                            mbean='java.lang:type=Memory')
        if memory_json['status'] == 200:
            heap = memory_json['value']['HeapMemoryUsage']
            self.handle_neg_bytes(heap['init'], 'heapMemoryUsageInit',
                                  dict_jmx)
            dict_jmx['heapMemoryUsageUsed'] = round(
                heap['used'] / 1024.0 / 1024.0, 2)
            dict_jmx['heapMemoryUsageCommitted'] = round(
                heap['committed'] / 1024.0 / 1024.0, 2)
            non_heap = memory_json['value']['NonHeapMemoryUsage']
            self.handle_neg_bytes(non_heap['init'], 'nonHeapMemoryUsageInit',
                                  dict_jmx)
            dict_jmx['nonHeapMemoryUsageUsed'] = round(
                non_heap['used'] / 1024.0 / 1024.0, 2)
            dict_jmx['nonHeapMemoryUsageCommitted'] = round(
                non_heap['committed'] / 1024.0 / 1024.0, 2)

        #initailize default values
        param_list = ['G1OldGenerationCollectionTime', 'G1OldGenerationCollectionCount', 'G1YoungGenerationCollectionTime',\
                      'G1YoungGenerationCollectionCount', 'G1OldGenUsageUsed', 'G1SurvivorSpaceUsageUsed', 'MetaspaceUsageUsed',\
                      'CodeCacheUsageUsed', 'CompressedClassSpaceUsageUsed', 'G1EdenSpaceUsageUsed']
        for param in param_list:
            dict_jmx[param] = 0

        #gc Stats
        gc_names = self.get_gc_names(jolokiaclient)
        for gc_name in gc_names:
            str_mbean = 'java.lang:type=GarbageCollector,name=' + gc_name
            valid = jolokiaclient.request(type='read',
                                          mbean=str_mbean,
                                          attribute='Valid')
            if valid['status'] == 200 and valid['value'] == True:
                str_attribute = 'CollectionTime,CollectionCount'
                gc_values = jolokiaclient.request(type='read',
                                                  mbean=str_mbean,
                                                  attribute=str_attribute)
                gc_name_no_spaces = ''.join(gc_name.split())
                if gc_values['status'] == 200:
                    dict_jmx[gc_name_no_spaces + 'CollectionTime'] = round(
                        gc_values['value']['CollectionTime'] * 0.001, 2)
                    dict_jmx[gc_name_no_spaces +
                             'CollectionCount'] = gc_values['value'][
                                 'CollectionCount']

        #memoryPool Stats
        mp_names = self.get_memory_pool_names(jolokiaclient)
        for pool_name in mp_names:
            str_mbean = 'java.lang:type=MemoryPool,name=' + pool_name
            valid = jolokiaclient.request(type='read',
                                          mbean=str_mbean,
                                          attribute='Valid')
            if valid['status'] == 200 and valid['value'] == True:
                mp_values = jolokiaclient.request(type='read',
                                                  mbean=str_mbean,
                                                  attribute='Usage')
                pool_name_no_spaces = ''.join(pool_name.split())
                if mp_values['status'] == 200:
                    dict_jmx[pool_name_no_spaces + 'UsageUsed'] = round(
                        mp_values['value']['used'] / 1024.0 / 1024.0, 2)

    def get_zookeeper_info(self):
        "Getting info about zookeeper type whether it is a standalone or cluster"
        try:
            p1 = subprocess.Popen("echo srvr | nc localhost 2181",
                                  shell=True,
                                  stdout=subprocess.PIPE)
            output = p1.communicate()[0]
            if output:
                output_list = output.splitlines()
            for line in output_list:
                if "Mode" in line:
                    zootype_list = line.split(":")
                    zootype = zootype_list[1].strip()
            return zootype
        except Exception as e:
            collectd.error("Error in getting zookeeper info due to %s" %
                           str(e))
            return None

    def get_zookeeper_id(self):
        "If zookeeper type is a cluster, get its own id and set its appropriate"
        try:
            p2 = subprocess.Popen("cat /opt/kafka/data/zookeeper/myid",
                                  shell=True,
                                  stdout=subprocess.PIPE)
            output = p2.communicate()[0]
            if output:
                return output
        except Exception as e:
            collectd.error("Error in getting zookeeper info due to %s" %
                           str(e))
            return None

    def add_zookeeper_parameters(self, jolokiaClient, dict_jmx):
        "Getting info about zookeeper type whether it is a standalone or cluster"
        zookpertype = self.get_zookeeper_info()
        if zookpertype == "standalone":
            zookper = jolokiaClient.request(
                type='read',
                mbean='org.apache.ZooKeeperService:name0=StandaloneServer_port'
                + self.port)
            mbean_memory = 'org.apache.ZooKeeperService:name0=StandaloneServer_port%s,name1=InMemoryDataTree' % self.port
        else:
            zookpertype = zookpertype[0].upper() + zookpertype[1:]
            # If zookeeper type is a cluster, get its own id and set its appropriate
            zookperId = self.get_zookeeper_id()
            bean = 'org.apache.ZooKeeperService:name0=ReplicatedServer_id' + zookperId + ',name1=replica.' + zookperId + ',name2=' + zookpertype
            zookper = jolokiaClient.request(type='read', mbean=bean)
            mbean_memory = 'org.apache.ZooKeeperService:name0=ReplicatedServer_id' + zookperId + ',name1=replica.' + zookperId + \
                           ',name2=' + zookpertype + ',name3=InMemoryDataTree'

        if zookper['status'] == 200:
            dict_jmx['avgRequestLatency'] = round(
                zookper['value']['AvgRequestLatency'] * 0.001, 2)
            dict_jmx['maxSessionTimeout'] = round(
                zookper['value']['MaxSessionTimeout'] * 0.001, 2)
            dict_jmx['minSessionTimeout'] = round(
                zookper['value']['MinSessionTimeout'] * 0.001, 2)
            dict_jmx['maxClientCnxnsPerHost'] = zookper['value'][
                'MaxClientCnxnsPerHost']
            dict_jmx['numAliveConnections'] = zookper['value'][
                'NumAliveConnections']
            dict_jmx['outstandingRequests'] = zookper['value'][
                'OutstandingRequests']
            dict_jmx['packetsReceived'] = zookper['value']['PacketsReceived']
            dict_jmx['packetsSent'] = zookper['value']['PacketsSent']
            dict_jmx['zookeeperVersion'] = ((
                zookper['value']['Version']).split(","))[0]
        zookper_count = jolokiaClient.request(type='read', mbean=mbean_memory)
        if zookper_count['status'] == 200:
            dict_jmx['nodeCount'] = zookper_count['value']['NodeCount']
            dict_jmx['watchCount'] = zookper_count['value']['WatchCount']
        zookper_ephemerals = jolokiaClient.request(type='exec',
                                                   mbean=mbean_memory,
                                                   operation='countEphemerals')
        if zookper_ephemerals['status'] == 200:
            dict_jmx['countEphemerals'] = zookper_ephemerals['value']
        zookper_datasize = jolokiaClient.request(type='exec',
                                                 mbean=mbean_memory,
                                                 operation='countEphemerals')
        if zookper_datasize['status'] == 200:
            dict_jmx['approximateDataSize'] = round(
                zookper_datasize['value'] / 1024.0 / 1024.0, 2)

    def add_operating_system_parameters(self, jolokiaclient, dict_jmx):
        """Add operating system related jmx stats"""
        ops = jolokiaclient.request(type='read',
                                    mbean='java.lang:type=OperatingSystem')
        if ops['status'] == 200:
            dict_jmx['osArchitecture'] = ops['value']['Arch']
            dict_jmx['availableProcessors'] = ops['value'][
                'AvailableProcessors']
            self.handle_neg_bytes(ops['value']['CommittedVirtualMemorySize'],
                                  'committedVirtualMemorySize', dict_jmx)
            dict_jmx['freePhysicalMemorySize'] = round(
                ops['value']['FreePhysicalMemorySize'] / 1024.0 / 1024.0, 2)
            dict_jmx['freeSwapSpaceSize'] = round(
                ops['value']['FreeSwapSpaceSize'] / 1024.0 / 1024.0, 2)
            dict_jmx['maxFileDescriptors'] = ops['value'][
                'MaxFileDescriptorCount']
            dict_jmx['osName'] = ops['value']['Name']
            dict_jmx['openFileDescriptors'] = ops['value'][
                'OpenFileDescriptorCount']
            dict_jmx['processCpuLoad'] = ops['value']['ProcessCpuLoad']
            pcputime = ops['value']['ProcessCpuTime']
            if pcputime >= 0:
                pcputime = round(pcputime / 1000000000.0, 2)
            dict_jmx['processCpuTime'] = pcputime
            dict_jmx['totalPhysicalMemorySize'] = round(
                ops['value']['TotalPhysicalMemorySize'] / 1024.0 / 1024.0, 2)
            dict_jmx['totalSwapSpaceSize'] = round(
                ops['value']['TotalSwapSpaceSize'] / 1024.0 / 1024.0, 2)
            dict_jmx['osVersion'] = ops['value']['Version']
            dict_jmx['systemCpuLoad'] = ops['value']['SystemCpuLoad']
            dict_jmx['systemLoadAverage'] = ops['value']['SystemLoadAverage']

    def add_nio_parameters(self, jolokiaclient, dict_jmx):
        """Add network related jmx stats"""
        nio = jolokiaclient.request(type='read',
                                    mbean='java.nio:type=BufferPool,*',
                                    attribute='Name')
        bufferpool_names = []
        if nio['status'] == 200:
            for _, value in nio['value'].items():
                bufferpool_names.append(value['Name'])

        for poolname in bufferpool_names:
            str_mbean = 'java.nio:type=BufferPool,name=' + poolname
            poolinfo = jolokiaclient.request(type='read', mbean=str_mbean)
            if poolinfo['status'] == 200:
                dict_jmx[poolname +
                         'BufferPoolCount'] = poolinfo['value']['Count']
                self.handle_neg_bytes(poolinfo['value']['MemoryUsed'],
                                      poolname + 'BufferPoolMemoryUsed',
                                      dict_jmx)
                self.handle_neg_bytes(poolinfo['value']['TotalCapacity'],
                                      poolname + 'BufferPoolTotalCapacity',
                                      dict_jmx)

    def add_compilation_parameters(self, jolokiaclient, dict_jmx):
        """Add compilation related jmx stats"""
        compilation = jolokiaclient.request(type='read',
                                            mbean='java.lang:type=Compilation')
        if compilation['status'] == 200:
            dict_jmx['compilerName'] = compilation['value']['Name']
            dict_jmx['totalCompilationTime'] = round(
                compilation['value']['TotalCompilationTime'] * 0.001, 2)

    def add_classloading_parameters(self, jolokiaclient, dict_jmx):
        """Add classloading related jmx stats"""
        classloading = jolokiaclient.request(
            type='read', mbean='java.lang:type=ClassLoading')
        if classloading['status'] == 200:
            dict_jmx['unloadedClass'] = classloading['value'][
                'UnloadedClassCount']
            dict_jmx['loadedClass'] = classloading['value']['LoadedClassCount']
            dict_jmx['totalLoadedClass'] = classloading['value'][
                'TotalLoadedClassCount']

    def add_gc_parameters(self, jolokiaclient, dict_jmx):
        """Add garbage collector related jmx stats"""
        def memory_gc_usage(self, mempool_gc, key, gc_name, dict_jmx):
            for name, values in mempool_gc.items():
                if name in ['G1 Eden Space', 'G1 Old Gen']:
                    mp_name = ''.join(name.split())
                    self.handle_neg_bytes(values['init'],
                                          gc_name + key + mp_name + 'Init',
                                          dict_jmx)
                    self.handle_neg_bytes(values['max'],
                                          gc_name + key + mp_name + 'Max',
                                          dict_jmx)
                    dict_jmx[gc_name + key + mp_name + 'Used'] = round(
                        values['used'] / 1024.0 / 1024.0, 2)
                    dict_jmx[gc_name + key + mp_name + 'Committed'] = round(
                        values['committed'] / 1024.0 / 1024.0, 2)

        gc_names = self.get_gc_names(jolokiaclient)
        for gc_name in gc_names:
            str_mbean = 'java.lang:type=GarbageCollector,name=' + gc_name
            if_valid = jolokiaclient.request(type='read',
                                             mbean=str_mbean,
                                             attribute='Valid')
            if if_valid['status'] == 200 and if_valid['value'] == True:
                str_attribute = 'CollectionTime,CollectionCount,LastGcInfo'
                gc_values = jolokiaclient.request(type='read',
                                                  mbean=str_mbean,
                                                  attribute=str_attribute)
                gc_name_no_spaces = ''.join(gc_name.split())
                if gc_values['status'] == 200:
                    dict_jmx[gc_name_no_spaces + 'CollectionTime'] = round(
                        gc_values['value']['CollectionTime'] * 0.001, 2)
                    dict_jmx[gc_name_no_spaces +
                             'CollectionCount'] = gc_values['value'][
                                 'CollectionCount']
                    if gc_values['value']['LastGcInfo']:
                        dict_jmx[gc_name_no_spaces +
                                 'GcThreadCount'] = gc_values['value'][
                                     'LastGcInfo']['GcThreadCount']
                        dict_jmx[gc_name_no_spaces + 'StartTime'] = round(
                            gc_values['value']['LastGcInfo']['startTime'] *
                            0.001, 2)
                        dict_jmx[gc_name_no_spaces + 'EndTime'] = round(
                            gc_values['value']['LastGcInfo']['endTime'] *
                            0.001, 2)
                        dict_jmx[gc_name_no_spaces + 'Duration'] = round(
                            gc_values['value']['LastGcInfo']['duration'] *
                            0.001, 2)
                        mem_aftergc = gc_values['value']['LastGcInfo'][
                            'memoryUsageAfterGc']
                        memory_gc_usage(self, mem_aftergc, 'MemUsageAfGc',
                                        gc_name_no_spaces, dict_jmx)
                        mem_beforegc = gc_values['value']['LastGcInfo'][
                            'memoryUsageBeforeGc']
                        memory_gc_usage(self, mem_beforegc, 'MemUsageBfGc',
                                        gc_name_no_spaces, dict_jmx)

    def add_threading_parameters(self, jolokiaclient, dict_jmx):
        """Add thread related jmx stats"""
        mbean_threading = 'java.lang:type=Threading'
        thread_json = jolokiaclient.request(type='read', mbean=mbean_threading)
        if thread_json['status'] == 200:
            dict_jmx['threads'] = thread_json['value']['ThreadCount']
            dict_jmx['peakThreads'] = thread_json['value']['PeakThreadCount']
            dict_jmx['daemonThreads'] = thread_json['value'][
                'DaemonThreadCount']
            dict_jmx['totalStartedThreads'] = thread_json['value'][
                'TotalStartedThreadCount']
            if thread_json['value']['CurrentThreadCpuTimeSupported']:
                dict_jmx['currentThreadCpuTime'] = round(
                    thread_json['value']['CurrentThreadCpuTime'] /
                    1000000000.0, 2)
                dict_jmx['currentThreadUserTime'] = round(
                    thread_json['value']['CurrentThreadUserTime'] /
                    1000000000.0, 2)

    def add_memory_parameters(self, jolokiaclient, dict_jmx):
        """Add memory related jmx stats"""
        memory_json = jolokiaclient.request(type='read',
                                            mbean='java.lang:type=Memory')
        if memory_json['status'] == 200:
            heap = memory_json['value']['HeapMemoryUsage']
            self.handle_neg_bytes(heap['init'], 'heapMemoryUsageInit',
                                  dict_jmx)
            self.handle_neg_bytes(heap['max'], 'heapMemoryUsageMax', dict_jmx)
            dict_jmx['heapMemoryUsageUsed'] = round(
                heap['used'] / 1024.0 / 1024.0, 2)
            dict_jmx['heapMemoryUsageCommitted'] = round(
                heap['committed'] / 1024.0 / 1024.0, 2)

            non_heap = memory_json['value']['NonHeapMemoryUsage']
            self.handle_neg_bytes(non_heap['init'], 'nonHeapMemoryUsageInit',
                                  dict_jmx)
            self.handle_neg_bytes(non_heap['max'], 'nonHeapMemoryUsageMax',
                                  dict_jmx)
            dict_jmx['nonHeapMemoryUsageUsed'] = round(
                non_heap['used'] / 1024.0 / 1024.0, 2)
            dict_jmx['nonHeapMemoryUsageCommitted'] = round(
                non_heap['committed'] / 1024.0 / 1024.0, 2)
            dict_jmx['objectPendingFinalization'] = memory_json['value'][
                'ObjectPendingFinalizationCount']

    def add_memory_pool_parameters(self, jolokiaclient, dict_jmx):
        """Add memory pool related jmx stats"""
        mp_names = self.get_memory_pool_names(jolokiaclient)
        for pool_name in mp_names:
            str_mbean = 'java.lang:type=MemoryPool,name=' + pool_name
            if_valid = jolokiaclient.request(type='read',
                                             mbean=str_mbean,
                                             attribute='Valid')
            if if_valid['status'] == 200 and if_valid['value'] == True:
                str_attribute = 'CollectionUsage,PeakUsage,Type,Usage,CollectionUsageThresholdSupported,UsageThresholdSupported'
                mp_values = jolokiaclient.request(type='read',
                                                  mbean=str_mbean,
                                                  attribute=str_attribute)
                pool_name_no_spaces = ''.join(pool_name.split())
                if mp_values['status'] == 200:
                    coll_usage = mp_values['value']['CollectionUsage']
                    if coll_usage:
                        self.handle_neg_bytes(
                            coll_usage['max'],
                            pool_name_no_spaces + 'CollectionUsageMax',
                            dict_jmx)
                        self.handle_neg_bytes(
                            coll_usage['init'],
                            pool_name_no_spaces + 'CollectionUsageInit',
                            dict_jmx)
                        dict_jmx[pool_name_no_spaces +
                                 'CollectionUsageUsed'] = round(
                                     coll_usage['used'] / 1024.0 / 1024.0, 2)
                        dict_jmx[pool_name_no_spaces +
                                 'CollectionUsageCommitted'] = round(
                                     coll_usage['committed'] / 1024.0 / 1024.0,
                                     2)
                    usage = mp_values['value']['Usage']
                    self.handle_neg_bytes(usage['max'],
                                          pool_name_no_spaces + 'UsageMax',
                                          dict_jmx)
                    self.handle_neg_bytes(usage['init'],
                                          pool_name_no_spaces + 'UsageInit',
                                          dict_jmx)
                    dict_jmx[pool_name_no_spaces + 'UsageUsed'] = round(
                        usage['used'] / 1024.0 / 1024.0, 2)
                    dict_jmx[pool_name_no_spaces + 'UsageCommitted'] = round(
                        usage['committed'] / 1024.0 / 1024.0, 2)
                    peak_usage = mp_values['value']['PeakUsage']
                    self.handle_neg_bytes(peak_usage['max'],
                                          pool_name_no_spaces + 'PeakUsageMax',
                                          dict_jmx)
                    self.handle_neg_bytes(
                        peak_usage['init'],
                        pool_name_no_spaces + 'PeakUsageInit', dict_jmx)
                    dict_jmx[pool_name_no_spaces + 'PeakUsageUsed'] = round(
                        peak_usage['used'] / 1024.0 / 1024.0, 2)
                    dict_jmx[pool_name_no_spaces +
                             'PeakUsageCommitted'] = round(
                                 peak_usage['committed'] / 1024.0 / 1024.0, 2)
                    if mp_values['value']['CollectionUsageThresholdSupported']:
                        coll_attr = 'CollectionUsageThreshold,CollectionUsageThresholdCount,CollectionUsageThresholdExceeded'
                        coll_threshold = jolokiaclient.request(
                            type='read', mbean=str_mbean, attribute=coll_attr)
                        if coll_threshold['status'] == 200:
                            dict_jmx[pool_name_no_spaces +
                                     'CollectionUsageThreshold'] = round(
                                         coll_threshold['value']
                                         ['CollectionUsageThreshold'] /
                                         1024.0 / 1024.0, 2)
                            dict_jmx[
                                pool_name_no_spaces +
                                'CollectionUsageThresholdCount'] = coll_threshold[
                                    'value']['CollectionUsageThreshold']
                            dict_jmx[
                                pool_name_no_spaces +
                                'CollectionUsageThresholdExceeded'] = coll_threshold[
                                    'value'][
                                        'CollectionUsageThresholdExceeded']
                    if mp_values['value']['UsageThresholdSupported']:
                        usage_attr = 'UsageThreshold,UsageThresholdCount,UsageThresholdExceeded'
                        usage_threshold = jolokiaclient.request(
                            type='read', mbean=str_mbean, attribute=usage_attr)
                        if usage_threshold['status'] == 200:
                            dict_jmx[pool_name_no_spaces +
                                     'UsageThreshold'] = round(
                                         usage_threshold['value']
                                         ['UsageThreshold'] / 1024.0 / 1024.0,
                                         2)
                            dict_jmx[pool_name_no_spaces +
                                     'UsageThresholdCount'] = usage_threshold[
                                         'value']['UsageThreshold']
                            dict_jmx[
                                pool_name_no_spaces +
                                'UsageThresholdExceeded'] = usage_threshold[
                                    'value']['UsageThresholdExceeded']

    def handle_neg_bytes(self, value, resultkey, dict_jmx):
        """Condition for byte keys whose return value may be -1 if not supported."""
        if value == -1:
            dict_jmx[resultkey] = value
        else:
            dict_jmx[resultkey] = round(value / 1024.0 / 1024.0, 2)

    def add_common_params(self, doc, dict_jmx):
        """Adds TIMESTAMP, PLUGIN, PLUGITYPE to dictionary."""
        timestamp = int(round(time.time() * 1000))
        dict_jmx[TIMESTAMP] = timestamp
        dict_jmx[PLUGIN] = ZOOK_JMX
        dict_jmx[PLUGINTYPE] = doc
        dict_jmx[ACTUALPLUGINTYPE] = ZOOK_JMX
        #dict_jmx[PLUGIN_INS] = doc
        collectd.info(
            "Plugin zookeeperjmx: Added common parameters successfully for %s doctype"
            % doc)

    def get_pid_jmx_stats(self, pid, port, output):
        """Call get_jmx_parameters function for each doc_type and add dict to queue"""
        jolokiaclient = self.jclient.get_jolokia_inst(port)
        for doc in ZOOK_DOCS:
            try:
                dict_jmx = {}
                self.get_jmx_parameters(jolokiaclient, doc, dict_jmx)
                if not dict_jmx:
                    raise ValueError("No data found")

                collectd.info(
                    "Plugin zookeeperjmx: Added %s doctype information successfully for pid %s"
                    % (doc, pid))
                self.add_common_params(doc, dict_jmx)
                output.put((pid, doc, dict_jmx))
            except Exception as err:
                collectd.error(
                    "Plugin zookeeperjmx: Error in collecting stats of %s doctype: %s"
                    % (doc, str(err)))

    def run_pid_process(self, list_pid):
        """Spawn process for each pid"""
        procs = []
        output = multiprocessing.Queue()
        for pid in list_pid:
            port = self.jclient.get_jolokia_port(pid)
            if port and self.jclient.connection_available(port):
                proc = multiprocessing.Process(target=self.get_pid_jmx_stats,
                                               args=(pid, port, output))
                procs.append(proc)
                proc.start()

        for proc in procs:
            proc.join()


#       for p in procs:
#          collectd.debug("%s, %s" % (p, p.is_alive()))
        return procs, output

    def collect_jmx_data(self):
        """Collects stats and spawns process for each pids."""
        list_pid = self.jclient.get_pid()
        if not list_pid:
            collectd.error("Plugin zookeeperjmx: No %s processes are running" %
                           self.process)
            return

        procs, output = self.run_pid_process(list_pid)
        for _ in procs:
            for _ in ZOOK_DOCS:
                try:
                    pid, doc_name, doc_result = output.get_nowait()
                except Queue.Empty:
                    collectd.error(
                        "Failed to send one or more doctype document to collectd"
                    )
                    continue
                # Dispatching documentsTypes which are requetsed alone
                if doc_name in self.documentsTypes:
                    if doc_name == "zookeeperStats":
                        self.add_rate_dispatch(pid, doc_name, doc_result)
                    else:
                        self.dispatch_data(doc_name, doc_result)
        output.close()

    def dispatch_data(self, doc_name, result):
        """Dispatch data to collectd."""
        if doc_name == "zookeeperStats":
            for item in ["packetsSent", "packetsReceived"]:
                del result[item]

        collectd.info(
            "Plugin zookeeperjmx: Succesfully sent %s doctype to collectd." %
            doc_name)
        collectd.debug("Plugin zookeeperjmx: Values dispatched =%s" %
                       json.dumps(result))
        utils.dispatch(result)

    def read_temp(self):
        """Collectd first calls register_read. At that time default interval is taken,
        hence temporary function is made to call, the read callback is unregistered
        and read() is called again with interval obtained from conf by register_config callback."""
        collectd.unregister_read(self.read_temp)
        collectd.register_read(self.collect_jmx_data,
                               interval=int(self.interval))