def onSuccess(self, results, config):
        # Create of map of ref to datasource.
        datasources = dict(
            (x.params['xenapi_ref'], x) for x in config.datasources)

        data = self.new_data()

        for ref, properties in results.iteritems():
            datasource = datasources.get(ref)
            if not datasource:
                # We're not monitoring whatever this thing is. Skip it.
                continue

            points = dict((x.path, x) for x in datasource.points)

            for path, point in points.items():
                value = properties.get(path)

                if value is None:
                    continue

                if point.rpn:
                    try:
                        value = rpneval(value, point.rpn)
                    except Exception:
                        LOG.exception('Failed to evaluate RPN: %s', point.rpn)
                        continue

                data['values'][datasource.component][point.id] = (value, 'N')

                # Prune points so we know what's missing.
                del(points[path])

            if points:
                LOG.debug(
                    "missing values for %s:%s:%s %s",
                    config.id,
                    datasource.component,
                    datasource.datasource,
                    points.keys())

            # Prune datasources so we know what's missing.
            del(datasources[ref])

        if datasources:
            LOG.debug(
                "missing XenAPI data for %s:%s %s",
                config.id,
                config.datasources[0].params['xenapi_classname'],
                datasources.keys())

        LOG.debug(
            'success for %s XenAPI %s',
            config.id,
            config.datasources[0].params['xenapi_classname'])

        data['events'].append(get_event(config, 'successful collection', 0))

        return data
Ejemplo n.º 2
0
 def getGraphValue(self, context, getfunc):
     """when graphing use this so that rpn conversions are accounted for"""
     val = getfunc(context)
     if val is None or len(self.dsnames) == 0: return 
     dp = self.getRRDDataPoint(self.dsnames[0])
     if dp and dp.rpn:
         #When VDEF does full rpn
         #val = "%s,%s" % (val, dp.rpn)
         val = rpneval(val, dp.rpn)
     return val
Ejemplo n.º 3
0
    def onSuccess(self, results, config):
        # Create of map of ref to datasource.
        datasources = dict(
            (x.params['xenapi_ref'], x) for x in config.datasources)

        data = self.new_data()

        for ref, properties in results.iteritems():
            datasource = datasources.get(ref)
            if not datasource:
                # We're not monitoring whatever this thing is. Skip it.
                continue

            points = dict((x.path, x) for x in datasource.points)

            for path, point in points.items():
                value = properties.get(path)

                if value is None:
                    continue

                if point.rpn:
                    try:
                        value = rpneval(value, point.rpn)
                    except Exception:
                        LOG.exception('Failed to evaluate RPN: %s', point.rpn)
                        continue

                data['values'][datasource.component][point.id] = (value, 'N')

                # Prune points so we know what's missing.
                del (points[path])

            if points:
                LOG.debug("missing values for %s:%s:%s %s", config.id,
                          datasource.component, datasource.datasource,
                          points.keys())

            # Prune datasources so we know what's missing.
            del (datasources[ref])

        if datasources:
            LOG.debug("missing XenAPI data for %s:%s %s", config.id,
                      config.datasources[0].params['xenapi_classname'],
                      datasources.keys())

        LOG.debug('success for %s XenAPI %s', config.id,
                  config.datasources[0].params['xenapi_classname'])

        data['events'].append(get_event(config, 'successful collection', 0))

        return data
    def collect_xen(self, config, ds0, client):
        rrd_tree = collections.defaultdict(
            lambda: collections.defaultdict(
                lambda: collections.defaultdict(
                    list)))

        for address in ds0.xenserver_addresses:

            # We must check what time the host thinks it is so we be
            # accurate and efficient about what and how much data we
            # request.
            server_time = None

            time_check_result = yield client.rrd_updates(address, start=1e11)

            for _, end in etree.iterparse(StringIO(time_check_result), tag='end'):
                server_time = end.text

            if not server_time:
                continue

            # Initialize {int: str} map of column indexes to
            # their corresponding entry strings. To be used to match
            # data to datapoints.
            index_entries = None

            start = int(server_time) - ds0.cycletime - 5
            result = yield client.rrd_updates(
                address, start=start, cf='AVERAGE', host=True)

            for _, element in etree.iterparse(StringIO(result)):
                if element.tag == 'meta':
                    step = int(element.findtext('step'))

                    if ds0.cycletime % step:
                        LOG.warn(
                            "%s:%s RRD interval (%s) not evenly divisible into datasource cycle (%s). Skipping collection",
                            config.id, address, step, ds0.cycletime)

                        continue

                    # Map (type, uuid, label) tuples to column indexes.
                    index_entries = dict(
                        (i, x.text.split(':')[1:]) for i, x in enumerate(
                            element.iter('entry')))

                elif element.tag == 'row':
                    for i, v in enumerate(element.iter('v')):
                        try:
                            value = float(v.text)
                        except (TypeError, ValueError):
                            continue

                        if not math.isnan(value):
                            etype, euuid, elabel = index_entries[i]
                            rrd_tree[etype][euuid][elabel].append(value)

        data = self.new_data()
        missing_data = collections.defaultdict(list)

        for datasource in config.datasources:
            prefix = datasource.params.get('prefix')
            if not prefix:
                continue

            datasource_data = rrd_tree[prefix[0]][prefix[1]]
            for datapoint in datasource.points:
                datapoint_data = []
                for elabel in datasource_data.keys():
                    if elabel == prefix[2]:
                        datapoint_data.append(datasource_data[elabel])

                    elif elabel.startswith(prefix[2]):
                        remainder = elabel.replace(prefix[2], '', 1)
                        if re.search(datapoint.pattern, remainder):
                            datapoint_data.append(datasource_data[elabel])

                if datapoint_data:
                    value = aggregate_values(datapoint, datapoint_data)

                    if datapoint.rpn:
                        try:
                            value = rpneval(value, datapoint.rpn)
                        except Exception:
                            LOG.exception(
                                'Failed to evaluate RPN: %s',
                                datapoint.rpn)

                            continue

                    data['values'][datasource.component][datapoint.id] = (
                        value, 'N')
                else:
                    missing_data[datasource.component].append(datapoint.id)

        for component, datapoint_ids in missing_data.items():
            LOG.debug(
                "missing RRD data for %s:%s %s",
                config.id,
                component,
                datapoint_ids)

        returnValue(data)
    def onSuccess(self, results, config):
        data = self.new_data()
        root = etree.Element('root')

        # Find and save the cluster tree
        for result in results[0]:
            tree = etree.parse(StringIO(result))
            root.append(tree.getroot())
            if tree.getroot().tag == 'clusters':
                cluster_tree = tree

        for result in results[0]:
            result_tree = etree.parse(StringIO(result))
            if result_tree.getroot().tag == 'storage_domains':
                count = len(result_tree.getroot().getchildren())
                addCount(root, [config.id], 'storagedomainCount', count)
            elif result_tree.getroot().tag == 'clusters':
                count = len(result_tree.getroot().getchildren())
                addCount(root, [config.id], 'clusterCount', count)
                data_centers = result_tree.xpath('//data_center/@id')
                addCount(root, data_centers, 'clusterCount')
            elif result_tree.getroot().tag == 'data_centers':
                count = len(result_tree.getroot().getchildren())
                addCount(root, [config.id], 'datacenterCount', count)
            elif result_tree.getroot().tag == 'hosts':
                count = len(result_tree.getroot().getchildren())
                addCount(root, [config.id], 'hostCount', count)
                clusters = result_tree.xpath('//cluster/@id')
                addCount(root, clusters, 'hostCount')

                for cluster in clusters:
                    datacenter = cluster_tree.xpath('//cluster[@id="%s"]/data_center/@id' % cluster)
                    addCount(root, datacenter, 'hostCount', 1)

            elif result_tree.getroot().tag == 'vms':
                count = len(result_tree.getroot().getchildren())
                addCount(root, [config.id], 'vmCount', count)
                clusters = result_tree.xpath('//cluster/@id')
                addCount(root, clusters, 'vmCount')

                hosts = result_tree.xpath('//host/@id')
                addCount(root, hosts, 'vmCount')

                for cluster in clusters:
                    datacenter = cluster_tree.xpath('//cluster[@id="%s"]/data_center/@id' % cluster)
                    addCount(root, datacenter, 'vmCount', 1)

        for result_stat in results[1]:
            root.append(etree.parse(StringIO(result_stat)).getroot())
            # This is the general format ...
            #root.xpath('//*[*/@id="368bf44e-7d29-483a-8c2e-9a79962b1e48"][name/text()="disk.read.latency"]/values/value/datum/text()')[0]

        for ds in config.datasources:
            if ds.component:
                component_id = prepId(ds.component)
            else:
                component_id = None

            for point in ds.points:
                # Handle percentage custom datapoints
                if "ovirt:" in point.xpath and point.rpn and 'xpath' in ds.params:
                    resultsDict = {}
                    try:
                        xpath = talesEvalStr(ds.params['xpath'], context=None, extra=ds.params['context'])
                        statdata = [(x.xpath('name/text()'), x.xpath('values/value/datum/text()')) for x in root.xpath(xpath) if x.tag == 'statistic']
                        for item, val in statdata:
                            resultsDict[item[0]] = val[0]
                        rpnstring = talesEvalStr(point.rpn, context=None, extra={'here': resultsDict})
                        results = rpneval(rpnstring.split(',', 1)[0], rpnstring.split(',', 1)[1])
                        data['values'][component_id][point.id] = (results, 'N')
                    except Exception:
                        pass

                # Do the rest using xpath
                elif 'xpath' in ds.params:
                    try:
                        # Some points may not exist in the xml, skip those...
                        xpath = talesEvalStr(ds.params['xpath'], context=None, extra=ds.params['context'])

                        results = root.xpath(xpath+point.xpath)
                        if 'Count' in point.xpath and not results:
                            results = ['0']
                        results = results[0]

                        # If rpn is defined, lets calculate the new results.
                        if point.rpn:
                            results = rpneval(
                                results, talesEvalStr(point.rpn, context=None, extra=ds.params['context']))
                        data['values'][component_id][point.id] = (results, 'N')
                    except Exception:
                        pass
        data['events'].append({
            'eventClassKey': 'oVirtCollectionSuccess',
            'eventKey': eventKey(config),
            'summary': 'ovirt: successful collection',
            'eventClass': '/Status/Perf/',
            'device': config.id,
            'severity': 0,
        })
        return data
Ejemplo n.º 6
0
    def collect_xen(self, config, ds0, client):
        rrd_tree = collections.defaultdict(lambda: collections.defaultdict(
            lambda: collections.defaultdict(list)))

        for address in ds0.xenserver_addresses:

            # We must check what time the host thinks it is so we be
            # accurate and efficient about what and how much data we
            # request.
            server_time = None

            time_check_result = yield client.rrd_updates(address, start=1e11)

            for _, end in etree.iterparse(StringIO(time_check_result),
                                          tag='end'):
                server_time = end.text

            if not server_time:
                continue

            # Initialize {int: str} map of column indexes to
            # their corresponding entry strings. To be used to match
            # data to datapoints.
            index_entries = None

            start = int(server_time) - ds0.cycletime - 5
            result = yield client.rrd_updates(address,
                                              start=start,
                                              cf='AVERAGE',
                                              host=True)

            for _, element in etree.iterparse(StringIO(result)):
                if element.tag == 'meta':
                    step = int(element.findtext('step'))

                    if ds0.cycletime % step:
                        LOG.warn(
                            "%s:%s RRD interval (%s) not evenly divisible into datasource cycle (%s). Skipping collection",
                            config.id, address, step, ds0.cycletime)

                        continue

                    # Map (type, uuid, label) tuples to column indexes.
                    index_entries = dict(
                        (i, x.text.split(':')[1:])
                        for i, x in enumerate(element.iter('entry')))

                elif element.tag == 'row':
                    for i, v in enumerate(element.iter('v')):
                        try:
                            value = float(v.text)
                        except (TypeError, ValueError):
                            continue

                        if not math.isnan(value):
                            etype, euuid, elabel = index_entries[i]
                            rrd_tree[etype][euuid][elabel].append(value)

        data = self.new_data()
        missing_data = collections.defaultdict(list)

        for datasource in config.datasources:
            prefix = datasource.params.get('prefix')
            if not prefix:
                continue

            datasource_data = rrd_tree[prefix[0]][prefix[1]]
            for datapoint in datasource.points:
                datapoint_data = []
                for elabel in datasource_data.keys():
                    if elabel == prefix[2]:
                        datapoint_data.append(datasource_data[elabel])

                    elif elabel.startswith(prefix[2]):
                        remainder = elabel.replace(prefix[2], '', 1)
                        if re.search(datapoint.pattern, remainder):
                            datapoint_data.append(datasource_data[elabel])

                if datapoint_data:
                    value = aggregate_values(datapoint, datapoint_data)

                    if datapoint.rpn:
                        try:
                            value = rpneval(value, datapoint.rpn)
                        except Exception:
                            LOG.exception('Failed to evaluate RPN: %s',
                                          datapoint.rpn)

                            continue

                    data['values'][datasource.component][datapoint.id] = (
                        value, 'N')
                else:
                    missing_data[datasource.component].append(datapoint.id)

        for component, datapoint_ids in missing_data.items():
            LOG.debug("missing RRD data for %s:%s %s", config.id, component,
                      datapoint_ids)

        returnValue(data)