Esempio n. 1
0
    def collect(self, config, datasource, rrdcache, collectionTime):
        collectedEvents = []
        collectedValues = {}
        expression = datasource.params.get('expression', None)
        debug = datasource.params.get('debug', None)
        if expression:
            # We will populate this with perf metrics and pass to eval()
            devdict = createDeviceDictionary(datasource.params['obj_attrs'])
            rrdValues = {}
            datapointDict = {}
            gotAllRRDValues = True

            for targetDatasource, targetDatapoint, targetRRA, targetAsRate in datasource.params['targetDatapoints']:
                try:
                    value = yield rrdcache.getLastValue(targetDatasource,
                                                        targetDatapoint,
                                                        targetRRA,
                                                        targetAsRate,
                                                        datasource.cycletime*5,
                                                        datasource.params['targets'][0])

                except StandardError as ex:
                    description = dsDescription(datasource, devdict)
                    msg = "Failure before evaluation, %s" % description
                    collectedEvents.append({
                        'eventKey': 'calculatedDataSourcePlugin_result',
                        'severity': ZenEventClasses.Error if debug else ZenEventClasses.Debug,
                        'summary': msg,
                    })
                    logMethod = log.error if debug else log.debug
                    logMethod(msg + "\n%s", ex)
                    log.exception(ex)
                    continue

                # Datapoints can be specified in the following ways:
                #
                # 1. <dpname>
                # 2. <dsname>_<dpname>
                # 3. datapoint['<dpname>']
                # 4. datapoint['<dsname>_<dpname>']
                #
                # Option 1 and 3 can only be used in cases where the
                # referenced datapoint names are unique for the device
                # or component.
                #
                # Option 1 and 2 can only be used when the datapoint or
                # datasource_datapoint name are valid Python variable
                # names.
                #
                # Option 4 can only be used when there is not a
                # datapoint literally named "datapoint". This is most
                # likely the safest option if you can avoid naming your
                # datapoints "datapoint".

                if value is None:
                    gotAllRRDValues = False
                else:
                    fqdpn = '%s_%s' % (targetDatasource, targetDatapoint)

                    # Syntax 1
                    rrdValues[targetDatapoint] = value

                    # Syntax 2
                    rrdValues[fqdpn] = value

                    # Syntax 3
                    datapointDict[targetDatapoint] = value

                    # Syntax 4
                    datapointDict[fqdpn] = value

                if value is not None:
                    rrdValues[targetDatapoint] = value
                    rrdValues['%s_%s' % (targetDatasource, targetDatapoint)] = value

            result = None
            if gotAllRRDValues:
                devdict.update(rrdValues)
                devdict['datapoint'] = datapointDict

                description = dsDescription(datasource, devdict)

                try:
                    result = eval(expression, devdict)
                    if debug:
                        log.debug("Evaluation successful, result is %s for %s" % (result, description))
                except ZeroDivisionError:
                    msg = "Evaluation failed due to attempted division by zero, %s" % description
                    collectedEvents.append({
                        'eventKey': 'calculatedDataSourcePlugin_result',
                        'severity': ZenEventClasses.Error if debug else ZenEventClasses.Debug,
                        'summary': msg,
                    })
                    logMethod = log.warn if debug else log.debug
                    logMethod(msg)
                except (TypeError, Exception) as ex:
                    msg = "Evaluation failed due to %s, %s" % (ex.message, description)
                    collectedEvents.append({
                        'eventKey': 'calculatedDataSourcePlugin_result',
                        'severity': ZenEventClasses.Error if debug else ZenEventClasses.Debug,
                        'summary': msg,
                    })
                    logMethod = log.exception if debug else log.debug
                    logMethod(msg + "\n%s", ex)
            else:
                log.debug("Can't get RRD values for EXPR: %s --> DS: %s" % (expression, dsKey(datasource)))

            if result is not None:
                collectedValues.setdefault(datasource.component, {})
                collectedValues[datasource.component]['_'.join((datasource.datasource, datasource.points[0].id))] = \
                    (result, collectionTime)

        returnValue({
            'events': collectedEvents,
            'values': collectedValues,
        })
Esempio n. 2
0
    def collect(self, config):
        collectedEvents = []
        collectedValues = {}
        collectedMaps = []

        datasourcesByKey = {dsKey(ds): ds for ds in config.datasources}
        # if we are able prefetch all the metrics that we can
        if hasattr(self.rrdcache, "batchFetchMetrics"):
            datasources = [datasourcesByKey.get(ds) for ds in toposort(config.datasources, datasourcesByKey) if datasourcesByKey.get(ds)]
            yield self.rrdcache.batchFetchMetrics(datasources)

        startCollectTime = time.time()
        from collections import defaultdict
        sourcetypes = defaultdict(int)
        for dskey in toposort(config.datasources, datasourcesByKey):
            datasource = datasourcesByKey.get(dskey, None)
            if datasource is None or \
                    'datasourceClassName' not in datasource.params or \
                    datasource.params['datasourceClassName'] not in DerivedProxyMap:
                #Not our datasource, it's a dependency from elsewhere
                #log.warn("not using ds: %s %s %s", dskey, datasource, datasource.params.__dict__)
                continue

            collectionTime = time.time()

            dsResult = yield DerivedProxyMap[datasource.params['datasourceClassName']].collect(
                                                    config, datasource, self.rrdcache, collectionTime)

            if dsResult:
                # Data for this collection won't be written until the current task
                # is entirely complete. To allow derivations of derivations to complete in
                # a single collection cycle, we'll artificially cache the values here for
                # every possible RRA. These values may be slightly inaccurate, as we're
                # essentially always using the 'LAST' RRA.
                resultValues = dsResult.get('values', {}).get(datasource.component, {})
                if resultValues:
                    collectedPoints = (p for p in datasource.points
                                       if p.id in resultValues or \
                                          '_'.join((datasource.datasource, p.id)) in resultValues)

                    for datapoint in collectedPoints:
                        rrdPath = datapoint.rrdPath.rsplit('/', 1)[0]

                        # Datapoint metadata only exists in Zenoss 5.
                        if hasattr(datapoint, 'metadata'):
                            contextUUID = datapoint.metadata["contextUUID"]
                        else:
                            contextUUID = rrdPath

                        value = (resultValues.get(datapoint.id, None) or
                                 resultValues.get('_'.join((datasource.datasource, datapoint.id))))[0]
                        for rra in ('AVERAGE', 'MIN', 'MAX', 'LAST'):
                            self.rrdcache.put(datasource.datasource, datapoint.id, rra, rrdPath, contextUUID, value)

                #incorporate results returned from the proxied method
                collectedEvents.extend(dsResult.get('events', []))
                collectedMaps.extend(dsResult.get('maps', []))
                collectedValues.setdefault(datasource.component, {})
                collectedValues[datasource.component].update(resultValues)
                dsclassname = datasource.params['datasourceClassName']
                sourcetypes[dsclassname] += 1

        endCollectTime = time.time()
        timeTaken = endCollectTime - startCollectTime
        timeLogFn = log.debug
        if timeTaken > 60.0 :
            timeLogFn = log.warn
        timeLogFn("  Took %.1f seconds to collect datasources: %s", timeTaken, sourcetypes)

        returnValue({
            'events': collectedEvents,
            'values': collectedValues,
            'maps': collectedMaps
        })
Esempio n. 3
0
    def collect(self, config, datasource, rrdcache, collectionTime):
        collectedEvents = []
        collectedValues = {}
        debug = datasource.params.get('debug', None)

        #Aggregate datasources only have one target datapoint config
        targetDatasource, targetDatapoint, targetRRA, targetAsRate = datasource.params['targetDatapoints'][0]

        targetValues, errors = yield self.getLastValues(rrdcache,
                                                targetDatasource,
                                                targetDatapoint,
                                                targetRRA,
                                                targetAsRate,
                                                datasource.cycletime,
                                                datasource.params['targets'])

        logMethod = log.error if debug else log.debug
        for ex, msg in errors:
            logMethod('%s: %s', msg, ex)

        if not targetValues:
            if datasource.params['targets']:
                msg = "No target values collected for datasource %s" % dsKey(datasource)
                collectedEvents.append({
                    'summary': msg,
                    'eventKey': 'aggregatingDataSourcePlugin_novalues',
                    'severity': ZenEventClasses.Info if debug else ZenEventClasses.Debug,
                })
                logMethod = log.info if debug else log.debug
                logMethod(msg)

            returnValue({
                'events': collectedEvents,
                'values': collectedValues,
            })

        for datapoint in datasource.points:
            try:
                aggregate, adjustedTargetValues = yield self.performAggregation(
                    datapoint.operation,
                    handleArguments(datasource.params['targetArgValues'][0], datapoint.arguments),
                    targetValues)
                if debug:
                    log.debug("Aggregate value %s calculated for datapoint %s_%s on %s:%s",
                              str(aggregate), datasource.datasource, datapoint.id,
                              datasource.device, datasource.component)
            except Exception as ex:
                msg = "Error calculating aggregation for %s_%s: %s" % (
                    targetDatasource,
                    targetDatapoint,
                    ex.message
                )
                collectedEvents.append({
                    'summary': msg,
                    'eventKey': 'aggregatingDataSourcePlugin_result',
                    'severity': ZenEventClasses.Error,
                })
                log.exception(msg + "\n%s", ex)
                continue

            #stash values for the threshold to put in event details
            threshold_cache[getThresholdCacheKey(datasource, datapoint)] = adjustedTargetValues

            collectedValues.setdefault(datasource.component, {})
            collectedValues[datasource.component]['_'.join((datasource.datasource, datapoint.id))] = \
                (aggregate, collectionTime)

        returnValue({
            'events': collectedEvents,
            'values': collectedValues,
        })