Esempio n. 1
0
def init():
    collectd.debug('initing stuff')
    global sock
    sock = socket()
    try:
        sock.connect((CARBON_SERVER,CARBON_PORT))
    except:
        collectd.warn("Couldn't connect to %(server)s on port %(port)d, is carbon-agent.py running?" % { 'server':CARBON_SERVER, 'port':CARBON_PORT })
    def write(self, sample):
        type_info = self._types.get(sample.type)

        if type_info is None:
            msg = 'plugin: %s unknown type %s, not listed in %s'

            collectd.info('write_influxdb: ' + msg % (sample.plugin,
                                                      sample.type,
                                                      self._typesdb))
            return

        identifier = format_identifier(sample)
        columns = ['time']
        points = [sample.time]

        for i, (ds_name, ds_type, min_val, max_val) in enumerate(type_info):
            value = sample.values[i]
            columns.append(ds_name)

            if (not isinstance(value, (float, int)) or
                    ds_type == "GAUGE" or
                    self._raw_values):
                continue

            metric_identifier = identifier + ds_name
            last = self._last_sample.get(metric_identifier)
            curr_time = time.monotonic()
            self._last_sample[metric_identifier] = (curr_time, value)
            if not last:
                continue

            old_time, old_value = last
            # Determine time between datapoints
            interval = curr_time - old_time
            if interval < 1:
                interval = 1

            if ds_type == "COUNTER" or ds_type == "DERIVE":
                # Check for overflow if it's a counter
                if ds_type == "COUNTER" and value < old_value:
                    if max_val == 'U':
                        # this is funky. pretend as if this is the first data
                        # point
                        new_value = None
                    else:
                        min_val = str_to_num(min_val)
                        max_val = str_to_num(max_val)
                        new_value = max_val - old_value + value - min_val
                else:
                    new_value = value - old_value

                # Both COUNTER and DERIVE get divided by the timespan
                new_value /= interval
            elif ds_type == "ABSOLUTE":
                new_value = value / interval
            else:
                collectd.warn('unrecognized ds_type {}'.format(ds_type))
                new_value = value

            sample.values[i] = new_value

        points.extend(sample.values)
        columns.extend(('host', 'type'))
        points.extend((sample.host, sample.type))

        if sample.plugin_instance:
            columns.append('plugin_instance')
            points.append(sample.plugin_instance)

        if sample.type_instance:
            columns.append('type_instance')
            points.append(sample.type_instance)

        data = {'name': sample.plugin,
                'columns': columns,
                'points': [points]}

        self._queues[identifier].put(data)
        self._flush()
Esempio n. 3
0
    def write(self, sample):
        type_info = self._types.get(sample.type)

        if type_info is None:
            msg = 'plugin: %s unknown type %s, not listed in %s'

            collectd.info('write_influxdb: ' + msg %
                          (sample.plugin, sample.type, self._typesdb))
            return

        identifier = format_identifier(sample)
        columns = ['time']
        points = [sample.time]

        for i, (ds_name, ds_type, min_val, max_val) in enumerate(type_info):
            value = sample.values[i]
            columns.append(ds_name)

            if (not isinstance(value, (float, int)) or ds_type == "GAUGE"
                    or self._raw_values):
                continue

            metric_identifier = identifier + ds_name
            last = self._last_sample.get(metric_identifier)
            curr_time = time.monotonic()
            self._last_sample[metric_identifier] = (curr_time, value)
            if not last:
                continue

            old_time, old_value = last
            # Determine time between datapoints
            interval = curr_time - old_time
            if interval < 1:
                interval = 1

            if ds_type == "COUNTER" or ds_type == "DERIVE":
                # Check for overflow if it's a counter
                if ds_type == "COUNTER" and value < old_value:
                    if max_val == 'U':
                        # this is funky. pretend as if this is the first data
                        # point
                        new_value = None
                    else:
                        min_val = str_to_num(min_val)
                        max_val = str_to_num(max_val)
                        new_value = max_val - old_value + value - min_val
                else:
                    new_value = value - old_value

                # Both COUNTER and DERIVE get divided by the timespan
                new_value /= interval
            elif ds_type == "ABSOLUTE":
                new_value = value / interval
            else:
                collectd.warn('unrecognized ds_type {}'.format(ds_type))
                new_value = value

            sample.values[i] = new_value

        points.extend(sample.values)
        columns.extend(('host', 'type'))
        points.extend((sample.host, sample.type))

        if sample.plugin_instance:
            columns.append('plugin_instance')
            points.append(sample.plugin_instance)

        if sample.type_instance:
            columns.append('type_instance')
            points.append(sample.type_instance)

        data = {'name': sample.plugin, 'columns': columns, 'points': [points]}

        self._queues[identifier].put(data)
        self._flush()