Beispiel #1
0
def metrics(vl, config):
    set = vl.type_instance
    dims = list(config.dimensions)
    units = config.units

    if vl.plugin == 'statsd':
        prefix, _, set = set.partition('.')
        if len(set) == 0:
            set = prefix
        else:
            dims.append(dict(Name='Prefix', Value=prefix))
    elif not set:
        set = vl.type

    sources = collectd.get_dataset(vl.type)
    if len(sources) == 1:
        _, type, _, _ = sources[0]
        unit = units.get(type, 'None')
        unit = units.get('.'.join([type, set]), unit)
        yield (set, unit, dims)
    else:
        for name, type, _, _ in sources:
            name = '.'.join([set, name])
            unit = units.get(type, 'None')
            unit = units.get('.'.join([type, set]), unit)
            unit = units.get('.'.join([type, name]), unit)
            yield (name, unit, dims)
def metrics(vl, config):
    set = vl.type_instance
    dims = list(config.dimensions)
    units = config.units

    if vl.plugin == 'statsd':
        prefix, _, set = set.partition('.')
        if len(set) == 0:
            set = prefix
        else:
            dims.append(dict(Name='Prefix', Value=prefix))
    elif not set:
        set = vl.type

    sources = collectd.get_dataset(vl.type)
    if len(sources) == 1:
        _, type, _, _ = sources[0]
        unit = units.get(type, 'None')
        unit = units.get('.'.join([type, set]), unit)
        yield (set, unit, dims)
    else:
        for name, type, _, _ in sources:
            name = '.'.join([set, name])
            unit = units.get(type, 'None')
            unit = units.get('.'.join([type, set]), unit)
            unit = units.get('.'.join([type, name]), unit)
            yield (name, unit, dims)
def add_typesdb_info_to_values(values_dict, types_dict=dict()):
    """
    Add information from types.db files.

    :param values_dict: Dictionary.
    :param types_dict: A dictionary containing information from
        Collectd's types.db files in the same format as returned
        by `read_types_db`. If this argument is omitted only
        information that can be obtained by calling `collectd.get_dataset()`
        is used.
    :returns: `collectd.Values` with additional attributes.

    Since Collectd 5.5 the Python plugin provides a `get_dataset()`
    function that returns information from the types.db files. In this 
    case `types_dict` does not have to be passed to 
    `add_typesdb_info_to_values()`. The Python plugin of earlier 
    Collectd versions does not provide `get_dataset()` and it is
    necessary to read (ideally all) types.db by calling
    `read_typesdb(path)` for each file (updating the dictionary
    with each call) and passing the resulting dictionary as
    and argument to `add_typesdb_info_to_values()`.

    """

    values_dict['dsname'] = []
    values_dict['dstype'] = []
    values_dict['dsmin'] = []
    values_dict['dsmax'] = []


    dataset = None
    try:
        dataset = collectd.get_dataset(values_dict['type'])
    except AttributeError:
        #
        # collectd.get_dataset() is not yet implemented. Try to get
        # the nformation from TYPES which holds the information
        # we read from types.db files.
        #
        try:
            dataset = types_dict[values_dict['type']]
        except KeyError:
            pass
    except TypeError, msg:
        pass
def _build_splunk_metrics(value):
    # get dimension from config
    dimension_list = CONFIG.get(DIMENSION_LIST_KEY, [])

    # get name of data source
    append_names = [
        '.' + append_name if append_name != 'value' else ''
        for (append_name, _, _, _) in collectd.get_dataset(value.type)
    ]
    if len(append_names) != len(value.values):
        collectd.error("len(ds_names) != len(value.values)")
        return

    # format metric name & dimension list
    # make sure you are passing a copy of dimension list
    metric_name, dimension_list = format_value(
        value, dimension_list, CONFIG['splunk_metric_transform'])

    # build splunk metrics data
    metrics = (dict(
        event=("%f metric_name=%s metric_type=%s _value=%d host=%s %s" %
               (value.time, metric_name + postfix, value.plugin, metric_value,
                value.host, ' '.join(dimension_list))).strip(),
        fields=dict(metric_name=metric_name + postfix,
                    metric_type=value.plugin,
                    _value=metric_value))
               for (postfix,
                    metric_value) in itertools.izip(append_names, value.values)
               if not isnan(metric_value))

    # add dimensions to fields
    dims = {}
    if dimension_list:
        arr = [x.strip() for x in dimension_list]
        for d in arr:
            (k, v) = d.split('=')
            dims.setdefault(k, []).append(v)

    for m in metrics:
        m['fields'].update(dims)
        yield m
Beispiel #5
0
    def write(self, vl, data=None):
        datasets = collectd.get_dataset(vl.type)
        for ds, value in zip(datasets, vl.values):
            if math.isnan(value):
                continue
            ds_name, ds_type, ds_min, ds_max = ds
            classname, new_labels = self._format(vl.plugin, vl.plugin_instance,
                                                 vl.type, vl.type_instance,
                                                 ds_name, ds_type)
            if classname is None:
                # Ignore classname that are unset (it's a feature from rewrite
                # rule to destroy a point)
                continue

            labels = self.default_labels.copy()
            labels.update(vl.meta)
            labels.update(new_labels)
            # Remove empty values
            labels = {
                k: str(v).strip()
                for k, v in labels.items() if v is not None and str(v).strip()
            }

            msg = '%d// %s{%s} %f' % (
                int(1000000 * vl.time),  # Microseconds
                classname,
                urllib.urlencode(labels).replace('&', ', '),
                value)

            try:
                self.queue.put_nowait(msg)
            except Full:
                collectd.warning('write_warp10 plugin: Buffer is full (%s '
                                 'elements) for endpoint "%s". The WARP '
                                 'endpoint may encounter issues. Otherwise, '
                                 'consider increasing BufferSize or reducing '
                                 'FlushInterval' %
                                 (self.queue.qsize(), self.url))
        try:
            prefix = CONFIG['prefix']
            if not prefix.endswith('.'):
                prefix = prefix + '.'
        except KeyError, e:
            prefix = ''

        try:
            tags_append = ' ' + CONFIG['tags_append']
        except KeyError, e:
            tags_append = ''

        append_names = [
            '.' + append_name if append_name != 'value' else ''
            for (append_name, _, _, _) in collectd.get_dataset(value.type)
        ]

        if len(append_names) != len(value.values):
            collectd.error("len(ds_names) != len(value.values)")
            return

        msg = "".join([
            "%s %f %d host=%s%s\n" %
            (prefix + metric_name + postfix, metric_value, value.time,
             value.host, tags_append)
            for (postfix, metric_value) in zip(append_names, value.values)
        ])

        try:
            CONFIG['queue'].put(msg, block=False)
    try:
        prefix = CONFIG['prefix']
        if not prefix.endswith('.'):
            prefix = prefix + '.'
    except KeyError, e:
        prefix = ''

    try:
        tags_append = ' ' + CONFIG['tags_append']
    except KeyError, e:
        tags_append = ''

    append_names = [ '.' + append_name if append_name != 'value' else ''
                     for (append_name, _, _, _)
                     in collectd.get_dataset(value.type) ]

    if len(append_names) != len(value.values):
        collectd.error("len(ds_names) != len(value.values)")
        return
    
    msg = "".join([ "%s %f %d host=%s%s\n" % (prefix + metric_name + postfix, metric_value, value.time, value.host, tags_append)
                      for (postfix, metric_value)
                      in zip(append_names, value.values) ])
    try:
        CONFIG['queue'].put(msg, block=False)
    except Exception, e:
        collectd.error("Failed to message:" + str(e))

if 'collectd' in globals().keys():
    collectd.register_config(configure_callback)
Beispiel #8
0
Example config file:

labels   module_id, user


Written by Andrew Tritt, [email protected]
"""
import collectd
import docker
import re
from os.path import basename, splitext

CLIENT = None
CONFIG_OPTIONS = dict()
LABEL = None
TYPE_INSTS = [t[0] for t in collectd.get_dataset('docker')]
IMG_REGX = re.compile(
    '\'([\w/:.-]+)')  # Regex for matching image name from image object

log_tmpl = splitext(basename(__file__))[0] + " plugin: %s"


def log(msg):
    collectd.info(log_tmpl % msg)


def init_func():
    global CLIENT
    CLIENT = docker.from_env()

 def _read_dataset(vl):
     dataset = [d[0:2] for d in collectd.get_dataset(vl.type)]
     return [d[0] for d in dataset], [d[1] for d in dataset]