def setUp(self):
        self.metric = Metric('cloudflare')
        self.metric.add_tag('zone_id', 'test')
        self.metric.with_timestamp(15376167000000000000)
        self.metric.values = {
            'uniques': '116',
            'requests_all': '258',
            'requests_cached': '142',
            'requests_uncached': '116',
            'requests_encrypted': '255',
            'requests_unencrypted': '3',
            'requests_status_200': '229',
            'requests_status_499': '2',
            'requests_content_type_css': '10',
            'requests_content_type_html': '96',
            'requests_country_us': '14',
            'requests_country_pl': '183',
            'requests_ip_class_monitoringService': '13',
            'requests_ip_class_noRecord': '215',
            'requests_ip_class_searchEngine': '30',
            'bandwidth_all': '4607212',
            'bandwidth_cached': '2985600',
            'bandwidth_uncached': '1621612',
            'bandwidth_encrypted': '4606145',
            'bandwidth_unencrypted': '1067',
            'bandwidth_content_type_css': '273141',
            'bandwidth_content_type_html': '1618653',
            'bandwidth_country_pl': '3712599',
            'bandwidth_country_us': '231584',

        }
Ejemplo n.º 2
0
class DataBase(object):
    def __init__(self, name, batch_size):

        self.logger = logging.getLogger(__name__)

        # Initialize the metrics to be pushed into the database.
        self.metric = Metric(
            name
        )  # Name of the database. If it doesnt exist, one will be created.
        self.str_metric = ""
        self.metrics = ""

        self.COUNTER = 0
        self.BATCH_SIZE = batch_size
        self.HOST = 'localhost'
        self.PORT = 9009

        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.sock.connect((self.HOST, self.PORT))

    def close_sock(self):
        """Close the socket to the DB"""
        self.sock.close()

    def new_message(self, message):
        """
        This function receives a message from the websocket and temporarily 
        stores chosen values in a string. As soon as BATCH_SIZE is reached,
        the whole batch will be pushed into the database.

        :param message: decoded json message 
        """
        self.COUNTER += 1

        self.metric.with_timestamp(message['E'] * 1000 * 1000)
        self.metric.add_value('PRICE', float(message['o']['ap']))
        self.metric.add_value('QUANTITY', float(message['o']['q']))
        self.metric.add_value(
            'USDVALUE',
            float(message['o']['q']) * float(message['o']['p']))
        self.metric.add_tag('PAIR', str(message['o']['s']))
        self.str_metric = str(self.metric)
        self.str_metric += "\n"
        self.metrics += self.str_metric

        if self.COUNTER == self.BATCH_SIZE:
            self.logger.info('Batch inserted into DB')
            self.COUNTER = 0
            bytes_metric = bytes(self.metrics, "utf-8")
            self.sock.sendall(bytes_metric)
            self.str_metric = ""
            self.metrics = ""
Ejemplo n.º 3
0
def report(args):
    try:
        create_file(base_file=f"{args.mount_point}/file.dat")
        result = 0
    except AssertionError:
        result = 1

    collection = MetricCollection()
    metric = Metric(SFS_COMPARE)
    metric.add_value("value", result)
    collection.append(metric)
    res = requests.post(f"{args.telegraf}/telegraf",
                        data=str(collection),
                        timeout=2)
    assert res.status_code == 204, f"Status is {res.status_code}"
    LOGGER.info("Metric written at: %s", args.telegraf)
Ejemplo n.º 4
0
    def parse_timeserie(self, zone_id, timeserie):
        metric = Metric('cloudflare')
        metric.with_timestamp(self.parse_time(timeserie['until']))
        metric.add_tag('zone_id', zone_id)
        metric.add_value('uniques', timeserie['uniques']['all'])
        self.__parse_requests(metric, timeserie['requests'])
        self.__parse_bandwidth(metric, timeserie['bandwidth'])

        return metric
Ejemplo n.º 5
0
    def __init__(self, name, batch_size):

        self.logger = logging.getLogger(__name__)

        # Initialize the metrics to be pushed into the database.
        self.metric = Metric(
            name
        )  # Name of the database. If it doesnt exist, one will be created.
        self.str_metric = ""
        self.metrics = ""

        self.COUNTER = 0
        self.BATCH_SIZE = batch_size
        self.HOST = 'localhost'
        self.PORT = 9009

        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.sock.connect((self.HOST, self.PORT))
Ejemplo n.º 6
0
def dns_resolve(args):
    metric = Metric(INT_DNS)
    try:
        socket.getaddrinfo(args.dns_name, 0, 0, 0, 0)
    except socket.gaierror as err:
        metric.add_value("ips", err)
        metric.add_tag("dns_name", args.dns_name)
        metric.add_tag("result", "Not Resolved")
        collection.append(metric)
        res = requests.post(f"{args.telegraf}/telegraf",
                            data=str(collection),
                            timeout=2)
        assert res.status_code == 204, f"Status is {res.status_code}"
        LOGGER.info("Metric written at: %d)", args.telegraf)
Ejemplo n.º 7
0
def send_mqtt(tags,timestamp,client,type,id,v,out_values):
    if(config['mqtt']['output'][type]['output_format']=="influx" and config['mqtt']['output'][type]['enable']):
        metric = Metric(config['mqtt']['output'][type]['measurement'])
        metric.with_timestamp(timestamp*1000000000)
        metric.add_value('usage',v)
        for m in config['mqtt']['output'][type]['tags']:
            metric.add_tag(m,tags[id][m])
        topic=config['mqtt']['output'][type]['pattern'].format(**tags[id])
        publish_result=client.publish(topic,f"{metric}",1)
        (rc,m)=publish_result
        if(m % 500==0):
           print(f"mqtt message {m}")
        if(rc!=0):
            error(f"publish error {rc} {m}")
            sys.exit(1)
        #info(f"{topic} {metric}")
        out_values.append((topic,metric))
Ejemplo n.º 8
0
def report(response_body):
    value = response_body["alarmValue"][0]["value"]
    status = response_body["alarm_status"]

    collection = MetricCollection()
    metric = Metric(AS_RESULT)
    metric.add_value("value", value)
    metric.add_tag("status", status)
    collection.append(metric)
    res = requests.post(f"{args.telegraf}/telegraf", data=str(collection), timeout=2)
    assert res.status_code == 204, f"Status is {res.status_code}"
Ejemplo n.º 9
0
class DataBase(object):
    def __init__(self):

        self.logger = logging.getLogger(__name__)

        # Initialize the metrics to be pushed into the database.
        self.metric = Metric(
            "liqui"
        )  # Name of the database. If it doesnt exist, one will be created.
        self.str_metric = ""
        self.metrics = ""

        self.counter = 0
        self.batch_size = 50
        self.HOST = 'localhost'
        self.PORT = 9009

        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.sock.connect((self.HOST, self.PORT))

    def close_sock(self):
        self.sock.close()

    def new_message(self, message):
        self.counter += 1

        self.metric.with_timestamp(message['E'] * 1000 * 1000)
        self.metric.add_value('PRICE', float(message['o']['p']))
        self.metric.add_value('QUANTITY', float(message['o']['q']))
        self.metric.add_tag('PAIR', str(message['o']['s']))
        self.str_metric = str(self.metric)
        self.str_metric += "\n"
        self.metrics += self.str_metric

        if self.counter == self.batch_size:
            self.logger.info('Batch inserted into DB')
            self.counter = 0
            bytes_metric = bytes(self.metrics, "utf-8")
            self.sock.sendall(bytes_metric)
            self.str_metric = ""
            self.metrics = ""
class ParserTest(unittest.TestCase):
    timeserie = {
        "until": "2018-08-22T11:45:00Z",
        "requests": {
          "all": 258,
          "cached": 142,
          "uncached": 116,
          "ssl": {
            "encrypted": 255,
            "unencrypted": 3
          },
          "http_status": {
            "200": 229,
            "499": 2
          },
          "content_type": {
            "css": 10,
            "html": 96,
          },
          "country": {
            "PL": 183,
            "US": 14
          },
          "ip_class": {
            "monitoringService": 13,
            "noRecord": 215,
            "searchEngine": 30
          }
        },
        "bandwidth": {
          "all": 4607212,
          "cached": 2985600,
          "uncached": 1621612,
          "ssl": {
            "encrypted": 4606145,
            "unencrypted": 1067
          },
          "content_type": {
            "css": 273141,
            "html": 1618653,
          },
          "country": {
            "PL": 3712599,
            "US": 231584
          }
        },
        "uniques": {
          "all": 116
        }
      }

    def setUp(self):
        self.metric = Metric('cloudflare')
        self.metric.add_tag('zone_id', 'test')
        self.metric.with_timestamp(15376167000000000000)
        self.metric.values = {
            'uniques': '116',
            'requests_all': '258',
            'requests_cached': '142',
            'requests_uncached': '116',
            'requests_encrypted': '255',
            'requests_unencrypted': '3',
            'requests_status_200': '229',
            'requests_status_499': '2',
            'requests_content_type_css': '10',
            'requests_content_type_html': '96',
            'requests_country_us': '14',
            'requests_country_pl': '183',
            'requests_ip_class_monitoringService': '13',
            'requests_ip_class_noRecord': '215',
            'requests_ip_class_searchEngine': '30',
            'bandwidth_all': '4607212',
            'bandwidth_cached': '2985600',
            'bandwidth_uncached': '1621612',
            'bandwidth_encrypted': '4606145',
            'bandwidth_unencrypted': '1067',
            'bandwidth_content_type_css': '273141',
            'bandwidth_content_type_html': '1618653',
            'bandwidth_country_pl': '3712599',
            'bandwidth_country_us': '231584',

        }

    def test_only_one_serie(self):
        expectedCollection = MetricCollection()
        expectedCollection.append(self.metric)
        timeseries = {'timeseries': [self.timeserie]}

        a = Parser()
        collection = a.parse_dashboard("test", timeseries)

        self.maxDiff = None
        self.assertDictEqual(expectedCollection.metrics[0].values, collection.metrics[0].values)

    def test_multiple_series(self):
        expectedCollection = MetricCollection()
        expectedCollection.append(self.metric)
        expectedCollection.append(self.metric)

        timeseries = {'timeseries': [self.timeserie, self.timeserie]}

        a = Parser()
        collection = a.parse_dashboard("test", timeseries)

        self.maxDiff = None
        self.assertDictEqual(expectedCollection.metrics[0].values, collection.metrics[0].values)

    def test_parse_time(self):
        dtime = '2018-09-22T11:45:00Z'
        expected_timestamp = 1537616700 * 10**9

        p = Parser()
        self.assertEqual(p.parse_time(dtime), expected_timestamp)
Ejemplo n.º 11
0
import django

os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
sys.path.insert(0, os.path.abspath(".."))
django.setup()

import paho.mqtt.client as mqtt
from project.settings import MQTT_HOST
from project.settings import MQTT_PORT
from project.settings import MQTT_USER
from project.settings import MQTT_PASSWORD
from project.settings import MQTT_WATER_SENSOR_TOPIC
import time
from random import randint
from influx_line_protocol import Metric

mqtt_client = mqtt.Client()
mqtt_client.username_pw_set(username=MQTT_USER, password=MQTT_PASSWORD)
mqtt_client.connect(MQTT_HOST, MQTT_PORT, 60)

while True:
    metric = Metric("water")
    metric.add_tag('water', 'water')
    metric.add_value('nutrient_level_cm', randint(20, 500))
    metric.add_value('ph_downer_level_cm', randint(20, 500))
    metric.add_value('ph_level', randint(20, 500))
    metric.add_value('tds_level', randint(20, 500))
    print(metric)
    mqtt_client.publish(MQTT_WATER_SENSOR_TOPIC, str(metric))
    time.sleep(0.5)
Ejemplo n.º 12
0
def main(measurement, station, url, latest):
    default_columns   =  "wind_dir_degt,wind_speed_mps,gust_speed_mps,significant_wave_height_m,dominant_wave_period_sec,avg_wave_period_sec,wave_dir_degt,sea_level_pressure_hpa,air_temp_degc,sea_surface_temp_degc,dewpoint_temp_degc,station_visibility_nmi,pressure_tendency_hpa,water_level_ft".split(",")
    realtime_columns  =  "wind_dir_degt,wind_speed_mps,gust_speed_mps,significant_wave_height_m,dominant_wave_period_sec,avg_wave_period_sec,wave_dir_degt,sea_level_pressure_hpa,air_temp_degc,sea_surface_temp_degc,dewpoint_temp_degc,station_visibility_nmi,water_level_ft".split(",")
    missing_data_list =  "MM,999,9999.0,999.0,99.0,99.00".split(",")

    f = requests.get(url)
    metadata_map = pull_station_metadata()
    for line in f.text.splitlines():
        if not is_comment(line):
            metric = Metric(measurement)
            values_list = line.split()
            if latest:
                station_id = values_list.pop(0)
                values_list.pop(0) # lat
                values_list.pop(0) # lon
                for key,value in metadata_map[station_id.lower()].items():
                    if key == 'id':
                        key = 'station_id'
                    if len(value) > 0:
                        if key in ["lat","lon"]:
                            metric.add_value(key,float(value))
                        else:
                            metric.add_tag(key,value)
            date = "{}-{}-{}T{}:{}+0700".format(values_list.pop(0),values_list.pop(0),values_list.pop(0),values_list.pop(0),values_list.pop(0)) #2006-01-02T15:04
            metric.with_timestamp(date_to_unix_timestamp(date))
            is_historical = (len(values_list) == 13)
            for i in range(len(values_list)):
                if values_list[i] not in missing_data_list:
                    if latest or is_historical:
                        metric.add_value(default_columns[i],float(values_list[i]))
                    else:
                        metric.add_value(realtime_columns[i],float(values_list[i]))
            if station:
                metric.add_tag("station_id",station)
            print(metric)
def get(client: Client):
    """Send request and write metrics to telegraf"""
    timeout = 20
    metrics = MetricCollection()
    try:
        res = requests.get(client.url,
                           headers={"Connection": "close"},
                           timeout=timeout)
    except requests.RequestException as err:
        LOGGER.exception("Timeout sending request to LB")
        lb_timeout = Metric(LB_TIMEOUT)
        lb_timeout.add_tag("client", client.host_name)
        lb_timeout.add_value("timeout", timeout * 1000)
        lb_timeout.add_value("exception", err)
        metrics.append(lb_timeout)
    else:
        lb_timing = Metric(LB_TIMING)
        lb_timing.add_tag("client", client.host_name)
        lb_timing.add_tag("server", res.headers["Server"])
        lb_timing.add_value("elapsed", res.elapsed.microseconds / 1000)
        metrics.append(lb_timing)
    client.report_metric(metrics)
def report(client: Client, endpoint: str, token: str, project_id: str,
           **request_params):
    """Send request and write metrics to telegraf"""
    collection = MetricCollection()
    try:
        influx_row = Metric(RDS_BACKUP)
        target_req = get_rds_backup_info(endpoint, token, project_id,
                                         **request_params)
        if target_req.ok:
            backups = target_req.json()["backups"]
            for backup in backups:
                influx_row.add_tag("id_backup", backup["id"])
                influx_row.add_tag("status", backup["status"])
                influx_row.add_tag("size", backup["size"])
                influx_row.add_value(
                    "backup_duration",
                    get_duration(backup["begin_time"], backup["end_time"]))
                collection.append(influx_row)
        else:
            influx_row.add_tag("status", "request_failed")
            influx_row.add_tag("host", "scn6")
            influx_row.add_tag("reason", "fail")
            influx_row.add_value("elapsed", target_req.elapsed.seconds)
            collection.append(influx_row)
    except (IOError, HTTPError) as error:
        influx_row = Metric(CSM_EXCEPTION)
        influx_row.add_tag("Reporter", RDS_BACKUP)
        influx_row.add_tag("Status", "RDS Unavailable")
        influx_row.add_value("Value", error)
        collection.append(influx_row)
    client.report_metric(collection)
def report(client: Client, endpoint: str, token: str, project_id: str,
           **request_params):
    """Send request and write metrics to telegraf"""
    collection = MetricCollection()
    influx_row = Metric(RDS_BACKUP)
    try:
        target_req = get_rds_backup_info(endpoint, token, project_id,
                                         **request_params)
        if target_req.ok:
            backups = target_req.json()['backups']
            for backup in backups:
                influx_row.add_tag('id_backup', backup['id'])
                influx_row.add_tag('status', backup['status'])
                influx_row.add_tag('size', backup['size'])
                influx_row.add_value(
                    'backup_duration',
                    get_duration(backup['begin_time'], backup['end_time']))
                collection.append(influx_row)
        else:
            influx_row.add_tag('status', 'request_failed')
            influx_row.add_tag('host', 'rds_backup')
            influx_row.add_tag('reason', 'fail')
            influx_row.add_value('elapsed', target_req.elapsed.seconds)
            collection.append(influx_row)
    except requests.RequestException as error:
        influx_row = Metric(CSM_EXCEPTION)
        influx_row.add_tag('Reporter', RDS_BACKUP)
        influx_row.add_tag('Status', 'RDS Unavailable')
        influx_row.add_value('Value', error)
        collection.append(influx_row)
        client.report_metric(collection)
Ejemplo n.º 16
0
def report(client: Client):
    """Send request and write metrics to telegraf"""
    try:
        target_req = requests.get(client.url, headers={"Connection": "close"})
        influx_row = Metric(AS_LOADBALANCER)
        if target_req.status_code == 200:
            influx_row.add_tag("state", "connected")
            influx_row.add_tag("host", "scn4")
            influx_row.add_tag("reason", "ok")
            influx_row.add_value("elapsed",
                                 target_req.elapsed.microseconds / 1000)
        else:
            influx_row.add_tag("state", "connection_lost")
            influx_row.add_tag("host", "scn4")
            influx_row.add_tag("reason", "fail")
            influx_row.add_value("elapsed",
                                 target_req.elapsed.microseconds / 1000)
    except (IOError, HTTPError) as error:
        influx_row = Metric(CSM_EXCEPTION)
        influx_row.add_tag("Reporter", AS_LOADBALANCER)
        influx_row.add_tag("Status", "Loadbalancer Unavailable")
        influx_row.add_value("Value", error)
    except Exception:  # pylint: disable=broad-except
        LOGGER.exception("Exception occured while metrics reporting")
        return
    client.report_metric(influx_row)
Ejemplo n.º 17
0
def main(repo, measurement):
    f = requests.get("https://api.github.com/repos/{}/stats/contributors".format(repo))
    if f.status_code == 202:
        time.sleep(5)
        main(repo, measurement)
    for record in f.json():
        author = record.get("author","")
        for week in record.get("weeks",[]):
            metric = Metric(measurement)
            metric.add_tag("author",author.get("login",""))
            metric.add_tag("org",repo.split("/")[0])
            metric.add_tag("repo",repo.split("/")[1])

            metric.add_value("additions",int(week.get("a")))
            metric.add_value("deletions",int(week.get("d")))
            metric.add_value("commits",int(week.get("c")))

            metric.with_timestamp(week.get("w") * 1000000000)
            print(metric)
Ejemplo n.º 18
0
def print_lp(measurement, url):
    with closing(requests.get(url, stream=True)) as r:
        reader = csv.reader(codecs.iterdecode(r.iter_lines(), 'utf-8'))
        row_count = 0
        for row in reader:
            if row_count == 0:
                row_count += 1
                continue
            else:
                metric = Metric(measurement)

                province = (row[0] or "").strip()
                if province:
                    metric.add_value("province", province)
                country = (row[1] or "").strip()
                if country:
                    metric.add_value("country", country)
                metric.with_timestamp(
                    date_to_unix_timestamp((row[2] or "").strip()))

                metric.add_value("confirmed", int((row[3] or "0").strip()))
                metric.add_value("deaths", int((row[4] or "0").strip()))
                metric.add_value("recovered", int((row[5] or "0").strip()))

                if len(row) > 6:
                    metric.add_value("lat", float((row[6] or "").strip()))
                    metric.add_value("lon", float((row[7] or "").strip()))

                print(metric)
Ejemplo n.º 19
0
 def setUp(self):
     self.expectedCollection = MetricCollection()
     m = Metric("kubernetes_pod")
     m.add_tag("pod", "canal-fqwlv")
     m.add_tag("namespace", "kube-system")
     m.values = {
         'cpu_usage': '0.0110423',
         'memory_rssBytes': '28389376i',
         'memory_usageBytes': '106688512i',
         'memory_workingSetBytes': '63107072i',
         'memory_pageFaults': '0i',
         'memory_majorPageFaults': '0i'
     }
     self.expectedCollection.append(m)
     m = Metric("kubernetes_pod_network")
     m.add_tag("pod", "canal-fqwlv")
     m.add_tag("namespace", "kube-system")
     m.add_tag("interface", "cali3e3082340d7")
     m.values = {
         'rxBytes': '227051613i',
         'rxErrors': '0i',
         'txBytes': '181652981i',
         'txErrors': '0i'
     }
     self.expectedCollection.append(m)
     m = Metric("kubernetes_pod_container")
     m.add_tag("pod", "canal-fqwlv")
     m.add_tag("namespace", "kube-system")
     m.add_tag("container", "install-cni")
     m.values = {
         'cpu_usage': '4.4756e-05',
         'memory_rssBytes': '180224i',
         'memory_usageBytes': '73990144i',
         'memory_workingSetBytes': '30420992i',
         'memory_pageFaults': '428434i',
         'memory_majorPageFaults': '0i'
     }
     self.expectedCollection.append(m)
Ejemplo n.º 20
0
def influxOutput(proc):
    now = datetime.now()
    metric = Metric("host_pid_cpu_usage")
    metric.with_timestamp(datetime.timestamp(now))
    metric.add_tag('host', socket.gethostname())
    metric.add_tag('platform', platform.platform())

    for k, v in zip(proc.keys(), proc.values()):
        if type(v) is float:
            metric.add_value(k, v)
        if type(v) is str:
            metric.add_tag(k, v)
    print(metric)
def get_client_response(client: Client):
    timeout = 5
    try:
        res = requests.get(client.url,
                           headers={"Connection": "close"},
                           timeout=timeout)
    except Timeout:
        LOGGER.exception("Timeout sending request to LB")
        lb_timeout = Metric(INT_DNS_TIMEOUT)
        lb_timeout.add_tag("client", client.url)
        lb_timeout.add_value("timeout", timeout * 1000)
        collection.append(lb_timeout)
    else:
        lb_timing = Metric(INT_DNS_TIMING)
        lb_timing.add_tag("client", client.url)
        lb_timing.add_tag("server", res.headers["Server"])
        lb_timing.add_value("elapsed", res.elapsed.microseconds / 1000)
        collection.append(lb_timing)
    client.report_metric(collection)
Ejemplo n.º 22
0
from project.settings import MQTT_USER
from project.settings import MQTT_PASSWORD
from project.settings import MQTT_SPRINKLER_SENSOR_TOPIC
import time
from random import randint
from rest_framework.test import APIRequestFactory
from influx_line_protocol import Metric
from sprinkler.views import RegistryView

mqtt_client = mqtt.Client()
mqtt_client.username_pw_set(username=MQTT_USER, password=MQTT_PASSWORD)
mqtt_client.connect(MQTT_HOST, MQTT_PORT, 60)

sprinklers = ["tomato", "eggplant", "watermelon", "potato"]
factory = APIRequestFactory()
view = RegistryView.as_view()

for _ in sprinklers:
    view(factory.delete('sprinkler/registry', {'tag': _}))
    view(factory.post('sprinkler/registry', {'tag': _}))

while True:
    for _ in sprinklers:
        metric = Metric("sprinkler")
        metric.add_tag('tag', _)
        metric.add_value('soil_moisture_raw_adc', randint(20, 500))
        metric.add_value("soil_moisture", randint(20, 500))
        print(metric)
        mqtt_client.publish(MQTT_SPRINKLER_SENSOR_TOPIC, str(metric))
        time.sleep(1)
Ejemplo n.º 23
0
    def parse_stats(self, stats):
        collections = MetricCollection()

        for pod in stats["pods"]:
            m = Metric("kubernetes_pod")
            m.add_tag("pod", pod["podRef"]["name"])
            m.add_tag("namespace", pod["podRef"]["namespace"])
            m.add_tag("node", stats["node"]["nodeName"])
            m.add_value("cpu_usage",
                        self.__nano_to_deci(pod["cpu"]["usageNanoCores"]))

            for key, value in pod["memory"].items():
                if key == "time":
                    continue
                m.add_value("memory_%s" % key, value)
            collections.append(m)

            for interface in pod["network"]["interfaces"]:
                m = Metric("kubernetes_pod_network")
                m.add_tag("pod", pod["podRef"]["name"])
                m.add_tag("namespace", pod["podRef"]["namespace"])
                m.add_tag("node", stats["node"]["nodeName"])

                for key, value in interface.items():
                    if key == "name":
                        m.add_tag("interface", value)
                        continue
                    m.add_value(key, value)
                collections.append(m)

            for container in pod["containers"]:
                m = Metric("kubernetes_pod_container")
                m.add_tag("pod", pod["podRef"]["name"])
                m.add_tag("namespace", pod["podRef"]["namespace"])
                m.add_tag("container", container["name"])
                m.add_tag("node", stats["node"]["nodeName"])
                m.add_value(
                    "cpu_usage",
                    self.__nano_to_deci(container["cpu"]["usageNanoCores"]))

                for key, value in container["memory"].items():
                    if key == "time":
                        continue
                    m.add_value("memory_%s" % key, value)
                collections.append(m)

        return collections