def print_lp(measurement, url):
    with closing(requests.get(url, stream=True)) as r:
        reader = csv.reader(codecs.iterdecode(r.iter_lines(), 'utf-8'))
        row_count = 0
        for row in reader:
            if row_count == 0:
                row_count += 1
                continue
            else:
                metric = Metric(measurement)

                province = (row[0] or "").strip()
                if province:
                    metric.add_value("province", province)
                country = (row[1] or "").strip()
                if country:
                    metric.add_value("country", country)
                metric.with_timestamp(
                    date_to_unix_timestamp((row[2] or "").strip()))

                metric.add_value("confirmed", int((row[3] or "0").strip()))
                metric.add_value("deaths", int((row[4] or "0").strip()))
                metric.add_value("recovered", int((row[5] or "0").strip()))

                if len(row) > 6:
                    metric.add_value("lat", float((row[6] or "").strip()))
                    metric.add_value("lon", float((row[7] or "").strip()))

                print(metric)
Example #2
0
def main(measurement, station, url, latest):
    default_columns   =  "wind_dir_degt,wind_speed_mps,gust_speed_mps,significant_wave_height_m,dominant_wave_period_sec,avg_wave_period_sec,wave_dir_degt,sea_level_pressure_hpa,air_temp_degc,sea_surface_temp_degc,dewpoint_temp_degc,station_visibility_nmi,pressure_tendency_hpa,water_level_ft".split(",")
    realtime_columns  =  "wind_dir_degt,wind_speed_mps,gust_speed_mps,significant_wave_height_m,dominant_wave_period_sec,avg_wave_period_sec,wave_dir_degt,sea_level_pressure_hpa,air_temp_degc,sea_surface_temp_degc,dewpoint_temp_degc,station_visibility_nmi,water_level_ft".split(",")
    missing_data_list =  "MM,999,9999.0,999.0,99.0,99.00".split(",")

    f = requests.get(url)
    metadata_map = pull_station_metadata()
    for line in f.text.splitlines():
        if not is_comment(line):
            metric = Metric(measurement)
            values_list = line.split()
            if latest:
                station_id = values_list.pop(0)
                values_list.pop(0) # lat
                values_list.pop(0) # lon
                for key,value in metadata_map[station_id.lower()].items():
                    if key == 'id':
                        key = 'station_id'
                    if len(value) > 0:
                        if key in ["lat","lon"]:
                            metric.add_value(key,float(value))
                        else:
                            metric.add_tag(key,value)
            date = "{}-{}-{}T{}:{}+0700".format(values_list.pop(0),values_list.pop(0),values_list.pop(0),values_list.pop(0),values_list.pop(0)) #2006-01-02T15:04
            metric.with_timestamp(date_to_unix_timestamp(date))
            is_historical = (len(values_list) == 13)
            for i in range(len(values_list)):
                if values_list[i] not in missing_data_list:
                    if latest or is_historical:
                        metric.add_value(default_columns[i],float(values_list[i]))
                    else:
                        metric.add_value(realtime_columns[i],float(values_list[i]))
            if station:
                metric.add_tag("station_id",station)
            print(metric)
Example #3
0
    def parse_timeserie(self, zone_id, timeserie):
        metric = Metric('cloudflare')
        metric.with_timestamp(self.parse_time(timeserie['until']))
        metric.add_tag('zone_id', zone_id)
        metric.add_value('uniques', timeserie['uniques']['all'])
        self.__parse_requests(metric, timeserie['requests'])
        self.__parse_bandwidth(metric, timeserie['bandwidth'])

        return metric
Example #4
0
class DataBase(object):
    def __init__(self, name, batch_size):

        self.logger = logging.getLogger(__name__)

        # Initialize the metrics to be pushed into the database.
        self.metric = Metric(
            name
        )  # Name of the database. If it doesnt exist, one will be created.
        self.str_metric = ""
        self.metrics = ""

        self.COUNTER = 0
        self.BATCH_SIZE = batch_size
        self.HOST = 'localhost'
        self.PORT = 9009

        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.sock.connect((self.HOST, self.PORT))

    def close_sock(self):
        """Close the socket to the DB"""
        self.sock.close()

    def new_message(self, message):
        """
        This function receives a message from the websocket and temporarily 
        stores chosen values in a string. As soon as BATCH_SIZE is reached,
        the whole batch will be pushed into the database.

        :param message: decoded json message 
        """
        self.COUNTER += 1

        self.metric.with_timestamp(message['E'] * 1000 * 1000)
        self.metric.add_value('PRICE', float(message['o']['ap']))
        self.metric.add_value('QUANTITY', float(message['o']['q']))
        self.metric.add_value(
            'USDVALUE',
            float(message['o']['q']) * float(message['o']['p']))
        self.metric.add_tag('PAIR', str(message['o']['s']))
        self.str_metric = str(self.metric)
        self.str_metric += "\n"
        self.metrics += self.str_metric

        if self.COUNTER == self.BATCH_SIZE:
            self.logger.info('Batch inserted into DB')
            self.COUNTER = 0
            bytes_metric = bytes(self.metrics, "utf-8")
            self.sock.sendall(bytes_metric)
            self.str_metric = ""
            self.metrics = ""
Example #5
0
def influxOutput(proc):
    now = datetime.now()
    metric = Metric("host_pid_cpu_usage")
    metric.with_timestamp(datetime.timestamp(now))
    metric.add_tag('host', socket.gethostname())
    metric.add_tag('platform', platform.platform())

    for k, v in zip(proc.keys(), proc.values()):
        if type(v) is float:
            metric.add_value(k, v)
        if type(v) is str:
            metric.add_tag(k, v)
    print(metric)
Example #6
0
def send_mqtt(tags,timestamp,client,type,id,v,out_values):
    if(config['mqtt']['output'][type]['output_format']=="influx" and config['mqtt']['output'][type]['enable']):
        metric = Metric(config['mqtt']['output'][type]['measurement'])
        metric.with_timestamp(timestamp*1000000000)
        metric.add_value('usage',v)
        for m in config['mqtt']['output'][type]['tags']:
            metric.add_tag(m,tags[id][m])
        topic=config['mqtt']['output'][type]['pattern'].format(**tags[id])
        publish_result=client.publish(topic,f"{metric}",1)
        (rc,m)=publish_result
        if(m % 500==0):
           print(f"mqtt message {m}")
        if(rc!=0):
            error(f"publish error {rc} {m}")
            sys.exit(1)
        #info(f"{topic} {metric}")
        out_values.append((topic,metric))
Example #7
0
def main(repo, measurement):
    f = requests.get("https://api.github.com/repos/{}/stats/contributors".format(repo))
    if f.status_code == 202:
        time.sleep(5)
        main(repo, measurement)
    for record in f.json():
        author = record.get("author","")
        for week in record.get("weeks",[]):
            metric = Metric(measurement)
            metric.add_tag("author",author.get("login",""))
            metric.add_tag("org",repo.split("/")[0])
            metric.add_tag("repo",repo.split("/")[1])

            metric.add_value("additions",int(week.get("a")))
            metric.add_value("deletions",int(week.get("d")))
            metric.add_value("commits",int(week.get("c")))

            metric.with_timestamp(week.get("w") * 1000000000)
            print(metric)
Example #8
0
class DataBase(object):
    def __init__(self):

        self.logger = logging.getLogger(__name__)

        # Initialize the metrics to be pushed into the database.
        self.metric = Metric(
            "liqui"
        )  # Name of the database. If it doesnt exist, one will be created.
        self.str_metric = ""
        self.metrics = ""

        self.counter = 0
        self.batch_size = 50
        self.HOST = 'localhost'
        self.PORT = 9009

        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.sock.connect((self.HOST, self.PORT))

    def close_sock(self):
        self.sock.close()

    def new_message(self, message):
        self.counter += 1

        self.metric.with_timestamp(message['E'] * 1000 * 1000)
        self.metric.add_value('PRICE', float(message['o']['p']))
        self.metric.add_value('QUANTITY', float(message['o']['q']))
        self.metric.add_tag('PAIR', str(message['o']['s']))
        self.str_metric = str(self.metric)
        self.str_metric += "\n"
        self.metrics += self.str_metric

        if self.counter == self.batch_size:
            self.logger.info('Batch inserted into DB')
            self.counter = 0
            bytes_metric = bytes(self.metrics, "utf-8")
            self.sock.sendall(bytes_metric)
            self.str_metric = ""
            self.metrics = ""
class ParserTest(unittest.TestCase):
    timeserie = {
        "until": "2018-08-22T11:45:00Z",
        "requests": {
          "all": 258,
          "cached": 142,
          "uncached": 116,
          "ssl": {
            "encrypted": 255,
            "unencrypted": 3
          },
          "http_status": {
            "200": 229,
            "499": 2
          },
          "content_type": {
            "css": 10,
            "html": 96,
          },
          "country": {
            "PL": 183,
            "US": 14
          },
          "ip_class": {
            "monitoringService": 13,
            "noRecord": 215,
            "searchEngine": 30
          }
        },
        "bandwidth": {
          "all": 4607212,
          "cached": 2985600,
          "uncached": 1621612,
          "ssl": {
            "encrypted": 4606145,
            "unencrypted": 1067
          },
          "content_type": {
            "css": 273141,
            "html": 1618653,
          },
          "country": {
            "PL": 3712599,
            "US": 231584
          }
        },
        "uniques": {
          "all": 116
        }
      }

    def setUp(self):
        self.metric = Metric('cloudflare')
        self.metric.add_tag('zone_id', 'test')
        self.metric.with_timestamp(15376167000000000000)
        self.metric.values = {
            'uniques': '116',
            'requests_all': '258',
            'requests_cached': '142',
            'requests_uncached': '116',
            'requests_encrypted': '255',
            'requests_unencrypted': '3',
            'requests_status_200': '229',
            'requests_status_499': '2',
            'requests_content_type_css': '10',
            'requests_content_type_html': '96',
            'requests_country_us': '14',
            'requests_country_pl': '183',
            'requests_ip_class_monitoringService': '13',
            'requests_ip_class_noRecord': '215',
            'requests_ip_class_searchEngine': '30',
            'bandwidth_all': '4607212',
            'bandwidth_cached': '2985600',
            'bandwidth_uncached': '1621612',
            'bandwidth_encrypted': '4606145',
            'bandwidth_unencrypted': '1067',
            'bandwidth_content_type_css': '273141',
            'bandwidth_content_type_html': '1618653',
            'bandwidth_country_pl': '3712599',
            'bandwidth_country_us': '231584',

        }

    def test_only_one_serie(self):
        expectedCollection = MetricCollection()
        expectedCollection.append(self.metric)
        timeseries = {'timeseries': [self.timeserie]}

        a = Parser()
        collection = a.parse_dashboard("test", timeseries)

        self.maxDiff = None
        self.assertDictEqual(expectedCollection.metrics[0].values, collection.metrics[0].values)

    def test_multiple_series(self):
        expectedCollection = MetricCollection()
        expectedCollection.append(self.metric)
        expectedCollection.append(self.metric)

        timeseries = {'timeseries': [self.timeserie, self.timeserie]}

        a = Parser()
        collection = a.parse_dashboard("test", timeseries)

        self.maxDiff = None
        self.assertDictEqual(expectedCollection.metrics[0].values, collection.metrics[0].values)

    def test_parse_time(self):
        dtime = '2018-09-22T11:45:00Z'
        expected_timestamp = 1537616700 * 10**9

        p = Parser()
        self.assertEqual(p.parse_time(dtime), expected_timestamp)