from datetime import datetime

from influxdb_client import InfluxDBClient, Point, WritePrecision
from influxdb_client.client.write_api import SYNCHRONOUS
import random
# You can generate a Token from the "Tokens Tab" in the UI
token = "6GqhhbHug0P1ewJLlvyXwkRXwc9pgxGiRNz-T__7rkYgm4Ya771qCHdC7HhNTltLmon2U7YIQzP50yXK0wXNTQ=="
org = "tpdp"
bucket = "db"

client = InfluxDBClient(url="http://192.168.4.23:8086", token=token)

write_api = client.write_api(write_options=SYNCHRONOUS)

# for n in range(300):
#     num = random.randint(0, 100) * 1.0
#     data = "mem,host=host1 used_percent="+str(num)
#     write_api.write(bucket, org, data)

dbs = client.get_list_database()

#for better format
list = []
for db in dbs:
    list.append(db.get('name'))
Esempio n. 2
0
def monitoringExtenders(network_list, network_setup, polling_frequency,
                        influxServer, dest_file, logger, system_command_lst):
    extender_csv_file_list = []
    csv_header = []
    """Monitoring Extender poll for polling_frequency all the extender part of the network_list. Then will store the 
    results into the dest_file in csv format.
    A dest_file will be created per extender, to allow to have different follow up"""
    #Create CSV Header file
    csv_header.append('DATE')
    for command, command_type in system_command_lst:
        if "WIFI_CHANIM" in command_type:
            for chanim in chanim_info:
                csv_header.append(command_type + "-" + chanim)
        elif "VMSTAT" in command_type:
            for vmstat in vmstat_info:
                csv_header.append(command_type + "-" + vmstat)
        elif "LOADAVG" in command_type:
            for loadavg in loadavg_info:
                csv_header.append(command_type + "-" + loadavg)
        else:
            csv_header.append(command_type)
    #Create root file name by concatenating the root filename and the extender name
    for extender in network_list:
        extender_csv_name = dest_file + '-' + extender['name'].strip(
            " ") + ".csv"
        #Open the file
        extender_csv_file = open(extender_csv_name, 'w+')
        extender['CSVFile'] = extender_csv_file
        #Write header into csv files and define dictionary to manage and check row
        csv_writer = csv.DictWriter(extender['CSVFile'], fieldnames=csv_header)
        extender['CSVWriter'] = csv_writer
        extender['CSVWriter'].writeheader()

        logger.info("Creating file {:20} mode {:2}".format(
            extender['CSVFile'].name, extender['CSVFile'].mode))

    logger.info("Creating Data Base {}".format(influxServer["Server_name"]))
    os.environ['NO_PROXY'] = influxServer["Server_name"]
    client = InfluxDBClient(host=influxServer["Server_name"],
                            port=influxServer["Server_port"],
                            ssl=False,
                            proxies=None)
    client.create_database(influxServer["DB_name"])
    logger.info("Creation of Data Base {} {}".format(
        influxServer["Server_name"], client.get_list_database()))

    #Start looping for monitoring extender
    while 1:
        try:
            #Launch the command
            DoExtenderMonitoring(network_list, network_setup, logger,
                                 system_command_lst, client)
            #Sleep for polling frequency
            time.sleep(int(polling_frequency))

        except KeyboardInterrupt:
            #If Keyboard interruption then stop polling
            logger.info("KEYBOARD interrupt stop monitoring")
            break
        else:
            logger.debug("Out of polling wait launch commands")

    #End of monitoring close opened files
    for extender in network_list:
        logger.info("Closing file {}".format(extender['CSVFile'].name))
        extender['CSVFile'].close()

    return
Esempio n. 3
0
class InfluxDB(object):
    def __init__(self, connection):
        try:
            timerange = int(connection['data_availability_timerange'])
        except:
            timerange = 30 # defaults
        self.min_timerange = '{}d'.format(str(timerange))
        print("Finding measurements with datapoints in last {} days".format(timerange))
        try:
            self.client = InfluxDBClient(url=connection['url'], token=connection['token'], org=connection["org"])
            self.query_api = self.client.query_api()
        except Exception as e:
            print("Failed to connect to initialize Influx Odata container")
            print(str(e))
            raise ConnectionError()
        try:
            bucket = connection['buckets']
            if isinstance(bucket, str):
                self.buckets = bucket.split(',')
            else:
                self.buckets = list()
        except:
            print("Bucket details not valid")
            raise ValueError()

    def fields(self, bucket, measurement):
        """returns a tuple of dicts where each dict has attributes (name, type, edm_type)"""

        fields_query = 'from (bucket: "{}") \
        |> range(start: -{}, stop: now()) \
        |> filter(fn: (r) => (r._measurement == "{}")) \
        |> keep(columns: ["_field"]) \
        |> group() \
        |> distinct(column: "_field") \
        |> limit(n: 200) \
        |> sort()'.format(bucket, self.min_timerange, measurement)

        tags_query = 'from (bucket: "{}") \
        |> range(start: -{}, stop: now()) \
        |> filter(fn: (r) => (r._measurement == "{}")) \
        |> keys()  \
        |> keep(columns: ["_value"]) \
        |> distinct() \
        |> filter(fn: (r) => r._value != "_measurement" and r._value != "_field") \
        |> filter(fn: (r) => r._value != "_time" and r._value != "_start" and r._value != "_stop" and r._value != "_value") \
        |> sort() \
        |> limit(n: 200)'.format(bucket, self.min_timerange, measurement)

        fields_rs = self.query_api.query(fields_query)
        tags_rs = self.query_api.query(tags_query)
        fields = []
        tags = []
        try:
            fields = [(_f.values['_value'], 'float') for _f in fields_rs[0].records]
        except:
            pass
        try:
            tags = [(_t.values['_value'], 'string') for _t in tags_rs[0].records]
        except:
            pass
        fields.extend(tags)

        # tags_rs = self.client.query('SHOW TAG KEYS', database=db_name)
        # expand and deduplicate
        #fields = set(tuple(f.items()) for f in chain(*chain(fields_rs, tags_rs)))
        fields = (dict(
            name= mangle_field_name(f[0]),
            type=f[1],
            edm_type=get_edm_type(f[1])
        ) for f in fields)
        return tuple(fields)

    @property
    def measurements(self):
        measurements = []

        for each_bucket in self.buckets:
            q = 'import "influxdata/influxdb/v1" v1.tagValues(bucket: "{}", tag: "_measurement", predicate: (r) => true, start: -{})'\
                .format(each_bucket, self.min_timerange)
            #'import "influxdata/influxdb/v1" v1.measurements(bucket: "{}")'.format(each_bucket)
            rs = self.query_api.query(q)
            measurements_list = [(_m.values['_value']) for _m in rs[0].records]

            def m_dict(m):
                d = dict()
                d['bucket'] = each_bucket
                d['mangled_bucket'] = mangle_bucket_name(each_bucket)
                d['mangled_measurement'] = mangle_measurement_name(m)
                d['mangled_path'] = bucket_name__measurement_name(each_bucket, m)
                d['fields'] = self.fields(each_bucket, m)
                return d
            measurements.extend(m_dict(m) for m in measurements_list)
        return measurements

    @property
    def databases(self):
        rs = self.client.get_list_database()
        return iter(rs)