Пример #1
0
# Slow as crap, but worth holding on to for project comparison.

import pandas as pd
from influxdb import InfluxDBClient

bldg = input('bldg ID: ').upper()
source = input("'hotIN', 'coldIN', or 'hotRETURN': ")

print('locating file...\n')

path = "/Users/joseph/Desktop/GRA/InfluxSemesterProject/LLC_BLDG_" + bldg + "/"
file = source + "_LLC_BLDG_" + bldg + "_OCT-4-NOV-13_Testdata.csv"

print('Connecting to InfluxDB...\n')

client = InfluxDBClient(host='influxdbubuntu.bluezone.usu.edu', port=8086)
client.switch_database('LLC_FlowData')

#path="/Users/joseph/Desktop/GRA/InfluxSemesterProject/"
#file = "test.csv"

print('Reading CSV file...\n')

csvReader = pd.read_csv(path + file, sep=',')

print(csvReader.shape)
print(csvReader.columns)

data = len(csvReader)
pbar = ProgressBar(widgets=widgets, maxval=data).start()
Пример #2
0
def last_data(unique_id, measure_type, measurement_id, period):
    """Return the most recent time and value from influxdb"""
    if not str_is_float(period):
        return '', 204

    if measure_type in ['input', 'math', 'output', 'pid']:
        dbcon = InfluxDBClient(
            INFLUXDB_HOST,
            INFLUXDB_PORT,
            INFLUXDB_USER,
            INFLUXDB_PASSWORD,
            INFLUXDB_DATABASE)

        if measure_type in ['input', 'math', 'output', 'pid']:
            measure = DeviceMeasurements.query.filter(
                DeviceMeasurements.unique_id == measurement_id).first()
        else:
            return '', 204

        if measure:
            conversion = Conversion.query.filter(
                Conversion.unique_id == measure.conversion_id).first()
        else:
            conversion = None

        channel, unit, measurement = return_measurement_info(
            measure, conversion)

        if hasattr(measure, 'measurement_type') and measure.measurement_type == 'setpoint':
            setpoint_pid = PID.query.filter(PID.unique_id == measure.device_id).first()
            if setpoint_pid and ',' in setpoint_pid.measurement:
                pid_measurement = setpoint_pid.measurement.split(',')[1]
                setpoint_measurement = DeviceMeasurements.query.filter(
                    DeviceMeasurements.unique_id == pid_measurement).first()
                if setpoint_measurement:
                    conversion = Conversion.query.filter(
                        Conversion.unique_id == setpoint_measurement.conversion_id).first()
                    _, unit, measurement = return_measurement_info(setpoint_measurement, conversion)

        try:
            if period != '0':
                query_str = query_string(
                    unit, unique_id,
                    measure=measurement, channel=channel,
                    value='LAST', past_sec=period)
            else:
                query_str = query_string(
                    unit, unique_id,
                    measure=measurement, channel=channel,
                    value='LAST')
            if query_str == 1:
                return '', 204

            raw_data = dbcon.query(query_str).raw

            number = len(raw_data['series'][0]['values'])
            time_raw = raw_data['series'][0]['values'][number - 1][0]
            value = raw_data['series'][0]['values'][number - 1][1]
            value = float(value)
            # Convert date-time to epoch (potential bottleneck for data)
            dt = date_parse(time_raw)
            timestamp = calendar.timegm(dt.timetuple()) * 1000
            live_data = '[{},{}]'.format(timestamp, value)
            return Response(live_data, mimetype='text/json')
        except KeyError:
            logger.debug("No Data returned form influxdb")
            return '', 204
        except IndexError:
            logger.debug("No Data returned form influxdb")
            return '', 204
        except Exception as e:
            logger.exception("URL for 'last_data' raised and error: "
                             "{err}".format(err=e))
            return '', 204
Пример #3
0
def export_data(unique_id, measurement_id, start_seconds, end_seconds):
    """
    Return data from start_seconds to end_seconds from influxdb.
    Used for exporting data.
    """
    dbcon = InfluxDBClient(
        INFLUXDB_HOST,
        INFLUXDB_PORT,
        INFLUXDB_USER,
        INFLUXDB_PASSWORD,
        INFLUXDB_DATABASE, timeout=100)

    output = Output.query.filter(Output.unique_id == unique_id).first()
    input_dev = Input.query.filter(Input.unique_id == unique_id).first()
    math = Math.query.filter(Math.unique_id == unique_id).first()

    if output:
        name = output.name
    elif input_dev:
        name = input_dev.name
    elif math:
        name = math.name
    else:
        name = None

    device_measurement = DeviceMeasurements.query.filter(
        DeviceMeasurements.unique_id == measurement_id).first()
    if device_measurement:
        conversion = Conversion.query.filter(
            Conversion.unique_id == device_measurement.conversion_id).first()
    else:
        conversion = None
    channel, unit, measurement = return_measurement_info(
        device_measurement, conversion)

    utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now()
    start = datetime.datetime.fromtimestamp(float(start_seconds))
    start += utc_offset_timedelta
    start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
    end = datetime.datetime.fromtimestamp(float(end_seconds))
    end += utc_offset_timedelta
    end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')

    query_str = query_string(
        unit, unique_id,
        measure=measurement, channel=channel,
        start_str=start_str, end_str=end_str)
    if query_str == 1:
        flash('Invalid query string', 'error')
        return redirect(url_for('routes_page.page_export'))
    raw_data = dbcon.query(query_str).raw

    if not raw_data or 'series' not in raw_data or not raw_data['series']:
        flash('No measurements to export in this time period', 'error')
        return redirect(url_for('routes_page.page_export'))

    # Generate column names
    col_1 = 'timestamp (UTC)'
    col_2 = '{name} {meas} ({id})'.format(
        name=name, meas=measurement, id=unique_id)
    csv_filename = '{id}_{name}_{meas}.csv'.format(
        id=unique_id, name=name, meas=measurement)

    from flask import Response
    import csv
    from io import StringIO

    def iter_csv(data):
        """ Stream CSV file to user for download """
        line = StringIO()
        writer = csv.writer(line)
        writer.writerow([col_1, col_2])
        for csv_line in data:
            writer.writerow([
                str(csv_line[0][:-4]).replace('T', ' '),
                csv_line[1]
            ])
            line.seek(0)
            yield line.read()
            line.truncate(0)
            line.seek(0)

    response = Response(iter_csv(raw_data['series'][0]['values']), mimetype='text/csv')
    response.headers['Content-Disposition'] = 'attachment; filename="{}"'.format(csv_filename)
    return response
Пример #4
0
        "Area": "North America",
        "Location": "New York City",
        "ClientIP": "192.168.0.256"
    },
    "fields": {
        "SessionDuration": 1.2
    }
}, {
    "measurement": "UserLogins",
    "tags": {
        "Area": "South America-popo",
        "Location": "Lima",
        "ClientIP": "192.168.1.256"
    },
    "fields": {
        "SessionDuration": 2.0
    }
}]

dbClient = InfluxDBClient('qwerty.com.ar', 8086, 'mim_tp1',
                          'mim_tp1_transporte', 'AccessHistory')

# Write the time series data points into database - user login details
dbClient.create_database('AccessHistory')
dbClient.write_points(loginEvents)

# Query the IPs from logins have been made
loginRecords = dbClient.query('select * from UserLogins;')

# Print the time series query results
print(loginRecords)
Пример #5
0
def get_unified_metric(start, end, interval, value, compress):  # noqa: E501
    """get_unified_metric

    Get **unified metrics** based on speficied start time, end time, time interval and value type. The **start** and **end** time should follow date-time Notation as defined by [RFC 3339, section 5.6](https://tools.ietf.org/html/rfc3339#section-5.6), e.g. `2020-02-12T14:00:00Z`; the time **interval** should follow **duration literals**, which specify a length of time; the **value** type should only be `min`, `max`, `mean`, or `median`.  A duration literal is an integer literal followed immediately (with no spaces) by a duration unit, the units include `s`(second), `m`(minute), `h`(hour), `d`(day), `w`(week).  # noqa: E501

    :param start: start time of time range of the monitoring metrics
    :type start: str
    :param end: end time of time range of the monitoring metrics
    :type end: str
    :param interval: time interval for aggregating the monitoring metrics
    :type interval: str
    :param value: value type of the monitoring metrics
    :type value: str
    :param compress: return compressed data
    :type compress: bool

    :rtype: UnifiedMetrics
    """

    # Initialization
    config = parse_conf()
    node_list = parse_host()
    host = config["influxdb"]["host"]
    port = config["influxdb"]["port"]

    start = util.deserialize_datetime(start)
    end = util.deserialize_datetime(end)

    # When we changed the database, April 28, 2020 11:40:00 AM GMT-05:00 DST
    switch_time = 1588092000
    start_epoch = int(start.timestamp())
    end_epoch = int(end.timestamp())

    # Check Sanity
    if start_epoch >= switch_time:
        dbname = config["influxdb"]["db_monster"]
    elif end_epoch <= switch_time:
        dbname = config["influxdb"]["database"]
    else:
        return ErrorMessage(
            error_code='400 INVALID_PARAMETERS',
            error_message='Due to we switched database on April 28, 2020 \
                11:40:00 AM GMT-05:00 DST, currently we do not support \
                    requesting data with time range falls on this time point.')

    if start > end:
        return ErrorMessage(
            error_code='400 INVALID_PARAMETERS',
            error_message='Start time should no larger than end time')
    else:
        # Initialize influxdb client
        client = InfluxDBClient(host=host, port=port, database=dbname)

        cpu_count = multiprocessing.cpu_count()

        results = []
        node_data = {}
        job_data = {}
        all_jobs_list = []

        # Time string used in query_data
        st_str = start.strftime('%Y-%m-%dT%H:%M:%SZ')
        et_str = end.strftime('%Y-%m-%dT%H:%M:%SZ')

        unified_metrics = UnifiedMetrics()

        # query_start = time.time()

        # Get time stamp
        time_list = gen_timestamp(start, end, interval)
        epoch_time_list = gen_epoch_timestamp(start, end, interval)

        # unified_metrics.time_stamp = epoch_time_list
        if compress:
            unified_metrics.time_stamp = json_zip(epoch_time_list)
        else:
            unified_metrics.time_stamp = epoch_time_list

        # Get all nodes detail
        query_process_data_args = zip(node_list, repeat(client),
                                      repeat(st_str), repeat(et_str),
                                      repeat(interval), repeat(value),
                                      repeat(time_list))

        with multiprocessing.Pool(processes=cpu_count) as pool:
            results = pool.starmap(query_process_data, query_process_data_args)

        # Attach data to node ip addr
        for index, node in enumerate(node_list):
            if results[index]:
                node_data[node] = {
                    "memory_usage": results[index]["memory_usage"],
                    "cpu_usage": results[index]["cpu_usage"],
                    "power_usage": results[index]["power_usage"],
                    "fan_speed": results[index]["fan_speed"],
                    "cpu_inl_temp": results[index]["cpu_inl_temp"],
                    "job_id": results[index]["job_list"]
                }
                if results[index]["job_set"]:
                    all_jobs_list.extend(results[index]["job_set"])

        if compress:
            unified_metrics.nodes_info = json_zip(node_data)
        else:
            unified_metrics.nodes_info = node_data

        # Get all jobs ID
        all_jobs_id = list(set(all_jobs_list))
        query_job_data_args = zip(repeat(client), all_jobs_id)

        # Get all jobs detail
        with multiprocessing.Pool(processes=cpu_count) as pool:
            results = pool.starmap(query_job_data, query_job_data_args)

        for index, job in enumerate(all_jobs_id):
            if results[index]:
                job_array = False
                if "." in results[index]["JobId"]:
                    job_array = True
                if "FinishTime" in results[index]:
                    finish_time = results[index]["FinishTime"]
                else:
                    finish_time = None

                if "NodeList" in results[index]:
                    node_list = results[index]["NodeList"]
                    pro_nodelist = process_nodelist(node_list)
                else:
                    node_list = None

                job_data[job] = {
                    "start_time": results[index]["StartTime"],
                    "submit_time": results[index]["SubmitTime"],
                    "finish_time": finish_time,
                    "job_name": results[index]["JobName"],
                    "user_name": results[index]["User"],
                    "node_list": pro_nodelist,
                    "total_nodes": results[index]["TotalNodes"],
                    "cpu_cores": results[index]["CPUCores"],
                    "job_array": job_array
                }
        if compress:
            unified_metrics.jobs_info = json_zip(job_data)
        else:
            unified_metrics.jobs_info = job_data

        # total_elapsed = float("{0:.2f}".format(time.time() - query_start))
        # # In seconds
        # time_range = int(end.timestamp()) - int(start.timestamp())

        # with open("requests.log", "a+") as requests_log:
        #     print(f"{time_range}|{interval}|{value}|{total_elapsed}", \
        # file = requests_log)

    return unified_metrics
Пример #6
0
     
     #if power is on, do nothing
     else:
         print("Power is on.")
 
 #dump current status of power into pickle file
 with open('/home/pi/python/data.pickle', 'wb') as f:
     pickle.dump(power_out, f, pickle.HIGHEST_PROTOCOL)
     
 #send data to influxdb server
 url = ""
 port = ""
 username = ""
 pwd = ""
 db_name = ""
 client = InfluxDBClient(url, port, username, pwd, db_name)
 current_time = str(datetime.datetime.utcnow())
 current_status = "On"
 if (power_out == 0):
     current_status = "Out"
 json_body = [
     {"measurement": "Power Status",
         "time": current_time,
         "fields": {
             "Status": current_status
             }
         }
     ]
 client.write_points(json_body)
 
     
Пример #7
0
#!/usr/bin/env python3
import mh_z19
from influxdb import InfluxDBClient

homestats = InfluxDBClient(host='raspberrypi4bserver.local',
                           database='homestats')

points = [{
    'measurement': 'sensor',
    'tags': {
        'location': 'myRoom'
    },
    'fields': {
        'co2': mh_z19.read()['co2']
    }
}]
homestats.write_points(points)
Пример #8
0
if __name__ == "__main__":
    i = 1.1
    while (True):
        json_body = [{
            "measurement": "sensorData",
            "tags": {
                "source": "bedroom",
                "type": "temperature"
            },
            "fields": {
                "value": i
            }
        }]

        i = i * 1.1
        if (i >= 100.0):
            i = 1.1
        client = InfluxDBClient('localhost', 8086, 'admin', 'admin',
                                'hackaday')

        client.create_database('hackaday')

        client.write_points(json_body)

        result = client.query('select value from sensorData;')

        print("Result: {0}".format(result))

        time.sleep(1)
Пример #9
0
 def __init__(self, config, **kwargs):
     self.config = config
     self.client = InfluxDBClient(username=config['indexer']['user'],
                                  password=config['indexer']['password'],
                                  database=config['indexer']['db'],
                                  host=config['indexer'].get('host'))
Пример #10
0
from influxdb import InfluxDBClient
import time, math, random

#获取当前运行的pid
p1 = psutil.Process(os.getpid())

from influxdb import InfluxDBClient
import time, math, random
while True:
    a = psutil.virtual_memory().percent  #内存占用率

    b = psutil.cpu_percent(interval=1.0)  #cpu占用率

    json_body = [{
        "measurement": "cpu_load_short",
        "tags": {
            "host": "server01",
            "region": "us-west"
        },
        #"time": "2009-11-10T23:00:00Z",
        "fields": {
            "cpu": b,
            "mem": a
        }
    }]
    client = InfluxDBClient('localhost', 8086, 'root', 'root', 'xxyyxx')
    client.create_database('xxyyxx', if_not_exists=False)
    client.write_points(json_body)
    #result = client.query('select value from cpu_load_short;')
    #print("Result: {0}".format(result))
    time.sleep(2)
Пример #11
0
            "Luminance": Sensor_Read.Luminance,
            "RelativeHumidity": Sensor_Read.RelativeHumidity,
            "Temperature": Sensor_Read.Temperature
        }
    }]
    print("Dinesh" + str(json_body))
    client_influx.write_points(json_body)
    print("Wrote values to DB")
    update_problem_pddl()
    print("Problem Updated")
    pddl_plan = get_pddl_plan()
    publish_pddl_plan(pddl_plan)
    print(pddl_plan)


client_influx = InfluxDBClient('localhost', 8086, 'root', 'root',
                               'Sensor_Values')
client_influx.create_database('Sensor_Values')

#MQTT Client instantiation
mqtt_client = mqtt.Client()

# Linking the client with callback implementations
mqtt_client.on_connect = on_connect
mqtt_client.on_message = on_message

# Connecting to the MQTT Broker
mqtt_client.connect(MQTT_BROKER_IP, MQTT_BROKER_PORT, MQTT_KEEPALIVE_INTERVAL)

mqtt_client.loop_start()
mqtt_client.subscribe(MQTT_TOPIC)
print("Subscribed !!")
Пример #12
0
# snmp constants
HOSTNAME = 'localhost'
COMMUNITY = 'public'
OID_RAM_UNUSED = 'iso.3.6.1.4.1.2021.4.11.0'
OID_DISK_USAGE = 'iso.3.6.1.4.1.2021.9.1.9.1'
OID_CPU_USAGE = 'iso.3.6.1.2.1.25.3.3.1.2'  # ...1.2.core_number

URL_DASHBOARD = 'http://localhost:8888'
DB_NAME = 'system'
GRAPHICS_PATH = './Analytics'

# Initializing snmp session
session = Session(hostname=HOSTNAME, community=COMMUNITY, version=2)

# Initializing database
client = InfluxDBClient(HOSTNAME, 8086, 'admin', '1234', DB_NAME)
client.create_database(DB_NAME)

# Open dashboard automatically
web.open_new_tab(URL_DASHBOARD)

print('Running...')
print('Press ctrl+c to stop monitoring and view results.')

while True:
    entry = []
    cpus = session.walk(OID_CPU_USAGE)
    nCores = len(cpus)
    load_sum = 0
    for c in cpus:
        load_sum += int(c.value)
Пример #13
0
app = Flask(__name__)

#below for flask-sijax
app.config['SIJAX_STATIC_PATH'] = path
app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js'
flask_sijax.Sijax(app)

dict_cnameIDSCnames = {}
with open('cname_id_name1.json') as f:
    dict_cnameIDSCnames = json.load(f)

data_cname_sedc = {}
with open('data_cname.json') as f:
    data_cname_sedc = json.load(f)

client = InfluxDBClient(host="localhost", port=8086)
client.get_list_database()
client.switch_database("sedc_test")

#get all sedc measurements
measurements = client.query("Show measurements").raw
series = measurements['series']
m_vals = np.array(series[0]['values']).flatten()
ori_response = {}
ori_response["mvals"] = m_vals
ori_response["cnames_all"] = dict_cnameIDSCnames.keys()


def get_TV(data, color):
    arr = np.array(data['series'][0]['values'])
    arr_vals = arr[:, -1]
    def getGardenCallback(self, err, res):
        if err:
            print err
        else:

            if configuration["influxdb"]["enabled"]:
                influxDbClient = InfluxDBClient(
                    configuration["influxdb"]["server"],
                    configuration["influxdb"]["port"],
                    configuration["influxdb-username"],
                    configuration["influxdb"]["password"],
                    configuration["influxdb"]["database"])

                try:
                    influxDbClient.create_database(
                        configuration["influxdb"]["database"])
                except InfluxDBClientError, ex:
                    print "InfluxDBClientError", ex

                influxDbClient.create_retention_policy(
                    configuration["influxdb"]["policy"],
                    'INF',
                    3,
                    default=True)

            for location in res["locations"]:
                #print json.dumps(location, indent=2, sort_keys=True)
                sensorId = location["sensor"]["sensor_identifier"][-4:].lower()

                flower = {}
                #flower["sensor_name"] = location["sensor"]["sensor_identifier"]

                if location["battery"]["gauge_values"][
                        "current_value"] is not None:
                    flower["battery"] = (
                        "Battery",
                        int(location["battery"]["gauge_values"]
                            ["current_value"]))

                if location["air_temperature"]["gauge_values"][
                        "current_value"] is not None:
                    flower["air_temperature"] = (
                        "Temperature",
                        float(location["air_temperature"]["gauge_values"]
                              ["current_value"]))
                    flower["air_temperature_status"] = [
                        "Temperature Status",
                        str(location["air_temperature"]
                            ["instruction_key"]).replace(
                                "air_temperature_", ""),
                        ["good", "too_low", "too_high"]
                    ]

                if location["fertilizer"]["gauge_values"][
                        "current_value"] is not None:
                    flower["fertilizer"] = (
                        "Fertilizer",
                        float(location["fertilizer"]["gauge_values"]
                              ["current_value"]))
                    flower["fertilizer_status"] = [
                        "Fertilizer Status",
                        str(location["fertilizer"]["instruction_key"]).replace(
                            "fertilizer_", ""),
                        ["good", "too_low", "too_high"]
                    ]

                if location["light"]["gauge_values"][
                        "current_value"] is not None:
                    flower["light"] = ("Light",
                                       float(location["light"]["gauge_values"]
                                             ["current_value"]))
                    flower["light_status"] = [
                        "Light Status",
                        str(location["light"]["instruction_key"]).replace(
                            "light_", ""), ["good", "too_low", "too_high"]
                    ]

                if location["watering"]["soil_moisture"]["gauge_values"][
                        "current_value"] is not None:
                    flower["watering"] = (
                        "Moisture",
                        float(location["watering"]["soil_moisture"]
                              ["gauge_values"]["current_value"]))
                    flower["watering_status"] = [
                        "Moisture Status",
                        str(location["watering"]["soil_moisture"]
                            ["instruction_key"]).replace("soil_moisture_", ""),
                        ["good", "too_low", "too_high"]
                    ]

                lastUtc = ("Updated", str(location["last_sample_utc"]))

                if configuration["mqtt"]["enabled"]:
                    print "Pushing Mqtt", sensorId, ":", configuration["mqtt"][
                        "prefix"], flower
                    try:
                        broadcastMqtt(configuration["mqtt"]["client"],
                                      configuration["mqtt"]["server"],
                                      configuration["mqtt"]["port"],
                                      configuration["mqtt"]["prefix"],
                                      sensorId + "/update",
                                      json.dumps(taflowerg))
                    except Exception, ex:
                        print "Error on mqtt broadcast", ex

                if configuration["prometheuspush"]["enabled"]:
                    registry = CollectorRegistry()
                    for key in flower.keys():
                        print "Pushing", sensorId, ":", configuration[
                            "prometheuspush"][
                                "prefix"] + '_' + key + '_total', "=", flower[
                                    key]

                        if flower[key][1] is None:
                            continue

                        elif type(flower[key][1]) is str:
                            e = Enum(
                                configuration["prometheuspush"]["prefix"] +
                                '_' + key + '_total',
                                flower[key][0], ['sensorid'],
                                states=flower[key][2],
                                registry=registry)

                            e.labels(sensorid=sensorId).state(flower[key][1])
                        else:
                            g = Gauge(
                                configuration["prometheuspush"]["prefix"] +
                                '_' + key + '_total',
                                flower[key][0], ['sensorid'],
                                registry=registry)

                            g.labels(sensorid=sensorId).set(flower[key][1])

                    print "Pushing", sensorId, ":", configuration[
                        "prometheuspush"][
                            "prefix"] + '_' + key + '_total', "=", flower[key]

                    try:
                        push_to_gateway(
                            configuration["prometheuspush"]["server"] + ":" +
                            configuration["prometheuspush"]["port"],
                            job=configuration["prometheuspush"]["client"] +
                            "_" + sensorId,
                            registry=registry)
                    except:
                        print "Prometheus not available"

                if configuration["influxdb"]["enabled"]:
                    influxDbJson = [{
                        "measurement":
                        configuration["influxdb"]["prefix"],
                        "tags": {
                            "sensor": sensorId,
                        },
                        "time":
                        lastUtc[1],
                        "fields": {}
                    }]
                    for key in flower.keys():
                        influxDbJson[0]["fields"][key] = flower[key][1]

                    print "Pushing", influxDbJson
                    try:
                        influxDbClient.write_points(
                            influxDbJson,
                            retention_policy=configuration["influxdb"]
                            ["policy"])
                    except:
                        print "Influxdb not available"
Пример #15
0
#!/usr/bin/python

# Run this script local to retrieve data from serial and write directly into influxDB

import serial
from influxdb import InfluxDBClient

client = InfluxDBClient('192.168.33.56', 8086, 'root', 'root', 'c-base/c-lab')
print(client.query("SHOW DATABASES"))

with serial.Serial('/dev/ttyACM4', 115200, timeout=1) as ser:
    while True:
        line = ser.readline().strip()

        if line:
            line = line.split(' ')
            if len(line) == 2:
                print(line)
                tags = line[0].split(',')
                data = line[1].split(',')
                humidity = data[0].split('=')
                temperature = data[1].split('=')

                #print(data)
                #print(humidity)
                #print(temperature)
                #print(tags)

                sensor = tags[1].split('=')
                #print(sensor)
Пример #16
0
"""
Send packets to magical InfluxDB!
"""

from influxdb import InfluxDBClient

influx_client = InfluxDBClient('localhost', 8086, 'root', 'root', 'sflow')


def mangle_flow(flow):

    json_body = [{
        "measurement": "realtime",
        "tags": flow['metadata'],
        "fields": {
            "packets":
            flow['sample']['sampling_rate'],
            "octets":
            (flow['frame_length'] - flow['stripped'] - flow['header_length']) *
            flow['sample']['sampling_rate'],
        }
    }]

    try:
        influx_client.write_points(json_body)
        print "Wrote to InfluxDB!"
    except:
        print "Couldn't write to InfluxDB :("

    return flow
Пример #17
0
influxdb_database = os.getenv("INFLUXDB_DATABASE")


def persists(measurement, fields, time):
    logging.info("{} {} {}".format(time, measurement, fields))

    influx_client.write_points([{
        "measurement": measurement,
        "time": time,
        "fields": fields
    }])


influx_client = InfluxDBClient(
    host=influxdb_host,
    port=influxdb_port,
    database=influxdb_database
)


def get_speed():
    logging.info("Calculating speed ...")
    s = speedtest.Speedtest()
    s.get_best_server()
    s.download()
    s.upload()

    return s.results.dict()


# def loop(sleep):
Пример #18
0
    url = "https://auth.docker.io/token?service=registry.docker.io&scope=repository:ratelimitpreview/test:pull"
    r = requests.get(url)
    return r.json()['token']


def get_remain(token):
    url = "https://registry-1.docker.io/v2/ratelimitpreview/test/manifests/latest"
    header_token = "Bearer " + token
    r = requests.head(url, headers={"Authorization": header_token})
    return r.headers['RateLimit-Remaining'].split(';')[0]


if __name__ == '__main__':
    remain = get_remain(get_token())
    client = InfluxDBClient(host=INFLUXDB_HOST,
                            port=INFLUXDB_PORT,
                            database=INFLUXDB_DB)
    json_body = [{
        "measurement":
        "dockerhub-ratelimit",
        "tags": {
            "host": INFLUXDB_TAG,
        },
        "time":
        str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%I:%M:%SZ")),
        "fields": {
            "remain": remain,
        }
    }]
    client.write_points(json_body)
Пример #19
0
import time
import random
from influxdb import InfluxDBClient

#Initialize start time of the program
start_time = time.time()

# Configure InfluxDB connection variables
host = "3.86.90.156"  # AWS EC2 Instance_ID i-0610264e5f0651062
port = 8086  # default port
user = "******"  # the user/password created for the pi, with write access
password = "******"
dbname = "OSNDS"  # the database we created earlier

#Initializes the influxDB client
influxClient = InfluxDBClient(host, port, user, password, dbname)

#Initializes a counter variable
counter = 1
accelerationArray = []


#Method to get acceleration data
def getAcceleration():
    accelerationArray.append(random.randint(-20, 20))
    accelerationArray.append(random.randint(-20, 20))
    accelerationArray.append(random.randint(-20, 20))
    return accelerationArray


def toInfluxFormat():
# import\ required file ------------------- 
from influxdb import InfluxDBClient
import pika
import sys
import json
import time


# declarations--------------------
client = InfluxDBClient(host='localhost', port=8086)
#def send_data_to_server(data):
cred=pika.PlainCredentials('admin', 'password')
connection = pika.BlockingConnection(pika.ConnectionParameters(host='34.224.98.65',credentials=cred))
print("connection-",connection)
channel = connection.channel()
channel.queue_declare(queue='local_queue', durable=True)


# code----------------------
while True:
    time.sleep(10)
    data = read_from_db()
    send_data_to_server(data)



def read_from_db():
    print("----------------------\nin read from db")
    #client.switch_database('ftest')
    client.query("alter retention policy onehr on ftest duration 1h replication 1 default")
import zmq
import time
import json
from influxdb import InfluxDBClient, SeriesHelper

USER = ''
PASSWORD = ''
DBNAME = 'carDB'
influx = InfluxDBClient('localhost', 8086, USER, PASSWORD, DBNAME)
influxLineString = ""
latControlstring = 'steerData3,testName=secondRun,active=%s,ff_type=%s ff_type_a=%s,ff_type_r=%s,steer_status=%s,steer_torque_motor=%s,steering_control_active=%s,steer_parameter1=%s,steer_parameter2=%s,steer_parameter3=%s,steer_parameter4=%s,steer_parameter5=%s,steer_parameter6=%s,steer_stock_torque=%s,steer_stock_torque_request=%s,x=%s,y=%s,y1=%s,y2=%s,y3=%s,y3=%s,psi=%s,delta=%s,t=%s,curvature_factor=%s,slip_factor=%s,resonant_period=%s,accel_limit=%s,restricted_steer_rate=%s,ff_angle_factor=%s,ff_rate_factor=%s,pCost=%s,lCost=%s,rCost=%s,hCost=%s,srCost=%s,' + \
                    'steer_torque_motor=%s,driver_torque=%s,angle_rate_count=%s,angle_rate_desired=%s,avg_angle_rate=%s,future_angle_steers=%s,angle_rate=%s,steer_zero_crossing=%s,' + \
                    'center_angle=%s,angle_steers=%s,angle_steers_des=%s,angle_offset=%s,self.angle_steers_des_mpc=%s,' + \
                    'steerRatio=%s,steerKf=%s,steerKpV[0]=%s,steerKiV[0]=%s,steerRateCost=%s,l_prob=%s,r_prob=%s,c_prob=%s,p_prob=%s,' + \
                    'l_poly[0]=%s,l_poly[1]=%s,l_poly[2]=%s,l_poly[3]=%s,r_poly[0]=%s,r_poly[1]=%s,r_poly[2]=%s,r_poly[3]=%s,p_poly[0]=%s,p_poly[1]=%s,p_poly[2]=%s,p_poly[3]=%s,' + \
                    'c_poly[0]=%s,c_poly[1]=%s,c_poly[2]=%s,c_poly[3]=%s,d_poly[0]=%s,d_poly[1]=%s,d_poly[2]=%s,lane_width=%s,lane_width_estimate=%s,lane_width_certainty=%s,' + \
                    'v_ego=%s,p=%s,i=%s,f=%s %s\n'

canFrameString = 'rawCANData,pid=%s,bus=%s First32=%di,Second32=%di,word1=%di,word2=%di,word3=%di,sword4=%di,sword1=%di,sword2=%di,sword3=%di,word4=%di %d\n'
canByteString = 'rawCANBytes,pid=%s,bus=%s,bNum=b%s data=%di %d\n'
#steerFrameString = 'steerData,testName=secondRun lkas_hud_GERNBY1=%s,lkas_hud_GERNBY2=%s,lkas_hud_LKAS_PROBLEM=%s,lkas_hud_LKAS_OFF=%s,lkas_hud_LDW_RIGHT=%s,lkas_hud_BEEP=%s,lkas_hud_LDW_ON=%s,lkas_hud_LDW_OFF=%s,lkas_hud_CLEAN_WINDSHIELD=%s,lkas_hud_DTC=%s,lkas_hud_CAM_TEMP_HIGH=%s,radar_hud_gernby1=%s,radar_hud_gernby2=%s,radar_hud_gernby3=%s,radar_hud_gernby4=%s,radar_hud_gernby5=%s,radar_hud_gernby6=%s,radar_hud_CMBS_OFF=%s,radar_hud_RESUME_INSTRUCTION=%s,stock_steer_request=%s,stock_steer_set_me_x00=%s,stock_steer_set_me_x00_2=%s,stock_steer_steer_torque=%s,lkas_hud_solid_lanes=%s,lkas_hud_steering_required=%s,lkas_hud_set_me_x48=%s,lkas_hud_set_me_x41=%s,lkas_hud_dashed_lanes=%s,speed=%s,lane11=%s,lane12=%s,lane13=%s,lane14=%s,lane15=%s,lane16=%s,lane17=%s,lane18=%s,lane19=%s,lane1A=%s,lane31=%s,lane32=%s,lane33=%s,lane34=%s,lane35=%s,lane36=%s,lane37=%s,lane38=%s,lane39=%s,lane3A=%s,lane51=%s,lane52=%s,lane53=%s,lane54=%s,lane55=%s,lane56=%s,lane57=%s,lane58=%s,lane59=%s,lane5A=%s,lane71=%s,lane72=%s,lane73=%s,lane74=%s,lane75=%s,lane76=%s,lane77=%s,lane78=%s,lane79=%s,lane7A=%s,stock_lane_center=%s,protect_hard=%s,min_steer_limit=%s,OP_STEER_AT_STOCK_LANE_CENTER=%s,lane_diff_1=%s,lane_diff_2=%s,cross_diff=%s,OP_apply_steer=%s,angle_steers=%s,angle_steers_rate=%s,avg_steer_limit=%s,frame=%s,avg_lane_center=%s,sample_count=%s,sent_apply_steer=%s,steer_torque_driver=%s,stock_lane_limit=%s %s\n'
steerFrameString = 'steerData2,testName=secondRun,active=%s ' + \
                    'lane11=%si,lane12=%si,lane13=%si,lane14=%si,lane15=%si,lane16=%si,lane17=%si,lane18=%si,lane19=%si,lane1A=%si,' + \
                    'lane31=%si,lane32=%si,lane33=%si,lane34=%si,lane35=%si,lane36=%si,lane37=%si,lane38=%si,lane39=%si,lane3A=%si,' + \
                    'lane51=%si,lane52=%si,lane53=%si,lane54=%si,lane55=%si,lane56=%si,lane57=%si,lane58=%si,lane59=%si,lane5A=%si,' + \
                    'lane71=%si,lane72=%si,lane73=%si,lane74=%si,lane75=%si,lane76=%si,lane77=%si,lane78=%si,lane79=%si,lane7A=%si,' + \
                    'angle_steers=%si,angle_steers_rate=%si,sent_apply_steer=%si,stock_steer_steer_torque=%si,steer_torque_driver=%si,' + \
                    'stock_lane_center=%si,stock_lane_curvature=%si,avg_lane_center=%si,avg_lane_curvature=%si,avg_steer_angle=%si,avg_steer_error=%si,' + \
                    'lkas_hud_GERNBY1=%si,lkas_hud_GERNBY2=%si,lkas_hud_LKAS_PROBLEM=%si,lkas_hud_LKAS_OFF=%si,lkas_hud_LDW_RIGHT=%si,lkas_hud_BEEP=%si,' + \
                    'lkas_hud_LDW_ON=%si,lkas_hud_LDW_OFF=%si,lkas_hud_CLEAN_WINDSHIELD=%si,lkas_hud_DTC=%si,lkas_hud_CAM_TEMP_HIGH=%si,radar_hud_gernby1=%si,' + \
                    'radar_hud_gernby2=%si,radar_hud_gernby3=%si,radar_hud_gernby4=%si,radar_hud_gernby5=%si,radar_hud_gernby6=%si,radar_hud_CMBS_OFF=%si,' + \
Пример #22
0
 def setUp(self):
     self.c = InfluxDBClient(database=INFLUXDB_DATABASE,
                             username=INFLUXDB_USER,
                             password=INFLUXDB_PASSWORD)
     self.c.delete_series(measurement='Sensor')
Пример #23
0
from influxdb import InfluxDBClient

'''
Username and password for connect to InfluxDB , Please use Environment Variable.
'''
dbClient = InfluxDBClient('localhost', 8086, 'sabaszx', 'admin', 'trapEvent', ssl=False, verify_ssl=False)
dbClient.switch_database('trapEvent')

#Count number (NEW)
def countSSID(receive, ssid_name):
json_body = [{
                "measurement": "countSSID",
                "tags": {
                    "SSIDName": ssid_name,
                "type": "known_ssid"},
                "fields": {

                    "item": receive}
                    }
            ]
    dbClient.write_points(json_body)
'''
#count number
def countAIS(receive):
    json_body = [{
                "measurement": "countSSID",
                "tags": {
                    "SSIDName": "AIS Smart Login",
                "type": "known_ssid"},
                "fields": {
Пример #24
0
import paho.mqtt.client as mqtt
from influxdb import InfluxDBClient
from flask import Flask, request, json
from flask_restful import Resource, Api
import datetime

broker_address = "FILL THIS IN"

client = mqtt.Client()
client.connect(broker_address)

dbclient = InfluxDBClient('0.0.0.0', 8086, 'root', 'root', 'mydb')

app = Flask(__name__)
api = Api(app)


class Test(Resource):
    def get(self):
        query = 'select mean("value") from "/lightstate" where "time" > now() - 10s'
        result = dbclient.query(query)
        try:
            light_avg = list(
                result.get_points(measurement='/lightstate'))[0]['mean']
            return {'average': light_avg}
        except:
            print('exception')
            pass

    def post(self):
        value = request.get_data()
Пример #25
0
 def connect(self):
     self.client = InfluxDBClient(host=self.host, port=self.port)
Пример #26
0
        return_msg = daemon.ram_use()
        logger.info("[Remote command] Daemon Ram in Use: {msg} MB".format(
            msg=return_msg))

    elif args.input_force_measurements:
        return_msg = daemon.input_force_measurements(
            args.input_force_measurements)
        logger.info(
            "[Remote command] Force acquiring measurements for Input with "
            "ID '{id}': Server returned: {msg}".format(
                id=args.input_force_measurements, msg=return_msg))

    elif args.get_measurement:
        client = InfluxDBClient(INFLUXDB_HOST,
                                INFLUXDB_PORT,
                                INFLUXDB_USER,
                                INFLUXDB_PASSWORD,
                                INFLUXDB_DATABASE,
                                timeout=5)
        query = "SELECT LAST(value) FROM {unit} " \
                "WHERE device_id='{id}' " \
                "AND channel='{channel}'".format(
                    unit=args.get_measurement[1],
                    id=args.get_measurement[0],
                    channel=args.get_measurement[2])

        try:
            last_measurement = client.query(query).raw
        except requests.exceptions.ConnectionError:
            logger.debug("ERROR: Failed to establish a new influxdb "
                         "connection. Ensure influxdb is running.")
            last_measurement = None
Пример #27
0
def past_data(unique_id, measure_type, measurement_id, past_seconds):
    """Return data from past_seconds until present from influxdb"""
    if not str_is_float(past_seconds):
        return '', 204

    if measure_type == 'tag':
        notes_list = []

        tag = NoteTags.query.filter(NoteTags.unique_id == unique_id).first()
        notes = Notes.query.filter(
            Notes.date_time >= (datetime.datetime.utcnow() - datetime.timedelta(seconds=int(past_seconds)))).all()

        for each_note in notes:
            if tag.unique_id in each_note.tags.split(','):
                notes_list.append(
                    [each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note])

        if notes_list:
            return jsonify(notes_list)
        else:
            return '', 204

    elif measure_type in ['input', 'math', 'output', 'pid']:
        dbcon = InfluxDBClient(
            INFLUXDB_HOST,
            INFLUXDB_PORT,
            INFLUXDB_USER,
            INFLUXDB_PASSWORD,
            INFLUXDB_DATABASE)

        if measure_type in ['input', 'math', 'output', 'pid']:
            measure = DeviceMeasurements.query.filter(
                DeviceMeasurements.unique_id == measurement_id).first()
        else:
            measure = None

        if not measure:
            return "Could not find measurement"

        if measure:
            conversion = Conversion.query.filter(
                Conversion.unique_id == measure.conversion_id).first()
        else:
            conversion = None

        channel, unit, measurement = return_measurement_info(
            measure, conversion)

        if hasattr(measure, 'measurement_type') and measure.measurement_type == 'setpoint':
            setpoint_pid = PID.query.filter(PID.unique_id == measure.device_id).first()
            if setpoint_pid and ',' in setpoint_pid.measurement:
                pid_measurement = setpoint_pid.measurement.split(',')[1]
                setpoint_measurement = DeviceMeasurements.query.filter(
                    DeviceMeasurements.unique_id == pid_measurement).first()
                if setpoint_measurement:
                    conversion = Conversion.query.filter(
                        Conversion.unique_id == setpoint_measurement.conversion_id).first()
                    _, unit, measurement = return_measurement_info(setpoint_measurement, conversion)

        try:
            query_str = query_string(
                unit, unique_id,
                measure=measurement,
                channel=channel,
                past_sec=past_seconds)

            if query_str == 1:
                return '', 204

            raw_data = dbcon.query(query_str).raw

            if 'series' in raw_data and raw_data['series']:
                return jsonify(raw_data['series'][0]['values'])
            else:
                return '', 204
        except Exception as e:
            logger.debug("URL for 'past_data' raised and error: "
                         "{err}".format(err=e))
            return '', 204
Пример #28
0
NODE_URL = 'http://nadejde-eos-node.northeurope.cloudapp.azure.com:8080'
BLOCK_PATH = '/v1/chain/get_block'
TRANSACTION_PATH = '/v1/history/get_transaction'
INFO_PATH ='/v1/chain/get_info'
INFLUX_URL = 'http://localhost:8086/write?db=eos'

WRITE_INFLUX = True
WRITE_POSTGRE = True
BATCH_SIZE = 100

#global variables to keep track of prices 
#used for buyram actions as there's no way to figure out how much ram was bought without
ramprice = 0.0
ramprice_inc_fee = 0.0

influx_client = InfluxDBClient(database='eos')
pg_config = {
    'host': os.environ['PG_EOS_HOST'],
    'dbname': os.environ['PG_EOS_DBNAME'],
    'user': os.environ['PG_EOS_USER'],
    'password': os.environ['PG_EOS_PASSWORD']
}

postgre_connection = psycopg2.connect("host=%(host)s dbname=%(dbname)s user=%(user)s password=%(password)s sslmode=verify-full sslrootcert=/home/nadejde/.postgresql/BaltimoreCyberTrustRoot.crt.pem" % pg_config)
posgre_cursor = postgre_connection.cursor()

#influx
#######
def write_points_influx_line(points):
    lines = ""
    for point in points:
Пример #29
0
def async_data(device_id, device_type, measurement_id, start_seconds, end_seconds):
    """
    Return data from start_seconds to end_seconds from influxdb.
    Used for asynchronous graph display of many points (up to millions).
    """
    if device_type == 'tag':
        notes_list = []
        tag = NoteTags.query.filter(NoteTags.unique_id == device_id).first()

        start = datetime.datetime.utcfromtimestamp(float(start_seconds))
        if end_seconds == '0':
            end = datetime.datetime.utcnow()
        else:
            end = datetime.datetime.utcfromtimestamp(float(end_seconds))

        notes = Notes.query.filter(
            and_(Notes.date_time >= start, Notes.date_time <= end)).all()
        for each_note in notes:
            if tag.unique_id in each_note.tags.split(','):
                notes_list.append(
                    [each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note])

        if notes_list:
            return jsonify(notes_list)
        else:
            return '', 204

    dbcon = InfluxDBClient(
        INFLUXDB_HOST,
        INFLUXDB_PORT,
        INFLUXDB_USER,
        INFLUXDB_PASSWORD,
        INFLUXDB_DATABASE)

    if device_type in ['input', 'math', 'output', 'pid']:
        measure = DeviceMeasurements.query.filter(
            DeviceMeasurements.unique_id == measurement_id).first()
    else:
        measure = None

    if not measure:
        return "Could not find measurement"

    if measure:
        conversion = Conversion.query.filter(
            Conversion.unique_id == measure.conversion_id).first()
    else:
        conversion = None
    channel, unit, measurement = return_measurement_info(
        measure, conversion)

    # Set the time frame to the past year if start/end not specified
    if start_seconds == '0' and end_seconds == '0':
        # Get how many points there are in the past year
        query_str = query_string(
            unit, device_id,
            measure=measurement,
            channel=channel,
            value='COUNT')

        if query_str == 1:
            return '', 204
        raw_data = dbcon.query(query_str).raw

        count_points = raw_data['series'][0]['values'][0][1]
        # Get the timestamp of the first point in the past year
        query_str = query_string(
            unit, device_id,
            measure=measurement,
            channel=channel,
            limit=1)

        if query_str == 1:
            return '', 204
        raw_data = dbcon.query(query_str).raw

        first_point = raw_data['series'][0]['values'][0][0]
        end = datetime.datetime.utcnow()
        end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
    # Set the time frame to the past start epoch to now
    elif start_seconds != '0' and end_seconds == '0':
        start = datetime.datetime.utcfromtimestamp(float(start_seconds))
        start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
        end = datetime.datetime.utcnow()
        end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')

        query_str = query_string(
            unit, device_id,
            measure=measurement,
            channel=channel,
            value='COUNT',
            start_str=start_str,
            end_str=end_str)

        if query_str == 1:
            return '', 204
        raw_data = dbcon.query(query_str).raw

        count_points = raw_data['series'][0]['values'][0][1]
        # Get the timestamp of the first point in the past year

        query_str = query_string(
            unit, device_id,
            measure=measurement,
            channel=channel,
            start_str=start_str,
            end_str=end_str,
            limit=1)

        if query_str == 1:
            return '', 204
        raw_data = dbcon.query(query_str).raw

        first_point = raw_data['series'][0]['values'][0][0]
    else:
        start = datetime.datetime.utcfromtimestamp(float(start_seconds))
        start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
        end = datetime.datetime.utcfromtimestamp(float(end_seconds))
        end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')

        query_str = query_string(
            unit, device_id,
            measure=measurement,
            channel=channel,
            value='COUNT',
            start_str=start_str,
            end_str=end_str)

        if query_str == 1:
            return '', 204
        raw_data = dbcon.query(query_str).raw

        count_points = raw_data['series'][0]['values'][0][1]
        # Get the timestamp of the first point in the past year

        query_str = query_string(
            unit, device_id,
            measure=measurement,
            channel=channel,
            start_str=start_str,
            end_str=end_str,
            limit=1)

        if query_str == 1:
            return '', 204
        raw_data = dbcon.query(query_str).raw

        first_point = raw_data['series'][0]['values'][0][0]

    start = datetime.datetime.strptime(
        influx_time_str_to_milliseconds(first_point),
        '%Y-%m-%dT%H:%M:%S.%f')
    start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')

    logger.debug('Count = {}'.format(count_points))
    logger.debug('Start = {}'.format(start))
    logger.debug('End   = {}'.format(end))

    # How many seconds between the start and end period
    time_difference_seconds = (end - start).total_seconds()
    logger.debug('Difference seconds = {}'.format(time_difference_seconds))

    # If there are more than 700 points in the time frame, we need to group
    # data points into 700 groups with points averaged in each group.
    if count_points > 700:
        # Average period between input reads
        seconds_per_point = time_difference_seconds / count_points
        logger.debug('Seconds per point = {}'.format(seconds_per_point))

        # How many seconds to group data points in
        group_seconds = int(time_difference_seconds / 700)
        logger.debug('Group seconds = {}'.format(group_seconds))

        try:
            query_str = query_string(
                unit, device_id,
                measure=measurement,
                channel=channel,
                value='MEAN',
                start_str=start_str,
                end_str=end_str,
                group_sec=group_seconds)

            if query_str == 1:
                return '', 204
            raw_data = dbcon.query(query_str).raw

            return jsonify(raw_data['series'][0]['values'])
        except Exception as e:
            logger.error("URL for 'async_data' raised and error: "
                         "{err}".format(err=e))
            return '', 204
    else:
        try:
            query_str = query_string(
                unit, device_id,
                measure=measurement,
                channel=channel,
                start_str=start_str,
                end_str=end_str)

            if query_str == 1:
                return '', 204
            raw_data = dbcon.query(query_str).raw

            return jsonify(raw_data['series'][0]['values'])
        except Exception as e:
            logger.error("URL for 'async_data' raised and error: "
                         "{err}".format(err=e))
            return '', 204
Пример #30
0
import pandas as pd
from influxdb import InfluxDBClient

client = InfluxDBClient(host='floodnet-influxdb.sonycproject.com',
                        ssl=True,
                        port=0,
                        database='db0')

measure = 'distance'
table = 'msg'
app_id = 'deployment_one_app'
dev_id = 'sensor_2'
days_back = 7

query = 'SELECT "%s" ' \
        'FROM "%s" ' \
        'WHERE "app_id" = \'%s\' ' \
        'AND "dev_id" = \'%s\' ' \
        'AND "time" >= now() - %id'\
        % (measure, table, app_id, dev_id, days_back)

data = client.query(query)

value_data = data.raw['series'][0]['values']

df = pd.DataFrame(value_data, columns=['datetime', 'distance'])

df['datetime'] = pd.to_datetime(df['datetime'], format='%Y-%m-%dT%H:%M:%S.%f')
df['localtime'] = df['datetime'].dt.tz_convert('US/Eastern')

print(df)