if len(split_values) == 2: date_string = split_values[0].split("-") dateParsed = datetime.datetime(int(date_string[0]), int(date_string[1]), int(date_string[2])) pandas_date = pd.to_datetime(dateParsed) pandas_date = pandas_date.tz_localize("Europe/Prague") pandas_date = pandas_date.tz_convert("utc") m_models.append(MoneyVelocityModel(float(split_values[1]), pandas_date)) influx_repository = InfluxRepository("http://localhost:8086", "FinancialIndicators", token, organizaiton) min_date = influx_repository.find_last(measurement, state) filtered = [] if min_date == datetime.datetime(1971, 1, 1, tzinfo=pytz.utc): filtered = m_models else: filtered = list( filter(lambda m: m.date.astimezone(pytz.utc) > min_date, m_models)) for moneyVelocity in filtered: point = Point(measurement).field("value", float(moneyVelocity.value)).time( moneyVelocity.date, WritePrecision.NS).tag("state", state) influx_repository.add(point) influx_repository.save()
try: temperature = poller.parameter_value(MI_TEMPERATURE) humidity = poller.parameter_value(MI_HUMIDITY) battery = poller.parameter_value(MI_BATTERY) data = json.dumps({ "temperature": temperature, "humidity": humidity, "battery": battery }) print(datetime.now(), device, " : ", data) point = Point("sensordata") \ .tag("device", device) \ .field("temp", temperature) \ .field("hum", humidity) \ .field("battery", battery) \ .time(datetime.utcnow(), WritePrecision.NS) write_api.write(bucket, org, point) except BTLEException as e: print("Error connecting to device {0}: {1}".format(device, str(e))) except Exception as e: print( "Error polling device {0}. Device might be unreachable or offline." .format(device)) print(traceback.print_exc())
from influxdb_client import InfluxDBClient, Point, WritePrecision from influxdb_client.client.write_api import SYNCHRONOUS _TOKEN = 'TG1LH76GVHihj6as2H18rFiLx-q8Vd7E6m8KiHs9mZcrNJ_WTrS0xO6Y6Z-JNacWsqEMkvJknG8VeHs6x3-X5Q==' # You can generate a Token from the "Tokens Tab" in the UI _ORG = "market_signal" bucket = "market_signal" client = InfluxDBClient(url="https://us-central1-1.gcp.cloud2.influxdata.com", token=_TOKEN, org=_ORG) write_api = client.write_api(write_options=SYNCHRONOUS) #''' p = Point("timeseries_dummy").\ tag("exchange", "kraken").\ tag("symbol", "btcusd").\ field("close", 7000.10) write_api.write(bucket=bucket, record=p) #''' query_api = client.query_api() records = query_api.query_stream( 'from(bucket:"{bucket}") |> range(start: -30m)'.format(bucket=bucket)) for record in records: print(record)
if (str(charger_data[charger_key].charger_ports[port_info][0].state) == "Available"): avail_val=1 else: avail_val=0 rfc_date_time_str = date_str.replace('/','-')+"T"+time_str+"Z" date_time_obj = datetime.strptime(rfc_date_time_str,"%d-%m-%YT%H:%M:%SZ") #unix_time=time.mktime(date_time_obj.timetuple()) address = str(charger_data[charger_key].name) address = address.replace("\"","'") address = address[2:-1] #print(address) p = Point("ChargerState") \ .tag("location", charger_data[charger_key].location) \ .tag("address",address) \ .tag("Port Type",port_info) \ .tag("StateTag",str(charger_data[charger_key].charger_ports[port_info][0].state)) \ .field("state_int",avail_val) \ .time(time=date_time_obj) i = i+1 point_list.append(p) #write_api.write(bucket=bucket, org="Home", record=p) if ( len(point_list)) > 0: try: write_api.write(bucket=bucket, org="Home", record=point_list) f_name_processed = True except: f_name_processed = False print('EXception : Failed to write to influxdb')
def loop2(ir): ir.freeze_var_buffer_latest() point = Point("iracing") \ \ .field("AirDensity", ir["AirDensity"]) \ .field("AirPressure", ir["AirPressure"]) \ .field("AirTemp", ir["AirTemp"])\ \ .field("Brake", ir["Brake"]) \ .field("Clutch", ir["Clutch"]) \ .field("FuelLevelPct", ir["FuelLevelPct"]) \ .field("FuelLevel", ir["FuelLevel"])\ .field("FuelPress", ir["FuelPress"])\ .field("Gear", ir["Gear"]) \ .field("LatAccel", ir["LatAccel"])\ \ .field("LFbrakeLinePress", ir["LFbrakeLinePress"])\ .field("LFshockDefl", ir["LFshockDefl"])\ .field("LFshockVel", ir["LFshockVel"])\ .field("LFtempCL", ir["LFtempCL"])\ .field("LFtempCM", ir["LFtempCM"])\ .field("LFtempCR", ir["LFtempCR"])\ \ .field("LRbrakeLinePress", ir["LRbrakeLinePress"])\ .field("LRshockDefl", ir["LRshockDefl"])\ .field("LRshockVel", ir["LRshockVel"]) \ \ .field("LongAccel", ir["LongAccel"]) \ .field("ManifoldPress", ir["ManifoldPress"])\ .field("Pitch", ir["Pitch"])\ .field("PitchRate", ir["PitchRate"])\ .field("RaceLaps", ir["RaceLaps"])\ \ .field("RFbrakeLinePress", ir["RFbrakeLinePress"])\ .field("RFshockDefl", ir["RFshockDefl"])\ .field("RFshockVel", ir["RFshockVel"])\ \ .field("Roll", ir["Roll"])\ .field("RollRate", ir["RollRate"])\ \ .field("RPM", ir["RPM"])\ \ .field("RRbrakeLinePress", ir["RRbrakeLinePress"])\ .field("RRshockDefl", ir["RRshockDefl"])\ .field("RRshockVel", ir["RRshockVel"])\ \ .field("OilPress", ir["OilPress"]) \ .field("OilTemp", ir["OilTemp"])\ .field("RPM", ir["RPM"]) \ .field("Speed", ir["Speed"]) \ .field("SessionTime", ir["SessionTime"]) \ .field("SteeringWheelAngle", ir["SteeringWheelAngle"])\ .field("Throttle", ir["Throttle"]) \ .field("TrackTempCrew", ir["TrackTempCrew"])\ .field("VertAccel", ir["VertAccel"])\ .field("WaterTemp", ir["WaterTemp"]) \ .field("WindDir", ir["WindDir"])\ .field("WindVel", ir["WindVel"])\ .time(datetime.utcnow(), WritePrecision.NS) write_api.write(bucket=bucket, org=org, record=point) print(f'Metric writen: {ir["SessionTime"]}')
def test_DateTimeUtc(self): dateTime = datetime(2015, 10, 15, 8, 20, 15) point = Point.measurement("h2o") \ .tag("location", "europe") \ .field("level", 2)
def create_point(d): return Point("meter_power_billed") \ .tag("provider", "vattenfall") \ .field("gauge", float(d['watts'])) \ .field("total", float(d['total'])) \ .time(int(float(d['timestamp'])*1000*1000*1000))
# point.time() expects time in UTC localDatetime = payload["lastStatusUpdate"] utcDatetime = TimeUtil.convertDatetimeToUtc(localDatetime) logging.info("Converted lastStatusUpdate ('" + payload["label"] + "') local time (" + str(localDatetime) + ") to UTC (" + str(utcDatetime) + ")") dbPayload.time(utcDatetime) for fieldName, value in payload.items(): if fieldName != "label" and fieldName != "room" and fieldName != "lastStatusUpdate": dbPayload.field(fieldName, value) write_api.write(bucket=bucket, record=dbPayload) #print("Saved payload in DB.") if __name__ == "__main__": logging.info("Saving single test value in DB...") testPayload = Point("testMeasurement") testPayload.field("testField", 0.5) # Point.time expects datetime in UTC #logging.info("Current UTC time: " + str(datetime.now(pytz.utc))) testPayload.time(datetime.now(pytz.utc)) write_api.write(bucket="test_database", record=testPayload)
def write_influx_data(point: str, field: str, value: float): point = Point(point).tag("device-name", BALENA_DEVICE_NAME).field(field, value) influx_write_api.write(bucket=INFLUX_BUCKET, record=[point])
def write(self, measurement, value): record = Point(measurement).field('value', value) self.write_api.write(bucket=self.bucket, record=record)
date_utils.date_helper = PandasDateTimeHelper() """ Prepare client. """ client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") write_api = client.write_api(write_options=SYNCHRONOUS) query_api = client.query_api() """ Prepare data """ point = Point("h2o_feet") \ .field("water_level", 10) \ .tag("location", "pacific") \ .time('1996-02-25T21:20:00.001001231Z') print(f'Time serialized with nanosecond precision: {point.to_line_protocol()}') print() write_api.write(bucket="my-bucket", record=point) """ Query: using Stream """ query = ''' from(bucket:"my-bucket") |> range(start: 0, stop: now()) |> filter(fn: (r) => r._measurement == "h2o_feet") ''' records = query_api.query_stream(query)
samples_humidity.append(humidity) time.sleep(1) return [numpy.median(samples_light), numpy.median(samples_sound), numpy.median(samples_temp), numpy.median(samples_humidity)] if __name__ == '__main__': grovepi.pinMode(light_sensor, "INPUT") grovepi.pinMode(sound_sensor, "INPUT") grovepi.pinMode(temp_humidity_sensor, "INPUT") print("start!!!") while True: light, sound, temp, humidity = get_sensor() timestamp = datetime.utcnow() sequence = [] lightPoint = Point(measurement).tag('device', 'raspberry-pi').field("light", light).time(time=timestamp).to_line_protocol() sequence.append(lightPoint) soundPoint = Point(measurement).tag('device', 'raspberry-pi').field("sound", sound).time(time=timestamp).to_line_protocol() sequence.append(soundPoint) tempPoint = Point(measurement).tag('device', 'raspberry-pi').field("temperature", temp).time(time=timestamp).to_line_protocol() sequence.append(tempPoint) humidPoint = Point(measurement).tag('device', 'raspberry-pi').field("humidity", humidity).time(time=timestamp).to_line_protocol() sequence.append(humidPoint) print("-------------------------------------") print(sequence) try: write_api.write(bucket, org, sequence) if error_flag == True: write_api.write(bucket, org, backuped_sequence) error_flag = False
def __influx_main__(): print(f"Ready to Connect to InfluxDB") #host = os.environ[ "INFLUXDB_SERVICE" ] #print(f"Host = {host}") #username = os.environ[ "INFLUXDB_USER" ] #password = os.environ[ "INFLUXDB_PASSWORD" ] #database = os.environ[ "INFLUXDB_NAME" ] try: print("Inside Try-Catch for InfluxDB") # ************* # Version 2.1.1 # ************* with InfluxDBClient(url="http://vibm-influxdb-influxdb2:80", token=MY_TOKEN, org=ORG, debug=True) as client: version = client.ping() print(f"Database Ping = {version}") buckets_api = client.buckets_api() buckets = buckets_api.find_buckets(org=ORG).buckets print(f"------------- List All Buckets -------------") for bucket in buckets: print(f"Existing Bucket --> {bucket}") bucket_id = buckets_api.find_bucket_by_name(MY_BUCKET) if bucket_id is not None: print(f"------------- Delete Tandem Bucket -------------") buckets_api.delete_bucket(bucket_id) print(f"Bucket Deleted ... {MY_BUCKET}") print(f"---------- Create Bucket for Tandem Data ----------") retention_rules = BucketRetentionRules(type="expire", every_seconds=3600) created_bucket = buckets_api.create_bucket( bucket_name=MY_BUCKET, retention_rules=retention_rules, org=ORG) print(f"Bucket Created ... {created_bucket}") """ Prepare Data """ print(f"---------- Write Data to Bucket ----------") write_api = client.write_api(write_options=SYNCHRONOUS) _point1 = Point("eu_capitals_oC").tag( "location", "Amsterdam").field("temperature", 18).time(datetime.utcnow(), WritePrecision.MS) _point2 = Point("eu_capitals_oC").tag("location", "Athens").field( "temperature", 19).time(datetime.utcnow(), WritePrecision.MS) _point3 = Point("eu_capitals_oC").tag( "location", "Belgrade").field("temperature", 16).time(datetime.utcnow(), WritePrecision.MS) _point4 = Point("eu_capitals_oC").tag("location", "Berlin").field( "temperature", 16).time(datetime.utcnow(), WritePrecision.MS) _point5 = Point("eu_capitals_oC").tag("location", "Bern").field( "temperature", 20).time(datetime.utcnow(), WritePrecision.MS) _point6 = Point("eu_capitals_oC").tag( "location", "Bratislava").field("temperature", 20).time(datetime.utcnow(), WritePrecision.MS) _point7 = Point("eu_capitals_oC").tag( "location", "Brussels").field("temperature", 18).time(datetime.utcnow(), WritePrecision.MS) _point8 = Point("eu_capitals_oC").tag( "location", "Bucharest").field("temperature", 20).time(datetime.utcnow(), WritePrecision.MS) _point9 = Point("eu_capitals_oC").tag( "location", "Copenhagen").field("temperature", 12).time(datetime.utcnow(), WritePrecision.MS) _point10 = Point("eu_capitals_oC").tag("location", "Dublin").field( "temperature", 14).time(datetime.utcnow(), WritePrecision.MS) _point11 = Point("eu_capitals_oC").tag( "location", "Helsinki").field("temperature", 3).time(datetime.utcnow(), WritePrecision.MS) _point12 = Point("eu_capitals_oC").tag("location", "Kyiv").field( "temperature", 8).time(datetime.utcnow(), WritePrecision.MS) _point13 = Point("eu_capitals_oC").tag("location", "Lisbon").field( "temperature", 19).time(datetime.utcnow(), WritePrecision.MS) _point14 = Point("eu_capitals_oC").tag("location", "London").field( "temperature", 19).time(datetime.utcnow(), WritePrecision.MS) _point15 = Point("eu_capitals_oC").tag("location", "Madrid").field( "temperature", 17).time(datetime.utcnow(), WritePrecision.MS) write_api.write(bucket=MY_BUCKET, record=_point1) write_api.write(bucket=MY_BUCKET, record=_point2) write_api.write(bucket=MY_BUCKET, record=_point3) write_api.write(bucket=MY_BUCKET, record=_point4) write_api.write(bucket=MY_BUCKET, record=_point5) write_api.write(bucket=MY_BUCKET, record=_point6) write_api.write(bucket=MY_BUCKET, record=_point7) write_api.write(bucket=MY_BUCKET, record=_point8) write_api.write(bucket=MY_BUCKET, record=_point9) write_api.write(bucket=MY_BUCKET, record=_point10) write_api.write(bucket=MY_BUCKET, record=_point11) write_api.write(bucket=MY_BUCKET, record=_point12) write_api.write(bucket=MY_BUCKET, record=_point13) write_api.write(bucket=MY_BUCKET, record=_point14) write_api.write(bucket=MY_BUCKET, record=_point15) # Only for v1.0 #client = db.InfluxDBClient( # host="vibm-influxdb-influxdb2", # port=80, # username="******", # password=None, # database=MY_DBNAME, # headers={"Authorization": MY_TOKEN}) #bucket_api = client.buckets_api() #bucket_api.create_bucket( bucket_name="tandem" ) #write_api = client.write_api() #p = Point("h2o_level").tag("location", "coyote_creek").field("water_level", 1) #write_api.write(bucket="tandem", org="influxdata", record=p) #write_api.write( "tandem", "influxdata", ["h2o_feet,location=coyote_creek water_level=1"]) #query_api = client.query_api() #query = 'from(bucket:"tandem")\ #|> range(start: -10m)\ #|> filter(fn:(r) => r._measurement == “h2o_level”)\ #|> filter(fn:(r) => r.location == "coyote_creek")\ #|> ilter(fn:(r) => r._field == "water_level" )' #result = query_api.query( org="influxdata", query=query ) results = [] #for table in result: # for record in table.records: # results.append((record.get_value(), record.get_field())) print(f"My Results = {results}") #client.switch_user("admin", "") #users = client.get_list_users() #print(f"Users = {users}") client.close() except Exception as e: print(f"Exception is Raised ... {e}") #client.get_list_database() #client.close() return
raise Exception("No sensor chosen!") # Set to your GPIO pin pin = int(config['sensor']['humidity_pin']) # 4 humidity, temperature = Adafruit_DHT.read_retry(sensor, pin) def get_pollution_data(name): data = sps.dict_values[name] return round(data, 2) temp = Point("weather") \ .tag("sensor", int(config['sensor']['humidity'])) \ .tag("device", int(config['device']['id'])) \ .tag("place", int(config['device']['place'])) \ .field("temperature", round(temperature, 2)) \ .field("humidity", round(humidity, 2)) \ .time(datetime.datetime.utcnow(), WritePrecision.NS) pollution = Point("pollution") \ .tag("device", int(config['device']['id'])) \ .tag("place", int(config['device']['place'])) \ .field("pm1", get_pollution_data(PM1)) \ .field("nc1", get_pollution_data(NC1)) \ .field("pm2.5", get_pollution_data(PM25)) \ .field("nc2.5", get_pollution_data(NC25)) \ .field("pm4.0", get_pollution_data(PM4)) \ .field("nc4.0", get_pollution_data(NC4)) \ .field("pm10", get_pollution_data(PM10)) \ .field("nc10", get_pollution_data(NC10)) \
def get_gateway_details(gateway): #print(gateway) gateway_id = gateway['ids']['gateway_id'] point = Point("TTN_Gateways").tag("gateway_id", gateway_id).tag("name", gateway['name']) if 'antennas' in gateway: for dimension in ['latitude', 'longitude', 'altitude']: if dimension in gateway['antennas'][0]['location']: value = gateway['antennas'][0]['location'][dimension] else: value = 0 point = point.tag( dimension, value ) #body['gateway']['antennas'][0]['location'][dimension] = antenna_locations[dimension] #point = point.tag('latitude',gateway['antennas'][0]['location']['latitude']).tag('longitude',gateway['antennas'][0]['location']['longitude']).tag('altitude',gateway['antennas'][0]['location']['altitude']) #for key,value in gateway['antennas'][0]['location']: # point = point.tag(key,value) gateway_stats = (requests.get(base_uri + "/api/v3/gs/gateways/" + gateway_id + "/connection/stats", params=gateway_stats_params, headers=http_headers)).json() #https://eu1.cloud.thethings.network/api/v3/gs/gateways/fort-digital-80029c641ef8/connection/stats if 'attributes' in gateway: for key, value in gateway['attributes'].items(): point = point.tag(key, value) #Need to consider how to handle last_status_received_at not updating but not getting a 'gateway not connected' message yet to mark a site as 'down' #Can probably handle this in the query? if "connected_at" in gateway_stats: #print(gateway_stats) point = point.field("status", 1) if 'last_status_received_at' in gateway_stats: point = point.time(gateway_stats['last_status_received_at']) if 'uplink_count' in gateway_stats: point = point.field("uplink_count", will_it_float(gateway_stats['uplink_count'])) if 'downlink_count' in gateway_stats: point = point.field("downlink_count", will_it_float(gateway_stats['downlink_count'])) if 'last_status' in gateway_stats: if 'metrics' in gateway_stats['last_status']: for key, value in gateway_stats['last_status'][ 'metrics'].items(): point = point.field(key, will_it_float(value)) #Could use the latest antenna location to automatically update gateway location as its ignored from UDP gateway_stats['last_status']['antenna_locations']['latitude/longitude/altitude'] #print(gateway_stats) if 'antenna_locations' in gateway_stats['last_status']: if 'antennas' not in gateway or gateway['antennas'][0][ 'location']['latitude'] != gateway_stats[ 'last_status']['antenna_locations'][0][ 'latitude'] or gateway['antennas'][0][ 'location']['longitude'] != gateway_stats[ 'last_status']['antenna_locations'][0][ 'longitude']: update_gateway( gateway_id, gateway_stats['last_status']['antenna_locations'][0]) else: #Gateway Not Connected point = point.field("status", 0) print(point.to_line_protocol()) #flushstdout sys.stdout.flush()
def influx_write_brightness(cls, order, value, time=datetime.utcnow()): point = Point("brightness").tag("street-light", order).field( "percent", value).time(time, WritePrecision.S) cls.influx_write_api.write(cls.influx_bucket, cls.influx_org, point)
def test_TimePrecisionDefault(self): point = Point.measurement("h2o") \ .tag("location", "europe") \ .field("level", 2) self.assertEqual(WritePrecision.NS, point._write_precision)
def influx_write_proximity(cls, order, value, time=datetime.utcnow()): point = Point("proximity").tag("street-light", order).field( "value", value).time(time, WritePrecision.S) cls.influx_write_api.write(cls.influx_bucket, cls.influx_org, point)
# host_suffixes = range(0,5) # host = random.choice(host_prefixes) + str(random.choice(host_suffixes)) client = InfluxDBClient(url=url, token=token, org=org, debug=True) # with pysnooper.snoop(): write_api = client.write_api(write_options=SYNCHRONOUS) for i in range(0, 100): points = [] #local = time.localtime() for x in range(0, 5): points.append(Point("biz_intel").tag("region", random.choice(regions)) \ .tag("app",random.choice(apps)) \ # .tag("host",host) \ .field("user_sessions", random.choice(user_sessions)) \ .field("num_transactions",random.choice(num_xactions)) \ .time(time.time_ns())) #print(local) write_api.write(bucket=bucket, org=org, record=points) # print(points[0].time()) sleep(5) p_alt = f"biz_intel,region={random.choice(regions)},app={random.choice(apps)},host={host} user_sessions={random.choice(user_sessions)},num_transactions={random.choice(num_xactions)} " point = Point("biz_intel").tag("region", random.choice(regions)) \ .tag("app",random.choice(apps)) \ .tag("host",host) \ .field("user_sessions", random.choice(user_sessions)) \
def main(): reset = False finished = False date = None level = None name = None ltype = None dnssec = False dnssec_set = False server = None with open('/var/log/unbound.log', 'rt', encoding='utf8') as file: for line in tailer.follow(file): #while True: if finished: if not ltype == 'DS' and not ltype == 'DNSKEY': point = Point('unbound') \ .tag('level', level) \ .tag('server', server) \ .tag('type', ltype) \ .tag('dnssec', dnssec) \ .tag('name', name) \ .time(date, WritePrecision.S) WRITE_API.write(INFLUX_BUCKET, INFLUX_ORG, point) reset = True if reset: date = None level = None name = None ltype = None dnssec = False dnssec_set = False server = None reset = False finished = False #line = file.readline() #if not line: # break if line is None or len(line) == 0: continue # Line 1 if date is None: if not re.search(DATE_PATTERN, line): continue split = re.split(DATE_PATTERN, line) date = datetime.fromtimestamp(calendar.timegm( datetime.strptime( str(datetime.now().year) + ' ' + split[1], '%Y %b %d %H:%M:%S').timetuple()), tz=tz.gettz('UTC')) line = split[2] if level is None: if not re.search(LEVEL_PATTERN, line): reset = True continue split = re.split(LEVEL_PATTERN, line) level = split[1] line = split[2] if name is None: if not re.search(NAME_PATTERN, line): reset = True continue split = re.split(NAME_PATTERN, line) name = split[1] line = split[2] if not re.search(TYPE_PATTERN, line): reset = True continue split = re.split(TYPE_PATTERN, line) ltype = split[1] continue # EOL # Line 2 if server is None: if not re.search(REPLY_PATTERN, line): reset = True continue split = re.split(REPLY_PATTERN, line) server = split[1] continue # EOL if not dnssec_set: if re.search(INSECURE_PATTERN, line): dnssec = False dnssec_set = True finished = True # Done continue if re.search(SECURE_PATTERN, line): dnssec = True dnssec_set = True finished = True # Done continue continue # Skip line
from influxdb_client import InfluxDBClient, Point, Dialect from influxdb_client.client.write_api import SYNCHRONOUS client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") write_api = client.write_api(write_options=SYNCHRONOUS) query_api = client.query_api() """ Prepare data """ _point1 = Point("my_measurement").tag("location", "Prague").field("temperature", 25.3) _point2 = Point("my_measurement").tag("location", "New York").field("temperature", 24.3) write_api.write(bucket="my-bucket", record=[_point1, _point2]) """ Query: using Table structure """ tables = query_api.query('from(bucket:"my-bucket") |> range(start: -10m)') for table in tables: print(table) for record in table.records: print(record.values) print() print() """
def panel_info_to_influx_points(panel_info): points = [] for control in panel_info['controls']: point = Point("cellar_panel_read") point.tag("slot", control["slot"]) point.tag("vessel", control["label"]) point.tag("batch_number", control['batch_info']['Batch #']) point.field("temp", control['temp']) point.field("set_point", control['set_point']) point.field("valve_open", control['valve_open']) point.field("days_in_vessel", control['batch_info']['Days in Vessel']) point.time(panel_info['read_at'], WritePrecision.NS) points.append(point) return points
write_api = client.write_api(write_options=SYNCHRONOUS) for i in plants: poller = MiFloraPoller(i[1], GatttoolBackend) plantData = { "battery": poller.battery_level(), "temperature": poller.parameter_value("temperature"), "brightness": poller.parameter_value("light"), "moisture": poller.parameter_value("moisture"), "conductivity": poller.parameter_value("conductivity") } print(plantData) pointBat = Point("bat").tag("host", plants[i][0]).field( "battery", plantData["battery"]).time(datetime.utcnow(), WritePrecision.NS) pointTemp = Point("temp").tag("host", plants[i][0]).field( "temperature", plantData["temperature"]).time(datetime.utcnow(), WritePrecision.NS) pointLight = Point("light").tag("host", plants[i][0]).field( "brightness", plantData["brightness"]).time(datetime.utcnow(), WritePrecision.NS) pointMoist = Point("moist").tag("host", plants[i][0]).field( "moisture", plantData["moisture"]).time(datetime.utcnow(), WritePrecision.NS) pointFert = Point("cond").tag("host", plants[i][0]).field( "cond", plantData["conductivity"]).time(datetime.utcnow(), WritePrecision.NS) write_api.write(bucket, org, pointBat)
def mainPanel(): with opentracing.tracer.start_span('Enter Main') as span: span.set_tag('step', '1') point = Point("endpoint_request").tag("endpoint", "/main").field("value", 1).time(datetime.utcnow(), WritePrecision.NS) write_api.write(bucket, org, point) if 'can_login' not in session: time.sleep(2) try: tracer.close() except: z = 1 return redirect('/') else: add_keys_str = "" count_keyszz = len(cache.keys('*')) for kkv in cache.keys('*'): add_keys_str = add_keys_str + '<li><span class="vt">' + str(kkv.decode("utf-8")) + '</span><span class="close">×</span></li>' time.sleep(2) try: tracer.close() except: z = 1 return """ <!DOCTYPE html> <html lang="en"> <head> <title>Panel</title> <style> ul { list-style-type: none; padding: 0; margin: 0; } ul li { border: 1px solid #ddd; margin-top: -1px; /* Prevent double borders */ background-color: white; padding: 12px; text-decoration: none; font-size: 18px; color: black; display: block; position: relative; } .close { cursor: pointer; position: absolute; top: 50%; background-color: red; right: 0%; padding: 12px 16px; transform: translate(0%, -50%); } header { background-color: black; padding: 2px; font-size: 15px; color: white; } .buttonred { background-color: red; border: none; color: white; padding: 5px 5px; text-align: center; text-decoration: none; display: inline-block; font-size: 16px; margin: 4px 2px; cursor: pointer; } .buttongreen { background-color: green; border: none; color: white; padding: 5px 5px; text-align: center; text-decoration: none; display: inline-block; font-size: 16px; margin: 4px 2px; cursor: pointer; } /* Style the footer */ footer { background-color: black; padding: 1px; text-align: center; color: white; } </style> </head> <body> <header> <table style="width:100%"><tr><td style="padding-left: 2%;text-align:left;width:50%">test1<button class="buttongreen" style="margin-left:2%;">Admin</button></td><td style="padding-right: 2%;text-align:right;width:50%"><button onclick="location.href='/do_LogOut'" class="buttonred">Logout</button></td></tr></table> </header> <section> <h2>TODOs (""" + str(count_keyszz) + """)</h2> <form method="get" action="http://localhost:5002/add_todo"> <label for="task">ADD Task:</label><br> <input type="text" id="task" name="task" ><br> <input style="background-color:lightgreen;" type="submit" value="ADD Task"> </form> <br> <br> <ul> """ + add_keys_str + """
Configure credentials """ influx_cloud_url = 'https://us-west-2-1.aws.cloud2.influxdata.com' influx_cloud_token = '...' bucket = '...' org = '...' client = InfluxDBClient(url=influx_cloud_url, token=influx_cloud_token) try: kind = 'temperature' host = 'host1' device = 'opt-123' """ Write data by Point structure """ point = Point(kind).tag('host', host).tag('device', device).field( 'value', 25.3).time(time=datetime.utcnow()) print(f'Writing to InfluxDB cloud: {point.to_line_protocol()} ...') write_api = client.write_api(write_options=SYNCHRONOUS) write_api.write(bucket=bucket, org=org, record=point) print() print('success') print() print() """ Query written data """ query = f'from(bucket: "{bucket}") |> range(start: -1d) |> filter(fn: (r) => r._measurement == "{kind}")' print(f'Querying from InfluxDB cloud: "{query}" ...')
vesync_client.update_energy() now = datetime.utcnow().isoformat() except Exception as e: logging.critical("No longer connected to VeSync", exc_info=True) sys.exit(1) try: for outlet in vesync_client.outlets: if outlet.voltage == 0: logging.warning( "Could not determine voltage for %s. Is the outlet on?", outlet.device_name) continue influx_payload.append( Point("voltage").tag("device_name", outlet.device_name).field( "value", outlet.voltage)) influx_payload.append( Point("power").tag("device_name", outlet.device_name).field( "value", outlet.power)) logging.debug("Writing payload: %s", influx_payload) influx_writer.write(bucket=config['InfluxDB']['bucket'], record=influx_payload) time.sleep(config['Settings']['interval']) except Exception as e: logging.critical("Couldn't write payload", exc_info=True)
import codecs from datetime import datetime from influxdb_client import WritePrecision, InfluxDBClient, Point from influxdb_client.client.write_api import SYNCHRONOUS bucket = "my-bucket" client = InfluxDBClient(url="http://localhost:9999", token="my-token", org="my-org") write_api = client.write_api(write_options=SYNCHRONOUS) query_api = client.query_api() p = Point("my_measurement").tag("location", "Prague").field( "temperature", 25.3).time(datetime.now(), WritePrecision.MS) # write using point structure write_api.write(bucket=bucket, record=p) line_protocol = p.to_line_protocol() print(line_protocol) # write using line protocol string write_api.write(bucket=bucket, record=line_protocol) # using Table structure tables = query_api.query('from(bucket:"my-bucket") |> range(start: -1m)') for table in tables: print(table) for record in table.records:
from datetime import datetime from influxdb_client import InfluxDBClient, Point, WritePrecision from influxdb_client.client.write_api import SYNCHRONOUS from . import TOKEN # You can generate a Token from the "Tokens Tab" in the UI token = TOKEN #이 토큰은 sehwan이라는 조직에 대해서 유효한 토큰임. org = "sehwan" bucket = "testBucket" client = InfluxDBClient(url="http://localhost:8086", token=token) write_api = client.write_api(write_options=SYNCHRONOUS) data = "mem,host=host1 used_percent=23.43234543" write_api.write(bucket, org, data) point = Point("mem")\ .tag("host", "host1")\ .field("used_percent", 23.43234543)\ .time(datetime.utcnow(), WritePrecision.NS) write_api.write(bucket, org, point) query = f'from(bucket: \\"{bucket}\\") |> range(start: -1h)' tables = client.query_api().query(query, org=org)
def test_write_using_default_tags(self): bucket = self.create_test_bucket() measurement = "h2o_feet" field_name = "water_level" val = "1.0" val2 = "2.0" tag = "location" tag_value = "creek level" p = Point(measurement) p.field(field_name, val) p.tag(tag, tag_value) p.time(1) p2 = Point(measurement) p2.field(field_name, val2) p2.tag(tag, tag_value) p2.time(2) record_list = [p, p2] self.write_client.write(bucket.name, self.org, record_list) query = 'from(bucket:"' + bucket.name + '") |> range(start: 1970-01-01T00:00:00.000000001Z)' flux_result = self.client.query_api().query(query) self.assertEqual(1, len(flux_result)) rec = flux_result[0].records[0] rec2 = flux_result[0].records[1] self.assertEqual(self.id_tag, rec["id"]) self.assertEqual(self.customer_tag, rec["customer"]) self.assertEqual("LA", rec[self.data_center_key]) self.assertEqual(self.id_tag, rec2["id"]) self.assertEqual(self.customer_tag, rec2["customer"]) self.assertEqual("LA", rec2[self.data_center_key]) self.delete_test_bucket(bucket)
from influxdb_client import InfluxDBClient, Point from influxdb_client.client.write_api import SYNCHRONOUS with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: write_api = client.write_api(write_options=SYNCHRONOUS) """ Prepare data """ _points = [] now = datetime.now(UTC).replace(hour=13, minute=20, second=15, microsecond=0) for i in range(50): _point = Point("weather")\ .tag("location", "New York")\ .field("temperature", random.randint(-10, 30))\ .time(now - timedelta(days=i)) _points.append(_point) write_api.write(bucket="my-bucket", record=_points) query_api = client.query_api() """ Query: using Flux from file """ with open('query.flux', 'r') as file: query = file.read() tables = query_api.query(query)