def writeCloudData(data): try: client = InfluxDBClient(INFLUXDB_CLIENT_URL, INFLUXDB_CLIENT_TOKEN) # client.write(BUCKET_ID, INFLUXDB_CLIENT_TOKEN, data) client.write_points(data) except Exception as error: print(error)
def influxdb_publish(event, data): from influxdb_client import InfluxDBClient try: client = InfluxDBClient(host=args.influxdb_host, port=args.influxdb_port, username=args.influxdb_user, password=args.influxdb_pass, database=args.influxdb_db) payload = {} payload['measurement'] = event payload['time'] = data['timestamp'] payload['fields'] = data if args.verbose: print("publishing %s to influxdb [%s:%s]: %s" % (event, args.influxdb_host, args.influxdb_port, payload)) # write_points() allows us to pass in a precision with the timestamp client.write_points([payload], time_precision='s') except Exception as e: print("Failed to connect to InfluxDB: %s" % e) print(" Payload was: %s" % payload)
class BufferingInfluxHandler(InfluxHandler, BufferingHandler): """InfluxDB Log handler :param indexed_keys: The names of keys to be treated as keys (as opposed to fields) in influxdb. :param debugging_fields: Send debug fields if true (the default). :param extra_fields: Send extra fields on the log record to graylog if true (the default). :param localname: Use specified hostname as source host. :param measurement: Replace measurement with specified value. If not specified, record.name will be passed as `logger` parameter. :param level_names: Allows the use of string error level names instead of numerical values. Defaults to False :param capacity: The number of points to buffer before sending to InfluxDB. :param flush_interval: Interval in seconds between flushes, maximum. Defaults to 5 seconds :param client_kwargs: Pass these args to the InfluxDBClient constructor """ def __init__(self, indexed_keys=None, debugging_fields=True, extra_fields=True, localname=None, measurement=None, level_names=False, capacity=64, flush_interval=5, backpop=True, **client_kwargs): self.debugging_fields = debugging_fields self.extra_fields = extra_fields self.localname = localname self.measurement = measurement self.level_names = level_names self.indexed_keys = ['level', 'short_message'] self.client = InfluxDBClient(**client_kwargs) self.flush_interval = flush_interval self._thread = None if flush_interval is None else threading.Thread( target=self._flush_thread, name="BufferingInfluxHandler", daemon=True) if indexed_keys is not None: self.indexed_keys += indexed_keys InfluxHandler.__init__(self, indexed_keys=None, debugging_fields=debugging_fields, extra_fields=extra_fields, localname=localname, measurement=measurement, level_names=level_names, backpop=backpop, **client_kwargs) BufferingHandler.__init__(self, capacity) self._thread.start() def emit(self, record): BufferingHandler.emit(self, record) def _flush_thread(self): while True: time.sleep(self.flush_interval) self.flush() def flush(self): self.acquire() try: if len(self.buffer): self.client.write_points( itertools.chain( itertools.chain( self.get_point(record) for record in self.buffer))) self.buffer = [] finally: self.release()
def characteristic_value_updated(self, characteristic, value): print("BMS answering") self.response += value if (self.response.endswith(b'w')): print("BMS answer:", self.response.hex()) self.response = self.response[4:] if (self.get_voltages): packVolts = 0 for i in range(int(len(self.response) / 2) - 1): cell = int.from_bytes(self.response[i * 2:i * 2 + 2], byteorder='big') / 1000 self.rawdat['V{0:0=2}'.format(i + 1)] = cell packVolts += cell # + self.rawdat['V{0:0=2}'.format(i)] self.rawdat['Vbat'] = packVolts self.rawdat['P'] = round( self.rawdat['Vbat'] * self.rawdat['Ibat'], 1) self.rawdat['State'] = int.from_bytes(self.response[16:18], byteorder='big', signed=True) print(self.rawdat) print("BMS chat ended") print(json.dumps(self.rawdat, indent=1, sort_keys=True)) influx_json_body = [{ "measurement": "bmsd-python", "tags": { "host": hostname }, "fields": { "ah_percent": self.rawdat['Ah_percent'], "ah_remaining": self.rawdat['Ah_remaining'], "ah_full": self.rawdat['Ah_full'], "p": self.rawdat['P'], "v_bat": self.rawdat['Vbat'], "i_bat": self.rawdat['Ibat'], "t1": self.rawdat['T1'], "cycles": self.rawdat['Cycles'] } }] influx_client = InfluxDBClient(influx_host, influx_port, influx_user, influx_pass, influx_db) influx_client.write_points(influx_json_body) print( "Capacity: {capacity}% ({Ah_remaining} of {Ah_full}Ah)\nPower: {power}W ({I}Ah)\nTemperature: {temp}°C\nCycles: {cycles}" .format( capacity=self.rawdat['Ah_percent'], Ah_remaining=self.rawdat['Ah_remaining'], Ah_full=self.rawdat['Ah_full'], power=self.rawdat['P'], I=self.rawdat['Ibat'], temp=self.rawdat['T1'], cycles=self.rawdat['Cycles'], )) #self.disconnect(); self.manager.stop() else: self.rawdat['packV'] = int.from_bytes( self.response[0:2], byteorder='big', signed=True) / 100.0 self.rawdat['Ibat'] = int.from_bytes( self.response[2:4], byteorder='big', signed=True) / 100.0 self.rawdat['Bal'] = int.from_bytes(self.response[12:14], byteorder='big', signed=False) self.rawdat['Ah_remaining'] = int.from_bytes( self.response[4:6], byteorder='big', signed=True) / 100 self.rawdat['Ah_full'] = int.from_bytes( self.response[6:8], byteorder='big', signed=True) / 100 self.rawdat['Ah_percent'] = round( self.rawdat['Ah_remaining'] / self.rawdat['Ah_full'] * 100, 2) self.rawdat['Cycles'] = int.from_bytes(self.response[8:10], byteorder='big', signed=True) for i in range(int.from_bytes(self.response[22:23], 'big')): # read temperatures self.rawdat['T{0:0=1}'.format(i + 1)] = (int.from_bytes( self.response[23 + i * 2:i * 2 + 25], 'big') - 2731) / 10 print("BMS request voltages") self.get_voltages = True self.response = bytearray() self.bms_write_characteristic.write_value( bytes([0xDD, 0xA5, 0x04, 0x00, 0xFF, 0xFC, 0x77]))
room = re.sub("LOT\d{4}", "", sys.argv[1]) print("Room Nr: "+room) try: roomint = int(room) except ValueError: print("Illegal LOT-Nr: "+sys.argv[1]) exit() if roomint < 0 or roomint > 15: print("Illegal Room Nr.") exit() df = pd.read_csv('/etc/openhab2/scripts/grafiek'+room+'.dat', header=None, skiprows = 1, encoding='ISO-8859-1', names=['datum','Lufttemperatur','Feuchtigkeit Temperatur','Kompost temp1','Kompost temp2','Kompost temp3','Zuluft Temperatur','CO2','Temperatur draussen','Feucht temp draussen','CO2 draussen','Trocknen','Desinfektion','RF Zuluft','Zuluft Temperatur unit','Zuluft Feucht temp','Luftklappe','Kuehlung','Heizung','Befeuchtigung','Ventilator','Dampf','Licht','Kuehlung Pumpe','Vorheizung','Vorbefeuchtigung','Kompost sw','Lufttemperatur sw','Zuluft sw','CO2 sw','RF-AF-FD sw','CO2 min','CO2 max','Abs Feucht Zuluft sw','Vorheizung temp sw','Kompost durch','Kompost diff','RF Luft','AF Luft','FD Luft','Absolute Feucht Maximum','Absolute Feucht Minimum','Misch luft','RF draussen','AF draussen','AF Zuluft','FD Zuluft','Entalpie Raum','Entalpie draussen','Sauerstoff','Phase']) df1 = df.iloc[:,[0,1,6,7,10,14,16,17,18,20,21,24,49,50]] df1[df.duplicated(keep=False)] db = 'Gicompar'+room+'' timeValues.index = df[ ['datum'] ] tags = {'Lufttemperatur': df[['Lufttemperatur']], 'Zuluft Temperatur': df[['Zuluft Temperatur']], 'CO2':df[['CO2']]} client.switch_database('db') client.write_points(db, tbName, timeValues, tags = tags)