def run(self): t0 = time.time() while True: time.sleep(Constants.LOOP_DELAY) t1 = time.time() try: if (t1 - t0) >= 10: t0 = t1 # self.logg.log("self test") if not self.mqtt_client.connected: self.logg.log("disconnect detected, reconnect") try: self.mqtt_client.connect() except: self.logg.log( Utils.format_exception( self.__class__.__name__)) self.mqtt_client.ping("self test") if not self.sensor_data_q.empty(): recv: MQTTMessage = self.sensor_data_q.get(block=False) self.update_sensor_data(recv.id, recv) if Constants.conf["ENV"]["LOG_SENSOR_DATA"]: self.logg.log(recv.topic + " " + str(recv.id) + " " + str(recv.data)) except: self.logg.log(Utils.format_exception(self.__class__.__name__))
def load_sensors(self): self.logg.log("load sensors") try: self.db = Database.instance() sensors = self.db.get_sensors() self.logg.log(sensors) t_create = time.time() if sensors is not None: for s in sensors: s1: Sensor = Sensor() s1.id = s["sensor_id"] s1.log_rate = s["log_rate"] s1.topic_name = s["topic_name"] s1.topic_code = s["topic_code"] s1.type = s["sensor_type_code"] s1.ts = t_create s1.log_ts = t_create # self.logg.log(json.dumps(s1.__dict__)) self.sensors.append(s1) topics = self.db.get_topics() if topics is not None: for t in topics: t1: MQTTTopic = MQTTTopic(t) self.topics.append(t1) self.logg.log(self.topics) self.logg.log(self.sensors) except: self.logg.log(Utils.format_exception(self.__class__.__name__))
def connect(self): self.logg.log("connecting to db") try: dbconf = Constants.conf["ENV"]["DB"] host = dbconf["HOST"] user = dbconf["USER"] password = dbconf["PASS"] dbname = dbconf["NAME"] # db type e.g. mysql, postgresql dbtype = dbconf["TYPE"] if dbtype == "MYSQL": self.connection = pymysql.connect( host=host, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor ) # self.cursor = self.connection.cursor(pymysql.cursors.DictCursor) self.cursor = self.connection.cursor() else: pass self.connected = True self.logg.log("connected to db") except: self.logg.log(Utils.format_exception(self.__class__.__name__))
def on_disconnect(client, userdata, rc): self.logg.log("client: " + str(client) + " disconnected") self.connected = False try: if self.client: self.client.loop_stop() except: self.logg.log(Utils.format_exception(self.__class__.__name__))
def get_sensors(): try: data = db.get_sensors() return json.dumps(data) except: logg.log(Utils.format_exception("")) return json.dumps({ "status": False })
def wrap(self, *args, **kwargs): # print("inside wrap") self.check_connect() try: return func(self, *args, **kwargs) except pymysql.Error as e: self.logg.log(Utils.format_exception(self.__class__.__name__)) if 'MySQL server has gone away' in str(e): # reconnect MySQL self.logg.log("attempt reconnect") self.connect() else: # No need to retry for other reasons pass return None except Exception: self.logg.log(Utils.format_exception(self.__class__.__name__)) return None
def get_sensor_data(): try: id = request.args.get('id') chan = request.args.get('chan') limit = request.args.get('limit') data = db.get_sensor_data(id, chan, limit) return json.dumps(data, indent=4, sort_keys=True, default=str) except: logg.log(Utils.format_exception("")) return json.dumps({ "status": False })
def on_message(client, userdata, message): try: # self.logg.log("message received, topic: ", message.topic, ", message: ", str(message.payload.decode("utf-8"))) # self.logg.log("client: ", client) # self.logg.log("message topic =", message.topic) # self.logg.log("message qos =", message.qos) # self.logg.log("message retain flag =", message.retain) raw_data = str(message.payload.decode("utf-8")) msg = MQTTMessage() topic_elems = message.topic.split("/") n_topic_elems = len(topic_elems) raw_data_split = raw_data.split(",") # msg.topic = "/".join(topic_elems[0:n_topic_elems-2]) msg.topic = "/".join(topic_elems) # self.logg.log(msg.topic) # the last-1 item is the sensor id # the last item is the input/output selector (cmd, sns) # msg.id = int(topic_elems[n_topic_elems-2]) # extract sensor id, remove from data array for further processing msg.id = int(raw_data_split[0]) # if len(raw_data_split) == 1: # msg.data = None # elif len(raw_data_split) == 2: # msg.data = raw_data_split[1] # else: # msg.data = ",".join(raw_data_split[1:]) msg.data = raw_data_split[1:] msg.ts = datetime.now() # fixed type at the moment msg.type = 1 # TODO: use a different topic for each sensor type, e.g. wsn/indoor, wsn/outdoor # TODO: only log the known sensors in the db and filter them by the topic ID if not self.sensor_data_q.full(): self.sensor_data_q.put(msg) except: self.logg.log(Utils.format_exception(self.__class__.__name__))
def get_sensor_data_plot(): try: id = request.args.get('id') chan = request.args.get('chan') limit = request.args.get('limit') data = db.get_sensor_data(id, chan, limit) # Utils.log(data) if not chan: timeseries = [] for i in range(len(data)): timeseries1 = Timeseries() timeseries1.x = [] timeseries1.y = [] for (i, row) in enumerate(data): timeseries1.x.append(row["timestamp"]) timeseries1.y.append(row["value"]) timeseries.append(timeseries1) strIO = graph.plot_timeseries_multi( timeseries, "sensor " + id, "time", "value") else: timeseries = Timeseries() timeseries.x = [] timeseries.y = [] for (i, row) in enumerate(data): timeseries.x.append(row["timestamp"]) timeseries.y.append(row["value"]) strIO = graph.plot_timeseries( timeseries, "sensor " + id + " chan " + chan, "time", "value") return strIO # Utils.log(strIO) # # attachment_filename = 'plot.png', # # as_attachment = True # return send_file(strIO, # mimetype='image/jpg', # attachment_filename='logo.png', # ) except: logg.log(Utils.format_exception("")) return json.dumps({ "status": False })
def get_sensor_data_csv(): try: id = request.args.get('id') chan = request.args.get('chan') limit = request.args.get('limit') file = request.args.get('file') data = db.get_sensor_data(id, chan, limit) # create a dynamic csv or file here using `StringIO` # (instead of writing to the file system) Utils.log(data) strIO = io.BytesIO() strdata = db.extract_csv_multichan(data) strIO.write(strdata.encode("utf-8")) # strIO.write(data) strIO.seek(0) if file: return send_file(strIO, mimetype='text/csv', attachment_filename='downloadFile.csv', as_attachment=True) else: # assume bytes_io is a `BytesIO` object byte_str = strIO.read() # Convert to a "unicode" object # Or use the encoding you expect text_obj = byte_str.decode('UTF-8') # return json.dumps({ # "status": True, # "data": text_obj # }) return text_obj except: logg.log(Utils.format_exception("")) return json.dumps({ "status": False })
def update_sensor_data(self, raw_id, d1: MQTTMessage): ts = time.time() found = False s1: Sensor = Sensor() if d1.data is None: return # remove heading data = d1.data if d1.data[0] == "data": data = d1.data[1:] d1.data = data try: for s in self.sensors: s1 = s if s1.id == Utils.get_sensor_id_encoding( raw_id, self.get_topic_code(d1.topic)): found = True # self.logg.log("found / " + d1.topic + ": " + str(s1.id) + " raw id: " + str(raw_id) + " topic code: " + str(s1.topic_code)) break if found: # print(d1) # for real time monitor # self.logg.log("update sensor/" + d1.topic) s1.current_data = data # handle sample and db log # only add to data buffer if enough time has passed since last recorded message # ts = 0 => log all incoming messages if (s1.ts == 0) or (ts - s1.ts >= s1.log_rate): # self.logg.log("sample") s1.ts = ts s1.data_buffer.append(d1) # handle dump to db if ts - s1.log_ts >= self.default_log_rate: self.logg.log("log db") s1.log_ts = ts # check if data in buffer if len(s1.data_buffer) > 0: self.log_sensor_data(s1) s1.data_buffer = [] else: # sensor is not defined in the db, save and use defaults # assign to the topic (should be defined) self.logg.log("create sensor") s1.current_data = d1 s1.raw_id = raw_id s1.type = d1.type s1.log_rate = self.default_log_rate s1.ts = ts s1.log_ts = ts s1.topic_name = d1.topic # write to db s1 = self.create_sensor(s1) # topic code is now assigned # if s1 is not None: # add to list anyways (the sensor may already be registered and db returns error) self.logg.log("new sensor: " + str(s1.__dict__)) self.sensors.append(s1) except: self.logg.log( Utils.format_exception(self.__class__.__name__) + " at message: " + str(d1.__dict__))
def extract_csv_multichan(self, data): try: data_dict = {} for d in data: key = str(d["sensor_id"]) + "/" + str(d["chan"]) if key in data_dict: data_dict[key].append(d) else: data_dict[key] = [d] # merge data (assume that the data is gathered at almost the same timestamps (server polling for data via mqtt at regular intervals)) od = collections.OrderedDict(sorted(data_dict.items())) # print(od) data_rows = [] data_cols = [] row_size = None headers = None for (k, v) in od.items(): # print(k, v) # print("key: ", k) if row_size is None: row_size = len(v) else: if len(v) < row_size: row_size = len(v) data_cols.append(v) for i in range(row_size): data_row = [] add_headers = False if headers is None: add_headers = True headers = [] headers.append("index") headers.append("timestamp") data_row.append(i+1) # (assume that the data is gathered at almost the same timestamps (server polling for data via mqtt at regular intervals)) ts = data_cols[0][i]["timestamp"] if ts is None: data_row.append(ts) else: data_row.append(ts) # div = 10000000.0 # data_row.append(datetime.utcfromtimestamp( # ts/div).strftime('%Y-%m-%d %H:%M:%S') + "." + str(int(((ts / div) - int(ts/div)) * 1000))) for (j, c) in enumerate(data_cols): data_row.append(c[i]["value"]) if add_headers: headers.append( "node " + str(c[i]["sensor_id"]) + " chan " + str(c[i]["chan"])) data_rows.append(data_row) data_str = "" data_str += ",".join(headers) + "\n" for dr in data_rows: data_str += ",".join(str(dc) for dc in dr) + "\n" return data_str except: self.logg.log(Utils.format_exception(self.__class__.__name__))