class TaskRunner(object): def __init__(self, loop): self.mbusexecutor = ThreadPoolExecutor(1) # separate thread for MBus operations over serial interface (slow) self.loop = loop self.arguments = {} self.mbusConn = None def __del__(self): self.closeMbusConnection() def get_argument( self, name, default, strip=True ): # mimicing HTTPRequestHander object make function usable both on HTTPRequestHandler- and TaskRunner-objects return self.arguments[name] if name in self.arguments else default @gen.coroutine def bgheartbeat(): out = StringIO() _scheduler.print_jobs(out=out) _log.info("%s" % out.getvalue()) @gen.coroutine def bgsetup(self): firstrun = datetime.fromtimestamp( divmod(datetime.now().timestamp(), bgloop_syncto)[0] * bgloop_syncto + bgloop_syncto ) _log.info("Background jobs first run at %s" % firstrun) _scheduler.add_job(TaskRunner.bgheartbeat, "interval", seconds=60, next_run_time=firstrun) _scheduler.add_job(TaskRunner.readAndStoreMbusData, "interval", args=[self], seconds=60, next_run_time=firstrun) _scheduler.add_job(PowerPoller.getPowerData, "interval", seconds=5, next_run_time=firstrun) out = StringIO() _scheduler.print_jobs(out=out) _log.info("%s" % out.getvalue()) return @gen.coroutine def dbUpsertMbusSlaveInfo(self, slaveinfo, addr): _log.info("Updating (or inserting) mbus slave metadata to database") result = 0 sqlbuf = StringIO() sqlbuf.write( "UPDATE OR IGNORE aMbusSlaveInfo SET\n" ) # update fails intentionally when row not existing, then do insert sqlbuf.write(" manufacturer = '%s',\n" % slaveinfo["Manufacturer"]) sqlbuf.write(" version = %d,\n" % int(slaveinfo["Version"])) sqlbuf.write(" productName = '%s',\n" % slaveinfo["ProductName"]) sqlbuf.write(" medium = '%s',\n" % slaveinfo["Medium"]) sqlbuf.write(" accessNumber = %d,\n" % int(slaveinfo["AccessNumber"])) sqlbuf.write(" signature = '%s',\n" % slaveinfo["Signature"]) sqlbuf.write(" lastStatus = '%s',\n" % slaveinfo["Status"]) sqlbuf.write(" rowUpdatedTimestamp = '%s'\n" % datetime.utcnow().strftime(df)) sqlbuf.write("WHERE address = %d AND id = %s" % (addr, slaveinfo["Id"])) _log.debug("SQL:\n%s" % sqlbuf.getvalue()) # cur.execute(sqlbuf.getvalue()) try: result = yield DBHelperSQLITE.dbExecute( _dbHelper, sqlbuf.getvalue(), parseresp=None, script=False, returnrowcount=True ) except Exception as e: _log.warning("Error during database write : %s" % str(e)) pass if not result: # no rows where updated so insert instead _log.warning("Inserting new row for mbus slave metadata, should only happen when data is recreated") sqlbuf.truncate(0) sqlbuf.seek(0) sqlbuf.write( "INSERT INTO aMbusSlaveInfo(address,id,manufacturer,version,productName,medium,accessNumber,signature,lastStatus,rowCreatedTimestamp)\n" ) sqlbuf.write( " VALUES (%d,'%s','%s',%d,'%s','%s',%d,'%s','%s','%s');\n" % ( addr, slaveinfo["Id"], slaveinfo["Manufacturer"], int(slaveinfo["Version"]), slaveinfo["ProductName"], slaveinfo["Medium"], int(slaveinfo["AccessNumber"]), slaveinfo["Signature"], slaveinfo["Status"], datetime.utcnow().strftime(df), ) ) _log.debug("SQL:\n%s" % sqlbuf.getvalue()) # cur.execute(sqlbuf.getvalue()) try: result = yield DBHelperSQLITE.dbExecute( _dbHelper, sqlbuf.getvalue(), parseresp=None, script=False, returnrowcount=True ) except Exception as e: _log.warning("Error during database write : %s" % str(e)) pass # self.bgDbConn.commit() return result @gen.coroutine def dbInsertMbusDataRecords(self, mbusdata, addr): _log.info("Inserting mbus slave data records to database") sqlbuf = StringIO() sqlbuf2 = StringIO() s2len = 0 slaveid = mbusdata["SlaveInformation"]["Id"] accessnum = mbusdata["SlaveInformation"]["AccessNumber"] mbusdf = "%Y-%m-%dT%H:%M:%S" # 2016-01-28T06:48:13 (In UTC) creationtime = datetime.utcnow().strftime(df) result = 0 for record in mbusdata["DataRecord"]: _log.debug( "Inserting mbus slave data records (address = %d , id = %d) to database" % (addr, int(record["@id"])) ) sqlbuf.write( "INSERT INTO \ aMbusDataRecord(address,id,recordId,recordFunction,recordStorageNumber,recordUnit,recordValue,recordTimestampRaw,recordTimestamp,rowCreatedTimestamp,accessNumber)\n" ) sqlbuf.write( " VALUES (%d,'%s',%d,'%s',%d,'%s','%s','%s','%s','%s',%d);\n" % ( addr, slaveid, int(record["@id"]), record["Function"], int(record["StorageNumber"]), record["Unit"], record["Value"], record["Timestamp"], datetime.strptime(record["Timestamp"], mbusdf).replace(tzinfo=timezone.utc).strftime(df), datetime.utcnow().strftime(df), int(accessnum), ) ) optionals = { k: record.get(k, None) for k in ("@frame", "Tariff", "Device") if k in record } # checking to see if optional data available in record if len(optionals): m = {"@frame": "recordFrame", "Tariff": "recordTariff", "Device": "recordDevice"} _log.debug("Mbus response contains optionals: %s" % optionals) s2len += sqlbuf2.write("UPDATE aMbusDataRecord SET") for i, key in enumerate(optionals): if i != 0: s2len += sqlbuf2.write(" AND") s2len += sqlbuf2.write(" %s = '%s'" % (m[key], optionals[key])) s2len += sqlbuf2.write( " WHERE address=%d AND id='%s' AND recordId='%s' AND recordTimestamp='%s';\n" % (addr, slaveid, record["@id"], creationtime) ) _log.debug("SQL:\n%s" % sqlbuf.getvalue()) # cur.executescript(sqlbuf.getvalue()) try: result = yield DBHelperSQLITE.dbExecute( _dbHelper, sqlbuf.getvalue(), parseresp=None, script=True, returnrowcount=True ) except Exception as e: _log.warning("Error during database write : %s" % str(e)) pass if s2len > 0: _log.debug("SQL(Optionals):\n%s" % sqlbuf2.getvalue()) # cur.executescript(sqlbuf2.getvalue()) try: result = yield DBHelperSQLITE.dbExecute( _dbHelper, sqlbuf2.getvalue(), parseresp=None, script=True, returnrowcount=True ) except Exception as e: _log.warning("Error during database write : %s" % str(e)) pass # self.bgDbConn.commit() return result @gen.coroutine def readAndStoreMbusData(self): mbus_addresses = [1, 15, 31] # m-bus primary addresses to request data from # mbus_addresses = [1] # m-bus primary addresses to request data from reply_dict = {} if not self.mbusConn: _log.info("M-bus connnection not opened previously, opening now..") self.mbusConn = yield self.openMbusConnection() for addr in mbus_addresses: try: res = yield self.readMbus(addr) _log.debug("address: %d resp:\n%s" % (addr, json.dumps(res, indent=4))) if res: reply_dict[addr] = res rowsUpdated = yield self.dbUpsertMbusSlaveInfo( reply_dict[addr]["MBusData"]["SlaveInformation"], addr ) if rowsUpdated: # only try to add records if previous sql was successful rowsUpdated = yield self.dbInsertMbusDataRecords(reply_dict[addr]["MBusData"], addr) except Exception as e: _log.error( "Exception in m-bus read or data insert: addr: %s, type %s, desc: %s" % (addr, type(e), str(e)) ) pass if len(reply_dict): _log.info("Successful m-bus read, aggregating MC302 records") res = yield self.aggregateMC302DataRecords() # for k,resp in enumerate(reply_dict): # _log.info("i: %s key:%s value:\n%s" % (k,resp,reply_dict[resp])) else: _log.info("No data received over m-bus") # mbusConn = yield self.closeMbusConnection() # FIX: don't close all the time return @run_on_executor(executor="mbusexecutor") def openMbusConnection(self): try: _log.info("Opening m-bus connection on device '%s'" % mbusSerialPort) self.mbusConn = MBus(device=mbusSerialPort) self.mbusConn.connect() except Exception as e: _log.error("Exception while connecting to m-bus : type %s, desc: %s" % (type(e), str(e))) raise Exception(e) return self.mbusConn @run_on_executor(executor="mbusexecutor") def closeMbusConnection(self): try: _log.info("Closing m-bus connection") self.mbusConn.disconnect() self.mbusConn = None except Exception as e: _log.error("Exception while closing connection to m-bus : type %s, desc: %s" % (type(e), str(e))) raise Exception(e) return self.mbusConn @run_on_executor(executor="mbusexecutor") def readMbus(self, addr=0): reply = None try: self.mbusConn.send_request_frame(addr) reply = xmltodict.parse( self.mbusConn.frame_data_xml(self.mbusConn.frame_data_parse(self.mbusConn.recv_frame())) ) # xmltodict works directly on the byte-string xml from libmbus except Exception as e: _log.error( "Exception while requesting data from m-bus : mbus-address: %d, type %s, desc: %s" % (addr, type(e), str(e)) ) raise Exception(e) return reply def getFactor(self, exp=""): factor = 1.0 if exp == "m": factor = float(1 / 1000) elif exp == "my": factor = float(1 / 1000000) elif exp == "10": factor = float(10) elif exp == "100": factor = float(100) elif exp == "k": factor = float(1000) elif exp == "10 k": factor = float(10000) elif exp == "100 k": factor = float(100000) elif exp == "M": factor = float(1000000) elif exp == "T": factor = float(1000000000) elif exp.startswith("1e"): factor = float(exp) return factor @gen.coroutine def aggregateMC302DataRecords(self): # merge 30 mbus data record rows into 1 row and update SlaveInfo _log.info("Aggregating mc302 records...") _log.debug("Get timestamp of latest MC302 record update...") # Find all mbus slaves and their addresses sql = "select distinct id,address from aMbusSlaveInfo" result = [] try: result = yield DBHelperSQLITE.dbExecute(_dbHelper, sql, parseresp=None, script=False, returnrowcount=False) except Exception as e: _log.warning("Error during database operation : %s" % str(e)) pass slaves = {} for row in result: slaves[row[0]] = {} slaves[row[0]]["address"] = row[1] # Find all last aMbusMC302Record-update for the slaves sql = "select id,max(recordTimestamp) from aMbusMC302Record group by id" result = [] try: result = yield DBHelperSQLITE.dbExecute(_dbHelper, sql, parseresp=None, script=False, returnrowcount=False) except Exception as e: _log.warning("Error during database operation : %s" % str(e)) pass for row in result: _log.info(row) if slaves[row[0]]: slaves[row[0]]["lastUpdate"] = row[1] sqlbuf = StringIO() # Loop over slave aggregating data for id, val in slaves.items(): # TABLE aMbusDataRecord # rowid INTEGER PRIMARY KEY NOT NULL, # address INTEGER NOT NULL, /* Mbus primary address*/ # id TEXT NOT NULL, /* Mbus slave (SlaveInformation) id*/ # recordId INTEGER NOT NULL, /* XML: /MBusData/Record@id */ # recordFrame TEXT, /* XML: /MBusData/Record@frame (optional), not clear when this data is received but libmbus xml has reserved space for this */ # recordFunction TEXT NOT NULL, /* XML: /MBusData/Record/Function */ # recordStorageNumber INTEGER, /* XML: /MBusData/Record/StorageNumber */ # recordTariff TEXT, /* XML: /MBusData/Record/Tariff (optional) */ # recordDevice TEXT, /* XML: /MBusData/Record/Device (optional) */ # recordUnit TEXT NOT NULL, /* XML: /MBusData/Record/Unit */ # recordValue INTEGER NOT NULL, /* XML: /MBusData/Record/Value */ # recordTimestampRaw TEXT NOT NULL, /* XML: /MBusData/Record/Timestamp */ # recordTimestamp TEXT NOT NULL, /* Parsed from /MBusData/Record/Timestamp */ # rowCreatedTimestamp INTEGER NOT NULL, # accessNumber INTEGER, sql = "SELECT distinct id,address,recordTimestamp,accessNumber FROM aMbusDataRecord WHERE id = '%s'" % id if "lastUpdate" in val: sql += " AND recordTimestamp > '%s'" % val["lastUpdate"] sql += ";\n" timestamps = [] try: timestamps = yield DBHelperSQLITE.dbExecute( _dbHelper, sql, parseresp=None, script=False, returnrowcount=False ) except Exception as e: _log.warning("Error during database operation : %s" % str(e)) pass creationtime = datetime.utcnow().strftime(df) for ts in timestamps: curr_id = ts[0] curr_addr = ts[1] curr_ts = ts[2] curr_acc = ts[3] # TABLE aMbusMC302Record # rowId INTEGER PRIMARY KEY NOT NULL, # accessNumber INTEGER NOT NULL, /* Slave MBus response sequence number */ # address INTEGER NOT NULL, /* Mbus primary address*/ # id TEXT NOT NULL, /* Mbus slave (SlaveInformation) id*/ # recordTimestampRaw TEXT, /* same for all data records in single Mbus response*/ # recordTimestamp TEXT, /* parsed value of raw */ # heatingEnergy INTEGER, /* Heating energy (cumulative, non-resettable) converted to Wh from 1|Instantaneous value|0|Energy;100;Wh */ # coolingEnergy INTEGER, /* Cooling energy (cumulative, non-resettable) converted to Wh from 2|Instantaneous value|0|Energy;100;Wh */ # energyM3T1 INTEGER, /* Energy mˆ3 * T1 (cumulative Volume * Temperature) from 3|Instantaneous value|0|Manufacturer specific */ # energyM3T2 INTEGER, /* Energy mˆ3 * T2 (cumulative Volume * Temperature) from 4|Instantaneous value|0|Manufacturer specific */ # volume INTEGER, /* Current volume (cumulative converted to dm3) from 5|Instantaneous value|0|Volume;m;m^3 */ # hourCounter INTEGER, /* Hour counter (non-resettable) from 6|Instantaneous value|0|On time (hours) */ # errorHourCounter INTEGER, /* Error hour counter (cumul hours in error, no resettable) from 7|Value during error state|0|On time (hours) */ # temp1 REAL, /* Current (Flow) Temperature T1 (converted to deg.Celsius) from 8|Instantaneous value|0|Flow temperature;1e-2;deg C */ # temp2 REAL, /* Current (Return) Temperature T2 (converted to deg.Celsius) from 9|Instantaneous value|0|Return temperature;1e-2;deg C */ # deltaT1T2 REAL, /* Temperature difference T1-T2 (converted to deg.Celsius) from 10|Instantaneous value|0|Temperature Difference;1e-2;deg C */ # power INTEGER, /* Current power (converted to W) from 11|Instantaneous value|0|Power;100;W */ # powerMax INTEGER, /* Maximum power since XX (converted to Watt) from 12|Maximum value|0|Power;100;W */ # flow INTEGER, /* Current water flow (converted to dm3/h) from 13|Instantaneous value|0|Volume flow;m;m^3/h */ # flowMax INTEGER, /* Maximum water flow since XX (converted to dm3/h) 14|Maximum value|0|Volume flow;m;m^3/h */ # errorFlags TEXT, /* Error flags from 15|Instantaneous value|0|Error flags */ # timePoint TEXT, /* Date+Time from 16|Instantaneous value|0|Time Point (time & date) */ # targetHeatingEnergy INTEGER,/* Heating energy since targetTimepoint (in Wh) from 17|Instantaneous value|1|Energy;100;Wh */ # targetCoolingEnergy INTEGER, /* Cooling energy since targetTimepoint (in Wh) from 18|Instantaneous value|1|Energy;100;Wh */ # targetEnergyM3T1 INTEGER, /* Energy mˆ3 * T1 since targetTimepoint from 19|Instantaneous value|1|Manufacturer specific */ # targetEnergyM3T2 INTEGER, /* Energy mˆ3 * T2 since targetTimepoint from 20|Instantaneous value|1|Manufacturer specific */ # targetVolume INTEGER, /* Volume since targetTimepoint (in dm3) from 21|Instantaneous value|1|Volume;m;m^3 */ # targetPowerMax INTEGER, /* Max power since targetTimepoint (in Watt) from 22|Maximum value|1|Power;100;W */ # targetFlowMax INTEGER, /* Max flow since targetTimepoint (in dm3/h) from 23|Maximum value|1|Volume flow;m;m^3/h */ # targetTimepoint TEXT, /* Target time point (date) from 24|Instantaneous value|1|Time Point (date) */ # rowCreatedTimestamp TEXT, # UNIQUE (accessNumber,id,recordTimestamp) ON CONFLICT REPLACE # Insert row sql = ( "INSERT INTO aMbusMC302Record(id,address,recordTimestamp,accessNumber,rowCreatedTimestamp) VALUES ('%s',%d,'%s',%d,'%s');\n" % (curr_id, curr_addr, curr_ts, curr_acc, creationtime) ) try: result = yield DBHelperSQLITE.dbExecute( _dbHelper, sql, parseresp=None, script=False, returnrowcount=True ) except Exception as e: _log.warning("Error during database operation : %s" % str(e)) pass _log.debug(sql) sql = ( "SELECT address,id,recordId,recordStorageNumber,recordUnit,recordValue FROM aMbusDataRecord WHERE id = '%s' AND recordTimestamp = '%s';\n" % (curr_id, curr_ts) ) records = [] try: records = yield DBHelperSQLITE.dbExecute( _dbHelper, sql, parseresp=None, script=False, returnrowcount=False ) except Exception as e: _log.warning("Error during database operation : %s" % str(e)) pass # reuse sqlbuf, empty it first truncate+seek sqlbuf.truncate(0) sqlbuf.seek(0) tmpbuf = StringIO() for record in records: tmpbuf.truncate(0) tmpbuf.seek(0) curr_recid = record[2] curr_recstor = record[3] curr_recunit = record[4] curr_value = record[5] factor = 1.0 if ";" in curr_recunit: parts = curr_recunit.split(";") factor = self.getFactor(str(parts[1])) tmpbuf.write("UPDATE aMbusMC302Record SET \n") if curr_recid == 0: # skip "Firmware" _log.debug("Skipping 0 = Firmware for now") continue elif curr_recid == 1: # tmpbuf.write("heatingEnergy = %d\n" % int(factor * curr_value)) elif curr_recid == 2: # tmpbuf.write("coolingEnergy = %d\n" % int(factor * curr_value)) elif curr_recid == 3: # tmpbuf.write("energyM3T1 = %d\n" % int(curr_value)) elif curr_recid == 4: # tmpbuf.write("energyM3T2 = %d\n" % int(curr_value)) elif curr_recid == 5: # tmpbuf.write( "volume = %d\n" % int(factor * curr_value * 1000) ) # volume converted from m3 to dm3 elif curr_recid == 6: # tmpbuf.write("hourCounter = %d\n" % int(curr_value)) elif curr_recid == 7: # tmpbuf.write("errorHourCounter = %d\n" % int(curr_value)) elif curr_recid == 8: # tmpbuf.write("temp1 = %f\n" % float(factor * curr_value)) elif curr_recid == 9: # tmpbuf.write("temp2 = %f\n" % float(factor * curr_value)) elif curr_recid == 10: # tmpbuf.write("deltaT1T2 = %f\n" % float(factor * curr_value)) elif curr_recid == 11: # tmpbuf.write("power = %d\n" % int(factor * curr_value)) elif curr_recid == 12: # tmpbuf.write("powerMax = %d\n" % int(factor * curr_value)) elif curr_recid == 13: # tmpbuf.write( "flow = %d\n" % int(factor * curr_value * 1000) ) # flow converted from m3/h to dm3/h elif curr_recid == 14: # tmpbuf.write( "flowMax = %d\n" % int(factor * curr_value * 1000) ) # maxflow converted from m3/h to dm3/h elif curr_recid == 15: # tmpbuf.write("errorFlags = '%s'\n" % str(curr_value)) elif curr_recid == 16: # tmpbuf.write("timePoint = '%s'\n" % str(curr_value)) elif curr_recid == 17: # tmpbuf.write("targetHeatingEnergy = %d\n" % int(factor * curr_value)) elif curr_recid == 18: # tmpbuf.write("targetCoolingEnergy = %d\n" % int(factor * curr_value)) elif curr_recid == 19: # tmpbuf.write("targetEnergyM3T1 = %d\n" % int(curr_value)) elif curr_recid == 20: # tmpbuf.write("targetEnergyM3T2 = %d\n" % int(curr_value)) elif curr_recid == 21: # tmpbuf.write( "targetVolume = %d\n" % int(factor * curr_value * 1000) ) # volume converted from m3 to dm3 elif curr_recid == 22: # tmpbuf.write("targetPowerMax = %d\n" % int(factor * curr_value)) elif curr_recid == 23: # tmpbuf.write("targetFlowMax = %d\n" % int(factor * curr_value)) elif curr_recid == 24: # tmpbuf.write("targetTimepoint = '%s'\n" % str(curr_value)) elif curr_recid == 25: # _log.debug("Skipping record id 25") continue elif curr_recid == 26: # _log.debug("Skipping record id 26") continue elif curr_recid == 27: # _log.debug("Skipping record id 27") continue elif curr_recid == 28: # _log.debug("Skipping record id 28") continue elif curr_recid == 29: # _log.debug("Skipping record id 29") continue else: continue tmpbuf.write("WHERE id = '%s' AND recordTimestamp = '%s';\n" % (curr_id, curr_ts)) sqlbuf.write(tmpbuf.getvalue()) _log.debug(sqlbuf.getvalue()) result = [] try: result = yield DBHelperSQLITE.dbExecute( _dbHelper, sqlbuf.getvalue(), parseresp=None, script=True, returnrowcount=True ) except Exception as e: _log.warning("Error during database operation : %s" % str(e)) pass _log.info("Aggregation done") return
mbus_addresses = [1,15,31] # m-bus primary addresses to request data from mbus = MBus(device=b'/dev/ttyUSB0') # C-library expects byte strings, Python3 string is unicode -> prefix string with 'b' #reply_dict = {} reply_xml = {} try: mbus.connect() for adr in mbus_addresses: try: mbus.send_request_frame(adr) reply_xml[adr] = mbus.frame_data_xml(mbus.frame_data_parse(mbus.recv_frame())) #reply_dict[adr] = xmltodict.parse(mbus.frame_data_xml(mbus.frame_data_parse(mbus.recv_frame()))) # xmltodict works directly on the byte-string xml from libmbus except Exception as e: print ("Exception while requesting data from m-bus : type %s, desc: %s" % (type(e),str(e))) pass mbus.disconnect() except Exception as e: print ("Exception while requesting data from m-bus : type %s, desc: %s" % (type(e),str(e))) pass # if reply_dict: # for k,resp in enumerate(reply_dict): # print("i: %s key:%s value:\n%s" % (k,resp,reply_dict[resp])) if reply_xml: for k,resp in enumerate(reply_xml): print("i: %s key:%s value:\n%s" % (k,resp,reply_xml[resp]))
if debug: print("mbus = " + str(mbus)) mbus.serial_set_baudrate(2400) res = mbus.send_ping_frame(0xFD, 1) print(res) res = mbus.send_ping_frame(0, 1) print(res) mbus.send_request_frame(address) reply = mbus.recv_frame() if debug: print("reply =", reply) reply_data = mbus.frame_data_parse(reply) if debug: print("reply_data =", reply_data) xml_buff = mbus.frame_data_xml(reply_data) print("xml_buff =", xml_buff) mbus.frame_data_free(reply_data) mbus.disconnect()
class Mbus: """"manages a single mbus-connector device""" def __init__(self, gateway): self._gateway = gateway self._mbus = None self._setupMbus() if self._mbus: self._mbus.connect() baudRate = config.getConfig('mbus', 'baudrate', None) if baudRate: self._mbus._libmbus.serial_set_baudrate(self._mbus.handle, int(baudRate)) self._devicesLock = threading.Lock() self._devices = {} # the devices that need to be queried, key = devId, value = DeviceConfig object def _setupMbus(self): """create the mbus connection""" try: path = config.getConfig('mbus', 'libpath', None) if not config.configs.has_option('mbus', 'device'): if not config.configs.has_option('mbus', 'host'): logger.error('mbus configuration missing: device or host') return else: if config.configs.has_option('mbus', 'port'): self._mbus = MBus(host=config.configs.get('mbus', 'host'), port=config.configs.get('mbus', 'port'), libpath=path) else: self._mbus = MBus(host=config.configs.get('mbus', 'host'), libpath=path) else: self._mbus = MBus(device=config.configs.get('mbus', 'device'), libpath=path) except: logger.exception("failed to setup mbus") def stop(self): if self._mbus: self._mbus.disconnect() def sample(self): """ walk over all the devices. This is done thread save, so a scan can be performed from another thread without worrying about sampling at the same time :rtype: the next time that we need to sample 1 or more devices""" nextRunAt = datetime.datetime.max self._devicesLock.acquire() try: for devId, dev in self._devices.iteritems(): try: runAt = datetime.datetime.now() if dev.nextRunAt <= runAt: # only query the devices that need to be run in this time slot, each device has a different query frequency reply = self.getRecords(devId, False) if reply: for asset in dev.definition['assets']: # we start from the definition found on the cloud, this contains all the assets that we need to support if asset['name'].isdigit(): # we only need to process assets that have an int as id, these represent the data records id = int(asset['name']) if _isAccumulative(asset): value = _calculateAccumulative(reply["DataRecord"][id], dev, id) else: value = reply["DataRecord"][id]["Value"] if value != None: calculation = _tryGetCalculation(asset) if calculation: value = eval(calculation) self._gateway.send(value, devId, str(id)) dev.lastRunAt = runAt if nextRunAt > dev.nextRunAt: nextRunAt = dev.nextRunAt except: logger.exception("failed to process device: {}".format(devId)) finally: self._devicesLock.release() if nextRunAt != datetime.datetime.max: return (nextRunAt - datetime.datetime.now()).total_seconds() else: return DefaultSamplingFrequency # if there was no device in this time range (ex: no devices yet), then do the next run at the default time interval, so that don't block after the first run when there are no devices yet, if we don't do this, we never query after the first device was added. def _getRequestFrame(self, address): """build an mbusframe object that can be used to request the data from a device.""" frame = MBusFrame() frame.type = mbusLow.MBUS_FRAME_TYPE_SHORT frame.start1 = mbusLow.MBUS_FRAME_SHORT_START frame.stop = mbusLow.MBUS_FRAME_STOP frame.control = MBUS_CONTROL_MASK_REQ_UD2 | MBUS_CONTROL_MASK_DIR_M2S | MBUS_CONTROL_MASK_FCV | MBUS_CONTROL_MASK_FCB frame.address = address logger.info("address = {}".format(frame.address)) return frame def _cleanFrames(self, data): """frame data contains pointers that need to be deleted (C data)""" try: for x in data: if x.data_var.record: self._mbus._libmbus.data_record_free(x.data_var.record) # need to clean this up, it's a C pointer except: logger.exception("failed to clean up data") def getRecords(self, address, full): """gets all the data records from the device at the speciied address :param address: the address to get the records for :param full: when true, parse all the data, otherwise only the values :return: a dict containing all the data that was found :type full: boolean """ mbHandle = self._mbus.handle mb = self._mbus._libmbus retry = 0 reply = MBusFrame() replies = [] frame = self._getRequestFrame(address) more_frames = True try: while more_frames: if retry > 3: return None if mb.send_frame(mbHandle, frame) == -1: raise Exception("failed to send mbus frame") result = mb.recv_frame(mbHandle, reply) if result == mbusLow.MBUS_RECV_RESULT_OK: logger.info("found response") retry = 0 mb.purge_frames(mbHandle) elif result == mbusLow.MBUS_RECV_RESULT_TIMEOUT: retry += 1 continue elif result == mbusLow.MBUS_RECV_RESULT_INVALID: logger.warning("received invalid m-bus response frame") retry += 1 mb.purge_frames(mbHandle) continue else: logger.error("Failed to receive m-bus response frame") return None reply_data = MBusFrameData() logger.info(reply) if mb.frame_data_parse(reply, reply_data) == -1: logger.error("m-bus data parse error") return None else: replies.append(reply_data) more_frames = False if reply_data.type == mbusLow.MBUS_DATA_TYPE_VARIABLE: if reply_data.data_var.more_records_follow: more_frames = True next = MBusFrame() reply = next frame.control ^= mbusLow.MBUS_CONTROL_MASK_FCB return binConverter.toDict(replies, full) finally: self._cleanFrames(replies) # need to cleam memory to prevent mem leak def getRecordstest(self, address, full): """for testing""" if address == 6: mb = self._mbus._libmbus frames = [] import csv try: with open("test/data.csv", 'rb') as file: reader = csv.reader(file, delimiter=',') for row in reader: frame = MBusFrame() frame.start1 = int(row[0]) frame.length1 = int(row[1]) frame.length2 = int(row[2]) frame.start2 = int(row[3]) frame.control = int(row[4]) frame.address = int(row[5]) frame.control_infomation = int(row[6]) frame.checksum = int(row[7]) frame.stop = int(row[8]) tempType = ctypes.c_uint8 * 252 frame.data = tempType() list = [int(x) for x in row[9].strip().split(' ')] for x in range(0, len(list)): frame.data[x] = list[x] frame.data_size = int(row[10]) frame.type = int(row[11]) frame.timestamp = int(row[12]) logger.info(frame) reply_data = MBusFrameData() if mb.frame_data_parse(frame, reply_data) == -1: logger.error("m-bus data parse error") else: frames.append((reply_data)) res = binConverter.toDict(frames, True) return res finally: self._cleanFrames(frames) def loadDevicesFromCloud(self, existing): """load the devices from the cloud, together with the config""" self._devicesLock.acquire() try: for dev in existing: rec = DeviceConfig(datetime.datetime.now()) rec.definition = dev asset = next((x for x in dev['assets'] if x['name'].encode('ascii', 'ignore') == "sample_frequency"), None) if asset and asset['state'] and 'value' in asset['state']: # if no asset defined, we use the default of 5 minutes rec.sampleFreq = asset['state']['value'] logger.info("sample frequency for {} = {}".format(dev["id"], rec.sampleFreq)) self._devices[int(dev['id'])] = rec # id of device has been localized to gateway. finally: self._devicesLock.release()