def run(self): log.info("[" + self.gateway_id + "] starting mysensors " + self.gateway_type + " gateway") # load previously assigned node ids if db.exists(self.assigned_ids_key): self.assigned_ids = db.rangebyscore(self.assigned_ids_key, "-inf", "+inf", withscores=False) errors = 0 while True: # connect to the configured gateway if not self.connected: self.connected = self.connect() if not self.connected: # something went wrong while connecting, sleep for a while and then try again time.sleep(sleep_on_error) continue if self.gateway_type == "serial" or self.gateway_type == "ethernet": # for serial and ethernet manage the loop manually by reading every single message message = self.read() if message is None: # something went wrong while reading the message, increase the error counter errors = errors + 1 time.sleep(1) if errors > 10: # too many consecutive errors, sleep for a while and then try to reconnect log.error( "[" + self.gateway_id + "] Too many errors, will try reconnecting in a while" ) time.sleep(sleep_on_error) self.connected = False # go and read a new message continue # parse the message parsed = self.parse(message) if parsed is None: # something went wrong while parsing the message, increase the error counter errors = errors + 1 time.sleep(1) if errors > 10: # too many consecutive errors, sleep for a while and then try to reconnect log.error( "[" + self.gateway_id + "] Too many errors, will try reconnecting in a while" ) time.sleep(sleep_on_error) self.connected = False # go and read a new message continue # parsed correctly, reset the error counter errors = 0 elif self.gateway_type == "mqtt": # for mqtt the loop is managed automatically with callbacks self.gateway.loop() # the loop should never end, if it will, sleep for a while then try to reconnect time.sleep(sleep_on_error) self.connected = False continue
def summarize(sensor, timeframe, start, end): # prepare the database schema to use if timeframe == "hour": key_to_read = sensor["db_sensor"] key_to_write = sensor["db_sensor"] + ":hour" elif timeframe == "day": key_to_read = sensor["db_sensor"] + ":hour:avg" key_to_write = sensor["db_sensor"] + ":day" # retrieve from the database the data based on the given timeframe data = db.rangebyscore(key_to_read, start, end, withscores=True) # split between values and timestamps values = [] timestamps = [] for i in range(0, len(data)): timestamps.append(data[i][0]) values.append(data[i][1]) # calculate the derived values timestamp = start min = avg = max = rate = sum = count = count_unique = "-" if "avg" in sensor["summarize"] and sensor["summarize"]["avg"]: # calculate avg avg = utils.avg(values) db.deletebyscore(key_to_write + ":avg", start, end) db.set(key_to_write + ":avg", avg, timestamp) if "min_max" in sensor["summarize"] and sensor["summarize"]["min_max"]: # calculate min min = utils.min(values) db.deletebyscore(key_to_write + ":min", start, end) db.set(key_to_write + ":min", min, timestamp) # calculate max max = utils.max(values) db.deletebyscore(key_to_write + ":max", start, end) db.set(key_to_write + ":max", max, timestamp) if "rate" in sensor["summarize"] and sensor["summarize"]["rate"]: # calculate the rate of change rate = utils.velocity(timestamps, values) db.deletebyscore(key_to_write + ":rate", start, end) db.set(key_to_write + ":rate", rate, timestamp) if "sum" in sensor["summarize"] and sensor["summarize"]["sum"]: # calculate the sum sum = utils.sum(values) db.deletebyscore(key_to_write + ":sum", start, end) db.set(key_to_write + ":sum", sum, timestamp) if "count" in sensor["summarize"] and sensor["summarize"]["count"]: # count the values count = utils.count(values) db.deletebyscore(key_to_write + ":count", start, end) db.set(key_to_write + ":count", count, timestamp) if "count_unique" in sensor["summarize"] and sensor["summarize"][ "count_unique"]: # count the unique values count_unique = utils.count_unique(values) db.deletebyscore(key_to_write + ":count_unique", start, end) db.set(key_to_write + ":count_unique", count_unique, timestamp) log.debug("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] (" + utils.timestamp2date(timestamp) + ") updating summary of the " + timeframe + " (min,avg,max,rate,sum,count,count_unique): (" + str(min) + "," + str(avg) + "," + str(max) + "," + str(rate) + "," + str(sum) + "," + str(count) + "," + str(count_unique) + ")")
def upgrade_2_0(): ######## START OF CONFIGURATION # remote all data from the target database empty_target_db = False # migrate history data migrate_history = True # history start timestamp to migrate, "-inf" for all history_start_timestamp = "-inf" # historu end timestamp to migrate history_end_timestamp = utils.now() # migrate recent data migrate_recent = True # database number from which we are migrating db_from = 1 # database number into which we are migrating db_to = 2 # debug debug = False # keys to migrate history (from key -> to key) # destination key format: myHouse:<module_id>:<group_id>:<sensor_id> history = { 'home:weather:outdoor:temperature:day:max': 'myHouse:outdoor:temperature:external:day:max', 'home:weather:outdoor:temperature:day:min': 'myHouse:outdoor:temperature:external:day:min', 'home:weather:outdoor:temperature:day': 'myHouse:outdoor:temperature:external:day:avg', 'home:weather:indoor:temperature:day:max': 'myHouse:indoor:temperature:living_room:day:max', 'home:weather:indoor:temperature:day:min': 'myHouse:indoor:temperature:living_room:day:min', 'home:weather:indoor:temperature:day': 'myHouse:indoor:temperature:living_room:day:avg', 'home:weather:almanac:record:min': 'myHouse:outdoor:temperature:record:day:min', 'home:weather:almanac:record:max': 'myHouse:outdoor:temperature:record:day:max', 'home:weather:almanac:normal:min': 'myHouse:outdoor:temperature:normal:day:min', 'home:weather:almanac:normal:max': 'myHouse:outdoor:temperature:normal:day:max', 'home:weather:outdoor:condition:day': 'myHouse:outdoor:temperature:condition:day:avg', } # keys to migrate recent data (from key -> to key) recent = { 'home:weather:outdoor:temperature:measure': 'myHouse:outdoor:temperature:external', 'home:weather:indoor:temperature:measure': 'myHouse:indoor:temperature:living_room', 'home:weather:outdoor:condition:measure': 'myHouse:outdoor:temperature:condition', } ######## END OF CONFIGURATION conf = config.get_config(validate=False) print "[Migration from v1.x to v2.0]\n" input( "WARNING: which data will be migrate is defined within this script, on top of the upgrade_20() function.\nIndividual sensors to migrate must be specified manually\nPlase ensure you have reviewed all the settings first!\n\nPress Enter to continue..." ) backup("1.0") # empty the target database first if empty_target_db: print "Flushing target database..." change_db(db_to) db.flushdb() # for each history key to migrate print "Migrating historical data..." for key_from in history: if not migrate_history: break key_to = history[key_from] print "\tMigrating " + key_from + " -> " + key_to # retrieve all the data change_db(db_from) data = db.rangebyscore(key_from, history_start_timestamp, history_end_timestamp, withscores=True) change_db(db_to) count = 0 # for each entry for entry in data: timestamp = utils.day_start(utils.timezone(entry[0])) value = utils.normalize(entry[1]) # store it into the new database if debug: print "[HISTORY][" + key_to + "] (" + utils.timestamp2date( timestamp) + ") " + str(value) db.set(key_to, value, timestamp) count = count + 1 print "\t\tdone, " + str(count) + " values" # for each recent key to migrate print "Migrating recent data..." for key_from in recent: if not migrate_recent: break key_to = recent[key_from] print "\tMigrating " + key_from + " -> " + key_to # retrieve the recent data change_db(db_from) data = db.rangebyscore(key_from, utils.now() - 2 * conf["constants"]["1_day"], utils.now(), withscores=True) change_db(db_to) count = 0 # for each entry for entry in data: timestamp = utils.timezone(entry[0]) value = utils.normalize(entry[1]) if debug: print "[RECENT][" + key_to + "] (" + utils.timestamp2date( timestamp) + ") " + str(value) # skip it if the same value is already stored old = db.rangebyscore(key_to, timestamp, timestamp) if len(old) > 0: continue # store it into the new database db.set(key_to, value, timestamp) # create the sensor data structure key_split = key_to.split(":") group_id = key_split[-2] sensor_id = key_split[-1] module_id = key_split[-4] sensor = utils.get_sensor(module_id, group_id, sensor_id) sensor['module_id'] = module_id sensor['group_id'] = group_id sensor['db_group'] = conf["constants"]["db_schema"][ "root"] + ":" + sensor["module_id"] + ":" + sensor["group_id"] sensor[ 'db_sensor'] = sensor['db_group'] + ":" + sensor["sensor_id"] import sensors sensors.summarize(sensor, 'hour', utils.hour_start(timestamp), utils.hour_end(timestamp)) count = count + 1 print "\t\tdone, " + str(count) + " values" print "Upgrading database..." version_key = conf["constants"]["db_schema"]["version"] db.set_simple(version_key, "2.0")
def data_get_data(module_id, group_id, sensor_id, timeframe, stat): data = [] sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps(data) if "plugin" in sensor and "poll_on_demand" in sensor["plugin"] and sensor[ "plugin"]["poll_on_demand"] and timeframe == "realtime": # the sensor needs to be polled on demand run(module_id, group_id, sensor_id, "save") # get the parameters for the requested timeframe if timeframe == "realtime": # recent hourly measures up to now range = "" start = utils.realtime() end = utils.now() withscores = True elif timeframe == "recent": # recent hourly measures up to now range = ":hour" start = utils.recent() end = utils.now() withscores = True elif timeframe == "history": # historical daily measures up to new range = ":day" start = utils.history() end = utils.now() withscores = True elif timeframe == "short_history": # historical daily measures up to new range = ":day" start = utils.history( conf["general"]["timeframes"]["short_history_days"]) end = utils.now() withscores = True elif timeframe == "today": # today's measure range = ":day" start = utils.day_start(utils.now()) end = utils.day_end(utils.now()) withscores = False elif timeframe == "yesterday": # yesterday's measure range = ":day" start = utils.day_start(utils.yesterday()) end = utils.day_end(utils.yesterday()) withscores = False elif timeframe == "forecast": # next days measures range = ":day" start = utils.day_start(utils.now()) end = utils.day_start(utils.now() + (conf["general"]["timeframes"]["forecast_days"] - 1) * conf["constants"]["1_day"]) withscores = True else: return data # define the key to request key = conf["constants"]["db_schema"][ "root"] + ":" + module_id + ":" + group_id + ":" + sensor_id + range requested_stat = ":" + stat # if a range is requested, start asking for the min if stat == "range": requested_stat = ":min" if timeframe == "realtime": requested_stat = "" # request the data data = db.rangebyscore( key + requested_stat, start, end, withscores=withscores, milliseconds=True, formatter=conf["constants"]["formats"][sensor["format"]]["formatter"]) if stat == "range" and len(data) > 0: # if a range is requested, ask for the max and combine the results data_max = db.rangebyscore(key + ":max", start, end, withscores=False, milliseconds=True, formatter=conf["constants"]["formats"][ sensor["format"]]["formatter"]) for i, item in enumerate(data): # ensure data_max has a correspondent value if i < len(data_max): if (isinstance(item, list)): data[i].append(data_max[i]) else: data.append(data_max[i]) return json.dumps(data)
def store(sensor, measures, ifnotexists=False): # if an exception occurred, skip this sensor if measures is None: return # for each returned measure for measure in measures: # set the timestamp to now if not already set if "timestamp" not in measure: measure["timestamp"] = utils.now() # define the key to store the value key = sensor["db_group"] + ":" + measure["key"] # if ifnotexists is set, check if the key exists if ifnotexists and db.exists(key): log.debug("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] key already exists, ignoring new value") return # delete previous values if needed realtime_count = conf["sensors"]["retention"]["realtime_count"] if "retention" in sensor and "realtime_count" in sensor["retention"]: realtime_count = sensor["retention"]["realtime_count"] if realtime_count > 0: db.deletebyrank(key, 0, -realtime_count) # if only measures with a newer timestamp than the latest can be added, apply the policy realtime_new_only = conf["sensors"]["retention"]["realtime_new_only"] if "retention" in sensor and "realtime_new_only" in sensor["retention"]: realtime_new_only = sensor["retention"]["realtime_new_only"] if realtime_new_only: # retrieve the latest measure's timestamp last = db.range(key, -1, -1) if len(last) > 0: last_timestamp = last[0][0] # if the measure's timestamp is older or the same, skip it if measure["timestamp"] <= last_timestamp: log.debug("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] (" + utils.timestamp2date(measure["timestamp"]) + ") old event, ignoring " + measure["key"] + ": " + str(measure["value"])) continue # check if there is already something stored with the same timestamp old = db.rangebyscore(key, measure["timestamp"], measure["timestamp"]) if len(old) > 0: if old[0][1] == measure["value"]: # if the value is also the same, skip it log.debug("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] (" + utils.timestamp2date(measure["timestamp"]) + ") already in the database, ignoring " + measure["key"] + ": " + str(measure["value"])) continue else: # same timestamp but different value, remove the old value so to store the new one db.deletebyscore(key, measure["timestamp"], measure["timestamp"]) # store the value into the database log.info("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] (" + utils.timestamp2date(measure["timestamp"]) + ") saving " + measure["key"] + ": " + utils.truncate(str(measure["value"])) + conf["constants"]["formats"][sensor["format"]]["suffix"]) db.set(key, measure["value"], measure["timestamp"]) # re-calculate the derived measures for the hour/day if "summarize" in sensor: summarize(sensor, 'hour', utils.hour_start(measure["timestamp"]), utils.hour_end(measure["timestamp"])) summarize(sensor, 'day', utils.day_start(measure["timestamp"]), utils.day_end(measure["timestamp"]))