def data_get_current(module_id, group_id, sensor_id): data = [] sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps(data) if "plugin" in sensor and "poll_on_demand" in sensor["plugin"] and sensor[ "plugin"]["poll_on_demand"]: # the sensor needs to be polled on demand run(module_id, group_id, sensor_id, "save") key = conf["constants"]["db_schema"][ "root"] + ":" + module_id + ":" + group_id + ":" + sensor_id # return the latest measure data = db.range( key, withscores=False, milliseconds=True, formatter=conf["constants"]["formats"][sensor["format"]]["formatter"]) # if an image, decode it and return it if sensor["format"] == "image": return base64.b64decode(data[0]) # if a calendar, return the current value elif sensor["format"] == "calendar": return json.dumps(utils.parse_calendar(data)) else: return json.dumps(data)
def data_send(module_id, group_id, sensor_id, value, force=False): sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps("KO") log.debug("[" + module_id + "][" + group_id + "][" + sensor_id + "] sending message: " + str(value)) sensor = init_sensor(sensor) if not hasattr(plugins[sensor["plugin"]["plugin_name"]], 'send'): log.error("the plugin " + sensor["plugin"]["plugin_name"] + " does not allow sending messages") return json.dumps("KO") try: try: # invoke the plugin-specific send function plugins[sensor["plugin"]["plugin_name"]].send(sensor, value, force=force) except TypeError: # not all the plugins support "force", call it without it plugins[sensor["plugin"]["plugin_name"]].send(sensor, value) return json.dumps("OK") except Exception, e: log.error("unable to send " + str(value) + ": " + utils.get_exception(e))
def sensor_get_current(module_id, group_id, sensor_id): # can return an image or text, set the correct content type content_type = "text/html" sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is not None and sensor["format"] == "image": content_type = "image" return Response(sensors.data_get_current(module_id, group_id, sensor_id), mimetype=content_type)
def data_run(module_id, group_id, sensor_id, action): sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps("KO") log.debug("[" + module_id + "][" + group_id + "][" + sensor_id + "] executing: " + str(action)) init_plugins() run(module_id, group_id, sensor_id, action) return json.dumps("OK")
def sensors(request): slist = [] message = '' sensors = [] if request.method == 'POST' and 'add_sensor' in request.POST: #save sensor if utils.add_sensor(request.POST['sensor_address'], request.POST['sensor_alias'], int(request.POST['sensor_family']), request.POST['sensor_service']): #d = dict(request=request, slist=slist, message=message) return redirect('/sensors/') else: message = 'sensor save error' d = dict(request=request, slist=slist, message=message) return render_to_response('web/sensors.html', d, context_instance=RequestContext(request)) try: ow.init(utils.owserver) sensors = ow.Sensor('/').sensorList() for s in sensors: exist = utils.get_sensor(s.address) if int(s.family) == 28: if exist: slist.append({'address': s.address, 'family': s.family, 'temperature': s.temperature, 'alias': exist.alias}) else: slist.append({'address': s.address, 'family': s.family, 'temperature': s.temperature}) elif int(s.family) == 29: if exist: slist.append({'address': s.address, 'family': s.family, 'PIO_ALL': s.PIO_ALL, 'alias': exist.alias}) else: slist.append({'address': s.address, 'family': s.family, 'PIO_ALL': s.PIO_ALL}) else: slist.append({'address': s.address, 'family': s.family}) except: message = 'OWserver error' d = dict(request=request, slist=slist, message=message, sensors=sensors) return render_to_response('web/sensors.html', d, context_instance=RequestContext(request))
def data_get_current_timestamp(module_id, group_id, sensor_id): data = [] sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps(data) key = conf["constants"]["db_schema"][ "root"] + ":" + module_id + ":" + group_id + ":" + sensor_id data = db.range(key, withscores=True, milliseconds=True) if len(data) > 0: return json.dumps( [utils.timestamp_difference(utils.now(), data[0][0] / 1000)]) else: return json.dumps(data)
def data_get_current_image(module_id, group_id, sensor_id, night_day): sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps("") data = json.loads(data_get_current(module_id, group_id, sensor_id)) if len(data) == 0: return "" filename = str(data[0]) if night_day and utils.is_night(): filename = "nt_" + filename with open( conf["constants"]["web_dir"] + "/images/" + sensor_id + "_" + str(filename) + ".png", 'r') as file: data = file.read() file.close() return data
def data_get_calendar(module_id, group_id, sensor_id): data = [] sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps(data) key = conf["constants"]["db_schema"][ "root"] + ":" + module_id + ":" + group_id + ":" + sensor_id # return the latest measure data = db.range( key, withscores=False, milliseconds=True, formatter=conf["constants"]["formats"][sensor["format"]]["formatter"]) return json.dumps(data)
def data_set(module_id, group_id, sensor_id, value, ifnotexists=False): sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps("KO") log.debug("[" + module_id + "][" + group_id + "][" + sensor_id + "] value to store: " + str(value)) sensor = init_sensor(sensor) # prepare the measure measures = [] measure = {} measure["key"] = sensor["sensor_id"] measure["value"] = utils.normalize( value, conf["constants"]["formats"][sensor["format"]]["formatter"]) measures.append(measure) # store it store(sensor, measures, ifnotexists=ifnotexists) return json.dumps("OK")
def run(module_id, group_id, sensor_id, action): try: # ensure the group and sensor exist sensor = utils.get_sensor(module_id, group_id, sensor_id) sensor = init_sensor(sensor) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] not found, skipping it") return # execute the action log.debug("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] requested " + action) if action == "poll": # read the measure (will be stored into the cache) poll(sensor) elif action == "parse": # just parse the output log.info(parse(sensor)) elif action == "save": # save the parsed output into the database save(sensor) elif action == "force_save": # save the parsed output into the database forcing polling the measure save(sensor, force=True) elif action == "summarize_hour": # every hour calculate and save min,max,avg of the previous hour summarize(sensor, 'hour', utils.hour_start(utils.last_hour()), utils.hour_end(utils.last_hour())) elif action == "summarize_day": # every day calculate and save min,max,avg of the previous day (using hourly averages) summarize(sensor, 'day', utils.day_start(utils.yesterday()), utils.day_end(utils.yesterday())) elif action == "expire": # purge old data from the database expire(sensor) else: log.error("Unknown action " + action) except Exception, e: log.warning("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] unable to run " + action + ": " + utils.get_exception(e))
def upgrade_2_0(): ######## START OF CONFIGURATION # remote all data from the target database empty_target_db = False # migrate history data migrate_history = True # history start timestamp to migrate, "-inf" for all history_start_timestamp = "-inf" # historu end timestamp to migrate history_end_timestamp = utils.now() # migrate recent data migrate_recent = True # database number from which we are migrating db_from = 1 # database number into which we are migrating db_to = 2 # debug debug = False # keys to migrate history (from key -> to key) # destination key format: myHouse:<module_id>:<group_id>:<sensor_id> history = { 'home:weather:outdoor:temperature:day:max': 'myHouse:outdoor:temperature:external:day:max', 'home:weather:outdoor:temperature:day:min': 'myHouse:outdoor:temperature:external:day:min', 'home:weather:outdoor:temperature:day': 'myHouse:outdoor:temperature:external:day:avg', 'home:weather:indoor:temperature:day:max': 'myHouse:indoor:temperature:living_room:day:max', 'home:weather:indoor:temperature:day:min': 'myHouse:indoor:temperature:living_room:day:min', 'home:weather:indoor:temperature:day': 'myHouse:indoor:temperature:living_room:day:avg', 'home:weather:almanac:record:min': 'myHouse:outdoor:temperature:record:day:min', 'home:weather:almanac:record:max': 'myHouse:outdoor:temperature:record:day:max', 'home:weather:almanac:normal:min': 'myHouse:outdoor:temperature:normal:day:min', 'home:weather:almanac:normal:max': 'myHouse:outdoor:temperature:normal:day:max', 'home:weather:outdoor:condition:day': 'myHouse:outdoor:temperature:condition:day:avg', } # keys to migrate recent data (from key -> to key) recent = { 'home:weather:outdoor:temperature:measure': 'myHouse:outdoor:temperature:external', 'home:weather:indoor:temperature:measure': 'myHouse:indoor:temperature:living_room', 'home:weather:outdoor:condition:measure': 'myHouse:outdoor:temperature:condition', } ######## END OF CONFIGURATION conf = config.get_config(validate=False) print "[Migration from v1.x to v2.0]\n" input( "WARNING: which data will be migrate is defined within this script, on top of the upgrade_20() function.\nIndividual sensors to migrate must be specified manually\nPlase ensure you have reviewed all the settings first!\n\nPress Enter to continue..." ) backup("1.0") # empty the target database first if empty_target_db: print "Flushing target database..." change_db(db_to) db.flushdb() # for each history key to migrate print "Migrating historical data..." for key_from in history: if not migrate_history: break key_to = history[key_from] print "\tMigrating " + key_from + " -> " + key_to # retrieve all the data change_db(db_from) data = db.rangebyscore(key_from, history_start_timestamp, history_end_timestamp, withscores=True) change_db(db_to) count = 0 # for each entry for entry in data: timestamp = utils.day_start(utils.timezone(entry[0])) value = utils.normalize(entry[1]) # store it into the new database if debug: print "[HISTORY][" + key_to + "] (" + utils.timestamp2date( timestamp) + ") " + str(value) db.set(key_to, value, timestamp) count = count + 1 print "\t\tdone, " + str(count) + " values" # for each recent key to migrate print "Migrating recent data..." for key_from in recent: if not migrate_recent: break key_to = recent[key_from] print "\tMigrating " + key_from + " -> " + key_to # retrieve the recent data change_db(db_from) data = db.rangebyscore(key_from, utils.now() - 2 * conf["constants"]["1_day"], utils.now(), withscores=True) change_db(db_to) count = 0 # for each entry for entry in data: timestamp = utils.timezone(entry[0]) value = utils.normalize(entry[1]) if debug: print "[RECENT][" + key_to + "] (" + utils.timestamp2date( timestamp) + ") " + str(value) # skip it if the same value is already stored old = db.rangebyscore(key_to, timestamp, timestamp) if len(old) > 0: continue # store it into the new database db.set(key_to, value, timestamp) # create the sensor data structure key_split = key_to.split(":") group_id = key_split[-2] sensor_id = key_split[-1] module_id = key_split[-4] sensor = utils.get_sensor(module_id, group_id, sensor_id) sensor['module_id'] = module_id sensor['group_id'] = group_id sensor['db_group'] = conf["constants"]["db_schema"][ "root"] + ":" + sensor["module_id"] + ":" + sensor["group_id"] sensor[ 'db_sensor'] = sensor['db_group'] + ":" + sensor["sensor_id"] import sensors sensors.summarize(sensor, 'hour', utils.hour_start(timestamp), utils.hour_end(timestamp)) count = count + 1 print "\t\tdone, " + str(count) + " values" print "Upgrading database..." version_key = conf["constants"]["db_schema"]["version"] db.set_simple(version_key, "2.0")
def data_get_data(module_id, group_id, sensor_id, timeframe, stat): data = [] sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps(data) if "plugin" in sensor and "poll_on_demand" in sensor["plugin"] and sensor[ "plugin"]["poll_on_demand"] and timeframe == "realtime": # the sensor needs to be polled on demand run(module_id, group_id, sensor_id, "save") # get the parameters for the requested timeframe if timeframe == "realtime": # recent hourly measures up to now range = "" start = utils.realtime() end = utils.now() withscores = True elif timeframe == "recent": # recent hourly measures up to now range = ":hour" start = utils.recent() end = utils.now() withscores = True elif timeframe == "history": # historical daily measures up to new range = ":day" start = utils.history() end = utils.now() withscores = True elif timeframe == "short_history": # historical daily measures up to new range = ":day" start = utils.history( conf["general"]["timeframes"]["short_history_days"]) end = utils.now() withscores = True elif timeframe == "today": # today's measure range = ":day" start = utils.day_start(utils.now()) end = utils.day_end(utils.now()) withscores = False elif timeframe == "yesterday": # yesterday's measure range = ":day" start = utils.day_start(utils.yesterday()) end = utils.day_end(utils.yesterday()) withscores = False elif timeframe == "forecast": # next days measures range = ":day" start = utils.day_start(utils.now()) end = utils.day_start(utils.now() + (conf["general"]["timeframes"]["forecast_days"] - 1) * conf["constants"]["1_day"]) withscores = True else: return data # define the key to request key = conf["constants"]["db_schema"][ "root"] + ":" + module_id + ":" + group_id + ":" + sensor_id + range requested_stat = ":" + stat # if a range is requested, start asking for the min if stat == "range": requested_stat = ":min" if timeframe == "realtime": requested_stat = "" # request the data data = db.rangebyscore( key + requested_stat, start, end, withscores=withscores, milliseconds=True, formatter=conf["constants"]["formats"][sensor["format"]]["formatter"]) if stat == "range" and len(data) > 0: # if a range is requested, ask for the max and combine the results data_max = db.rangebyscore(key + ":max", start, end, withscores=False, milliseconds=True, formatter=conf["constants"]["formats"][ sensor["format"]]["formatter"]) for i, item in enumerate(data): # ensure data_max has a correspondent value if i < len(data_max): if (isinstance(item, list)): data[i].append(data_max[i]) else: data.append(data_max[i]) return json.dumps(data)
def data_run(module_id, group_id, sensor_id, action): sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps("KO") log.debug("[" + module_id + "][" + group_id + "][" + sensor_id + "] executing: " + str(action)) init_plugins() run(module_id, group_id, sensor_id, action) return json.dumps("OK") # allow running it both as a module and when called directly if __name__ == '__main__': if len(sys.argv) != 5: # no arguments provided, schedule all sensors schedule.start() schedule_all() while True: time.sleep(1) else: # run the command for the given sensor # <module_id> <group_id> <sensor_id> <action> init_plugins() sensor = utils.get_sensor(sys.argv[1], sys.argv[2], sys.argv[3]) if sensor is None: log.info("invalid sensor provided") else: run(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])