def run(module_id, group_id, sensor_id, action): try: # ensure the group and sensor exist sensor = utils.get_sensor(module_id, group_id, sensor_id) sensor = init_sensor(sensor) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] not found, skipping it") return # execute the action log.debug("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] requested " + action) if action == "poll": # read the measure (will be stored into the cache) poll(sensor) elif action == "parse": # just parse the output log.info(parse(sensor)) elif action == "save": # save the parsed output into the database save(sensor) elif action == "force_save": # save the parsed output into the database forcing polling the measure save(sensor, force=True) elif action == "summarize_hour": # every hour calculate and save min,max,avg of the previous hour summarize(sensor, 'hour', utils.hour_start(utils.last_hour()), utils.hour_end(utils.last_hour())) elif action == "summarize_day": # every day calculate and save min,max,avg of the previous day (using hourly averages) summarize(sensor, 'day', utils.day_start(utils.yesterday()), utils.day_end(utils.yesterday())) elif action == "expire": # purge old data from the database expire(sensor) else: log.error("Unknown action " + action) except Exception, e: log.warning("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] unable to run " + action + ": " + utils.get_exception(e))
def upgrade_2_0(): ######## START OF CONFIGURATION # remote all data from the target database empty_target_db = False # migrate history data migrate_history = True # history start timestamp to migrate, "-inf" for all history_start_timestamp = "-inf" # historu end timestamp to migrate history_end_timestamp = utils.now() # migrate recent data migrate_recent = True # database number from which we are migrating db_from = 1 # database number into which we are migrating db_to = 2 # debug debug = False # keys to migrate history (from key -> to key) # destination key format: myHouse:<module_id>:<group_id>:<sensor_id> history = { 'home:weather:outdoor:temperature:day:max': 'myHouse:outdoor:temperature:external:day:max', 'home:weather:outdoor:temperature:day:min': 'myHouse:outdoor:temperature:external:day:min', 'home:weather:outdoor:temperature:day': 'myHouse:outdoor:temperature:external:day:avg', 'home:weather:indoor:temperature:day:max': 'myHouse:indoor:temperature:living_room:day:max', 'home:weather:indoor:temperature:day:min': 'myHouse:indoor:temperature:living_room:day:min', 'home:weather:indoor:temperature:day': 'myHouse:indoor:temperature:living_room:day:avg', 'home:weather:almanac:record:min': 'myHouse:outdoor:temperature:record:day:min', 'home:weather:almanac:record:max': 'myHouse:outdoor:temperature:record:day:max', 'home:weather:almanac:normal:min': 'myHouse:outdoor:temperature:normal:day:min', 'home:weather:almanac:normal:max': 'myHouse:outdoor:temperature:normal:day:max', 'home:weather:outdoor:condition:day': 'myHouse:outdoor:temperature:condition:day:avg', } # keys to migrate recent data (from key -> to key) recent = { 'home:weather:outdoor:temperature:measure': 'myHouse:outdoor:temperature:external', 'home:weather:indoor:temperature:measure': 'myHouse:indoor:temperature:living_room', 'home:weather:outdoor:condition:measure': 'myHouse:outdoor:temperature:condition', } ######## END OF CONFIGURATION conf = config.get_config(validate=False) print "[Migration from v1.x to v2.0]\n" input( "WARNING: which data will be migrate is defined within this script, on top of the upgrade_20() function.\nIndividual sensors to migrate must be specified manually\nPlase ensure you have reviewed all the settings first!\n\nPress Enter to continue..." ) backup("1.0") # empty the target database first if empty_target_db: print "Flushing target database..." change_db(db_to) db.flushdb() # for each history key to migrate print "Migrating historical data..." for key_from in history: if not migrate_history: break key_to = history[key_from] print "\tMigrating " + key_from + " -> " + key_to # retrieve all the data change_db(db_from) data = db.rangebyscore(key_from, history_start_timestamp, history_end_timestamp, withscores=True) change_db(db_to) count = 0 # for each entry for entry in data: timestamp = utils.day_start(utils.timezone(entry[0])) value = utils.normalize(entry[1]) # store it into the new database if debug: print "[HISTORY][" + key_to + "] (" + utils.timestamp2date( timestamp) + ") " + str(value) db.set(key_to, value, timestamp) count = count + 1 print "\t\tdone, " + str(count) + " values" # for each recent key to migrate print "Migrating recent data..." for key_from in recent: if not migrate_recent: break key_to = recent[key_from] print "\tMigrating " + key_from + " -> " + key_to # retrieve the recent data change_db(db_from) data = db.rangebyscore(key_from, utils.now() - 2 * conf["constants"]["1_day"], utils.now(), withscores=True) change_db(db_to) count = 0 # for each entry for entry in data: timestamp = utils.timezone(entry[0]) value = utils.normalize(entry[1]) if debug: print "[RECENT][" + key_to + "] (" + utils.timestamp2date( timestamp) + ") " + str(value) # skip it if the same value is already stored old = db.rangebyscore(key_to, timestamp, timestamp) if len(old) > 0: continue # store it into the new database db.set(key_to, value, timestamp) # create the sensor data structure key_split = key_to.split(":") group_id = key_split[-2] sensor_id = key_split[-1] module_id = key_split[-4] sensor = utils.get_sensor(module_id, group_id, sensor_id) sensor['module_id'] = module_id sensor['group_id'] = group_id sensor['db_group'] = conf["constants"]["db_schema"][ "root"] + ":" + sensor["module_id"] + ":" + sensor["group_id"] sensor[ 'db_sensor'] = sensor['db_group'] + ":" + sensor["sensor_id"] import sensors sensors.summarize(sensor, 'hour', utils.hour_start(timestamp), utils.hour_end(timestamp)) count = count + 1 print "\t\tdone, " + str(count) + " values" print "Upgrading database..." version_key = conf["constants"]["db_schema"]["version"] db.set_simple(version_key, "2.0")
def parse(sensor, data): request = sensor['plugin']['measure'] measures = [] measure = {} measure["key"] = sensor["sensor_id"] # parse the json parsed_json = json.loads(data) if "error" in parsed_json: # error returned log.error("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] " + parsed_json["error"]["type"] + ": " + parsed_json["error"]["description"]) return None if request == "temperature": measure["value"] = float(parsed_json['current_observation']['temp_c']) measure["timestamp"] = utils.timezone( int(parsed_json['current_observation']['observation_epoch'])) measures.append(measure) elif request == "humidity": measure["value"] = int( parsed_json['current_observation']['relative_humidity'].replace( '%', '')) measure["timestamp"] = utils.timezone( int(parsed_json['current_observation']['observation_epoch'])) measures.append(measure) elif request == "wind": measure["value"] = float( parsed_json['current_observation']['wind_kph']) measure["timestamp"] = utils.timezone( int(parsed_json['current_observation']['observation_epoch'])) measures.append(measure) elif request == "wind_gust": measure["value"] = float( parsed_json['current_observation']['wind_gust_kph']) measure["timestamp"] = utils.timezone( int(parsed_json['current_observation']['observation_epoch'])) measures.append(measure) elif request == "pressure": measure["value"] = float( parsed_json['current_observation']['pressure_mb']) measure["timestamp"] = utils.timezone( int(parsed_json['current_observation']['observation_epoch'])) measures.append(measure) elif request == "condition": measure["value"] = parsed_json['current_observation']['icon'] measure["timestamp"] = utils.timezone( int(parsed_json['current_observation']['observation_epoch'])) measures.append(measure) elif request == "wind_dir": direction = parsed_json['current_observation']['wind_dir'] if len(direction) > 0 and (direction[0] == "N" or direction[0] == "W" or direction[0] == "S" or direction[0] == "E"): direction = direction[0] else: direction = "-" measure["value"] = direction measure["timestamp"] = utils.timezone( int(parsed_json['current_observation']['observation_epoch'])) measures.append(measure) elif request == "forecast_condition": for entry in parsed_json['forecast']['simpleforecast'][ 'forecastday'][:forecast_max_entries]: measure = {} measure["key"] = sensor["sensor_id"] + ":day:avg" measure["timestamp"] = utils.day_start( utils.timezone(int(entry["date"]["epoch"]))) measure["value"] = entry["icon"] measures.append(measure) elif request == "forecast_pop": for entry in parsed_json['forecast']['simpleforecast'][ 'forecastday'][:forecast_max_entries]: measure = {} measure["key"] = sensor["sensor_id"] + ":day:avg" measure["timestamp"] = utils.day_start( utils.timezone(int(entry["date"]["epoch"]))) measure["value"] = entry["pop"] if entry["pop"] > 0 else 0 measures.append(measure) elif request == "forecast_rain": for entry in parsed_json['forecast']['simpleforecast'][ 'forecastday'][:forecast_max_entries]: measure = {} measure["key"] = sensor["sensor_id"] + ":day:avg" measure["timestamp"] = utils.day_start( utils.timezone(int(entry["date"]["epoch"]))) measure["value"] = entry["qpf_allday"][ "mm"] if entry["qpf_allday"]["mm"] > 0 else 0 measures.append(measure) elif request == "forecast_snow": for entry in parsed_json['forecast']['simpleforecast'][ 'forecastday'][:forecast_max_entries]: measure = {} measure["key"] = sensor["sensor_id"] + ":day:avg" measure["timestamp"] = utils.day_start( utils.timezone(int(entry["date"]["epoch"]))) measure["value"] = entry["snow_allday"]["cm"] * 10 if entry[ "snow_allday"]["cm"] > 0 else 0 measures.append(measure) elif request == "forecast_temperature": for entry in parsed_json['forecast']['simpleforecast'][ 'forecastday'][:forecast_max_entries]: measure = {} measure["key"] = sensor["sensor_id"] + ":day:min" measure["value"] = int(entry["low"]["celsius"]) measure["timestamp"] = utils.day_start( utils.timezone(int(entry["date"]["epoch"]))) measures.append(measure) measure = {} measure["key"] = sensor["sensor_id"] + ":day:max" measure["value"] = int(entry["high"]["celsius"]) measure["timestamp"] = utils.day_start( utils.timezone(int(entry["date"]["epoch"]))) measures.append(measure) elif request == "record_temperature": measure["key"] = sensor["sensor_id"] + ":day:min" measure["value"] = int( parsed_json['almanac']['temp_low']['record']['C']) measure["timestamp"] = utils.day_start(utils.now()) measures.append(measure) measure = {} measure["key"] = sensor["sensor_id"] + ":day:max" measure["value"] = int( parsed_json['almanac']['temp_high']['record']['C']) measure["timestamp"] = utils.day_start(utils.now()) measures.append(measure) elif request == "record_temperature_year": measure["key"] = sensor["sensor_id"] + ":day:min" measure["value"] = int( parsed_json['almanac']['temp_low']['recordyear']) measure["timestamp"] = utils.day_start(utils.now()) measures.append(measure) measure = {} measure["key"] = sensor["sensor_id"] + ":day:max" measure["value"] = int( parsed_json['almanac']['temp_high']['recordyear']) measure["timestamp"] = utils.day_start(utils.now()) measures.append(measure) elif request == "normal_temperature": measure["key"] = sensor["sensor_id"] + ":day:min" measure["value"] = int( parsed_json['almanac']['temp_low']['normal']['C']) measure["timestamp"] = utils.day_start(utils.now()) measures.append(measure) measure = {} measure["key"] = sensor["sensor_id"] + ":day:max" measure["value"] = int( parsed_json['almanac']['temp_high']['normal']['C']) measure["timestamp"] = utils.day_start(utils.now()) measures.append(measure) elif request == "rain": measure["key"] = sensor["sensor_id"] + ":day:avg" date_dict = parsed_json['history']['dailysummary'][0]['date'] date = datetime.datetime.strptime( date_dict["mday"] + "-" + date_dict["mon"] + "-" + date_dict["year"], "%d-%m-%Y") measure["timestamp"] = utils.timezone( int(time.mktime(date.timetuple()))) measure["value"] = float( parsed_json['history']['dailysummary'][0]['precipm']) measures.append(measure) elif request == "snow": measure["key"] = sensor["sensor_id"] + ":day:avg" date_dict = parsed_json['history']['dailysummary'][0]['date'] date = datetime.datetime.strptime( date_dict["mday"] + "-" + date_dict["mon"] + "-" + date_dict["year"], "%d-%m-%Y") measure["timestamp"] = utils.timezone( int(time.mktime(date.timetuple()))) measure["value"] = float( parsed_json['history']['dailysummary'][0] ['precipm']) if utils.is_number( parsed_json['history']['dailysummary'][0]['precipm']) else 0 measures.append(measure) else: raise Exception("invalid request " + str(request)) # append the measure and return it return measures
def customer_auto_report(customer_id, time_now): from model import Customer, ScheduledTask from task.mail import send_email customer = Customer.get_by_id(customer_id) if not customer: logbook.error("Can't find customer {} for report generation", customer_id) return report_task = ScheduledTask.get_by_customer(customer_id, Customer.AUTO_REPORT_TASK) if not report_task: logbook.error("Can't find auto report task for customer {}", customer_id) return logbook.debug("Start auto-report task for customer {}: {}", customer, report_task) report_task.start() previous_next_send = report_task.next_scheduled.replace(tzinfo=utc) if previous_next_send is None or previous_next_send > time_now: logbook.warning( "Looks like report for customer {} already sent. Next send: {}, current time {}", customer, previous_next_send, time_now) report_task.completed(now=time_now) return report_begin = day_start(report_task.last or time_now) _, report_end = report_task.task_range(time_now, previous_interval=True) report_end = day_start(report_end) report_id = CustomerReportId(customer.customer_id, report_begin, report_end, conf.customer.report.type, conf.customer.report.format, customer.locale) report_file_generate(report_id) # report_file_generate closed session so we should initialize customer again customer = Customer.get_by_id(customer_id) report_task = ScheduledTask.get_by_customer(customer_id, Customer.AUTO_REPORT_TASK) report_cache = ReportCache() report = report_cache.get_report_aggregated(report_id) if not report or not report["tariffs"]: logbook.info("Report is empty for customer {}", customer) report_task.completed(now=time_now) return report_file = report_cache.get_report(report_id) if not report_file: logbook.error("Report generation failed") report_task.completed(False) return filename = Report.generate_file_name(customer.get_name(), report_begin, report_end, conf.customer.report.format) subject, body = MessageTemplate.get_rendered_message( MessageTemplate.CUSTOMER_AUTO_REPORT, customer.locale_language(), customer_name=customer.get_name(), currency=customer.tariff.currency, report_start=report_begin, report_end=report_end) subscription_info = customer.subscription_info()['billing'] if subscription_info["enable"]: send_email.delay(subscription_info["email"], subject, body, attachments=[(filename, report_file)]) report_task.completed(now=time_now) comment_fmt = "%s - %s" % (report_begin.strftime('%Y-%m-%d'), report_end.strftime('%Y-%m-%d')) for currency, amount in report["total"].items(): amount = Decimal(amount) customer.modify_balance(-amount, currency, None, comment_fmt) account = customer.get_account(currency) account.charge(-amount) db.session.commit()
def customer_auto_report(customer_id, time_now): from model import Customer, ScheduledTask from task.mail import send_email customer = Customer.get_by_id(customer_id) if not customer: logbook.error("Can't find customer {} for report generation", customer_id) return report_task = ScheduledTask.get_by_customer(customer_id, Customer.AUTO_REPORT_TASK) if not report_task: logbook.error("Can't find auto report task for customer {}", customer_id) return logbook.debug("Start auto-report task for customer {}: {}", customer, report_task) report_task.start() previous_next_send = report_task.next_scheduled.replace(tzinfo=utc) if previous_next_send is None or previous_next_send > time_now: logbook.warning("Looks like report for customer {} already sent. Next send: {}, current time {}", customer, previous_next_send, time_now) report_task.completed(now=time_now) return report_begin = day_start(report_task.last or time_now) _, report_end = report_task.task_range(time_now, previous_interval=True) report_end = day_start(report_end) report_id = CustomerReportId(customer.customer_id, report_begin, report_end, conf.customer.report.type, conf.customer.report.format, customer.locale) report_file_generate(report_id) # report_file_generate closed session so we should initialize customer again customer = Customer.get_by_id(customer_id) report_task = ScheduledTask.get_by_customer(customer_id, Customer.AUTO_REPORT_TASK) report_cache = ReportCache() report = report_cache.get_report_aggregated(report_id) if not report or not report["tariffs"]: logbook.info("Report is empty for customer {}", customer) report_task.completed(now=time_now) return report_file = report_cache.get_report(report_id) if not report_file: logbook.error("Report generation failed") report_task.completed(False) return filename = Report.generate_file_name(customer.get_name(), report_begin, report_end, conf.customer.report.format) subject, body = MessageTemplate.get_rendered_message( MessageTemplate.CUSTOMER_AUTO_REPORT, customer.locale_language(), customer_name=customer.get_name(), currency=customer.tariff.currency, report_start=report_begin, report_end=report_end) subscription_info = customer.subscription_info()['billing'] if subscription_info["enable"]: send_email.delay(subscription_info["email"], subject, body, attachments=[(filename, report_file)]) report_task.completed(now=time_now) comment_fmt = "%s - %s" % (report_begin.strftime('%Y-%m-%d'), report_end.strftime('%Y-%m-%d')) for currency, amount in report["total"].items(): amount = Decimal(amount) customer.modify_balance(-amount, currency, None, comment_fmt) account = customer.get_account(currency) account.charge(-amount) db.session.commit()
def data_get_data(module_id, group_id, sensor_id, timeframe, stat): data = [] sensor = utils.get_sensor(module_id, group_id, sensor_id) if sensor is None: log.error("[" + module_id + "][" + group_id + "][" + sensor_id + "] sensor not found") return json.dumps(data) if "plugin" in sensor and "poll_on_demand" in sensor["plugin"] and sensor[ "plugin"]["poll_on_demand"] and timeframe == "realtime": # the sensor needs to be polled on demand run(module_id, group_id, sensor_id, "save") # get the parameters for the requested timeframe if timeframe == "realtime": # recent hourly measures up to now range = "" start = utils.realtime() end = utils.now() withscores = True elif timeframe == "recent": # recent hourly measures up to now range = ":hour" start = utils.recent() end = utils.now() withscores = True elif timeframe == "history": # historical daily measures up to new range = ":day" start = utils.history() end = utils.now() withscores = True elif timeframe == "short_history": # historical daily measures up to new range = ":day" start = utils.history( conf["general"]["timeframes"]["short_history_days"]) end = utils.now() withscores = True elif timeframe == "today": # today's measure range = ":day" start = utils.day_start(utils.now()) end = utils.day_end(utils.now()) withscores = False elif timeframe == "yesterday": # yesterday's measure range = ":day" start = utils.day_start(utils.yesterday()) end = utils.day_end(utils.yesterday()) withscores = False elif timeframe == "forecast": # next days measures range = ":day" start = utils.day_start(utils.now()) end = utils.day_start(utils.now() + (conf["general"]["timeframes"]["forecast_days"] - 1) * conf["constants"]["1_day"]) withscores = True else: return data # define the key to request key = conf["constants"]["db_schema"][ "root"] + ":" + module_id + ":" + group_id + ":" + sensor_id + range requested_stat = ":" + stat # if a range is requested, start asking for the min if stat == "range": requested_stat = ":min" if timeframe == "realtime": requested_stat = "" # request the data data = db.rangebyscore( key + requested_stat, start, end, withscores=withscores, milliseconds=True, formatter=conf["constants"]["formats"][sensor["format"]]["formatter"]) if stat == "range" and len(data) > 0: # if a range is requested, ask for the max and combine the results data_max = db.rangebyscore(key + ":max", start, end, withscores=False, milliseconds=True, formatter=conf["constants"]["formats"][ sensor["format"]]["formatter"]) for i, item in enumerate(data): # ensure data_max has a correspondent value if i < len(data_max): if (isinstance(item, list)): data[i].append(data_max[i]) else: data.append(data_max[i]) return json.dumps(data)
def store(sensor, measures, ifnotexists=False): # if an exception occurred, skip this sensor if measures is None: return # for each returned measure for measure in measures: # set the timestamp to now if not already set if "timestamp" not in measure: measure["timestamp"] = utils.now() # define the key to store the value key = sensor["db_group"] + ":" + measure["key"] # if ifnotexists is set, check if the key exists if ifnotexists and db.exists(key): log.debug("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] key already exists, ignoring new value") return # delete previous values if needed realtime_count = conf["sensors"]["retention"]["realtime_count"] if "retention" in sensor and "realtime_count" in sensor["retention"]: realtime_count = sensor["retention"]["realtime_count"] if realtime_count > 0: db.deletebyrank(key, 0, -realtime_count) # if only measures with a newer timestamp than the latest can be added, apply the policy realtime_new_only = conf["sensors"]["retention"]["realtime_new_only"] if "retention" in sensor and "realtime_new_only" in sensor["retention"]: realtime_new_only = sensor["retention"]["realtime_new_only"] if realtime_new_only: # retrieve the latest measure's timestamp last = db.range(key, -1, -1) if len(last) > 0: last_timestamp = last[0][0] # if the measure's timestamp is older or the same, skip it if measure["timestamp"] <= last_timestamp: log.debug("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] (" + utils.timestamp2date(measure["timestamp"]) + ") old event, ignoring " + measure["key"] + ": " + str(measure["value"])) continue # check if there is already something stored with the same timestamp old = db.rangebyscore(key, measure["timestamp"], measure["timestamp"]) if len(old) > 0: if old[0][1] == measure["value"]: # if the value is also the same, skip it log.debug("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] (" + utils.timestamp2date(measure["timestamp"]) + ") already in the database, ignoring " + measure["key"] + ": " + str(measure["value"])) continue else: # same timestamp but different value, remove the old value so to store the new one db.deletebyscore(key, measure["timestamp"], measure["timestamp"]) # store the value into the database log.info("[" + sensor["module_id"] + "][" + sensor["group_id"] + "][" + sensor["sensor_id"] + "] (" + utils.timestamp2date(measure["timestamp"]) + ") saving " + measure["key"] + ": " + utils.truncate(str(measure["value"])) + conf["constants"]["formats"][sensor["format"]]["suffix"]) db.set(key, measure["value"], measure["timestamp"]) # re-calculate the derived measures for the hour/day if "summarize" in sensor: summarize(sensor, 'hour', utils.hour_start(measure["timestamp"]), utils.hour_end(measure["timestamp"])) summarize(sensor, 'day', utils.day_start(measure["timestamp"]), utils.day_end(measure["timestamp"]))