def configure_logger(): transport = "ipc:///tmp/zmq_calibrate_util_server.ipc" ScreenLoggerServer.start_thread(transport) logger = LoggerClient.open("test_emon", transport) logger.config(logger.levels.DEBUG, logger.schedules.INSTANTANEOUSLY) logger.info("Opening connection") return logger
def configure_logger(): transport = "ipc:///tmp/zmq_calibrate_util_server.ipc" ScreenLoggerServer.start_thread(transport) logger = LoggerClient.open("calibrate", transport) logger.config(logger.levels.DEBUG, logger.schedules.INSTANTANEOUSLY) logger.info("Opening connection") return logger
def start(): """Starts a thread which reads from RFM69Pi and publishes using MQTT.""" global logger logger = LoggerClient.open("OpenEnergyMonitor") logger.info("Opening connection") global client client = Utils.getpahoclient(logger) global iface iface = __try_open(logger) # config is a dictionary with: # devices : [ { id, desc, name } ] # keys : [ { nodeId, key, desc, name } ] config = Utils.getconfig("open_energy_monitor", logger) nodes = config["nodes"] node2keys = config["node2keys"] for n in nodes: # We add data field to check relative difference between two consecutive # readings, if the difference is not enough the message is not published, # reducing this way the overhead and database size. for conf in node2keys[str(n["id"])]: conf["data"] = -10000.0 conf["when"] = 0.0 global is_running is_running = True thread = threading.Thread(target=__process, args=(logger, client, iface, nodes, node2keys)) thread.setDaemon(True) thread.start()
def start(): """Opens connections with logger, InfluxDB and MQTT broker.""" global logger global mqtt_client global house_data global config global influx_client logger = LoggerClient.open("InfluxDBHub") mqtt_client = Utils.getpahoclient(logger, __configure_mqtt) config = Utils.getconfig("influxdb", logger) influx_client = InfluxDBClient(config["host"], config["port"], config["user"], config["password"], config["database"]) if not {"name": config["database"]} in influx_client.get_list_database(): influx_client.create_database(config["database"]) if not any([ x["name"] == "raspimon_policy" for x in influx_client.get_list_retention_policies() ]): influx_client.create_retention_policy('raspimon_policy', config["retention_policy"], 1, default=True) else: influx_client.alter_retention_policy( 'raspimon_policy', duration=config["retention_policy"], replication=1, default=True)
def start(): """Opens logger connection.""" global logger logger = LoggerClient.open("ElectricityPricesMonitor") publish(0) if time.time()*1000 % Scheduler.T1_DAY > 21*Scheduler.T1_HOUR - 10*Scheduler.T1_SECOND: # publish next day electricity prices when starting the software at # night try: publish(1) except: print "Unexpected error:",traceback.format_exc()
def start(): """Opens logger connection and loads its configuration from MongoDB and sends first message.""" global logger global config global location_id global current_weather_url logger = LoggerClient.open("AEMETMonitor") config = Utils.getconfig("aemet", logger) location_id = config["location_id"] current_weather_url = config["current_weather_url"] publish()
def start(): """Opens logger connection.""" global logger logger = LoggerClient.open("ElectricityPricesMonitor") publish(0) if time.time( ) * 1000 % Scheduler.T1_DAY > 21 * Scheduler.T1_HOUR - 10 * Scheduler.T1_SECOND: # publish next day electricity prices when starting the software at # night try: publish(1) except: print "Unexpected error:", traceback.format_exc()
def start(): """Opens connections with logger, MongoDB and MQTT broker.""" global logger global mqtt_client global house_data logger = LoggerClient.open("MongoDBHub") mqtt_client = Utils.getpahoclient(logger, __configure_mqtt) mongo_client = Utils.getmongoclient(logger) db = mongo_client["raspimon"] col = db["GVA2015_houses"] house_data = col.find_one({"raspi": raspi_mac}) assert house_data is not None mongo_client.close()
def start(): """Connects with logging server, loads plugwise network configuration and connects with MQTT broker.""" global logger global client global config global device global circles_config global circles global mac2circle logger = LoggerClient.open("PlugwiseMonitor") if not verbose: logger.config(logger.levels.WARNING, logger.schedules.DAILY) config = Utils.getconfig("plugwise", logger) assert config is not None device = plugwise_api.Stick(logger, DEFAULT_SERIAL_PORT) # circles_config is a list of dictionaries: name, mac, desc. state field is # added in next loop to track its value so it can be used to only send # messages in state transitions. power1s and power8s field is used to check # the relative difference in power in order to reduce the network overhead. circles_config = config["circles"] circles = [] mac2circle = {} for circle_data in circles_config: mac = circle_data["mac"] circles.append( plugwise_api.Circle( logger, mac, device, { "name": circle_data["name"], "location": circle_data["desc"], "always_on": "False", "production": "True" })) mac2circle[mac] = circles[-1] circle_data["state"] = "NA" for v in OUTPUT_LIST: circle_data["power" + v["suffix"]] = -10000.0 circle_data["when" + v["suffix"]] = 0.0 client = Utils.getpahoclient(logger, __configure) client.loop_start()
def start(): """Opens connections with logger, InfluxDB and MQTT broker.""" global logger global mqtt_client global house_data global config global influx_client logger = LoggerClient.open("InfluxDBHub") mqtt_client = Utils.getpahoclient(logger, __configure_mqtt) config = Utils.getconfig("influxdb", logger) influx_client = InfluxDBClient(config["host"], config["port"], config["user"], config["password"], config["database"]) if not {"name":config["database"]} in influx_client.get_list_database(): influx_client.create_database(config["database"]) if not any([ x["name"]=="raspimon_policy" for x in influx_client.get_list_retention_policies()]): influx_client.create_retention_policy('raspimon_policy', config["retention_policy"], 1, default=True) else: influx_client.alter_retention_policy('raspimon_policy', duration=config["retention_policy"], replication=1, default=True)
def start(): """Connects with logging server and sends first message.""" global logger logger = LoggerClient.open("CheckIP") publish()
return method if __name__ == "__main__": Utils.ntpcheck() Utils.startup_wait() T1_MILISECOND = 1 T1_CENTISECOND = 10 T1_DECISECOND = 100 T1_SECOND = 1000 T1_MINUTE = 60000 T1_HOUR = 3600000 T1_DAY = 24 * T1_HOUR # Configure logger. logger = LoggerClient.open("MainMonitoringSystem") logger.info("Initializing main monitoring system") # Configure Scheduler. Scheduler.start() logger.info("Scheduler started") # Start all modules. started_modules = [] def try_start(module_info): logger.info("Starting module %s", module_info["import"]) module = importlib.import_module(module_info["import"]) if __try_call(logger, module.start):
global location_id global current_weather_url logger = LoggerClient.open("AEMETMonitor") config = Utils.getconfig("aemet", logger) location_id = config["location_id"] current_weather_url = config["current_weather_url"] publish() def stop(): """Closes connection with logger.""" logger.close() if __name__ == "__main__": import raspi_mon_sys.ScreenLoggerServer as ScreenLoggerServer transport = "ipc:///tmp/zmq_aemet_server.ipc" ScreenLoggerServer.start_thread(transport) class MQTTClientFake: def publish(self, *args): print args tz = pytz.timezone("Europe/Madrid") logger = LoggerClient.open("AEMETMonitor", transport) # logger.config(logger.levels.DEBUG, logger.schedules.INSTANTANEOUSLY) current_weather_url = "http://www.aemet.es/es/eltiempo/observacion/ultimosdatos_8416Y_datos-horarios.csv?k=val&l=8416Y&datos=det&w=0&f=temperatura&x=h24" location_id = "46250" client = MQTTClientFake() __publish_daily_forecast(client) __publish_hourly_forecast(client) __publish_current_weather_status(client)
for k in keys: v = float( res[k].replace(',','.') ) # replace commas by dots message = { 'timestamp' : ref_time + hour_offset, 'data' : v } client.publish(topic.format(k), json.dumps(message)) logger.info("Electricity price published") except: logger.info("Unable to publish electricity prices") client.disconnect() raise else: client.disconnect() def publish(day_offset): """Publishes the electricity prices for a given day offset. If `offset=0` prices will be for current day, if `offset=1` prices will be for next day, and so on. """ # take the date for tomorrow tz = pytz.timezone("Europe/Madrid") dt = datetime.date.today() + datetime.timedelta(days=day_offset) dt = datetime.datetime.combine(dt, datetime.datetime.min.time()) dt = tz.localize(dt) ref_time = time.mktime(dt.timetuple()) __publish_data_of_day(dt.strftime("%Y%m%d"), ref_time) if __name__ == "__main__": logger = LoggerClient.open("ElectricityPricesMonitor") publish(0) logger.close()
def start(): """Opens logger connection.""" global logger logger = LoggerClient.open("ElectricityPricesMonitor") publish(0) if time.time( ) * 1000 % Scheduler.T1_DAY > 21 * Scheduler.T1_HOUR - 10 * Scheduler.T1_SECOND: # publish next day electricity prices when starting the software at # night try: publish(1) except: print "Unexpected error:", traceback.format_exc() def stop(): logger.close() if __name__ == "__main__": import raspi_mon_sys.ScreenLoggerServer as ScreenLoggerServer transport = "ipc:///tmp/zmq_electricity_prices_server.ipc" ScreenLoggerServer.start_thread(transport) logger = LoggerClient.open("AEMETMonitor", transport) tz = pytz.timezone("Europe/Madrid") dt = datetime.datetime.strptime(sys.argv[1], "%Y%m%d") dt = tz.localize(dt) ref_time = time.mktime(dt.timetuple()) __publish_data_of_day(sys.argv[1], ref_time) logger.close()
message = {'timestamp': ref_time + hour_offset, 'data': v} client.publish(topic.format(k), json.dumps(message)) logger.info("Electricity price published") except: logger.info("Unable to publish electricity prices") client.disconnect() raise else: client.disconnect() def publish(day_offset): """Publishes the electricity prices for a given day offset. If `offset=0` prices will be for current day, if `offset=1` prices will be for next day, and so on. """ # take the date for tomorrow tz = pytz.timezone("Europe/Madrid") dt = datetime.date.today() + datetime.timedelta(days=day_offset) dt = datetime.datetime.combine(dt, datetime.datetime.min.time()) dt = tz.localize(dt) ref_time = time.mktime(dt.timetuple()) __publish_data_of_day(dt.strftime("%Y%m%d"), ref_time) if __name__ == "__main__": logger = LoggerClient.open("ElectricityPricesMonitor") publish(0) logger.close()