Example #1
0
def __process(logger, client, iface, nodes, node2keys):
    last_timestamp = time.time()
    topic = Utils.gettopic("rfemon/{0}/{1}/{2}")
    while is_running:
        if iface is None:
            time.sleep(0.1)
            iface = __try_open(logger)
            continue
        try:
            # Execute run method
            iface.run()
            # Read socket
            current_values = iface.read()
            if current_values is None and time.time(
            ) - last_timestamp > MAX_TIME_WITHOUT_READ:
                current_values = __build_null_values(node2keys)
                logger.alert(
                    "No values read for a long time, probably something is wrong with RF device"
                )
                iface.close()
                time.sleep(0.1)
                iface = __try_open(logger)
            elif current_values is not None:
                current_values = [current_values]
            # If complete and valid data values were received
            if current_values is not None:
                for values in current_values:
                    logger.debug(str(values))
                    t = values[0]
                    nodeId = values[1]
                    for conf in node2keys[str(nodeId)]:
                        last_data = conf["data"]
                        key = conf["key"]
                        name = conf["name"]
                        mul = conf.get("mul", 1.0)
                        add = conf.get("add", 0.0)
                        v = values[key]
                        if v is not None: v = (v + add) * mul
                        alert_below_th = conf.get("alert_below_threshold",
                                                  None)
                        if alert_below_th is not None and v is not None and v < alert_below_th:
                            logger.alert(
                                "Value %f for nodeId %d key %d registered with name %s is below threshold %f",
                                float(v), nodeId, key, name,
                                float(alert_below_th))
                        if Utils.compute_relative_difference(
                                last_data, v
                        ) > conf.get(
                                "tolerance", DEFAULT_POWER_TOLERANCE
                        ) or t - conf["when"] > MAX_TIME_BETWEEN_READINGS:
                            message = {'timestamp': t, 'data': v}
                            client.publish(topic.format(name, nodeId, key),
                                           json.dumps(message))
                            conf["data"] = v
                            conf["when"] = t
                    last_timestamp = t
        except:
            print "Unexpected error:", traceback.format_exc()
            logger.error("Unexpected error: %s", traceback.format_exc())
        time.sleep(0.05)
Example #2
0
def start():
    """Starts a thread which reads from RFM69Pi and publishes using MQTT."""
    global logger
    logger = LoggerClient.open("OpenEnergyMonitor")
    logger.info("Opening connection")

    global client
    client = Utils.getpahoclient(logger)

    global iface
    iface = __try_open(logger)

    # config is a dictionary with:
    # devices : [ { id, desc, name  } ]
    # keys : [ { nodeId, key, desc, name } ]
    config = Utils.getconfig("open_energy_monitor", logger)
    nodes = config["nodes"]
    node2keys = config["node2keys"]
    for n in nodes:
        # We add data field to check relative difference between two consecutive
        # readings, if the difference is not enough the message is not published,
        # reducing this way the overhead and database size.
        for conf in node2keys[str(n["id"])]:
            conf["data"] = -10000.0
            conf["when"] = 0.0
    global is_running
    is_running = True
    thread = threading.Thread(target=__process,
                              args=(logger, client, iface, nodes, node2keys))
    thread.setDaemon(True)
    thread.start()
def start():
    """Opens connections with logger, InfluxDB and MQTT broker."""
    global logger
    global mqtt_client
    global house_data
    global config
    global influx_client
    logger = LoggerClient.open("InfluxDBHub")
    mqtt_client = Utils.getpahoclient(logger, __configure_mqtt)
    config = Utils.getconfig("influxdb", logger)
    influx_client = InfluxDBClient(config["host"], config["port"],
                                   config["user"], config["password"],
                                   config["database"])
    if not {"name": config["database"]} in influx_client.get_list_database():
        influx_client.create_database(config["database"])
    if not any([
            x["name"] == "raspimon_policy"
            for x in influx_client.get_list_retention_policies()
    ]):
        influx_client.create_retention_policy('raspimon_policy',
                                              config["retention_policy"],
                                              1,
                                              default=True)
    else:
        influx_client.alter_retention_policy(
            'raspimon_policy',
            duration=config["retention_policy"],
            replication=1,
            default=True)
def start():
    """Starts a thread which reads from RFM69Pi and publishes using MQTT."""    
    global logger
    logger = LoggerClient.open("OpenEnergyMonitor")
    logger.info("Opening connection")
    
    global client
    client = Utils.getpahoclient(logger)
    
    global iface
    iface = __try_open(logger)

    # config is a dictionary with:
    # devices : [ { id, desc, name  } ]
    # keys : [ { nodeId, key, desc, name } ]
    config    = Utils.getconfig("open_energy_monitor", logger)
    nodes     = config["nodes"]
    node2keys = config["node2keys"]
    for n in nodes:
        # We add data field to check relative difference between two consecutive
        # readings, if the difference is not enough the message is not published,
        # reducing this way the overhead and database size.
        for conf in node2keys[str(n["id"])]:
            conf["data"] = -10000.0
            conf["when"] = 0.0
    global is_running
    is_running = True
    thread = threading.Thread(target=__process,
                              args=(logger, client, iface, nodes, node2keys))
    thread.setDaemon(True)
    thread.start()
def start(mail_credentials_path=__mail_credentials_path,
          transport_string=__transport):
    """Starts the execution of the server.
    
    The first argument is a path to `mail_credentials.json` file and the second
    argument is a ZeroMQ transport string for bind server socket.

    """
    if not Scheduler.is_running():
        Utils.ntpcheck()

        ctx = zmq.Context.instance()
        s = ctx.socket(zmq.PULL)
        s.bind(transport_string)

        Utils.startup_wait()

        __process_message(
            mail_credentials_path, {
                "name": "MailLoggerServer",
                "level": str(__levels.ALERT),
                "schedule": str(__schedules.INSTANTANEOUSLY),
                "text": "Logging service STARTED",
                "datetime": datetime.datetime.now()
            })
        mail_credentials = json.loads(open(mail_credentials_path).read())
        mail_credentials = None

        Scheduler.start()
        Scheduler.repeat_o_clock(3600 * 1000, __queue_handler,
                                 mail_credentials_path, "HOURLY",
                                 __hourly_queue)
        Scheduler.repeat_o_clock(3600 * 24 * 1000, __queue_handler,
                                 mail_credentials_path, "DAILY", __daily_queue)
        Scheduler.repeat_o_clock(3600 * 24 * 7 * 1000, __queue_handler,
                                 mail_credentials_path, "WEEKLY",
                                 __weekly_queue)

        print("Running server at ZMQ transport: " + transport_string)
        try:
            while True:
                msg = s.recv_pyobj()
                __process_message(mail_credentials_path, msg)
            raise Exception("Unexpected error (probably NTP related)")
        except:
            __queue_handler(mail_credentials_path, "HOURLY", __hourly_queue)
            __queue_handler(mail_credentials_path, "DAILY", __daily_queue)
            __queue_handler(mail_credentials_path, "WEEKLY", __weekly_queue)
            __process_message(
                mail_credentials_path, {
                    "name": "MailLoggerServer",
                    "level": "ALERT",
                    "schedule": "INSTANTANEOUSLY",
                    "text": "Logging service STOPPED",
                    "datetime": datetime.datetime.now()
                })
            print("Stopping server")
            Scheduler.stop()
            raise
def start():
    """Opens connections with logger, MongoDB and MQTT broker."""
    global logger
    global mqtt_client
    global house_data
    logger = LoggerClient.open("MongoDBHub")
    mqtt_client = Utils.getpahoclient(logger, __configure_mqtt)
    mongo_client = Utils.getmongoclient(logger)
    db = mongo_client["raspimon"]
    col = db["GVA2015_houses"]
    house_data = col.find_one({"raspi": raspi_mac})
    assert house_data is not None
    mongo_client.close()
def __process(logger, client, iface, nodes, node2keys):
    last_timestamp = time.time()
    topic = Utils.gettopic("rfemon/{0}/{1}/{2}")
    while is_running:
        if iface is None:
            time.sleep(0.1)
            iface = __try_open(logger)
            continue
        try:
            # Execute run method
            iface.run()
            # Read socket
            current_values = iface.read()
            if current_values is None and time.time() - last_timestamp > MAX_TIME_WITHOUT_READ:
                current_values = __build_null_values(node2keys)
                logger.alert("No values read for a long time, probably something is wrong with RF device")
                iface.close()
                time.sleep(0.1)
                iface = __try_open(logger)
            elif current_values is not None:
                current_values = [ current_values ]
            # If complete and valid data values were received
            if current_values is not None:
                for values in current_values:
                    logger.debug(str(values))
                    t      = values[0]
                    nodeId = values[1]
                    for conf in node2keys[str(nodeId)]:
                        last_data = conf["data"]
                        key       = conf["key"]
                        name      = conf["name"]
                        mul       = conf.get("mul", 1.0)
                        add       = conf.get("add", 0.0)
                        v         = values[key]
                        if v is not None: v = (v + add) * mul
                        alert_below_th = conf.get("alert_below_threshold", None)
                        if alert_below_th is not None and v is not None and v < alert_below_th:
                            logger.alert("Value %f for nodeId %d key %d registered with name %s is below threshold %f",
                                         float(v), nodeId, key, name, float(alert_below_th))
                        if Utils.compute_relative_difference(last_data, v) > conf.get("tolerance",DEFAULT_POWER_TOLERANCE) or t - conf["when"] > MAX_TIME_BETWEEN_READINGS:
                            message = { 'timestamp' : t, 'data' : v }
                            client.publish(topic.format(name, nodeId, key), json.dumps(message))
                            conf["data"] = v
                            conf["when"] = t
                    last_timestamp = t
        except:
            print "Unexpected error:",traceback.format_exc()
            logger.error("Unexpected error: %s", traceback.format_exc())
        time.sleep(0.05)
def __queue_handler(mail_credentials_path, frequency, queue):
    """Traverses the given queue and concatenates by lines all message texts."""
    if not queue.empty():
        subject = __generate_subject(frequency)
        lines_list = []
        while not queue.empty():
            lines_list.append(queue.get()[1])
        msg = '\n'.join(lines_list)
        try:
            mail_credentials = json.loads(open(mail_credentials_path).read())
            Utils.sendmail(mail_credentials, subject, msg)
            mail_credentials = None
        except:
            print "Unexpected error:", traceback.format_exc()
            print("FATAL ERROR: irrecoverable information loss :(")
Example #9
0
def main():
    config  = Utils.getconfig("plugwise", None)
    stick   = config["stick"]
    pairing = config["pairing"]
    device  = "/dev/ttyUSB0"

    print
    print "**************************************************************"
    print "*                          Menu :                            *"
    print "*                                                            *"
    print "*  m  : Pair your Circle+ (Master) devices with USB stick    *"
    print "*  s  : Pair your Circle (Slave) devices with Circle+        *"
    print "*  q  : Exit                                                 *"
    print "*                                                            *"
    print "**************************************************************"
    print
    print "Enter a letter from the menu above :"
    arg = raw_input()
    print
    opts, args = getopt.getopt(sys.argv[1:], "m:s:q:",
                               ['master', 'slave', 'quit'])
    
    if arg == "m":
        for circle_plus in pairing.keys():
            PairCirclePlus(device, stick, circle_plus)
    elif arg == "s":
        for master,slaves in pairing.iteritems():
            pair_circles(slaves, device)
    elif arg == "q":
	sys.exit(0)
    else:
	print "Command Error ! Select only one letter below !"
def __process_message(mail_credentials_path, msg):
    sched = msg["schedule"]
    txt = __generate_message_line(msg)

    if sched != str(__schedules.SILENTLY):
        sys.stderr.write(txt + "\n")

    if sched == str(__schedules.INSTANTANEOUSLY):
        subject = __generate_subject(__schedules.INSTANTANEOUSLY, msg["name"])
        try:
            mail_credentials = json.loads(open(mail_credentials_path).read())
            Utils.sendmail(mail_credentials, subject, txt)
            mail_credentials = None
        except:
            print "Unexpected error:", traceback.format_exc()

    elif sched != str(__schedules.SILENTLY):
        __schedule2queue[sched].put((msg["datetime"], txt))
def start():
    """Connects with logging server, loads plugwise network configuration and
    connects with MQTT broker."""
    global logger
    global client
    global config
    global device
    global circles_config
    global circles
    global mac2circle
    logger = LoggerClient.open("PlugwiseMonitor")
    if not verbose:
        logger.config(logger.levels.WARNING, logger.schedules.DAILY)
    config = Utils.getconfig("plugwise", logger)
    assert config is not None
    device = plugwise_api.Stick(logger, DEFAULT_SERIAL_PORT)

    # circles_config is a list of dictionaries: name, mac, desc.  state field is
    # added in next loop to track its value so it can be used to only send
    # messages in state transitions. power1s and power8s field is used to check
    # the relative difference in power in order to reduce the network overhead.
    circles_config = config["circles"]
    circles = []
    mac2circle = {}
    for circle_data in circles_config:
        mac = circle_data["mac"]
        circles.append(
            plugwise_api.Circle(
                logger, mac, device, {
                    "name": circle_data["name"],
                    "location": circle_data["desc"],
                    "always_on": "False",
                    "production": "True"
                }))
        mac2circle[mac] = circles[-1]
        circle_data["state"] = "NA"
        for v in OUTPUT_LIST:
            circle_data["power" + v["suffix"]] = -10000.0
            circle_data["when" + v["suffix"]] = 0.0

    client = Utils.getpahoclient(logger, __configure)
    client.loop_start()
def start():
    """Opens logger connection and loads its configuration from MongoDB and sends
    first message."""
    global logger
    global config
    global location_id
    global current_weather_url
    logger = LoggerClient.open("AEMETMonitor")
    config = Utils.getconfig("aemet", logger)
    location_id = config["location_id"]
    current_weather_url = config["current_weather_url"]
    publish()
def publish():
    """Publishes circle messages via MQTT."""
    try:
        # All circles messages are generated together and before sending data
        # via MQTT. This way sending data overhead is ignored and we expect
        # similar timestamps between all circles.
        messages = []
        for i, c in enumerate(circles):
            config = circles_config[i]
            t = time.time()
            mac = config["mac"]
            name = config["name"]
            last_powers = [config["power" + x["suffix"]] for x in OUTPUT_LIST]
            last_state = config["state"]
            try:
                reading = c.get_power_usage()
                powers = [reading[x["key"]] for x in OUTPUT_LIST]
                state = c.get_info()['relay_state']
                alert_below_th = config.get("alert_below_threshold", None)
                for i, p in enumerate(powers):
                    p = max(0, p)
                    key = OUTPUT_LIST[i]["key"]
                    suffix = OUTPUT_LIST[i]["suffix"]
                    if alert_below_th is not None and p < alert_below_th:
                        logger.alert(
                            "Value %f %s for circle %s registered with name %s is below threshold %f",
                            float(p), suffix, mac, name, float(alert_below_th))
                    if Utils.compute_relative_difference(
                            last_powers[i], p) > config.get(
                                "tolerance", DEFAULT_POWER_TOLERANCE
                            ) or t - config[
                                "when" + suffix] > MAX_TIME_BETWEEN_READINGS:
                        usage_message = {'timestamp': t, 'data': p}
                        messages.append((topic.format(name, "power" + suffix,
                                                      mac), usage_message))
                        config["power" + suffix] = p
                        config["when" + suffix] = t
                # check state transition before message is appended
                if state != last_state:
                    state_message = {'timestamp': t, 'data': state}
                    messages.append((topic.format(name, "state",
                                                  mac), state_message))
                    config["state"] = state  # track current state value
            except:
                print "Unexpected error:", traceback.format_exc()
                logger.info("Error happened while processing circles data: %s",
                            traceback.format_exc())
        for top, message in messages:
            client.publish(top, json.dumps(message))
    except:
        print "Unexpected error:", traceback.format_exc()
        logger.error("Error happened while processing circles data")
        raise
def start():
    """Opens logger connection and loads its configuration from MongoDB and sends
    first message."""
    global logger
    global config
    global location_id
    global current_weather_url
    logger = LoggerClient.open("AEMETMonitor")
    config = Utils.getconfig("aemet", logger)
    location_id = config["location_id"]
    current_weather_url = config["current_weather_url"]
    publish()
def stop():
    mongo_client = Utils.getmongoclient(logger)
    db = mongo_client["raspimon"]
    # close MQTT broker connection
    mqtt_client.disconnect()
    # force sending data to MongoDB
    __upload_all_data(db, __build_raspimon_documents, raspimon_message_queues)
    __upload_all_data(db, __build_forecast_documents, forecast_message_queues)
    if len(pending_documents) > 0: db.GVA2015_data.insert(pending_documents)
    # close rest of pending connections
    mongo_client.close()
    logger.close()
def publish():
    global tz
    tz = pytz.timezone("Europe/Madrid")
    try:
        client = Utils.getpahoclient(logger)
        __publish_daily_forecast(client)
        __publish_hourly_forecast(client)
        __publish_current_weather_status(client)
        client.disconnect()

    except:
        print "Unexpected error:", traceback.format_exc()
        logger.error("Unexpected error: %s", traceback.format_exc())
def publish():
    global tz
    tz = pytz.timezone("Europe/Madrid")
    try:
        client = Utils.getpahoclient(logger)
        __publish_daily_forecast(client)
        __publish_hourly_forecast(client)
        __publish_current_weather_status(client)
        client.disconnect()
        
    except:
        print "Unexpected error:", traceback.format_exc()
        logger.error("Unexpected error: %s", traceback.format_exc())
def __publish_data_of_day(day_str, ref_time):
    try:
        client = Utils.getpahoclient(logger, __configure)
    except:
        logger.error("Unable to connecto to MQTT broker")
        raise
    tomorrow_url = url.format(day_str)
    try:
        # http request
        response_string = urllib2.urlopen(tomorrow_url)
    except:
        logger.error("Unable to retrieve electricity prices")
        client.disconnect()
        raise
    try:
        response = json.load(response_string)
        pvpc = response['PVPC']
        # PVPC is an array of dictionaries where every dictionary is:
        # {"Dia":"13/11/2015","Hora":"00-01",
        # "GEN":"126,08","NOC":"75,81","VHC":"79,94",
        # "COFGEN":"0,000075326953000000","COFNOC":"0,000158674625000000",
        # "COFVHC":"0,000134974129000000","PMHGEN":"66,35","PMHNOC":"63,98",
        # "PMHVHC":"66,63","SAHGEN":"6,14","SAHNOC":"5,92","SAHVHC":"6,17",
        # "FOMGEN":"0,03","FOMNOC":"0,03","FOMVHC":"0,03","FOSGEN":"0,13",
        # "FOSNOC":"0,12","FOSVHC":"0,13","INTGEN":"2,46","INTNOC":"2,37",
        # "INTVHC":"2,47","PCAPGEN":"6,94","PCAPNOC":"1,16","PCAPVHC":"1,64",
        # "TEUGEN":"44,03","TEUNOC":"2,22","TEUVHC":"2,88"}
        
        # Looking here: http://tarifaluzhora.es/ it seems that GEN is the main
        # electricity price, and it comes in thousandth of an euro.
        
        # It will send 25 hours at CEST to CET transition day.
        
        keys = [ 'GEN', 'NOC', 'VHC' ]
        # for every hour data in pvpc
        for res in pvpc:
            # TODO: check day value
            hour_offset = int( res['Hora'].split('-')[0] ) * 3600
            for k in keys:
                v = float( res[k].replace(',','.') ) # replace commas by dots
                message = { 'timestamp' : ref_time + hour_offset, 'data' : v }
                client.publish(topic.format(k), json.dumps(message))
        logger.info("Electricity price published")
    except:
        logger.info("Unable to publish electricity prices")
        client.disconnect()
        raise
    else:
        client.disconnect()
def __publish_data_of_day(day_str, ref_time):
    try:
        client = Utils.getpahoclient(logger, __configure)
    except:
        logger.error("Unable to connecto to MQTT broker")
        raise
    tomorrow_url = url.format(day_str)
    try:
        # http request
        response_string = urllib2.urlopen(tomorrow_url)
    except:
        logger.error("Unable to retrieve electricity prices")
        client.disconnect()
        raise
    try:
        response = json.load(response_string)
        pvpc = response['PVPC']
        # PVPC is an array of dictionaries where every dictionary is:
        # {"Dia":"13/11/2015","Hora":"00-01",
        # "GEN":"126,08","NOC":"75,81","VHC":"79,94",
        # "COFGEN":"0,000075326953000000","COFNOC":"0,000158674625000000",
        # "COFVHC":"0,000134974129000000","PMHGEN":"66,35","PMHNOC":"63,98",
        # "PMHVHC":"66,63","SAHGEN":"6,14","SAHNOC":"5,92","SAHVHC":"6,17",
        # "FOMGEN":"0,03","FOMNOC":"0,03","FOMVHC":"0,03","FOSGEN":"0,13",
        # "FOSNOC":"0,12","FOSVHC":"0,13","INTGEN":"2,46","INTNOC":"2,37",
        # "INTVHC":"2,47","PCAPGEN":"6,94","PCAPNOC":"1,16","PCAPVHC":"1,64",
        # "TEUGEN":"44,03","TEUNOC":"2,22","TEUVHC":"2,88"}

        # Looking here: http://tarifaluzhora.es/ it seems that GEN is the main
        # electricity price, and it comes in thousandth of an euro.

        # It will send 25 hours at CEST to CET transition day.

        keys = ['GEN', 'NOC', 'VHC']
        # for every hour data in pvpc
        for res in pvpc:
            # TODO: check day value
            hour_offset = int(res['Hora'].split('-')[0]) * 3600
            for k in keys:
                v = float(res[k].replace(',', '.'))  # replace commas by dots
                message = {'timestamp': ref_time + hour_offset, 'data': v}
                client.publish(topic.format(k), json.dumps(message))
        logger.info("Electricity price published")
    except:
        logger.info("Unable to publish electricity prices")
        client.disconnect()
        raise
    else:
        client.disconnect()
def start():
    """Opens connections with logger, InfluxDB and MQTT broker."""
    global logger
    global mqtt_client
    global house_data
    global config
    global influx_client
    logger = LoggerClient.open("InfluxDBHub")
    mqtt_client = Utils.getpahoclient(logger, __configure_mqtt)
    config = Utils.getconfig("influxdb", logger)
    influx_client = InfluxDBClient(config["host"], config["port"],
                                   config["user"], config["password"],
                                   config["database"])
    if not {"name":config["database"]} in influx_client.get_list_database():
        influx_client.create_database(config["database"])
    if not any([ x["name"]=="raspimon_policy" for x in influx_client.get_list_retention_policies()]):
        influx_client.create_retention_policy('raspimon_policy',
                                              config["retention_policy"],
                                              1, default=True)
    else:
        influx_client.alter_retention_policy('raspimon_policy',
                                             duration=config["retention_policy"],
                                             replication=1,
                                             default=True)
def upload_data():
    try:
        mongo_client = Utils.getmongoclient(logger)
        db = mongo_client["raspimon"]
        t = time.time()

        raspimon_batch = __build_after_deadline_documents(
            __build_raspimon_documents, raspimon_message_queues, t)
        forecast_batch = __build_after_deadline_documents(
            __build_forecast_documents, forecast_message_queues, t)

        global pending_documents
        insert_batch = raspimon_batch + forecast_batch + pending_documents
        pending_documents = []

        try:
            if len(insert_batch) > 0: db.GVA2015_data.insert(insert_batch)
        except:
            pending_documents = insert_batch
            if len(pending_documents) > PENDING_DOCUMENTS_LENGTH_ERROR:
                logger.error(
                    "Pending %s messages is above data loss threshold %d, sadly pending list set to zero :S",
                    len(pending_documents), PENDING_DOCUMENTS_LENGTH_ERROR)
                pending_documents = []  # data loss here :'(
            elif len(pending_documents) > PENDING_DOCUMENTS_LENGTH_WARNING:
                logger.alert(
                    "Pending %s messages is above warning threshold %d, data loss will occur at %d",
                    len(pending_documents), PENDING_DOCUMENTS_LENGTH_WARNING,
                    PENDING_DOCUMENTS_LENGTH_ERROR)
            else:
                logger.warning("Connection with database is failing")
            raise
        logger.info("Inserted %d documents", len(insert_batch))
        mongo_client.close()
    except:
        print "Unexpected error:", traceback.format_exc()
        logger.error("Unexpected error: %s", traceback.format_exc())
import re
import requests
import time
import traceback
import unicodedata
import urllib2

from xml.etree.ElementTree import parse

import raspi_mon_sys.aemet as aemet
import raspi_mon_sys.LoggerClient as LoggerClient
import raspi_mon_sys.Utils as Utils

NUM_DAYS = 4

weather_topic  = Utils.gettopic("aemet/{0}/{1}")
forecast_topic = Utils.gettopic("aemet/{0}/{1}/{2}", "forecast")

# This variables are loaded from MongoDB server at start() function.
tz = None
logger = None
current_weather_url = None
location_id = None
config = None

daily_forecast_info = (
    # more than one value
    ( "get_precipitacion", "period", "rain_prob" ),
    ( "get_cota_nieve", "period", "snow_level" ),
    ( "get_estado_cielo", "period", "sky" ),
    ( "get_viento", "period", "wind_direction", "wind_speed" ),
import datetime
import json
import pytz
import sys
import traceback
import time
import urllib2

sys.path.append("../")
import raspi_mon_sys.LoggerClient as LoggerClient
import raspi_mon_sys.Scheduler as Scheduler
import raspi_mon_sys.Utils as Utils

logger = None
topic = Utils.gettopic("electricity_prices/{0}")
url = 'http://www.esios.ree.es/Solicitar?fileName=PVPC_CURV_DD_{0}&fileType=txt&idioma=es'

def __on_connect(client, userdata, rc):
    logger.info("Connected to MQTT broker")

def __configure(client):
    client.on_connect = __on_connect

def __publish_data_of_day(day_str, ref_time):
    try:
        client = Utils.getpahoclient(logger, __configure)
    except:
        logger.error("Unable to connecto to MQTT broker")
        raise
    tomorrow_url = url.format(day_str)
import json
import Queue
import Scheduler
import socket
import threading
import time
import traceback
import sys
import zmq

import raspi_mon_sys.LoggerClient as LoggerClient
import raspi_mon_sys.Utils as Utils

__transport = LoggerClient.default_transport
__mail_credentials_path = "/etc/mail_credentials.json"
__mac_addr = Utils.getmac()

# Queues of pending messages.
__hourly_queue = Queue.PriorityQueue()
__daily_queue = Queue.PriorityQueue()
__weekly_queue = Queue.PriorityQueue()

# Enums.
__levels = LoggerClient.levels
__schedules = LoggerClient.schedules

# Mapping between schedule options and python queues.
__schedule2queue = {
    str(__schedules.HOURLY): __hourly_queue,
    str(__schedules.DAILY): __daily_queue,
    str(__schedules.WEEKLY): __weekly_queue
import Queue
import time
import threading
import traceback

import raspi_mon_sys.LoggerClient as LoggerClient
import raspi_mon_sys.Scheduler as Scheduler
import raspi_mon_sys.Utils as Utils

PENDING_DOCUMENTS_LENGTH_WARNING = 10000  # expected 40MB of messages for warning
PENDING_DOCUMENTS_LENGTH_ERROR = 30000  # expected 120MB of messages for data loss
PERIOD = 3600  # every 3600 seconds (1 hour) we send data to hour server

assert PENDING_DOCUMENTS_LENGTH_ERROR > PENDING_DOCUMENTS_LENGTH_WARNING

raspi_mac = Utils.getmac()
logger = None
mqtt_client = None
house_data = None
lock = threading.RLock()

pending_documents = []
raspimon_message_queues = {}
forecast_message_queues = {}


def __enqueue_raspimon_message(client, userdata, topic, message):
    timestamp = message["timestamp"]
    data = message["data"]
    basetime = int(timestamp // PERIOD * PERIOD)
    lock.acquire()
Example #26
0
def __try_call(logger, func, *args):
    try:
        func(*args)
        return True
    except:
        print "Unexpected error:",traceback.format_exc()
        logger.error("Unexpected error: %s", traceback.format_exc())
        return False

def __replace_vars(x, module):
    if type(x) is int or type(x) is float or not x.startswith("$this."): return x
    method = getattr(module, x.replace("$this.",""))
    return method

if __name__ == "__main__":
    Utils.ntpcheck()
    Utils.startup_wait()
    
    T1_MILISECOND  = 1
    T1_CENTISECOND = 10
    T1_DECISECOND  = 100
    T1_SECOND      = 1000
    T1_MINUTE      = 60000
    T1_HOUR        = 3600000
    T1_DAY         = 24 * T1_HOUR

    # Configure logger.
    logger = LoggerClient.open("MainMonitoringSystem")

    logger.info("Initializing main monitoring system")
import datetime
import json
import pytz
import sys
import traceback
import time
import urllib2

sys.path.append("../")
import raspi_mon_sys.LoggerClient as LoggerClient
import raspi_mon_sys.Scheduler as Scheduler
import raspi_mon_sys.Utils as Utils

logger = None
topic = Utils.gettopic("electricity_prices/{0}")
url = 'http://www.esios.ree.es/Solicitar?fileName=PVPC_CURV_DD_{0}&fileType=txt&idioma=es'


def __on_connect(client, userdata, rc):
    logger.info("Connected to MQTT broker")


def __configure(client):
    client.on_connect = __on_connect


def __publish_data_of_day(day_str, ref_time):
    try:
        client = Utils.getpahoclient(logger, __configure)
    except:
Example #28
0
        # reducing this way the overhead and database size.
        for conf in node2keys[str(n["id"])]:
            conf["data"] = -10000.0
            conf["when"] = 0.0
    global is_running
    is_running = True
    thread = threading.Thread(target=__process,
                              args=(logger, client, iface, nodes, node2keys))
    thread.setDaemon(True)
    thread.start()


def stop():
    global is_running
    is_running = False
    time.sleep(0.1)
    client.disconnect()
    logger.close()
    iface.close()


if __name__ == "__main__":
    Utils.startup_wait()
    start()
    # Inifite loop.
    try:
        while True:
            time.sleep(60)
    except:
        stop()
"""
import datetime
import json
import Queue
import socket
import threading
import traceback
import sys
import zmq

import raspi_mon_sys.LoggerClient as LoggerClient
import raspi_mon_sys.Utils as Utils

__transport = LoggerClient.default_transport
__mac_addr  = Utils.getmac()

# Queues of pending messages.
__hourly_queue = Queue.PriorityQueue()
__daily_queue  = Queue.PriorityQueue()
__weekly_queue = Queue.PriorityQueue()

# Enums.
__levels    = LoggerClient.levels
__schedules = LoggerClient.schedules

def __generate_message_line(msg):
    """Given a message it generates a string to be shown at screen or mail."""
    time_str     = datetime.datetime.strftime(msg["datetime"], "%c")
    host_str     = socket.gethostname()
    name_str     = msg["name"]
        # We add data field to check relative difference between two consecutive
        # readings, if the difference is not enough the message is not published,
        # reducing this way the overhead and database size.
        for conf in node2keys[str(n["id"])]:
            conf["data"] = -10000.0
            conf["when"] = 0.0
    global is_running
    is_running = True
    thread = threading.Thread(target=__process,
                              args=(logger, client, iface, nodes, node2keys))
    thread.setDaemon(True)
    thread.start()

def stop():
    global is_running
    is_running = False
    time.sleep(0.1)
    client.disconnect()
    logger.close()
    iface.close()

if __name__ == "__main__":
    Utils.startup_wait()
    start()
    # Inifite loop.
    try:
        while True:
            time.sleep(60)
    except:
        stop()
import re
import requests
import time
import traceback
import unicodedata
import urllib2

from xml.etree.ElementTree import parse

import raspi_mon_sys.aemet as aemet
import raspi_mon_sys.LoggerClient as LoggerClient
import raspi_mon_sys.Utils as Utils

NUM_DAYS = 4

weather_topic = Utils.gettopic("aemet/{0}/{1}")
forecast_topic = Utils.gettopic("aemet/{0}/{1}/{2}", "forecast")

# This variables are loaded from MongoDB server at start() function.
tz = None
logger = None
current_weather_url = None
location_id = None
config = None

daily_forecast_info = (
    # more than one value
    ("get_precipitacion", "period", "rain_prob"),
    ("get_cota_nieve", "period", "snow_level"),
    ("get_estado_cielo", "period", "sky"),
    ("get_viento", "period", "wind_direction", "wind_speed"),
import json
import time
import traceback

import raspi_mon_sys.LoggerClient as LoggerClient
import raspi_mon_sys.plugwise.api as plugwise_api
import raspi_mon_sys.Scheduler as Scheduler
import raspi_mon_sys.Utils as Utils

# Plugwise connection configuration.
MAX_TIME_BETWEEN_READINGS = 1800
DEFAULT_POWER_TOLERANCE = 0.0
DEFAULT_SERIAL_PORT = "/dev/ttyUSB0"  # USB port used by Plugwise receiver
OUTPUT_LIST = [{"key": 0, "suffix": "1s"}, {"key": 1, "suffix": "8s"}]

topic = Utils.gettopic("plugwise/{0}/{1}/{2}")
logger = None
client = None
config = None
device = None
circles_config = None
circles = None
mac2circle = None
verbose = False


def __on_connect(client, userdata, rc):
    # We will use this topic to send on/off commands to our circles.
    client.subscribe("plugwise/#")