def cache_tokens(self, content): log_timer = anode.Log(logging.DEBUG).start() dict_content = json.loads(content) self.token_access = dict_content["access_token"] self.token_refresh = dict_content["refresh_token"] self.token_expiry = self.get_time() + dict_content["expires_in"] - 10 * self.config["poll_seconds"] anode.Log(logging.INFO).log("Plugin", "state", lambda: "[netatmo] access tokens cached, refresh [{}]" .format(time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime(self.token_expiry)))) log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.cache_tokens) self.poll()
def http_response(self, response, url, callback): if response.code == 200: if callback is not None: treq.text_content(response).addCallbacks(callback) else: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error processing HTTP response [{}] with [{}]".format(self.name, url, response.code))
def http_post(self, url, data, callback): connection_pool = self.config["pool"] if "pool" in self.config else None treq.post(url, data, timeout=HTTP_TIMEOUT, pool=connection_pool).addCallbacks( lambda response, url=url, callback=callback: self.http_response(response, url, callback), errback=lambda error, url=url: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error processing HTTP GET [{}] with [{}]".format( self.name, url, error.getErrorMessage())))
def http_put(self, url, data, callback=None): connection_pool = self.config["pool"] if "pool" in self.config else None HTTPClient(Agent(self.reactor, contextFactory=DoNotVerifySSLContextFactory())) \ .put(url, data, timeout=HTTP_TIMEOUT, pool=connection_pool).addCallbacks( lambda response, url=url, callback=callback: self.http_response(response, url, callback), errback=lambda error, url=url: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error processing HTTP GET [{}] with [{}]".format( self.name, url, error.getErrorMessage())))
def __init__(self, parent, name, config, reactor): super(Hue, self).__init__(parent, name, config, reactor) self.disabled = False self.light_state = None try: self.groups = requests.get("http://" + BRIDGE_IP + "/api/" + BRIDGE_TOKEN + "/groups", verify=False).json() except RequestException as exception: self.disabled = True anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error [{}] processing request, disabling plugin:\n" .format(self.name, exception), exception)
def datagramRequest(self): try: self.transport.write( KasaMeter.encrypt('{"emeter": {"get_realtime": {}}}', KasaMeter.ENCRYPT_KEY)) except Exception as exception: anode.Log(logging.ERROR).log( "Plugin", "state", lambda: "[kasa] write failed to [{}:{}:{}]".format( self.name, self.ip, KasaMeter.PORT), exception)
def __init__(self, parent, name, config, reactor): super(Netatmo, self).__init__(parent, name, config, reactor) self.disabled = False self.token_access = None self.token_refresh = None self.token_expiry = None try: self.netatmo_username = os.environ['NETATMO_USERNAME'] self.netatmo_password = os.environ['NETATMO_PASSWORD'] self.netatmo_client_id = os.environ['NETATMO_CLIENT_ID'] self.netatmo_client_secret = os.environ['NETATMO_CLIENT_SECRET'] except KeyError as key_error: self.disabled = True anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error getting Netatmo connection key [{}] from environment, disabling plugin" .format(self.name, key_error))
def connectionRefused(self): anode.Log(logging.ERROR).log("Plugin", "state", lambda: "[kasa] connection refused to [{}:{}:{}]" .format(self.name, self.ip, KasaMeter.PORT))
def lights_adjust(self, content): log_timer = anode.Log(logging.DEBUG).start() try: bin_timestamp = self.get_time() lights = json.loads(content) for group in self.groups: power = 0 group_on = True group_adjust = False for light in self.groups[group]["lights"]: if lights[light]["state"]["on"] and lights[light]["state"]["reachable"]: if light not in LIGHT_STATE or \ {k: lights[light]["state"][k] for k in ("ct", "bri")} == LIGHT_STATES["default"]["state"]: LIGHT_STATE[light] = "default" if {k: lights[light]["state"][k] for k in ("ct", "bri")} == LIGHT_STATES[LIGHT_STATE[light]]["state"]: if LIGHT_STATES[self.light_state]["state"]["ct"] != LIGHT_STATES[LIGHT_STATE[light]]["state"]["ct"] or \ LIGHT_STATES[self.light_state]["state"]["bri"] != LIGHT_STATES[LIGHT_STATE[light]]["state"]["bri"]: LIGHT_STATE[light] = self.light_state group_adjust = True power = LIGHT_STATES[LIGHT_STATE[light]]["power"][lights[light]["modelid"]] else: group_on = False LIGHT_STATE.pop(light, None) if group_adjust: self.http_put("http://" + BRIDGE_IP + "/api/" + BRIDGE_TOKEN + "/groups/" + group + "/action", json.dumps(LIGHT_STATES[self.light_state]["state"]), self.light_adjusted) power_second_bin = power * len(self.groups[group]["lights"]) if group_on else 0 self.datum_push( "power_Dconsumption__electricity__" + self.groups[group]["name"].lower() + "_Dlights", "current", "point", power_second_bin, "W", 1, bin_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_bound_lower=0, data_derived_max=True, data_derived_min=True ) energy_consumption_day = self.datum_get( DATUM_QUEUE_LAST, "energy_Dconsumption__electricity__" + self.groups[group]["name"].lower() + "_Dlights", "integral", "mWh", "1", "day") if energy_consumption_day is not None and self.get_time_period(bin_timestamp, Plugin.get_seconds(1, "day")) != \ self.get_time_period(energy_consumption_day["bin_timestamp"], Plugin.get_seconds(1, "day")): energy_consumption_day = None energy_consumption_day = int((0 if energy_consumption_day is None else energy_consumption_day["data_value"]) + (float(power_second_bin) / (60 * 60) * 10000)) self.datum_push( "energy_Dconsumption__electricity__" + self.groups[group]["name"].lower() + "_Dlights", "current", "integral", energy_consumption_day, "mWh", 10, bin_timestamp, bin_timestamp, 1, "day", data_bound_lower=0 ) energy_consumption_alltime_min = self.datum_get( DATUM_QUEUE_MIN, "energy_Dconsumption__electricity__" + self.groups[group]["name"].lower() + "_Dlights", "integral", "Wh", 1, "all_Dtime", 1, "day") energy_consumption_alltime_min = int((0 if energy_consumption_alltime_min is None else energy_consumption_alltime_min["data_value"])) self.datum_push( "energy_Dconsumption__electricity__" + self.groups[group]["name"].lower() + "_Dlights", "current", "integral", int(energy_consumption_alltime_min + energy_consumption_day / 10000), "Wh", 1, bin_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0, data_derived_min=True ) self.publish() except Exception as exception: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error [{}] processing response:\n" .format(self.name, exception), exception) log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.lights_adjust)
def light_adjusted(self, content): anode.Log(logging.DEBUG).log("Plugin", "state", lambda: "[{}] updated light group state {}" .format(self.name, json.dumps([d["success"] for d in json.loads(content)])))
def _poll(self): log_timer = anode.Log(logging.DEBUG).start() try: bin_timestamp = self.get_time() model_day_inter = self.pickled_get(os.path.join(self.config["db_dir"], "amodel"), name="energyforecastinterday") model_day_intra = self.pickled_get(os.path.join(self.config["db_dir"], "amodel"), name="energyforecastintraday") if "energyforecastinterday" in model_day_inter and "energyforecastintraday" in model_day_intra and \ APP_MODEL_ENERGYFORECAST_INTRADAY_PROD_VERSION in model_day_intra["energyforecastintraday"] and \ self.anode.get_plugin("davis") is not None and \ self.anode.get_plugin("darksky") is not None: energy_production_today = self.anode.get_plugin("fronius").datum_get( DATUM_QUEUE_LAST, "energy_Dproduction__electricity__inverter", "integral", "Wh", 1, "day") energy_production_today = energy_production_today["data_value"] / energy_production_today["data_scale"] \ if energy_production_today is not None else None sun_rise = self.anode.get_plugin("davis").datum_get( DATUM_QUEUE_LAST, "sun__season__rise", "epoch", "scalar", 1, "day") sun_rise = sun_rise["data_value"] / sun_rise["data_scale"] \ if sun_rise is not None else None sun_set = self.anode.get_plugin("davis").datum_get( DATUM_QUEUE_LAST, "sun__season__set", "epoch", "scalar", 1, "day") sun_set = sun_set["data_value"] / sun_set["data_scale"] \ if sun_set is not None else None sun_azimuth = self.anode.get_plugin("davis").datum_get( DATUM_QUEUE_LAST, "sun__season__azimuth", "point", "_PC2_PB0", 2, "second") sun_azimuth = sun_azimuth["data_value"] / sun_azimuth["data_scale"] \ if sun_azimuth is not None else None sun_altitude = self.anode.get_plugin("davis").datum_get( DATUM_QUEUE_LAST, "sun__season__altitude", "point", "_PC2_PB0", 2, "second") sun_altitude = sun_altitude["data_value"] / sun_altitude["data_scale"] \ if sun_altitude is not None else None current = int(time.time()) sun_percentage = (0 if current < sun_rise else (100 if current > sun_set else ( (current - sun_rise) / float(sun_set - sun_rise) * 100))) \ if (sun_set is not None and sun_rise is not None and (sun_set - sun_rise) != 0) else 0 for day in range(1, 4): temperature_forecast = self.anode.get_plugin("darksky").datum_get( DATUM_QUEUE_MAX if day == 1 else DATUM_QUEUE_LAST, "temperature__conditions__forecast", "point", "_PC2_PB0C", day, "day") temperature_forecast = temperature_forecast["data_value"] / temperature_forecast["data_scale"] \ if temperature_forecast is not None else None rain_forecast = self.anode.get_plugin("darksky").datum_get( DATUM_QUEUE_MAX if day == 1 else DATUM_QUEUE_LAST, "rain__water__day_Dforecast", "integral", "mm", day, "day_Dtime") rain_forecast = rain_forecast["data_value"] / rain_forecast["data_scale"] \ if rain_forecast is not None else 0 humidity_forecast = self.anode.get_plugin("darksky").datum_get( DATUM_QUEUE_MAX if day == 1 else DATUM_QUEUE_LAST, "humidity__conditions__forecast", "mean", "_P25", day, "day") humidity_forecast = humidity_forecast["data_value"] / humidity_forecast["data_scale"] \ if humidity_forecast is not None else None wind_forecast = self.anode.get_plugin("darksky").datum_get( DATUM_QUEUE_LAST, "wind__conditions__speed_Dforecast", "mean", "km_P2Fh", day, "day") wind_forecast = wind_forecast["data_value"] / wind_forecast["data_scale"] \ if wind_forecast is not None else None conditions_forecast = self.anode.get_plugin("darksky").datum_get( DATUM_QUEUE_LAST, "description__conditions__forecast", "enumeration", "__", day, "day") conditions_forecast = conditions_forecast["data_string"] \ if conditions_forecast is not None else None model_features_dict = { "datum__bin__date": datetime.datetime.fromtimestamp(bin_timestamp + (86400 * (day - 1))).strftime('%Y/%m/%d'), "energy_Dproduction__electricity__inverter": 0, "temperature__conditions__forecast": temperature_forecast, "rain__water__day_Dforecast": rain_forecast, "humidity__conditions__forecast": humidity_forecast, "wind__conditions__speed_Dforecast": wind_forecast, "sun__season__rise": sun_rise, "sun__season__set": sun_set, "sun__season__azimuth": sun_azimuth, "sun__season__altitude": sun_altitude, "description__conditions__forecast": conditions_forecast } model_features = pandas.DataFrame([model_features_dict]).apply(pandas.to_numeric, errors='ignore') for model_version in model_day_inter["energyforecastinterday"]: if model_version >= APP_MODEL_ENERGYFORECAST_INTERDAY_PROD_VERSION: energy_production_forecast = 0 model_classifier = "" if model_version == APP_MODEL_ENERGYFORECAST_INTERDAY_PROD_VERSION \ else ("_D" + model_version) if day > 1 or sun_percentage <= 65 or \ self.datum_get(DATUM_QUEUE_LAST, "energy_Dproduction__electricity__forecast" + model_classifier + "__inverter", "high", "Wh", day, "day") is None: model = model_day_inter["energyforecastinterday"][model_version][1] try: energy_production_forecast = model['execute'](model=model, features=model['execute']( features=model_features, engineering=True), prediction=True)[0] except Exception as exception: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error [{}] executing model [{}] with features {}".format( self.name, exception, model_version, model_features_dict), exception) self.datum_push( "energy_Dproduction__electricity__forecast" + model_classifier + "__inverter", "forecast", "high" if day == 1 else "integral", self.datum_value(energy_production_forecast, factor=10), "Wh", 10, bin_timestamp, bin_timestamp, day, "day", asystem_version=model_day_inter["energyforecastinterday"][model_version][0], data_version=model_version, data_bound_lower=0) if day == 1: model = model_day_intra["energyforecastintraday"][APP_MODEL_ENERGYFORECAST_INTRADAY_PROD_VERSION][1] energy_production_forecast = self.datum_get(DATUM_QUEUE_LAST, "energy_Dproduction__electricity__forecast" + model_classifier + "__inverter", "high", "Wh", day, "day") energy_production_forecast = energy_production_forecast["data_value"] / \ energy_production_forecast["data_scale"] \ if energy_production_forecast is not None else 0 energy_production_forecast_scaled = 0 energy_production_forecast_scaled_features_dict = \ {"energy_Dproduction__electricity__forecast_Ddaylight_Dinverter": int(sun_percentage * 10)} try: energy_production_forecast_scaled = energy_production_forecast * model['execute']( model=model, features=pandas.DataFrame([energy_production_forecast_scaled_features_dict]) .apply(pandas.to_numeric, errors='ignore'), prediction=True).iloc[0][0].item() except Exception as exception: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error [{}] executing model [{}] with features {}".format( "energyforecastintraday", exception, model_version, energy_production_forecast_scaled_features_dict), exception) self.datum_push( "energy_Dproduction__electricity__forecast" + model_classifier + "__inverter", "forecast", "integral", self.datum_value(energy_production_forecast_scaled, factor=10), "Wh", 10, bin_timestamp, bin_timestamp, day, "day", asystem_version=model_day_inter["energyforecastinterday"][model_version][0], data_version=model_version, data_bound_lower=0) if model_classifier == "": self.datum_push( "energy_Dproduction__electricity__forecast_Ddaylight_Dinverter", "forecast", "point", self.datum_value(sun_percentage, factor=10), "_P25", 10, bin_timestamp, bin_timestamp, day, "day", asystem_version=model_day_inter["energyforecastinterday"][model_version][0], data_version=model_version, data_bound_lower=0, data_bound_upper=100) energy_production_forecast_actual = 100 if energy_production_today is not None else 0 if energy_production_today is not None and energy_production_today != 0: energy_production_forecast_actual = int(((energy_production_forecast_scaled - energy_production_today) * norm.cdf(sun_percentage, 40, 15) + energy_production_today) / energy_production_today * 100) self.datum_push( "energy_Dproduction__electricity__forecast_Dactual" + model_classifier + "__inverter", "forecast", "point", self.datum_value(energy_production_forecast_actual), "_P25", 1, bin_timestamp, bin_timestamp, day, "day", asystem_version=model_day_inter["energyforecastinterday"][model_version][0], data_version=model_version, data_bound_lower=0, data_derived_max=True) self.publish() except Exception as exception: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error [{}] processing model" .format(self.name, exception), exception) log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self._poll)
def push_meter(self, content): log_timer = anode.Log(logging.DEBUG).start() try: dict_content = json.loads(content, parse_float=Decimal) bin_timestamp = self.get_time() data_timestamp = int( calendar.timegm( dateutil.parser.parse( dict_content["Head"]["Timestamp"]).timetuple())) energy_export_grid_alltime = self.datum_value( dict_content, ["Body", "Data", "0", "EnergyReal_WAC_Minus_Absolute"], factor=10) self.datum_push("energy_Dexport__electricity__grid", "current", "integral", energy_export_grid_alltime, "Wh", 10, data_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0, data_derived_min=True) energy_export_grid_alltime_min = self.datum_get( DATUM_QUEUE_MIN, "energy_Dexport__electricity__grid", "integral", "Wh", 1, "all_Dtime", 1, "day") energy_export_grid_day = (energy_export_grid_alltime - energy_export_grid_alltime_min["data_value"]) \ if energy_export_grid_alltime_min is not None else 0 self.datum_push("energy_Dexport__electricity__grid", "current", "integral", energy_export_grid_day, "Wh", 10, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) energy_production_inverter_alltime_last = self.datum_get( DATUM_QUEUE_LAST, "energy_Dproduction__electricity__inverter", "integral", "Wh", "1", "all_Dtime") energy_production_inverter_alltime_min = self.datum_get( DATUM_QUEUE_MIN, "energy_Dproduction__electricity__inverter", "integral", "Wh", 1, "all_Dtime", 1, "day") energy_production_inverter_day = (energy_production_inverter_alltime_last["data_value"] - energy_production_inverter_alltime_min["data_value"]) \ if (energy_production_inverter_alltime_last is not None and energy_production_inverter_alltime_min is not None) else 0 energy_consumption_inverter_day = energy_production_inverter_day - energy_export_grid_day self.datum_push("energy_Dconsumption__electricity__inverter", "current", "integral", energy_consumption_inverter_day, "Wh", 10, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) energy_consumption_grid_alltime = self.datum_value( dict_content, ["Body", "Data", "0", "EnergyReal_WAC_Plus_Absolute"], factor=10) self.datum_push("energy_Dconsumption__electricity__grid", "current", "integral", energy_consumption_grid_alltime, "Wh", 10, data_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0, data_derived_min=True) energy_consumption_grid_min = self.datum_get( DATUM_QUEUE_MIN, "energy_Dconsumption__electricity__grid", "integral", "Wh", 1, "all_Dtime", 1, "day") energy_consumption_grid_day = (energy_consumption_grid_alltime - energy_consumption_grid_min["data_value"]) \ if energy_consumption_grid_min is not None else 0 self.datum_push("energy_Dconsumption__electricity__grid", "current", "integral", energy_consumption_grid_day, "Wh", 10, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) energy_consumption_shoulder_inverter = self.datum_get( DATUM_QUEUE_LAST, "energy_Dconsumption__electricity__shoulder_Dinverter", "integral", "Wh", 1, "all_Dtime") energy_consumption_shoulder_grid = self.datum_get( DATUM_QUEUE_LAST, "energy_Dconsumption__electricity__shoulder_Dgrid", "integral", "Wh", 1, "all_Dtime") if energy_consumption_shoulder_grid is not None and \ self.get_time_period(energy_consumption_shoulder_grid["data_timestamp"], Plugin.get_seconds(1, "day")) != \ self.get_time_period(bin_timestamp, Plugin.get_seconds(1, "day")): energy_consumption_shoulder_grid = None if energy_consumption_shoulder_grid is None and bin_timestamp >= \ (self.get_time_period(bin_timestamp, Plugin.get_seconds(1, "day")) + HOUR_SHOULDER_START * 60 * 60): self.datum_push( "energy_Dconsumption__electricity__shoulder_Dgrid", "current", "integral", energy_consumption_grid_alltime, "Wh", 10, bin_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0) self.datum_push( "energy_Dconsumption__electricity__shoulder_Dinverter", "current", "integral", energy_production_inverter_day - energy_export_grid_day, "Wh", 10, bin_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0) energy_consumption_peak_inverter = self.datum_get( DATUM_QUEUE_LAST, "energy_Dconsumption__electricity__peak_Dinverter", "integral", "Wh", 1, "all_Dtime") energy_consumption_peak_grid = self.datum_get( DATUM_QUEUE_LAST, "energy_Dconsumption__electricity__peak_Dgrid", "integral", "Wh", 1, "all_Dtime") if energy_consumption_peak_grid is not None and \ self.get_time_period(energy_consumption_peak_grid["data_timestamp"], Plugin.get_seconds(1, "day")) != \ self.get_time_period(bin_timestamp, Plugin.get_seconds(1, "day")): energy_consumption_peak_grid = None if energy_consumption_peak_grid is None and bin_timestamp >= \ (self.get_time_period(bin_timestamp, Plugin.get_seconds(1, "day")) + HOUR_PEAK_START * 60 * 60): self.datum_push("energy_Dconsumption__electricity__peak_Dgrid", "current", "integral", energy_consumption_grid_alltime, "Wh", 10, bin_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0) self.datum_push( "energy_Dconsumption__electricity__peak_Dinverter", "current", "integral", energy_production_inverter_day - energy_export_grid_day, "Wh", 10, bin_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0) energy_consumption_offpeak_inverter = self.datum_get( DATUM_QUEUE_LAST, "energy_Dconsumption__electricity__off_Dpeak_Dinverter", "integral", "Wh", 1, "all_Dtime") energy_consumption_offpeak_grid = self.datum_get( DATUM_QUEUE_LAST, "energy_Dconsumption__electricity__off_Dpeak_Dgrid", "integral", "Wh", 1, "all_Dtime") if energy_consumption_offpeak_grid is not None and \ self.get_time_period(energy_consumption_offpeak_grid["data_timestamp"], Plugin.get_seconds(1, "day")) != \ self.get_time_period(bin_timestamp, Plugin.get_seconds(1, "day")): energy_consumption_offpeak_grid = None if energy_consumption_offpeak_grid is None and bin_timestamp >= \ (self.get_time_period(bin_timestamp, Plugin.get_seconds(1, "day")) + HOUR_OFFPEAK_START * 60 * 60): self.datum_push( "energy_Dconsumption__electricity__off_Dpeak_Dgrid", "current", "integral", energy_consumption_grid_alltime, "Wh", 10, bin_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0) self.datum_push( "energy_Dconsumption__electricity__off_Dpeak_Dinverter", "current", "integral", energy_production_inverter_day - energy_export_grid_day, "Wh", 10, bin_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0) energy_consumption_offpeak_morning_grid_day = 0 if energy_consumption_shoulder_grid is None: energy_consumption_offpeak_morning_grid_day = (energy_consumption_grid_alltime - energy_consumption_grid_min["data_value"]) \ if energy_consumption_grid_min is not None else 0 else: energy_consumption_offpeak_morning_grid_day = \ (energy_consumption_shoulder_grid["data_value"] - energy_consumption_grid_min["data_value"]) \ if energy_consumption_grid_min is not None else 0 self.datum_push( "energy_Dconsumption__electricity__off_Dpeak_Dmorning_Dgrid", "current", "integral", energy_consumption_offpeak_morning_grid_day, "Wh", 10, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) energy_consumption_shoulder_grid_day = 0 if energy_consumption_peak_grid is None and energy_consumption_shoulder_grid is not None: energy_consumption_shoulder_grid_day = energy_consumption_grid_alltime - energy_consumption_shoulder_grid[ "data_value"] elif energy_consumption_peak_grid is not None and energy_consumption_shoulder_grid is not None: energy_consumption_shoulder_grid_day = energy_consumption_peak_grid["data_value"] - \ energy_consumption_shoulder_grid["data_value"] self.datum_push("energy_Dconsumption__electricity__shoulder_Dgrid", "current", "integral", energy_consumption_shoulder_grid_day, "Wh", 10, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) energy_consumption_peak_grid_day = 0 if energy_consumption_offpeak_grid is None and energy_consumption_peak_grid is not None: energy_consumption_peak_grid_day = energy_consumption_grid_alltime - energy_consumption_peak_grid[ "data_value"] elif energy_consumption_offpeak_grid is not None and energy_consumption_peak_grid is not None: energy_consumption_peak_grid_day = energy_consumption_offpeak_grid["data_value"] - \ energy_consumption_peak_grid["data_value"] self.datum_push("energy_Dconsumption__electricity__peak_Dgrid", "current", "integral", energy_consumption_peak_grid_day, "Wh", 10, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) energy_consumption_offpeak_evening_grid_day = 0 if energy_consumption_offpeak_grid is not None: energy_consumption_offpeak_evening_grid_day = energy_consumption_grid_alltime - \ energy_consumption_offpeak_grid["data_value"] self.datum_push( "energy_Dconsumption__electricity__off_Dpeak_Devening_Dgrid", "current", "integral", energy_consumption_offpeak_evening_grid_day, "Wh", 10, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) energy_consumption_grid_offpeak_day = energy_consumption_offpeak_morning_grid_day + \ energy_consumption_offpeak_evening_grid_day self.datum_push( "energy_Dconsumption__electricity__off_Dpeak_Dgrid", "current", "integral", energy_consumption_grid_offpeak_day, "Wh", 10, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) self.datum_push("energy_Dexport__electricity__yield", "current", "integral", self.datum_value(energy_export_grid_day * Decimal(TARIFF_FEEDIN), factor=100), "_P24", 100, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) self.datum_push( "energy_Dconsumption__electricity__savings", "current", "integral", self.datum_value( (energy_production_inverter_day - energy_export_grid_day) * Decimal(TARIFF_FLAT), factor=100), "_P24", 100, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) self.datum_push( "energy_Dconsumption__electricity__cost_Dhome", "current", "integral", self.datum_value( Decimal(SUPPLY_CHARGE) + energy_consumption_grid_day * Decimal(TARIFF_FLAT), factor=100), "_P24", 100, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) self.datum_push( "energy_Dconsumption__electricity__cost_Dsmart_Dhome", "current", "integral", self.datum_value( Decimal(SUPPLY_CHARGE) + energy_consumption_shoulder_grid_day * Decimal(TARIFF_SHOULDER) + energy_consumption_peak_grid_day * Decimal(TARIFF_PEAK if datetime.datetime.fromtimestamp( bin_timestamp).weekday() < 5 else TARIFF_SHOULDER) + energy_consumption_grid_offpeak_day * Decimal(TARIFF_OFFPEAK), factor=100), "_P24", 100, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) self.publish() except Exception as exception: anode.Log(logging.ERROR).log( "Plugin", "error", lambda: "[{}] error [{}] processing response:\n{}".format( self.name, exception, content), exception) log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.push_meter)
def _push(self, content, targets): # noinspection PyBroadException try: dict_content = json.loads(content, parse_float=Decimal) bin_timestamp = self.get_time() if "packet" in dict_content: bin_unit = "second" bin_width = dict_content["packet"]["interval"] data_timestamp = dict_content["packet"]["dateTime"] self.datum_push( "temperature__conditions__utility", "current", "point", None if self.datum_value(dict_content["packet"], ["inTemp"]) is None else self.datum_value( (dict_content["packet"]["inTemp"] - 32) * 5 / 9 - 1, factor=10), "_PC2_PB0C", 10, data_timestamp, bin_timestamp, bin_width, bin_unit, data_derived_max=True, data_derived_min=True) self.datum_push("humidity__conditions__utility", "current", "point", self.datum_value(dict_content["packet"], ["inHumidity"]), "_P25", 1, data_timestamp, bin_timestamp, bin_width, bin_unit, data_bound_upper=100, data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push("sun__season__azimuth", "current", "point", self.datum_value(dict_content["packet"], ["sunaz"]), "_PC2_PB0", 1, data_timestamp, bin_timestamp, bin_width, bin_unit, data_derived_max=True, data_derived_min=True) self.datum_push("sun__season__altitude", "current", "point", self.datum_value(dict_content["packet"], ["sunalt"]), "_PC2_PB0", 1, data_timestamp, bin_timestamp, bin_width, bin_unit, data_derived_max=True, data_derived_min=True) self.datum_push( "sun__season__rise", "current", "epoch", self.datum_value(dict_content["packet"], ["sunrise"]), "scalar", 1, data_timestamp, bin_timestamp, 1, "day") self.datum_push( "sun__season__set", "current", "epoch", self.datum_value(dict_content["packet"], ["sunset"]), "scalar", 1, data_timestamp, bin_timestamp, 1, "day") self.datum_push( "temperature__conditions__roof", "current", "point", None if self.datum_value(dict_content["packet"], ["outTemp"]) is None else self.datum_value( (dict_content["packet"]["outTemp"] - 32) * 5 / 9, factor=10), "_PC2_PB0C", 10, data_timestamp, bin_timestamp, bin_width, bin_unit, data_derived_max=True, data_derived_min=True) # TODO: Disable wind properties since weewx doesnt seem to report them any more, or at the least they are None when 0 # self.datum_push( # "wind__conditions__speed", # "current", "point", # None if self.datum_value(dict_content["packet"], ["windSpeed"]) is None else self.datum_value( # dict_content["packet"]["windSpeed"] * Decimal(1.60934)), # "km_P2Fh", # 1, # data_timestamp, # bin_timestamp, # bin_width, # bin_unit, # data_bound_lower=0, # data_derived_max=True, # data_derived_min=True # ) # self.datum_push( # "wind__conditions__bearing", # "current", "point", # self.datum_value(dict_content["packet"], ["windDir"]), # "_PC2_PB0", # 1, # data_timestamp, # bin_timestamp, # bin_width, # bin_unit, # data_bound_lower=0, # data_derived_max=True, # data_derived_min=True # ) # self.datum_push( # "wind__conditions__chill", # "current", "point", # None if self.datum_value(dict_content["packet"], ["windchill"]) is None else self.datum_value( # (dict_content["packet"]["windchill"] - 32) * 5 / 9, factor=10), # "_PC2_PB0C", # 10, # data_timestamp, # bin_timestamp, # bin_width, # bin_unit, # data_derived_max=True, # data_derived_min=True # ) self.datum_push( "wind__conditions__gust_Dspeed", "current", "point", None if self.datum_value(dict_content["packet"], ["windGust"]) is None else self.datum_value(dict_content["packet"]["windGust"] * Decimal(1.60934)), "km_P2Fh", 1, data_timestamp, bin_timestamp, bin_width, bin_unit, data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push("wind__conditions__gust_Dbearing", "current", "point", self.datum_value(dict_content["packet"], ["windGustDir"]), "_PC2_PB0", 1, data_timestamp, bin_timestamp, bin_width, bin_unit, data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push( "pressure__conditions__roof", "current", "point", None if self.datum_value(dict_content["packet"], ["barometer"]) is None else self.datum_value(dict_content["packet"]["barometer"] * Decimal(33.8639)), "mbar", 1, data_timestamp, bin_timestamp, bin_width, bin_unit, data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push("heat_Dindex__conditions__roof", "current", "point", self.datum_value(dict_content["packet"], ["heatindex"]), "scalar", 1, data_timestamp, bin_timestamp, bin_width, bin_unit, data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push("humidity__conditions__roof", "current", "point", self.datum_value(dict_content["packet"], ["outHumidity"]), "_P25", 1, data_timestamp, bin_timestamp, bin_width, bin_unit, data_bound_upper=100, data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push( "cloud__season__base", "current", "point", None if self.datum_value(dict_content["packet"], ["cloudbase"]) is None else self.datum_value(dict_content["packet"]["cloudbase"] * Decimal(0.3048)), "m", 1, data_timestamp, bin_timestamp, bin_width, bin_unit, data_derived_max=True, data_derived_min=True) self.datum_push( "dew_Dpoint__conditions__roof", "current", "point", None if self.datum_value(dict_content["packet"], ["dewpoint"]) is None else self.datum_value( (dict_content["packet"]["dewpoint"] - 32) * 5 / 9, factor=10), "_PC2_PB0C", 10, data_timestamp, bin_timestamp, bin_width, bin_unit, data_derived_max=True, data_derived_min=True) self.datum_push( "dew_Dpoint__conditions__utility", "current", "point", None if self.datum_value(dict_content["packet"], ["inDewpoint"]) is None else self.datum_value( (dict_content["packet"]["inDewpoint"] - 32) * 5 / 9, factor=10), "_PC2_PB0C", 10, data_timestamp, bin_timestamp, bin_width, bin_unit, data_derived_max=True, data_derived_min=True) rain_conditions_roof_month = None if self.datum_value(dict_content["packet"], ["monthRain"]) is None else \ self.datum_value(dict_content["packet"]["monthRain"] * Decimal(2.54), factor=100) self.datum_push("rain__water__month", "current", "integral", rain_conditions_roof_month, "cm", 100, data_timestamp, bin_timestamp, 1, "month", data_bound_lower=0, data_derived_min=True) rain_conditions_roof_month_min = \ self.datum_get(DATUM_QUEUE_MIN, "rain__water__month", "integral", "cm", 1, "month", 1, "day") rain_conditions_roof_day = rain_conditions_roof_month - rain_conditions_roof_month_min["data_value"] \ if rain_conditions_roof_month_min is not None else 0 self.datum_push("rain__water__day", "current", "integral", rain_conditions_roof_day * 10, "mm", 100, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) self.datum_push( "rain__water__year", "current", "integral", None if self.datum_value(dict_content["packet"], ["yearRain"]) is None else self.datum_value(dict_content["packet"]["yearRain"] * Decimal(0.0254), factor=10000), "m", 10000, data_timestamp, bin_timestamp, 1, "year", data_bound_lower=0, data_derived_min=True) if "record" in dict_content: bin_unit = "minute" bin_width = dict_content["record"]["interval"] data_timestamp = dict_content["record"]["dateTime"] self.datum_push( "rain__water__rate", "current", "mean", None if self.datum_value(dict_content["record"], ["rainRate"]) is None else self.datum_value(dict_content["record"]["rainRate"] * Decimal(25.4), factor=10), "mm_P2Fh", 10, data_timestamp, bin_timestamp, bin_width, bin_unit, data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push( "rain__water__last_D30_Dmin", "current", "integral", None if self.datum_value(dict_content["record"], ["rain"]) is None else self.datum_value(dict_content["record"]["rain"] * Decimal(25.4), factor=10), "mm", 10, data_timestamp, bin_timestamp, bin_width, bin_unit, data_bound_lower=0, data_derived_min=True) self.publish() except Exception as exception: anode.Log(logging.ERROR).log( "Plugin", "error", lambda: "[{}] error [{}] processing response:\n{}".format( self.name, exception, content), exception)
def push_getdata(self, content): log_timer = anode.Log(logging.DEBUG).start() try: dict_content = json.loads(content, parse_float=Decimal) bin_timestamp = self.get_time() for device in dict_content["body"]["devices"]: module_name = "__indoor__" + device["module_name" if device["type"] == "NAMain" else ( "station_name" if "station_name" in device else "name")].lower().encode("UTF-8") if module_name != "__indoor__ignore" and "dashboard_data" in device: data_timestamp = device["dashboard_data"]["time_utc"] self.datum_push( "temperature" + module_name, "current", "point", self.datum_value(device, ["dashboard_data", "Temperature"], factor=10), "_PC2_PB0C", 10, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_version="1001", data_derived_max=True, data_derived_min=True ) self.datum_push( "humidity" + module_name, "current", "point", self.datum_value(device, ["dashboard_data", "Humidity"]), "_P25", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_version="1001", data_bound_upper=100, data_bound_lower=0, data_derived_max=True, data_derived_min=True ) self.datum_push( "pressure" + module_name, "current", "point", self.datum_value(device, ["dashboard_data", "Pressure"]), "mbar", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_version="1001", data_bound_lower=0, data_derived_max=True, data_derived_min=True ) self.datum_push( "pressure_Dabsolute" + module_name, "current", "point", self.datum_value(device, ["dashboard_data", "AbsolutePressure"]), "mbar", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_version="1001", data_bound_lower=0, data_derived_max=True, data_derived_min=True ) self.datum_push( "carbon_Ddioxide" + module_name, "current", "point", self.datum_value(device, ["dashboard_data", "CO2"]), "ppm", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_version="1001", data_bound_lower=0, data_derived_max=True, data_derived_min=True ) self.datum_push( "noise" + module_name, "current", "point", self.datum_value(device, ["dashboard_data", "Noise"]), "dB", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_version="1001", data_bound_lower=0, data_derived_max=True, data_derived_min=True ) if device["type"] == "NHC": self.datum_push( "health_Dindex" + module_name, "current", "point", self.datum_value(device, ["dashboard_data", "health_idx"]), "scalar", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_version="1001", data_bound_lower=0, data_derived_max=True, data_derived_min=True ) if "modules" in device: for device_sub in device["modules"]: module_name = (("__indoor__" if device_sub["type"] == "NAModule4" else "__outdoor__") + device_sub["module_name"].lower()).encode("UTF-8") if module_name != "__outdoor__ignore" and "dashboard_data" in device_sub: data_timestamp = device_sub["dashboard_data"]["time_utc"] self.datum_push( "temperature" + module_name, "current", "point", self.datum_value(device_sub, ["dashboard_data", "Temperature"], factor=10), "_PC2_PB0C", 10, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_version="0" if module_name == "__outdoor__parents" else "1001", data_derived_max=True, data_derived_min=True ) self.datum_push( "humidity" + module_name, "current", "point", self.datum_value(device_sub, ["dashboard_data", "Humidity"]), "_P25", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_version="0" if module_name == "__outdoor__parents" else "1001", data_bound_upper=100, data_bound_lower=0, data_derived_max=True, data_derived_min=True ) if device_sub["type"] == "NAModule4": self.datum_push( "carbon_Ddioxide" + module_name, "current", "point", self.datum_value(device_sub, ["dashboard_data", "CO2"]), "ppm", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_version="0" if module_name == "__outdoor__parents" else "1001", data_bound_lower=0, data_derived_max=True, data_derived_min=True ) self.publish() except Exception as exception: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error [{}] processing response:\n{}" .format(self.name, exception, content), exception) log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.push_getdata)
def push_forecast(self, content): log_timer = anode.Log(logging.DEBUG).start() # noinspection PyBroadException try: dict_content = json.loads(content, parse_float=Decimal) bin_timestamp = self.get_time() data_timestamp = dict_content["currently"]["time"] if self.get_time_period(dict_content["daily"]["data"][0]["time"], 24 * 60 * 60) == \ self.get_time_period(bin_timestamp, 24 * 60 * 60): for forecast_index in range(3): forecast = dict_content["daily"]["data"][forecast_index] self.datum_push( "description__conditions__forecast", "forecast", "enumeration", 0, "__", 1, data_timestamp, bin_timestamp, forecast_index + 1, "day", data_string=self.datum_value(forecast, ["icon"]).lower().replace("-", " ").encode("ascii", "ignore") ) self.datum_push( "temperature__conditions__forecast", "forecast", "point", None if self.datum_value(forecast, ["temperatureHigh"]) is None else \ int((self.datum_value(forecast, ["temperatureHigh"]) - 32) * 5 / 9 * 10), "_PC2_PB0C", 10, data_timestamp, bin_timestamp, forecast_index + 1, "day", data_derived_max=forecast_index == 0 ) self.datum_push( "temperature__conditions__forecast", "forecast", "low", None if self.datum_value(forecast, ["temperatureLow"]) is None else \ int((self.datum_value(forecast, ["temperatureLow"]) - 32) * 5 / 9 * 10), "_PC2_PB0C", 10, data_timestamp, bin_timestamp, forecast_index + 1, "day" ) self.datum_push( "wind__conditions__speed_Dforecast", "forecast", "mean", None if self.datum_value(forecast, ["windSpeed"]) is None else \ int(self.datum_value(forecast, ["windSpeed"], factor=10) * 1.609344), "km_P2Fh", 10, data_timestamp, bin_timestamp, forecast_index + 1, "day", data_bound_lower=0, data_derived_min=forecast_index == 0 ) self.datum_push( "wind__conditions__speed_Dforecast", "forecast", "high", None if self.datum_value(forecast, ["windGust"]) is None else \ int(self.datum_value(forecast, ["windGust"], factor=10) * 1.609344), "km_P2Fh", 10, data_timestamp, bin_timestamp, forecast_index + 1, "day", data_bound_lower=0 ) self.datum_push( "humidity__conditions__forecast", "forecast", "mean", None if self.datum_value(forecast, ["humidity"]) is None else \ int(self.datum_value(forecast, ["humidity"], factor=100)), "_P25", 1, data_timestamp, bin_timestamp, forecast_index + 1, "day", data_bound_upper=100, data_bound_lower=0, data_derived_max=forecast_index == 0, data_derived_min=forecast_index == 0 ) is_rain = "precipType" in forecast and forecast["precipType"] == "rain" rain_probability = float(forecast["precipProbability"] if is_rain and "precipProbability" in forecast else 0) rain_rate_max_time = forecast["precipIntensityMaxTime"] if is_rain and "precipIntensityMaxTime" in forecast else 0 rain_rate_max = float(forecast["precipIntensityMax"] if is_rain and "precipIntensityMax" in forecast else 0) * 25.4 rain_rate = float(forecast["precipIntensity"] if is_rain and "precipIntensity" in forecast else 0) * 25.4 rain = rain_rate * 24 self.datum_push( "rain__water__probability_Dforecast", "forecast", "point", int(rain_probability * 100), "_P25", 1, data_timestamp, bin_timestamp, forecast_index + 1, "day", data_bound_lower=0, data_derived_max=forecast_index == 0, data_derived_min=forecast_index == 0 ) self.datum_push( "rain__water__rate_Dmax_Dtime_Dforecast", "forecast", "epoch", int(rain_rate_max_time), "scalar", 1, data_timestamp, bin_timestamp, forecast_index + 1, "day" ) self.datum_push( "rain__water__rate_Dmax_Dforecast", "forecast", "high", int(rain_rate_max * 10000), "mm_P2Fh", 10000, data_timestamp, bin_timestamp, forecast_index + 1, "day", data_bound_lower=0, data_derived_max=forecast_index == 0, data_derived_min=forecast_index == 0 ) self.datum_push( "rain__water__rate_Dforecast", "forecast", "mean", int(rain_rate * 10000), "mm_P2Fh", 10000, data_timestamp, bin_timestamp, forecast_index + 1, "day", data_bound_lower=0, data_derived_max=forecast_index == 0, data_derived_min=forecast_index == 0 ) self.datum_push( "rain__water__day_Dforecast", "forecast", "integral", int(rain * 10), "mm", 10, data_timestamp, bin_timestamp, forecast_index + 1, "day", data_bound_lower=0, data_derived_max=forecast_index == 0, data_derived_min=forecast_index == 0 ) self.publish() except Exception as exception: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error [{}] processing response:\n{}" .format(self.name, exception, content), exception) log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.push_forecast)
class KasaMeter(DatagramProtocol): PORT = 9999 ENCRYPT_KEY = 0xAB @staticmethod def encrypt(value, key): values = list(value) for i in range(len(values)): encoded = ord(values[i]) values[i] = chr(encoded ^ int(key)) key = ord(values[i]) return "".join(values) @staticmethod def decrypt(value, key): values = list(value.decode("latin_1")) for i in range(len(values)): encoded = ord(values[i]) values[i] = chr(encoded ^ key) key = encoded return "".join(values) def startProtocol(self): self.transport.connect(self.ip, KasaMeter.PORT) def datagramRequest(self): try: self.transport.write(KasaMeter.encrypt('{"emeter": {"get_realtime": {}}}', KasaMeter.ENCRYPT_KEY)) except Exception as exception: anode.Log(logging.ERROR).log("Plugin", "state", lambda: "[kasa] write failed to [{}:{}:{}]" .format(self.name, self.ip, KasaMeter.PORT), exception) def datagramReceived(self, data, (host, port)): try: bin_timestamp = self.plugin.get_time() decrypted = KasaMeter.decrypt(data, KasaMeter.ENCRYPT_KEY) meter = json.loads(decrypted) self.plugin.datum_push( "power_Dconsumption__electricity__" + self.name, "current", "point", int(self.plugin.datum_value(meter, ["emeter", "get_realtime", "power_mw"], 0, 1) / 1000) if self.plugin.datum_value( meter, ["emeter", "get_realtime", "power_mw"], 0) < 10000000 else 0, "W", 1, bin_timestamp, bin_timestamp, self.plugin.config["poll_seconds"], "second", data_bound_lower=0, data_derived_max=True, data_derived_min=True ) energy_consumption_alltime = self.plugin.datum_value(meter, ["emeter", "get_realtime", "total_wh"], 0, 1) self.plugin.datum_push( "energy_Dconsumption__electricity__" + self.name, "current", "integral", energy_consumption_alltime, "Wh", 1, bin_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0, data_derived_min=True ) energy_consumption_alltime_min = self.plugin.datum_get( DATUM_QUEUE_MIN, "energy_Dconsumption__electricity__" + self.name, "integral", "Wh", 1, "all_Dtime", 1, "day") energy_consumption_day = (energy_consumption_alltime - energy_consumption_alltime_min["data_value"]) \ if energy_consumption_alltime_min is not None else 0 self.plugin.datum_push( "energy_Dconsumption__electricity__" + self.name, "current", "integral", energy_consumption_day, "Wh", 1, bin_timestamp, bin_timestamp, 1, "day", data_bound_lower=0 ) self.plugin.publish() except Exception as exception: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[kasa] error [{}] processing response length [{}] and decrypted as [{}] from [{}:{}:{}]" .format(exception, len(data), decrypted if 'decrypted' in vars() else "", self.name, self.ip, KasaMeter.PORT), exception)
def _push(self, content, targets): latency_ping = 0 throughput_upload = 0 throughput_download = 0 packet_type = "latency_throughput" bin_timestamp = self.get_time() try: dict_content = json.loads(content, parse_float=Decimal) if "ping-icmp" in dict_content and "ping" not in dict_content: packet_type = "latency" elif "ping" in dict_content and "ping-icmp" not in dict_content: packet_type = "throughput" if packet_type != "latency": throughput_upload = self.datum_value(dict_content["upload"] / 8000, factor=10) if not isinstance(throughput_upload, int): raise Exception throughput_download = self.datum_value( dict_content["download"] / 8000000, factor=100) if not isinstance(throughput_download, int): raise Exception if packet_type != "throughput": latency_ping = self.datum_value(dict_content["ping-icmp"], factor=100) if not isinstance(latency_ping, int): raise Exception except Exception as exception: latency_ping = 0 throughput_upload = 0 throughput_download = 0 packet_type = "latency_throughput" try: for target in targets or []: if target in SPEEDTEST_ID_METRIC: if packet_type != "latency": self.datum_push("upload__internet__" + SPEEDTEST_ID_METRIC[target], "current", "point", throughput_upload, "KB_P2Fs", 10, bin_timestamp, bin_timestamp, 4, "hour", data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push("download__internet__" + SPEEDTEST_ID_METRIC[target], "current", "point", throughput_download, "MB_P2Fs", 100, bin_timestamp, bin_timestamp, 4, "hour", data_bound_lower=0, data_derived_max=True, data_derived_min=True) if packet_type != "throughput": self.datum_push("ping__internet__" + SPEEDTEST_ID_METRIC[target], "current", "point", latency_ping, "ms", 100, bin_timestamp, bin_timestamp, 5, "minute", data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.publish() except Exception as exception: anode.Log(logging.ERROR).log( "Plugin", "error", lambda: "[{}] error [{}] processing response:\n{}".format( self.name, exception, content), exception)
def push_flow(self, content): log_timer = anode.Log(logging.DEBUG).start() try: dict_content = json.loads(content, parse_float=Decimal) bin_timestamp = self.get_time() data_timestamp = int( calendar.timegm( dateutil.parser.parse( dict_content["Head"]["Timestamp"]).timetuple())) self.datum_push( "power_Dexport__electricity__grid", "current", "point", self.datum_value(dict_content, ["Body", "Data", "Site", "P_Grid"], 0, -1) if self.datum_value(dict_content, ["Body", "Data", "Site", "P_Grid"], 0) <= 0 else 0, "W", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push("power_Dproduction__electricity__inverter", "current", "point", self.datum_value( dict_content, ["Body", "Data", "Inverters", "1", "P"], 0, 1), "W", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push( "power_Dconsumption__electricity__grid", "current", "point", self.datum_value(dict_content, ["Body", "Data", "Site", "P_Grid"], 0, 1) if self.datum_value(dict_content, ["Body", "Data", "Site", "P_Grid"], 0) >= 0 else 0, "W", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push( "power_Dconsumption__electricity__inverter", "current", "point", self.datum_value(dict_content, ["Body", "Data", "Site", "P_Load"], 0, -1) - (self.datum_value(dict_content, ["Body", "Data", "Site", "P_Grid"], 0, 1) if self.datum_value( dict_content, ["Body", "Data", "Site", "P_Grid"], 0) >= 0 else 0), "W", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push( "power_Dutilisation__electricity__inverter", "current", "point", self.datum_value( dict_content, ["Body", "Data", "Site", "rel_SelfConsumption"], 0), "_P25", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_bound_upper=100, data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push( "power_Dutilisation__electricity__grid", "current", "point", self.datum_value(100 - self.datum_value( dict_content, ["Body", "Data", "Site", "rel_Autonomy"], 0) ), "_P25", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_bound_upper=100, data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push( "power_Dutilisation__electricity__array", "current", "point", 0 if self.datum_value(dict_content, ["Body", "Data", "Site", "P_PV"], 0) == 0 else (self.datum_value( self.datum_value(dict_content, ["Body", "Data", "Inverters", "1", "P"], 0) / self.datum_value(dict_content, ["Body", "Data", "Site", "P_PV"], 0) * 100)), "_P25", 1, data_timestamp, bin_timestamp, self.config["poll_seconds"], "second", data_bound_upper=100, data_bound_lower=0, data_derived_max=True, data_derived_min=True) self.datum_push( "energy_Dproduction__electricity__inverter", "current", "integral", self.datum_value(dict_content, ["Body", "Data", "Site", "E_Year"], factor=10), "Wh", 10, data_timestamp, bin_timestamp, 1, "year", data_bound_lower=0, data_derived_min=True) energy_production_inverter_alltime = self.datum_value( dict_content, ["Body", "Data", "Site", "E_Total"], factor=10) self.datum_push("energy_Dproduction__electricity__inverter", "current", "integral", energy_production_inverter_alltime, "Wh", 10, data_timestamp, bin_timestamp, 1, "all_Dtime", data_bound_lower=0, data_derived_min=True) energy_production_inverter_alltime_min = self.datum_get( DATUM_QUEUE_MIN, "energy_Dproduction__electricity__inverter", "integral", "Wh", 1, "all_Dtime", 1, "day") energy_production_inverter_day = (energy_production_inverter_alltime - energy_production_inverter_alltime_min["data_value"]) \ if energy_production_inverter_alltime_min is not None else 0 self.datum_push("energy_Dproduction__electricity__inverter", "current", "integral", energy_production_inverter_day, "Wh", 10, data_timestamp, bin_timestamp, 1, "day", data_bound_lower=0) self.publish() except Exception as exception: anode.Log(logging.ERROR).log( "Plugin", "error", lambda: "[{}] error [{}] processing response:\n".format( self.name, exception), exception) log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.push_flow)