def __rest(self, type, apiCall, data = None, isBinary = False, extraHeaders = None, majorVersion="", asJSON = True): protocol = RMAPIClientProtocol.getAsString(self._protocol) apiUrl = protocol + self._host + ":" + self._port + "/api/" + majorVersion + "/" if self.token is None: url = apiUrl + apiCall else: url = apiUrl + apiCall + "?access_token=" + self.token try: req = urllib2.Request(url) req.get_method = lambda: type # Force GET/POST depending on type except: return RMAPIClientErrors.REQ if isBinary: req.add_data(data=data) else: req.add_data(data=json.dumps(data)) req.add_header("Content-type","text/plain") if extraHeaders is not None: for header in extraHeaders: req.add_header(header) try: log.info("REST: %s : %s" % (req.get_method(), req.get_full_url())) r = urllib2.urlopen(req) data = r.read() except Exception, e: log.error("Cannot OPEN URL: %s" % e) return RMAPIClientErrors.OPEN
def perform(self): self.apiKey = self.params.get("apiKey", None) self.stationID = self.params.get("stationID", None) if self.apiKey is None or not self.apiKey or not isinstance( self.apiKey, str): self.lastKnownError = "Error: No API Key. Please register an account at https://www.willyweather.com.au/info/api.html" return self.params["_nearbyStationsIDList"] = [] self.noDays = 7 if self.params.get("stationLookUp"): s = self.settings llat = s.location.latitude llon = s.location.longitude searchURL = "https://api.willyweather.com.au/v2/" + self.apiKey + "/search.json" searchURLParams = [("lat", llat), ("lng", llon), ("units", "distance:km")] try: d = self.openURL(searchURL, searchURLParams) if d is None: return search = json.loads(d.read()) if self.parserDebug: log.info(search) self.getNearbyStations(search) except Exception, e: log.error("*** Error finding nearby stations") log.exception(e)
def resetToDefault(self): log.info("**** BEGIN Reset parsers and mixer to default") result = False try: self.mixer.resetToDefault() self.parserDataTable.clear(False) self.forecastTable.clear(False) globalDbManager.parserDatabase.commit() for parserConfig in self.parsers: parserConfig.runtimeLastForecastInfo = None parserConfig.failCounter = 0 parserConfig.lastFailTimestamp = None parser = self.parsers[parserConfig] enabled = parser.isEnabledForLocation( globalSettings.location.timezone, globalSettings.location.latitude, globalSettings.location.longitude) self.parserTable.addParser(parserConfig.fileName, parserConfig.name, enabled, parser.params) result = True except Exception, e: log.exception(e)
def resetToDefault(self): log.info("**** BEGIN Reset parsers and mixer to default") result = False try: self.mixer.resetToDefault() self.parserDataTable.clear(False) self.forecastTable.clear(False) globalDbManager.parserDatabase.commit() for parserConfig in self.parsers: parserConfig.runtimeLastForecastInfo = None parserConfig.failCounter = 0 parserConfig.lastFailTimestamp = None parser = self.parsers[parserConfig] enabled = parser.isEnabledForLocation(globalSettings.location.timezone, globalSettings.location.latitude, globalSettings.location.longitude ) self.parserTable.addParser(parserConfig.fileName, parserConfig.name, enabled, parser.params) result = True except Exception, e: log.exception(e)
def __stopWatchdog(self): if self.__watchDogDescriptor is not None: self.__watchDogDescriptor.write('V') # Magic char = expect close stop timer self.__watchDogDescriptor.flush() self.__watchDogDescriptor.close() self.__watchDogDescriptor = None self.__lastWatchdogTimestamp = None log.info("Closed system watchdog file %s" % (self.__watchDogFile))
def __stopWatchdog(self): if self.__watchDogDescriptor is not None: self.__watchDogDescriptor.write( 'V') # Magic char = expect close stop timer self.__watchDogDescriptor.flush() self.__watchDogDescriptor.close() self.__watchDogDescriptor = None self.__lastWatchdogTimestamp = None log.info("Closed system watchdog file %s" % (self.__watchDogFile))
def __getForecastData(self, forecast): dayTimestamp = rmCurrentDayTimestamp() if "list" not in forecast: self.lastKnownError = "Error: Missing data cannot parse response JSON." log.info(self.lastKnownError) return for entry in forecast["list"]: timestamp = entry["dt"] if self.parserDebug: log.info("Date: %s" % rmTimestampToDateAsString(timestamp)) maxtemp = None mintemp = None temp = None humidity = None pressure = None if "main" in entry: maxtemp = entry["main"].get("temp_max") mintemp = entry["main"].get("temp_min") temp = entry["main"].get("temp") humidity = entry["main"].get("humidity") pressure = entry["main"].get("grnd_level") try: pressure = pressure / 10 # hPa to kPa except: pressure = None self.addValue(RMParser.dataType.MINTEMP, timestamp, mintemp) self.addValue(RMParser.dataType.MAXTEMP, timestamp, maxtemp) self.addValue(RMParser.dataType.TEMPERATURE, timestamp, temp) self.addValue(RMParser.dataType.RH, timestamp, humidity) self.addValue(RMParser.dataType.PRESSURE, timestamp, pressure) qpf = None if "rain" in entry: qpf = entry["rain"].get("3h") self.addValue(RMParser.dataType.QPF, timestamp, qpf) wind = None if "wind" in entry: wind = entry["wind"].get("speed") self.addValue(RMParser.dataType.WIND, timestamp, wind) icon = None if entry["weather"][0]: icon = self.conditionConvert(entry["weather"][0].get("id")) self.addValue(RMParser.dataType.CONDITION, timestamp, icon) if self.parserDebug: log.debug(self.result)
def perform(self): self.params["_nearbyStationsIDList"] = [] self.params["_airportStationsIDList"] = [] self.lastKnownError = "" apiKey = self.params.get("apiKey", None) useCustomStation = self.params.get("useCustomStation", False) stationName = self.params.get("customStationName") hasForecastData = False hasStationData = False noAPIKey = apiKey is None or not apiKey or not isinstance(apiKey, str) if noAPIKey: self.getNearbyStationsNoKey() else: self.getNearbyPWSStationsWithKey(apiKey) self.getNearbyAirportStationsWithKey(apiKey) hasForecastData = self.getForecastWithKey(apiKey) noStationName = stationName is None or not stationName or not isinstance( stationName, str) if useCustomStation: if stationName is None or not stationName or not isinstance( stationName, str): self.lastKnownError = "Warning: Use Nearby Stations is enabled but no station name specified." log.error(self.lastKnownError) else: self.arrStationNames = stationName.split(",") for stationName in self.arrStationNames: if noAPIKey: hasStationData = self.getStationDataNoKey(stationName) else: hasStationData = self.getStationDataWithKey( apiKey, stationName) if hasStationData: # we only get the first one that responds others are for fallback break if not hasStationData: self.lastKnownError = "Warning: No observed data received from stations." if noAPIKey: self.lastKnownError = "Error: No observed data received from stations." log.error(self.lastKnownError) else: log.info("WUnderground: station data retrieved for %s" % stationName) if not hasForecastData and not noAPIKey: self.lastKnownError = "Warning: No Forecast data received." if not hasStationData: self.lastKnownError = "Error: No forecast or station data received." log.error(self.lastKnownError) else: log.info("WUnderground: forecast data retrieved.")
def perform(self): url = 'https://api.ambientweather.net/v1/devices/' + str( self.params["macAddress"]) parameterList = [("apiKey", str(self.params["apiKey"])), ("applicationKey", str(self.params["applicationKey"])), ("limit", "1")] log.info('Getting data from {0}'.format(str(url))) data = self.openURL(url, parameterList) if data is None: self.lastKnownError = "Error: No data received from server" log.error(self.lastKnownError) return station = json.loads(data.read()) for entry in station: dateutc = entry["dateutc"] / 1000 # from milliseconds if 'tempf' in entry: temp = convertFahrenheitToCelsius(entry["tempf"]) self.addValue(RMParser.dataType.TEMPERATURE, dateutc, temp, False) if 'humidity' in entry: self.addValue(RMParser.dataType.RH, dateutc, entry["humidity"], False) if 'windspeedmph' in entry: windspeed = entry["windspeedmph"] * 0.44704 # to meters/sec self.addValue(RMParser.dataType.WIND, dateutc, windspeed, False) if 'solarradiation' in entry: solarrad = convertRadiationFromWattsToMegaJoules( entry["solarradiation"]) self.addValue(RMParser.dataType.SOLARRADIATION, dateutc, solarrad, False) if 'dailyrainin' in entry: rain = convertInchesToMM(entry["dailyrainin"]) self.addValue(RMParser.dataType.RAIN, dateutc, rain, False) if 'baromrelin' in entry: pressure = entry["baromrelin"] * 3.38639 # to kPa self.addValue(RMParser.dataType.PRESSURE, dateutc, pressure, False) if 'dewPoint' in entry: dewpoint = convertFahrenheitToCelsius(entry["dewPoint"]) self.addValue(RMParser.dataType.DEWPOINT, dateutc, dewpoint, False) return True
def performWithDataFeeds(self, station): s = self.settings URLHourly = "http://fawn.ifas.ufl.edu/controller.php/lastHour/summary/json" URLDaily = "http://fawn.ifas.ufl.edu/controller.php/lastDay/summary/json" URLParams = [] useHourly = self.params.get("useHourly", False) #----------------------------------------------------------------------------------------------- # # Get hourly data. # if useHourly: try: log.info("Retrieving data from: %s" % URLHourly) d = self.openURL(URLHourly, URLParams) if d is None: return json_data = d.read() json_data = json_data.replace("'","\"") hourly = json.loads(json_data) for entry in hourly: # only selected station if int(entry.get("StationID")) == station: dateString = entry.get("startTime") #timestamp = rmTimestampFromDateAsStringWithOffset(dateString) timestamp = rmTimestampFromDateAsString(dateString[:-6], '%Y-%m-%dT%H:%M:%S') if timestamp is None: log.debug("Cannot convert hourly data startTime: %s to unix timestamp" % dateString) continue # Add 12h in the future for FAWN timestamp to fix badly reported offset and make it middle of the day UTC (Dragos) timestamp += 12 * 60 *60 self.addValue(RMParser.dataType.TEMPERATURE, timestamp, self.__toFloat(entry.get("t2m_avg"))) self.addValue(RMParser.dataType.MINTEMP, timestamp, self.__toFloat(entry.get("t2m_min"))) self.addValue(RMParser.dataType.MAXTEMP, timestamp, self.__toFloat(entry.get("t2m_max"))) # km/h -> m/s self.addValue(RMParser.dataType.WIND, timestamp, 0.27777777777778 * self.__toFloat(entry.get("ws_avg"))) # cm -> mm self.addValue(RMParser.dataType.RAIN, timestamp, 10 * self.__toFloat(entry.get("rain_sum"))) self.addValue(RMParser.dataType.DEWPOINT, timestamp, self.__toFloat(entry.get("dp_avg"))) self.addValue(RMParser.dataType.RH, timestamp, self.__toFloat(entry.get("rh_avg"))) if self.parserDebug: log.debug(self.result) except Exception, e: self.lastKnownError = "Error retrieving hourly data." log.error(self.lastKnownError) log.exception(e)
def installParser(self, tempFilePath, fileName): filePath = os.path.abspath( os.path.join(os.path.dirname(os.path.abspath(__file__)), "parsers", fileName)) shutil.move(tempFilePath, filePath) try: module = imp.load_source(fileName, filePath) log.info(" * Parser %s successful loaded from file '%s'" % (fileName, filePath)) parser = RMParser.parsers[-1] # Last added parser enabled = parser.isEnabledForLocation(globalSettings.location.timezone, \ globalSettings.location.latitude, \ globalSettings.location.longitude ) parserConfig, isNew = self.parserTable.addParser( fileName, parser.parserName, enabled, parser.params) if not isNew: params = self.parserTable.getParserParams(parserConfig.dbID) if params: parser.params = params RMParser.parsers.pop() #delete old entry pkeys = self.parsers.keys() for pkey in pkeys: if parserConfig.dbID is pkey.dbID: del self.parsers[pkey] self.parsers[parserConfig] = parser parserConfig.userDataTypes = self.userDataTypeTable.addRecords( parser.userDataTypes) self.parserUserDataTypeTable.addRecords(parserConfig.dbID, parserConfig.userDataTypes) log.debug(parserConfig) return True except Exception as e: try: if os.path.exists(filePath): os.remove(filePath) except Exception, e: log.exception(e) log.error(" * Error installing/loading parser %s from file '%s'" % (fileName, filePath)) log.exception(e)
def perform(self): # downloading data from a URL convenience function since other python libraries can be used URL = self.params["URL"] data = self.openURL(URL) if data is None: self.lastKnownError = "Error: No data received from server" return #xmldata = e.parse("/tmp/IDQ11295.xml") xmldata = e.parse(data) for node in xmldata.getroot().getiterator(tag="area"): if node.attrib['description'] != self.params["city"]: continue for subnode in node.getiterator(tag="forecast-period"): subnodeDate = subnode.get("start-time-utc") subnodeTimestamp = rmTimestampFromDateAsString( subnodeDate, '%Y-%m-%dT%H:%M:%SZ') log.info("%s" % subnodeDate) for element in subnode.getiterator(tag="element"): mint = None maxt = None qpfMin = None qpfMax = None qpfAvg = None type = element.get("type") if type == "air_temperature_minimum": try: mint = self.__toFloat(element.text) log.info("\tMin Temp: %s" % mint) self.addValue(RMParser.dataType.MINTEMP, subnodeTimestamp, mint) except: log.debug("Cannot get minimum temperature") elif type == "air_temperature_maximum": try: maxt = self.__toFloat(element.text) self.addValue(RMParser.dataType.MAXTEMP, subnodeTimestamp, maxt) log.info("\tMax Temp: %s" % maxt) except: log.debug("Cannot get max temperature") elif type == "precipitation_range": try: qpfMin, _, qpfMax, _ = element.text.split( ) # will result in ['15', 'to', '35', 'mm'] qpfAvg = (self.__toFloat(qpfMin) + self.__toFloat(qpfMax)) / 2 log.info("\tQPF Avg: %s" % qpfAvg) self.addValue(RMParser.dataType.QPF, subnodeTimestamp, qpfAvg) except: log.debug("Cannot get precipitation forecast") if self.parserDebug: log.debug(self.result)
def dump(self): log.info("Memory Usage Stats") log.info("------------------------------------------------------") log.info("As reported by pyresource %d " % self.getFromPyResource()) m = self.getFromProc() log.info("As reported by /proc peak: %d rss: %d" % (m["peak"], m["rss"]))
def monotonicTime(self, asSeconds = True): t = timespec() if self.clock_gettime(rmMonotonicTime.CLOCK_MONOTONIC_RAW , ctypes.pointer(t)) != 0: errno_ = ctypes.get_errno() if self.fallback: log.info("Monotonic Clock Error ! Reverting to time.time() fallback") return self.monotonicFallback(asSeconds) else: raise OSError(errno_, os.strerror(errno_)) if asSeconds: return t.tv_sec return t.tv_sec + t.tv_nsec * 1e-9
def __refreshWatchDog(self): timestamp = rmCurrentTimestamp() if self.__lastWatchdogTimestamp is None or (timestamp - self.__lastWatchdogTimestamp) >= self.__watchDogTimeout: if self.__watchDogDescriptor is None: try: self.__watchDogDescriptor = open(self.__watchDogFile, 'w') log.info("Opened system watchdog file %s with timeout %d" % (self.__watchDogFile, self.__watchDogTimeout)) except Exception, e: log.error(e) try: self.__watchDogDescriptor.write(`timestamp`) self.__watchDogDescriptor.flush() log.debug("PING Hardware Watchdog") except Exception, e: log.error(e)
def __rest(self, type, apiCall, data=None, isBinary=False, extraHeaders=None, majorVersion="", asJSON=True): protocol = RMAPIClientProtocol.getAsString(self._protocol) apiUrl = protocol + self._host + ":" + self._port + "/api/" + majorVersion + "/" if self.token is None: url = apiUrl + apiCall else: url = apiUrl + apiCall + "?access_token=" + self.token try: req = urllib2.Request(url) req.get_method = lambda: type # Force GET/POST depending on type except: return RMAPIClientErrors.REQ if data is not None: if isBinary: req.add_data(data=data) else: req.add_data(data=json.dumps(data)) req.add_header("Content-type", "text/plain") req.add_header('User-Agent', "RMAPIClient") if extraHeaders is not None: for header in extraHeaders: req.add_header(header) try: log.info("REST: %s : %s" % (req.get_method(), req.get_full_url())) if self.context is not None: r = urllib2.urlopen(req, context=self.context, timeout=5) else: r = urllib2.urlopen(req, timeout=5) data = r.read() except Exception, e: log.error("Cannot OPEN URL: %s" % e) return RMAPIClientErrors.OPEN
def sanitize(self, key, value): interval = self.limits.get(key, None) if interval is None: log.info("%s key not found in our limits definitions" % key) return value min = interval["min"] max = interval["max"] if min is not None and value < min: log.error("%s value %s less than limits minimum of %s" % (key, value, interval["min"])) return None if max is not None and value > max: log.error("%s value %s more than limits maximum of %s" % (key, value, interval["max"])) return None return value
def installParser(self, tempFilePath, fileName): filePath = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "parsers", fileName)) shutil.move(tempFilePath, filePath) try: module = imp.load_source(fileName, filePath) log.info(" * Parser %s successful loaded from file '%s'" % (fileName, filePath)) parser = RMParser.parsers[-1] # Last added parser enabled = parser.isEnabledForLocation(globalSettings.location.timezone, \ globalSettings.location.latitude, \ globalSettings.location.longitude ) parserConfig, isNew = self.parserTable.addParser(fileName, parser.parserName, enabled, parser.params) if not isNew: params = self.parserTable.getParserParams(parserConfig.dbID) if params: parser.params = params RMParser.parsers.pop() #delete old entry pkeys = self.parsers.keys() for pkey in pkeys: if parserConfig.dbID is pkey.dbID: del self.parsers[pkey] self.parsers[parserConfig] = parser parserConfig.userDataTypes = self.userDataTypeTable.addRecords(parser.userDataTypes) self.parserUserDataTypeTable.addRecords(parserConfig.dbID, parserConfig.userDataTypes) log.debug(parserConfig) return True except Exception as e: try: if os.path.exists(filePath): os.remove(filePath) except Exception, e: log.exception(e) log.error(" * Error installing/loading parser %s from file '%s'" % (fileName, filePath)) log.exception(e)
def __refreshWatchDog(self): timestamp = rmCurrentTimestamp() if self.__lastWatchdogTimestamp is None or ( timestamp - self.__lastWatchdogTimestamp) >= self.__watchDogTimeout: if self.__watchDogDescriptor is None: try: self.__watchDogDescriptor = open(self.__watchDogFile, 'w') log.info("Opened system watchdog file %s with timeout %d" % (self.__watchDogFile, self.__watchDogTimeout)) except Exception, e: log.error(e) try: self.__watchDogDescriptor.write( ` timestamp `) self.__watchDogDescriptor.flush() log.debug("PING Hardware Watchdog") except Exception, e: log.error(e)
def getNearbyStations(self, jsonData): try: nearestStation = jsonData["location"].get("id") except: log.warning("No closest station found!") self.lastKnownError = "Warning: No closest station found!" return closestURL = "https://api.willyweather.com.au/v2/" + self.apiKey + "/search/closest.json" closestURLParams = [("id", nearestStation), ("weatherTypes", "general"), ("units", "distance:km")] try: d = self.openURL(closestURL, closestURLParams) if d is None: return closest = json.loads(d.read()) if self.parserDebug: log.info(closest) for i in closest["general"]: id = i["id"] name = i["name"] region = i["region"] postcode = i["postcode"] distance = i["distance"] infoStr = "Station ID = " + str( id) + " (" + name + ", " + region + ", " + str( postcode) + ", " + str(distance) + " kms away)" self.params["_nearbyStationsIDList"].append(infoStr) if self.parserDebug: log.debug(self.params["_nearbyStationsIDList"]) except Exception, e: log.error("*** Error running WillyWeather parser") log.exception(e)
def __refreshWIFI(self): timestamp = rmCurrentTimestamp() lastWIFICheckTimestamp = globalWIFI.wifiInterface.lastWIFICheckTimestamp oldIP = globalWIFI.wifiInterface.ipAddress if lastWIFICheckTimestamp is None or oldIP is None or (timestamp - lastWIFICheckTimestamp) >= self.__wifiRefreshTimeout: try: globalWIFI.detect() if oldIP != globalWIFI.wifiInterface.ipAddress: log.info("Refreshed WIFI Information. (old: %s new ip: %s)" % (`oldIP`, `globalWIFI.wifiInterface.ipAddress`)) if RMOSPlatform().AUTODETECTED == RMOSPlatform.ANDROID: return # Handle None IP if globalWIFI.wifiInterface.ipAddress is None: if self.__lastNoneIpTimestamp is None or (timestamp - self.__lastNoneIpTimestamp) < self.__wifiNoneIpTimeout: # First occurrence of None IP OR we can wait some more time. if self.__lastNoneIpTimestamp is None: self.__lastNoneIpTimestamp = timestamp log.debug("Refreshed WIFI Information - no IP detected. Give it some more time: %d seconds!" % (self.__wifiNoneIpTimeout - (timestamp - self.__lastNoneIpTimestamp), )) return else: globalWIFI.restart() log.warn("Refreshed WIFI Information - WIFI quick reloaded because no IP detected. New IP is %s" % `globalWIFI.wifiInterface.ipAddress`) self.__lastNoneIpTimestamp = None # Reset None IP timestamp. # Check if we never connected to this AP, set back AP mode and restart app if globalWIFI.wifiInterface.mode == "managed" and not globalWIFI.hasConnectedOnce(): if globalWIFI.wifiInterface.hasClientLink: globalWIFI.saveHasConnectedOnce(True) else: log.warning("WIFI Watcher Client IP (%s) configuration failed, restarting in AP mode." % oldIP) globalWIFI.setDefaultAP() globalWIFI.saveHasConnectedOnce(False) globalWIFI.restart() self.__mainManager.touchWakeMessage() except Exception, e: log.error(e)
def perform(self): user = self.params.get("username") passwd = self.params.get("password") top_level_url = str(self.params.get("IP_address")) if str(top_level_url) == "": log.error("IP address invalid or missing") return urlpath = "http://" + user + ":" + passwd + "@" + top_level_url + "/cgi-bin/template.cgi?template=" values = "[th0temp-act]%20[th0hum-act]%20[thb0press-act]%20[sol0evo-act]%20[mbsystem-latitude]%20" \ "[mbsystem-longitude]%20[th0temp-dmax]%20[th0temp-dmin]%20[th0hum-dmax]%20" \ "[th0hum-dmin]%20[wind0avgwind-act]%20[sol0rad-act]%20[rain0total-daysum]%20" \ "[th0dew-act]%20[UYYYY][UMM][UDD][Uhh][Umm][Uss]%20[epoch]" headers = "&contenttype=text/plain;charset=iso-8859-1" # log.debug(str(urlpath) + str(values) + str(headers)) try: mburl = urlpath + values + headers d = request(str(mburl)) if d.getcode() is not 200: log.error("Missing or incorrect username or password") return mbrdata = d.read() # log.debug(mbrdata) except AssertionError as error: log.error(str(error)) log.error("Cannot open Meteobridge") self.getstationdata(mbrdata) log.info("Updated data from Meteobridge") return
def perform(self): self.initAPIClient() # Build WUnderground instand data URL URL = self.__buildUrl() if URL is None: return log.info(URL) jsonContent = self.__getStationData(URL) if jsonContent is None: return log.info(jsonContent) observations = self.__parseStationData(jsonContent) #self.addValue(RMParser.dataType.TEMPERATURE, rmCurrentTimestamp(), self.observations["temperature"]) try: jsonRules = json.loads(self.params["rules"]) for rule in jsonRules: self.rules.addRuleSerialized(rule) except Exception, e: log.info("Error: Cannot load rules: %s", e) self.lastKnownError = "Error: Cannot load rules" return
def __doUpdate(fromVersion, toVersion): updateScriptsDir = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "dbUpdateScripts")) if not os.path.exists(updateScriptsDir): return False for version in range(fromVersion + 1, toVersion + 1): moduleName = "updateV%d" % (version) scriptName = moduleName + ".py" scriptPath = os.path.join(updateScriptsDir, scriptName) compiled = False if not os.path.exists(scriptPath): scriptName = moduleName + ".pyc" scriptPath = os.path.join(updateScriptsDir, scriptName) compiled = True log.info("... applying database upgrade: %s" % scriptPath) if not os.path.exists(scriptPath): return False success = False try: if compiled: module = imp.load_compiled(moduleName, scriptPath) else: module = imp.load_source(moduleName, scriptPath) try: success = module.performUpdate() except Exception, e: log.error(e) del sys.modules[moduleName] except Exception, e: log.error(e) return False
def perform(self): url = 'http://api.ambientweather.net/v1/devices/' + str( self.params["macAddress"]) parameterList = [("apiKey", str(self.params["apiKey"])), ("applicationKey", str(self.params["applicationKey"])), ("limit", "1")] log.info('Getting data from {0}'.format(str(url))) query_string = urllib.urlencode(parameterList) url_query = "?".join([url, query_string]) try: req = urllib2.Request(url=url_query, headers=self.req_headers) data = urllib2.urlopen(url=req, timeout=60) log.debug("Connected to %s" % (url_query)) except Exception, e: self.lastKnownError = "Connection Error" log.error("Error while connecting to %s, error: %s" % (url, e.reason)) return
def __parseWeatherTag(self, tree, tag, type, subtag = "value", useStartTimes = True, typeConvert = None): values = [] forecastTimes = [] timeLayoutKey = None dayTimestamp = rmCurrentDayTimestamp() maxDayTimestamp = dayTimestamp + globalSettings.parserDataSizeInDays * 86400 for w in tree.getroot().getiterator(tag = tag): if w.attrib['type'] != type: continue timeLayoutKey = w.attrib['time-layout'] forecastTimes = self.__parseTimeLayout(tree, timeLayoutKey, useStartTimes=useStartTimes) for wval in w.getiterator(tag = subtag): try: val = wval.text if typeConvert == 'int': val = int(val) if typeConvert == 'float': val = float(val) except: val = None values.append(val) result = zip(forecastTimes, values) result.sort(key=lambda z: z[0]) # Skip days that don't have full intervals (that cover a day), otherwise the weather date will start to disapear tmpresult = [] lastDay = None skipDay = None for z in result: day = rmGetStartOfDay(z[0]) startDate = rmTimestampToDate(z[0]) startHour = startDate.hour if lastDay is None or lastDay < day: skipDay = None lastDay = day log.info("%s %s: found new day: %s - %s" % (tag, type, rmTimestampToUtcDateAsString(day), rmTimestampToUtcDateAsString(lastDay))) if startHour > 10: skipDay = day if day == skipDay: log.info("\tday: %s starting with hour %s (local) skipping..." % (rmTimestampToUtcDateAsString(day), startHour)) continue if dayTimestamp > z[0] >= maxDayTimestamp: log.info("%s %s: reject date %s/%s as it's in the past" %(tag, type, z[0], dayTimestamp)) continue tmpresult.append(z) return tmpresult
def performWithReport(self, station): s = self.settings self.lastKnownError = "" now = time.time() URLReport = "https://fawn.ifas.ufl.edu/data/reports/?res" #+ str(now) POSTParams = self.__generatePOSTParams(station) #----------------------------------------------------------------------------------------------- # # Get daily data. # try: log.info("Retrieving data from: %s" % URLReport) POSTParams = urllib.urlencode(POSTParams) req = urllib2.Request(URLReport, data=POSTParams) response = urllib2.urlopen(req) data = response.read() except Exception, e: self.lastKnownError = "Cannot download data" log.error(self.lastKnownError) log.error(e) return False
def perform(self): passwd = self.params.get('password') # Username is static, can't be changed. user = "******" top_level_url = str(self.params.get("IP_address")) if str(top_level_url) == "": log.error("IP address or hostname invalid or missing") return urlpath = "http://" + user + ":" + passwd + "@" + top_level_url + "/cgi-bin/template.cgi?template=" values = "[th0temp-act]%20[th0hum-act]%20[thb0press-act]%20[sol0evo-daysum]%20[mbsystem-latitude]%20" \ "[mbsystem-longitude]%20[th0temp-dmax]%20[th0temp-dmin]%20[th0hum-dmax]%20" \ "[th0hum-dmin]%20[wind0avgwind-davg]%20[sol0rad-act]%20[rain0total-daysum]%20" \ "[th0dew-act]%20[UYYYY][UMM][UDD][Uhh][Umm][Uss]%20[epoch]%20" \ "[mbsystem-station]%20[mbsystem-stationnum]" headers = "&contenttype=text/plain;charset=iso-8859-1" try: mburl = urlpath + values + headers d = request(str(mburl)) mbrdata = d.read() log.debug("Returned data: {}".format(mbrdata)) except AssertionError as error: log.error(str(error)) log.error("Cannot open Meteobridge") return self.getstationdata(mbrdata) log.info("Updated data from Meteobridge") return
def perform(self): s = self.settings apiKey = self.params.get("apiKey", None) if apiKey is None: self.lastKnownError = "Error: No API Key. Please register for a free account on https://openweathermap.org/." return URL = "https://api.openweathermap.org/data/2.5/forecast" URLParams = [ ("appid", str(apiKey)), ("lat", str(s.location.latitude)), ("lon", str(s.location.longitude)), ("units", "metric"), ] forecast = None try: d = self.openURL(URL, URLParams) if d is None: return forecast = json.loads(d.read()) if self.parserDebug: with open("dump.json", "w") as f: json.dump(forecast, f) log.info(forecast) self.__getForecastData(forecast) except Exception, e: log.error("*** Error running OpenWeatherMap parser") log.exception(e)
def perform(self): s = self.settings address = self.getParamAsString(self.params.get("stationAddress")) port = self.params.get("stationPort", 22222) if address is None: self.lastKnownError = "No Station IP address specified" log.error(self.lastKnownError) return False try: wlsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) wlsocket.connect((address, port)) wlsocket.settimeout(5) wlsocket.sendall(b"LOOP 1\n") log.info("Sent LOOP command") except Exception: self.lastKnownError = "Cannot connect to station IP: %s port %s." % ( address, port) log.error(self.lastKnownError) return False retries = 5 while retries > 0: try: raw_data = wlsocket.recv(1024) except socket.timeout, e: log.info("Recv timeout (%s) retrying." % e) time.sleep(2) wlsocket.sendall(b"LOOP 1\n") log.info("Sent LOOP command") retries -= 1 continue except socket.error, e: log.info("Recv error (%s)." % e) self.lastKnownError = "No response from station" break
def monotonicInit(self): try: from RMOSGlue.rmOSPlatform import RMOSPlatform if RMOSPlatform().AUTODETECTED == RMOSPlatform.ANDROID: librt = ctypes.CDLL('libc.so', use_errno=True) log.info("Initialised Android monotonic clock") elif RMOSPlatform().AUTODETECTED == RMOSPlatform.OPENWRT: librt = ctypes.CDLL('librt.so.0', use_errno=True) log.info("Initialised OpenWRT monotonic clock") else: librt = ctypes.CDLL('librt.so.1', use_errno=True) log.info("Initialised generic monotonic clock") self.clock_gettime = librt.clock_gettime self.clock_gettime.argtypes = [ctypes.c_int, ctypes.POINTER(timespec)] self.get = self.monotonicTime except Exception, e: self.get = self.monotonicFallback log.error("Cannot initialise monotonicClock will use fallback time.time() method !")
def __load(self, parserDir): log.info("*** BEGIN Loading parsers from '%s'" % parserDir) fileMap = OrderedDict() #--------------------------------------------------------------------------- # # for root, dirs, files in os.walk(parserDir): for fname in files: tmpsplit = os.path.splitext(fname) modname = tmpsplit[0] modext = tmpsplit[1] modPath = os.path.join(root, fname) fileEntry = fileMap.get(modname, None) if fileEntry is None: fileMap[modname] = { "file": fname, "name": modname, "ext": modext, "path": modPath } else: if modext == ".py": fileEntry["file"] = fname fileEntry["name"] = modname fileEntry["ext"] = modext fileEntry["path"] = modPath #--------------------------------------------------------------------------- # # for fileEntry in fileMap.values(): try: if fileEntry["ext"] == ".pyc" : module = imp.load_compiled(fileEntry["name"], fileEntry["path"]) elif fileEntry["ext"] == ".py" : module = imp.load_source(fileEntry["name"], fileEntry["path"]) else: continue except Exception as e: log.error(" * Error loading parser %s from file '%s'" % (fileEntry["name"], fileEntry["path"])) log.exception(e) continue try: log.debug(" * Parser %s successful loaded from file '%s'" % (fileEntry["name"], fileEntry["path"])) parser = RMParser.parsers[-1] # Last added parser enabled = parser.isEnabledForLocation(globalSettings.location.timezone, \ globalSettings.location.latitude, \ globalSettings.location.longitude ) parserConfig, isNew = self.parserTable.addParser(fileEntry["file"], parser.parserName, enabled, parser.params) parser.defaultParams = parser.params.copy() # save the default parser params for an eventual params reset if not isNew: params = self.parserTable.getParserParams(parserConfig.dbID) unusedKeyList = [] if params: for key in params: bFound = False for pkey in parser.params: if key == pkey: bFound = True if not bFound: unusedKeyList.append(key) for key in unusedKeyList: params.pop(key, None) parser.params.update(params) self.parserTable.updateParserParams(parserConfig.dbID, parser.params) self.parsers[parserConfig] = parser parserConfig.userDataTypes = self.userDataTypeTable.addRecords(parser.userDataTypes) self.parserUserDataTypeTable.addRecords(parserConfig.dbID, parserConfig.userDataTypes) log.debug(parserConfig) except Exception, e: log.info("Failed to register parser from file : %s. Error: %s" % (fileEntry["name"], e)) RMParser.parsers.pop()
def parsePacket(self, raw_data): self.lastKnownError = "" if raw_data is None or len(raw_data) < 99: self.lastKnownError = "Invalid data response" return False timestamp = rmCurrentTimestamp() if self.parserDebug: hex_string = binascii.hexlify(raw_data).decode('utf-8') log.info("Raw Data LOOP %s" % hex_string) try: ack = struct.unpack('c', raw_data[0:1])[0] L = struct.unpack('c', raw_data[1:2])[0] O1 = struct.unpack('c', raw_data[2:3])[0] O2 = struct.unpack('c', raw_data[3:4])[0] pkt_type = struct.unpack('B', raw_data[5:6])[0] next_record = struct.unpack('H', raw_data[6:8])[0] except Exception: self.lastKnownError = "Invalid data format" return False if L != 'L' and O1 != 'O' and O2 != 'O': self.lastKnownError = "Unknown packet encoding" return False pressure = struct.unpack('H', raw_data[8:10])[0] / 1000 pressure *= 3.386 # inHg to kPa log.info("Barometer: %s" % pressure) self.addValue(RMParser.dataType.PRESSURE, timestamp, pressure) outside_temp = struct.unpack('h', raw_data[13:15])[0] / 10 outside_temp = convertFahrenheitToCelsius(outside_temp) log.info("Outside Temp: %s" % outside_temp) self.addValue(RMParser.dataType.TEMPERATURE, timestamp, outside_temp) #wind_speed = struct.unpack('B', raw_data[15:16])[0] #wind_dir = struct.unpack('H', raw_data[17:19])[0] ten_min_avg_wind_spd = struct.unpack('B', raw_data[16:17])[0] ten_min_avg_wind_spd /= 2.237 # mph to mps log.info("Wind Speed (10min avg): %s" % ten_min_avg_wind_spd) out_hum = struct.unpack('B', raw_data[34:35])[0] log.info("Humidity: %s" % out_hum) self.addValue(RMParser.dataType.RH, timestamp, out_hum) #rain_rate = struct.unpack('H', raw_data[42:44])[0] * 0.01 if self.params["useSolarRadiation"]: solar_radiation = struct.unpack('H', raw_data[45:47])[0] log.info("Solar Radiation: %s" % solar_radiation) self.addValue(RMParser.dataType.SOLARRADIATION, timestamp, solar_radiation) day_rain = struct.unpack('H', raw_data[51:53])[0] * 0.01 day_rain = convertInchesToMM(day_rain) log.info("Day Rain: %s", day_rain) self.addValue(RMParser.dataType.RAIN, timestamp, day_rain) if self.params["useStationEvapoTranpiration"]: day_et = struct.unpack('H', raw_data[57:59])[0] / 1000 day_et = convertInchesToMM(day_et) log.info("Day EvapoTranspiration: %s" % day_et) #xmtr_battery_status = struct.unpack('?', raw_data[87:88])[0] #console_battery_volts = ((struct.unpack('h', raw_data[88:90])[0] * 300) / 512) / 100.0 forecast_icon = struct.unpack('c', raw_data[90:91])[0] rainmachine_icon = self.conditionConvert(ord(forecast_icon)) log.info("Condition: %s -> %s" % (ord(forecast_icon), rainmachine_icon)) self.addValue(RMParser.dataType.CONDITION, timestamp, rainmachine_icon) #crc = struct.unpack('h', raw_data[98:100])[0] return True
while retries > 0: try: raw_data = wlsocket.recv(1024) except socket.timeout, e: log.info("Recv timeout (%s) retrying." % e) time.sleep(2) wlsocket.sendall(b"LOOP 1\n") log.info("Sent LOOP command") retries -= 1 continue except socket.error, e: log.info("Recv error (%s)." % e) self.lastKnownError = "No response from station" break else: log.info("Parsing Response") self.parsePacket(raw_data) break wlsocket.close() if self.parserDebug: log.info(self.result) #----------------------------------------------------------------------------------------------- # # Parse LOOP data. # def parsePacket(self, raw_data): self.lastKnownError = "" if raw_data is None or len(raw_data) < 99:
def mainShutdownHandler(signal, frame): log.info(">>>>>>> Got interrupt signal shutting down ...") RMMainManager.instance.stop()
def perform(self): if self.username is None: self.username = self.params["username"] self.password = self.params["password"] if self.password is None or self.username is None: log.info("Cannot login: no username or password provided") self.lastKnownError = "Error: Invalid username or password" return if self.username is not self.params["username"]: self.username = self.params["username"] self.password = self.params["password"] self.clientOauth() if self.accessToken is None: self.clientOauth() else: self.renewAccesTokenIfNeeded() if self.accessToken is None: log.info("Cannot login: invalid oauth") self.lastKnownError = "Error: Invalid username or password" return self.getData() tsStartOfDayUTC = rmCurrentDayTimestamp() specifiedModules = [] if self.params["useSpecifiedModules"]: modulesString = self.params["specificModules"] specifiedModules = modulesString.split(',') specifiedModules = [item.strip() for item in specifiedModules] for device in self.jsonData["body"]["devices"][0:1]: name = device["station_name"] #put as output parameter? [llat, llon] = device["place"]["location"] #use for max distance modules = device["modules"] rh = 0 temp = 0 maxTemp = 0 minTemp = 0 rain = 0 wind = 0 tsTemp = None tsWind = None tsRain = None idxTemp = 0 idxWind = 0 idxRain = 0 self.params["_availableModules"] = [] for module in modules: moduleName = 'unnamed' try: moduleName = module["module_name"] except: pass moduleID = module["_id"] self.params["_availableModules"].append([moduleName , moduleID] ) moduleDataType = module["data_type"] if self.params["useSpecifiedModules"]: if moduleID not in specifiedModules: continue elif "outdoor" not in moduleName.lower() and ("Rain" not in moduleDataType) and ("Wind" not in moduleDataType): continue try: recordedRain = self.__toFloat(module["dashboard_data"]["Rain"]) #measured in C tsRecordedRain = self.__toInt(module["dashboard_data"]["time_utc"]) if tsRecordedRain < tsStartOfDayUTC: continue tsRain = max(tsRecordedRain, tsRain) rain += recordedRain idxRain+=1 except: pass try: recordedWind = self.__toFloat(module["dashboard_data"]["WindStrength"]) tsRecordedWind = self.__toInt(module["dashboard_data"]["time_utc"]) if tsRecordedWind < tsStartOfDayUTC: continue tsWind = max(recordedWind, tsWind) wind += recordedWind idxWind+=1 except: pass try: recordedTemp = self.__toFloat(module["dashboard_data"]["Temperature"]) tsRecordedTemp = self.__toInt(module["dashboard_data"]["time_utc"]) if tsRecordedTemp < tsStartOfDayUTC : continue tsTemp = max(tsRecordedTemp, tsTemp) maxTemp += self.__toFloat(module["dashboard_data"]["max_temp"]) minTemp += self.__toFloat(module["dashboard_data"]["min_temp"]) rh += self.__toFloat(module["dashboard_data"]["Humidity"]) #measured in % temp += recordedTemp idxTemp+=1 except: pass if idxTemp > 0 and tsTemp > tsStartOfDayUTC: self.addValue(RMParser.dataType.TEMPERATURE, tsStartOfDayUTC, temp/idxTemp) self.addValue(RMParser.dataType.MINTEMP, tsStartOfDayUTC, minTemp/idxTemp) self.addValue(RMParser.dataType.MAXTEMP, tsStartOfDayUTC, maxTemp/idxTemp) self.addValue(RMParser.dataType.RH, tsStartOfDayUTC, rh/idxTemp) if idxWind > 0 and tsWind > tsStartOfDayUTC: self.addValue(RMParser.dataType.WIND, tsStartOfDayUTC, wind/idxWind) if idxRain > 0 and tsRain > tsStartOfDayUTC: self.addValue(RMParser.dataType.RAIN, tsStartOfDayUTC, rain/idxRain) for parserCfg in RMParserManager.instance.parsers: if self.parserName is parserCfg.name: RMParserManager.instance.setParserParams(parserCfg.dbID, self.params) break
def raindelay(delay=1): log.info("Set RainDelay to %s" % delay)
def perform( self): # The function that will be executed must have this name # Based on a number of discussions, the following is what should # be sent here. # # if new day: # send last data point for temperaturer of previous day # send last data point for humidity of previous day # send last data point for pressure of previous day # send last data point for wind of previous day # send last data point for solar rad of previous day # send last data point for dewpoint of previous day # send last data point for rain of previous day # send min/max temperature and humidity for previous day # delete entries indexed at this timestamp # send average temperature for today # send average humidity for today # send average pressure for today # send average wind for today # send average solar rad for today # send total rain for today # send min/max temperature for today # send min/max humidity for today # # need to store data for only one time each day (11:59pm), maybe # use a dictionary structure like: # # {'timestamp': 333333333, 'readings': { # 'temperature': <temp>, # 'humidity': <humidity>, # } # } # if self.started == False: self.started = True log.debug("Starting UPD Listener thread.") # TODO: How do we stop the thread once it's started? threading.Thread(target=self.wfUDPData).start() return None # First time, just start the thread, we have no data yet. for idx, rawdata in enumerate(self.parserData): if 'report' in rawdata: logMsg = "Interval Summary:" if idx == 1: # if looking at yesterday data (0 = today, 1 = yesterday) ... if self.newDay: # Check if need to send yesterday's data self.newDay = 0 # reset the flag and send yesterday's data logMsg = "Yesterday's EoD Summary:" else: # otherwise skip sending it, only want to send one time continue self.addValue(RMParser.dataType.TEMPERATURE, rawdata['ts'], rawdata['report']['temperature']) self.addValue(RMParser.dataType.RH, rawdata['ts'], rawdata['report']['humidity']) self.addValue(RMParser.dataType.PRESSURE, rawdata['ts'], rawdata['report']['pressure']) self.addValue(RMParser.dataType.WIND, rawdata['ts'], rawdata['report']['wind']) self.addValue(RMParser.dataType.SOLARRADIATION, rawdata['ts'], rawdata['report']['srad']) self.addValue(RMParser.dataType.RAIN, rawdata['ts'], rawdata['report']['rain']) self.addValue(RMParser.dataType.DEWPOINT, rawdata['ts'], rawdata['report']['dewpoint']) if rawdata['report']['max_temp'] < 60: self.addValue(RMParser.dataType.MAXTEMP, rawdata['ts'], rawdata['report']['max_temp']) if rawdata['report']['max_temp'] > -60: self.addValue(RMParser.dataType.MINTEMP, rawdata['ts'], rawdata['report']['min_temp']) self.addValue(RMParser.dataType.MAXRH, rawdata['ts'], rawdata['report']['max_humid']) self.addValue(RMParser.dataType.MINRH, rawdata['ts'], rawdata['report']['min_humid']) log.info( "%s temp(C,F): %.2f / %.2f, wind(m/s,mph): %.2f / %.2f, rain_dayTot(mm,in): %.2f / %.2f" % (logMsg, rawdata['report']["temperature"], ((rawdata['report']["temperature"] * 9 / 5) + 32), rawdata['report']["wind"], (rawdata['report']["wind"] * 2.237), rawdata['report']["rain"], (rawdata['report']["rain"] / 25.4))) log.debug("timestamp = %s" % datetime.fromtimestamp(rawdata['ts'])) log.debug("temperature = %f" % rawdata['report']["temperature"]) log.debug("humidity = %f" % rawdata['report']["humidity"]) log.debug("pressure = %f" % rawdata['report']["pressure"]) log.debug("wind speed = %f" % rawdata['report']["wind"]) log.debug("solar radiation = %f" % rawdata['report']["srad"]) log.debug("rain = %f" % rawdata['report']["rain"]) log.debug("dewpoint = %f" % rawdata['report']["dewpoint"]) log.debug("max temperature = %f" % rawdata['report']["max_temp"]) log.debug("min temperature = %f" % rawdata['report']["min_temp"]) log.debug("max humidity = %f" % rawdata['report']["max_humid"]) log.debug("min humidity = %f" % rawdata['report']["min_humid"]) log.debug("")
ret = os.system("ping -c 1 -w 2 8.8.8.8 > /dev/null 2>&1") self.__internetStatus = True if ret == 0 else False except Exception, e: pass def __diagCloud(self): self.__cloudStatus = -1 try: with open(globalSettings.cloud._statusFile, 'r') as f: try: status = f.readline().rstrip() self.__cloudStatus = int(status) except: pass except (IOError, OSError): pass globalMachineDiag = RMMachineDiag() if __name__ == "__main__": #curframe = inspect.currentframe() #calframe = inspect.getouterframes(curframe, 2) #print 'caller name:', calframe[1][3] log.info("Testing MachineDiag") globalMachineDiag.checkMachine() print globalMachineDiag.getStatus()
params.pop(key, None) parser.params.update(params) self.parserTable.updateParserParams(parserConfig.dbID, parser.params) self.parsers[parserConfig] = parser parserConfig.userDataTypes = self.userDataTypeTable.addRecords(parser.userDataTypes) self.parserUserDataTypeTable.addRecords(parserConfig.dbID, parserConfig.userDataTypes) log.debug(parserConfig) except Exception, e: log.info("Failed to register parser from file : %s. Error: %s" % (fileEntry["name"], e)) RMParser.parsers.pop() log.info("*** END Loading parsers") def findParserConfig(self, parserID): for parserConfig in self.parsers: if parserConfig.dbID == parserID: return parserConfig return None def setParserParams(self, parserID, params): parserConfig = self.findParserConfig(parserID) if parserConfig is None: return False parser = self.parsers[parserConfig] newParams = copy.deepcopy(parser.params)
log.info("%s key not found in our limits definitions" % key) return value min = interval["min"] max = interval["max"] if min is not None and value < min: log.error("%s value %s less than limits minimum of %s" % (key, value, interval["min"])) return None if max is not None and value > max: log.error("%s value %s more than limits maximum of %s" % (key, value, interval["max"])) return None return value if __name__ == "__main__": l = RMWeatherDataLimits() for value in [-55.2, 0.8884, 100.1]: log.info("TEMPERATURE Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.TEMPERATURE, value))) log.info("MINTEMP Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.MINTEMP, value))) log.info("MAXTEMP Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.MAXTEMP, value))) log.info("RH Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.RH, value))) log.info("WIND Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.WIND, value))) log.info("SOLARRAD Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.SOLARRADIATION, value))) log.info("SKYCOVER Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.SKYCOVER, value))) log.info("ET Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.ET0, value))) log.info("QPF Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.QPF, value))) log.info("RAIN Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.RAIN, value))) log.info("PRESSURE Sanitized %s to %s" % (value, l.sanitize(RMWeatherDataType.PRESSURE, value))) log.info("-" * 100)
def perform(self): # The function that will be executed must have this name # Accessing system location settings #lat = self.settings.location.latitude log.info("Hello History") # Other location settings #self.zip #self.name #self.state #self.latitude #self.longitude #self.address #self.elevation #self.gmtOffset #self.dstOffset #self.stationID #self.stationName #self.et0Average station = self.params.get("StationID", None) if station is None or station == "": station = "02860" log.debug("No station set, using (%s)" % station) #url = "https://opendata.dwd.de/climate/observations_germany/climate/hourly/precipitation/recent/stundenwerte_RR_" + str(station) + "_akt.zip" url = "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/1_minute/precipitation/now/1minutenwerte_nieder_" + str(station) + "_now.zip" URLParams = [ ("User-Agent", "RainMachine v2") ] try: req = urllib2.Request(url) response = urllib2.urlopen(req) raw = response.read() zipFile = ZipFile(StringIO.StringIO(raw)) dataFile = None for fileInfo in zipFile.filelist: if fileInfo.filename.startswith("produkt_ein_now_"): dataFile = fileInfo if dataFile == None: log.error("Unable to find data file.") return content = zipFile.read(dataFile) reader = csv.reader(StringIO.StringIO(content), delimiter=';') next(reader) #minDate = datetime.datetime.today() - datetime.timedelta(days=30) minDate = rmDeltaDayFromTimestamp(rmCurrentDayTimestamp(), -30) epochDate = datetime.datetime(1970,1,1) log.debug("minDate: %s" % minDate) lastHour = None currentHour = None totalHour = 0 for row in reader: timeStamp = row[1] #log.debug("Timestamp: %s" % timeStamp) myDate = datetime.datetime.strptime(row[1], "%Y%m%d%H%M") myEpoch = (int)((myDate - epochDate).total_seconds()) #date = rmTimestampFromDateAsString(timeStamp, "%Y%m%d%H") if myEpoch is None: log.debug("Cannot convert timestamp: %s to unix timestamp" % timeStamp) continue if myEpoch < minDate: continue value = parseString(row[3]) if value == None: continue currentHour = myEpoch - (myEpoch % 3600) if currentHour != lastHour: if lastHour != None: # log.debug("Adding value %s" % value) self.addValue(RMParser.dataType.RAIN, lastHour, totalHour) totalHour = value lastHour = currentHour else: totalHour += value log.info("Done") except Exception, e: log.error("*** Error running DWD parser") log.exception(e)
class FAWN(RMParser): parserName = "FAWN Parser" parserDescription = "Florida Automated Weather Network observations" parserForecast = False parserHistorical = True parserEnabled = False parserDebug = True parserInterval = 6 * 3600 params = {"station": 480, "useHourly": False} def isEnabledForLocation(self, timezone, lat, long): if FAWN.parserEnabled and timezone: return timezone.startswith("US") or timezone.startswith("America") return False def perform(self): station = self.params.get("station", None) if station is None: self.lastKnownError = "No station number configured." log.error(self.lastKnownError) return res = self.performWithDataFeeds(station) if not res: self.performWithReport(station) #----------------------------------------------------------------------------------------------- # # Get hourly and daily data using the JSON API data feeds # def performWithDataFeeds(self, station): s = self.settings URLHourly = "http://fawn.ifas.ufl.edu/controller.php/lastHour/summary/json" URLDaily = "http://fawn.ifas.ufl.edu/controller.php/lastDay/summary/json" URLParams = [] useHourly = self.params.get("useHourly", False) #----------------------------------------------------------------------------------------------- # # Get hourly data. # if useHourly: try: log.info("Retrieving data from: %s" % URLHourly) d = self.openURL(URLHourly, URLParams) if d is None: return json_data = d.read() json_data = json_data.replace("'","\"") hourly = json.loads(json_data) for entry in hourly: # only selected station if int(entry.get("StationID")) == station: dateString = entry.get("startTime") #timestamp = rmTimestampFromDateAsStringWithOffset(dateString) timestamp = rmTimestampFromDateAsString(dateString[:-6], '%Y-%m-%dT%H:%M:%S') if timestamp is None: log.debug("Cannot convert hourly data startTime: %s to unix timestamp" % dateString) continue # Add 12h in the future for FAWN timestamp to fix badly reported offset and make it middle of the day UTC (Dragos) timestamp += 12 * 60 *60 self.addValue(RMParser.dataType.TEMPERATURE, timestamp, self.__toFloat(entry.get("t2m_avg"))) self.addValue(RMParser.dataType.MINTEMP, timestamp, self.__toFloat(entry.get("t2m_min"))) self.addValue(RMParser.dataType.MAXTEMP, timestamp, self.__toFloat(entry.get("t2m_max"))) # km/h -> m/s self.addValue(RMParser.dataType.WIND, timestamp, 0.27777777777778 * self.__toFloat(entry.get("ws_avg"))) # cm -> mm self.addValue(RMParser.dataType.RAIN, timestamp, 10 * self.__toFloat(entry.get("rain_sum"))) self.addValue(RMParser.dataType.DEWPOINT, timestamp, self.__toFloat(entry.get("dp_avg"))) self.addValue(RMParser.dataType.RH, timestamp, self.__toFloat(entry.get("rh_avg"))) if self.parserDebug: log.debug(self.result) except Exception, e: self.lastKnownError = "Error retrieving hourly data." log.error(self.lastKnownError) log.exception(e) #----------------------------------------------------------------------------------------------- # # Get daily data. # try: log.info("Retrieving data from: %s" % URLDaily) d = self.openURL(URLDaily, URLParams) if d is None: return json_data = d.read() json_data = json_data.replace("'","\"") daily = json.loads(json_data) for entry in daily: # only selected station if int(entry.get("StationID")) == station: dateString = entry.get("startTime") #timestamp = rmTimestampFromDateAsStringWithOffset(dateString) timestamp = rmTimestampFromDateAsString(dateString[:-6], '%Y-%m-%dT%H:%M:%S') if timestamp is None: log.debug("Cannot convert daily data startTime: %s to unix timestamp" % dateString) continue # Add 12h in the future for FAWN timestamp to fix badly reported offset and make it middle of the day UTC (Dragos) timestamp += 12 * 60 *60 self.addValue(RMParser.dataType.TEMPERATURE, timestamp, self.__toFloat(entry.get("t2m_avg"))) self.addValue(RMParser.dataType.MINTEMP, timestamp, self.__toFloat(entry.get("t2m_min"))) self.addValue(RMParser.dataType.MAXTEMP, timestamp, self.__toFloat(entry.get("t2m_max"))) # km/h -> m/s self.addValue(RMParser.dataType.WIND, timestamp, 0.27777777777778 * self.__toFloat(entry.get("ws_avg"))) # cm -> mm self.addValue(RMParser.dataType.RAIN, timestamp, 10 * self.__toFloat(entry.get("rain_sum"))) self.addValue(RMParser.dataType.DEWPOINT, timestamp, self.__toFloat(entry.get("dp_avg"))) self.addValue(RMParser.dataType.RH, timestamp, self.__toFloat(entry.get("rh_avg"))) self.addValue(RMParser.dataType.MINRH, timestamp, self.__toFloat(entry.get("rh_min"))) self.addValue(RMParser.dataType.MAXRH, timestamp, self.__toFloat(entry.get("rh_max"))) # in -> mm self.addValue(RMParser.dataType.ET0, timestamp, 25.4 * self.__toFloat(entry.get("et"))) if self.parserDebug: log.debug(self.result) except Exception, e: self.lastKnownError = "Error retrieving daily data, trying report feed." log.error(self.lastKnownError) log.exception(e) return False
def addRuleSerialized(self, data): try: return self.addRule(data["variable"], data["operator"], data["value"], data["action"], **data["params"]) except: log.info("Can't add rule: %s" % data) return -1
def __init__(self): RMParser.__init__(self) self.started = False log.info("Initializing WeatherFlow local UDP parser (ver 1.2.0)")
class RMRainSensorSoftware: def __init__(self): self.__dayQPF = {} # table with day timestamp and its QPF def setDayQPF(self, dayTimestamp, qpf): if not dayTimestamp is None: self.__dayQPF[dayTimestamp] = qpf log.debug("Setting qpf %f for day: %d(%s)" % (qpf, dayTimestamp, rmTimestampToDateAsString(dayTimestamp))) def clearDayQPF(self): self.__dayQPF = {} def check(self, minQPF, timestamp=None): if timestamp is None: timestamp = rmCurrentDayTimestamp() else: timestamp = rmGetStartOfDay(timestamp) dayQPF = self.__dayQPF.get(timestamp, None) if dayQPF is not None and dayQPF > minQPF: return True return False if __name__ == "__main__": s = RMRainSensor() log.info("Rain Detected: " + ("No", "Yes")[s.get()])
def perform(self): url = self.getParamAsString(self.params.get("WIFILoggerURL")) if url is None: self.lastKnownError = "Invalid WIFILogger URL" log.error(self.lastKnownError) return False wifiLoggerData = self.openURL(url) if wifiLoggerData is None: return json_data = wifiLoggerData.read() json_data = json_data.replace("'", "\"") current_weather_data = json.loads(json_data) log.info("Parsing Wifi Logger Data...") # TIMESTAMP = "TIMESTAMP" # [Unix timestamp] timestamp = current_weather_data["utctime"] log.debug("TIMESTAMP: %s" % (timestamp)) # TEMPERATURE = "TEMPERATURE" # [degC] temperatureF = float(current_weather_data["tempout"]) log.debug("temperatureF: %s" % (temperatureF)) TEMPERATURE = self.toCelsius(temperatureF) log.debug("TEMPERATURE: %s" % (TEMPERATURE)) self.addValue(RMParser.dataType.TEMPERATURE, timestamp, TEMPERATURE) hltempout = current_weather_data["hltempout"] # MINTEMP = "MINTEMP" # [degC] log.debug("minTempF: %s" % (hltempout[0])) MINTEMP = self.toCelsius(float(hltempout[0])) log.debug("MINTEMP: %s" % (MINTEMP)) self.addValue(RMParser.dataType.MINTEMP, timestamp, MINTEMP) # MAXTEMP = "MAXTEMP" # [degC] log.debug("maxTempF: %s" % (hltempout[1])) MAXTEMP = self.toCelsius(float(hltempout[1])) log.debug("MAXTEMP: %s" % (MAXTEMP)) self.addValue(RMParser.dataType.MAXTEMP, timestamp, MAXTEMP) # RH = "RH" # [percent] RH = int(current_weather_data["humout"]) log.debug("RH: %s" % (RH)) self.addValue(RMParser.dataType.RH, timestamp, RH) hltempout = current_weather_data["hlhumout"] # MINRH = "MINRH" # [percent] MINRH = int(hltempout[0]) log.debug("MINRH: %s" % (MINRH)) self.addValue(RMParser.dataType.MINRH, timestamp, MINRH) # MAXRH = "MAXRH" # [percent] MAXRH = int(hltempout[1]) log.debug("MAXRH: %s" % (MAXRH)) self.addValue(RMParser.dataType.MAXRH, timestamp, MAXRH) # WIND = "WIND" # [meter/sec] # here I will use the avg 10 minute speed, will convert from mph # 1 Mile per Hour = 0.44704 Meters per Second windMPH = float(current_weather_data["windavg10"]) log.debug("windMPH: %s" % (windMPH)) WIND = windMPH * 0.44704 log.debug("WIND: %s" % (WIND)) self.addValue(RMParser.dataType.WIND, timestamp, WIND) # SOLARRADIATION = "SOLARRADIATION" # [megaJoules / square meter per hour] # SKYCOVER = "SKYCOVER" # [percent] # RAIN = "RAIN" # [mm] # 1 inch = 25.4mm rainInch = float(current_weather_data["raind"]) log.debug("rainInch: %s" % (rainInch)) RAIN = rainInch * 25.4 log.debug("RAIN: %s" % (RAIN)) self.addValue(RMParser.dataType.RAIN, timestamp, RAIN) # ET0 = "ET0" # [mm] # POP = "POP" # [percent] # QPF = "QPF" # [mm] - # PRESSURE = "PRESSURE" # [kilo Pa atmospheric pressure] # 1 inch = 3.3864 kpa barInch = float(current_weather_data["bar"]) log.debug("barInch: %s" % (barInch)) PRESSURE = barInch * 3.3864 log.debug("PRESSURE: %s" % (PRESSURE)) self.addValue(RMParser.dataType.PRESSURE, timestamp, PRESSURE) # DEWPOINT = "DEWPOINT" # [degC] dewF = float(current_weather_data["dew"]) log.debug("dewF: %s" % (dewF)) DEWPOINT = self.toCelsius(dewF) log.debug("DEWPOINT: %s" % (DEWPOINT)) self.addValue(RMParser.dataType.DEWPOINT, timestamp, DEWPOINT) RAINRATE = float(current_weather_data["rainr"]) log.debug("RAINRATE: %s" % (RAINRATE)) # CONDITION = "CONDITION" # [string] # # current conditions ... from Davis # Forecast Icon Values # # Value Decimal Value Hex Segments Shown Forecast currentConditionValue = int(current_weather_data["foreico"]) # 8 0x08 Sun Mostly Clear # mapping to "Fair" if currentConditionValue == 8: self.addValue(RMParser.dataType.CONDITION, timestamp, RMParser.conditionType.Fair) log.debug("Current Condition Fair") # 6 0x06 Partial Sun + Cloud Partly Cloudy # 7 0x07 Partial Sun + Cloud + Rain Partly Cloudy, Rain within 12 hours # 22 0x16 Partial Sun + Cloud + Snow Partly Cloudy, Snow within 12 hours # 23 0x17 Partial Sun + Cloud + Rain + Snow Partly Cloudy, Rain or Snow within 12 hours # mapping to "PartlyCloudy" elif ((currentConditionValue == 6) or (currentConditionValue == 7) or (currentConditionValue == 22) or (currentConditionValue == 23)): self.addValue(RMParser.dataType.CONDITION, timestamp, RMParser.conditionType.PartlyCloudy) log.debug("Current Condition Partly Cloudy") # 2 0x02 Cloud Mostly Cloudy # 3 0x03 Cloud + Rain Mostly Cloudy, Rain within 12 hours # 18 0x12 Cloud + Snow Mostly Cloudy, Snow within 12 hours # 19 0x13 Cloud + Rain + Snow Mostly Cloudy, Rain or Snow within 12 hours # mapping to "MostlyCloudy" elif ((currentConditionValue == 2) or (currentConditionValue == 3) or (currentConditionValue == 18) or (currentConditionValue == 19)): self.addValue(RMParser.dataType.CONDITION, timestamp, RMParser.conditionType.MostlyCloudy) log.debug("Current Condition Mostly Cloudy") # here lets check rain rate if (0 < RAINRATE <= 0.098): self.addValue(RMParser.dataType.CONDITION, timestamp, RMParser.conditionType.LightRain) log.debug("Current Condition Light Rain") elif (0.098 < RAINRATE <= 0.39): self.addValue(RMParser.dataType.CONDITION, timestamp, RMParser.conditionType.RainShowers) log.debug("Current Condition Rain Showers") elif (RAINRATE > 0.39): self.addValue(RMParser.dataType.CONDITION, timestamp, RMParser.conditionType.HeavyRain) log.debug("Current Condition Heavy Rain")
from RMUtilsFramework.rmMemoryUsageStats import RMMemoryUsageStats ##------------------------------------------------------------------------ ## ## globalSettings.parseSysArguments(True) # Default log (persistent) #log.setConsoleLogLevel() # Reduce console output level to ERROR by default log.enableFileLogging(os.path.join(globalSettings.databasePath, "log/rainmachine.log")) #globalGPIO.turnOnOffAllLeds(True) log.info("RainMachine SDK Copyright (c) 2015 Green Electronics LLC") #log.info("---------------------------------------- USAGE ----------------------------------------------") #log.info(" python %s" % (__file__)) #log.info(" python %s name,lat,long,et0avg [httpServerPort]" % (__file__)) #log.info("---------------------------------------------------------------------------------------------") ##------------------------------------------------------------------------ ## Shutdown handler for SIGINT signal ## def mainShutdownHandler(signal, frame): log.info(">>>>>>> Got interrupt signal shutting down ...") RMMainManager.instance.stop() ##------------------------------------------------------------------------ ## Global message queue for threads
def __upload(self, localPath, file): with open(os.path.join(localPath, file), "rb") as f: log.debug("Uploading file: %s" % file) extraRemotePath = os.path.split(file)[0] if extraRemotePath: if os.path.isabs(extraRemotePath): extraRemotePath = extraRemotePath.lstrip("/") file = file.lstrip("/") try: self.__ftp.mkd(extraRemotePath) except Exception, e: log.debug("Folder %s/ creation failed: %s" % (extraRemotePath, e)) self.__ftp.storbinary("STOR " + file, f) def __getUploadFolderName(self): return globalSettings.netName + "-SPK-" + str(globalSettings.hardwareVersion) + "-" \ + globalWIFI.wifiInterface.macAddress + "-" + str(rmCurrentTimestamp()) # ----------------------------------------------------------------------------------------------------------- # Main Test Unit # if __name__ == "__main__": globalSettings.parseSysArguments() globalWIFI.detect() lu = RMDiagUpload() lu.uploadDiag() log.info("Status is: %s" % lu.status)
def getPercentage(self): usage = 0 try: startUsage = self.get() time.sleep(1) finalUsage = self.get() try: prevActive = startUsage['active'] prevIdle = startUsage['idle'] active = finalUsage['active'] idle = finalUsage['idle'] deltaActive = active - prevActive deltaIdle = idle - prevIdle usage = (float(deltaActive) / (deltaActive + deltaIdle)) * 100 except ZeroDivisionError: pass except Exception, e: log.error("Cannot read cpu stats from %s because %s" % (self.statpath, str(e))) return usage #----------------------------------------------------------------------------------------------------------- # Main Test Unit # if __name__ == "__main__": log.info("CPU Utilisation: %.2f" % RMCPUStats().getPercentage())