def resetToDefault(self): log.info("**** BEGIN Reset parsers and mixer to default") result = False try: self.mixer.resetToDefault() self.parserDataTable.clear(False) self.forecastTable.clear(False) globalDbManager.parserDatabase.commit() for parserConfig in self.parsers: parserConfig.runtimeLastForecastInfo = None parserConfig.failCounter = 0 parserConfig.lastFailTimestamp = None parser = self.parsers[parserConfig] enabled = parser.isEnabledForLocation( globalSettings.location.timezone, globalSettings.location.latitude, globalSettings.location.longitude) self.parserTable.addParser(parserConfig.fileName, parserConfig.name, enabled, parser.params) result = True except Exception, e: log.exception(e)
def perform(self): days = self.params["historicDays"] intervals = days / self.maxAllowedDays + (days % self.maxAllowedDays != 0) lastIntervalStartDay = datetime.date.today() if intervals > 1: days = self.maxAllowedDays log.debug("Days: %d Intervals: %d" % (days, intervals)) for i in range(0, intervals): startDay = lastIntervalStartDay - datetime.timedelta( days=1) # CIMIS real data starts from yesterday endDay = startDay - datetime.timedelta(days=(days + 1)) lastIntervalStartDay = endDay try: self.__retrieveData( endDay, startDay ) # we call with startDay/endDay swapped because CIMIS expects historic intervals except Exception, e: log.error("*** Error running CIMIS parser") self.lastKnownError = "Error: Data retrieval failed" log.exception(e)
def perform(self): self.apiKey = self.params.get("apiKey", None) self.stationID = self.params.get("stationID", None) if self.apiKey is None or not self.apiKey or not isinstance( self.apiKey, str): self.lastKnownError = "Error: No API Key. Please register an account at https://www.willyweather.com.au/info/api.html" return self.params["_nearbyStationsIDList"] = [] self.noDays = 7 if self.params.get("stationLookUp"): s = self.settings llat = s.location.latitude llon = s.location.longitude searchURL = "https://api.willyweather.com.au/v2/" + self.apiKey + "/search.json" searchURLParams = [("lat", llat), ("lng", llon), ("units", "distance:km")] try: d = self.openURL(searchURL, searchURLParams) if d is None: return search = json.loads(d.read()) if self.parserDebug: log.info(search) self.getNearbyStations(search) except Exception, e: log.error("*** Error finding nearby stations") log.exception(e)
def resetToDefault(self): log.info("**** BEGIN Reset parsers and mixer to default") result = False try: self.mixer.resetToDefault() self.parserDataTable.clear(False) self.forecastTable.clear(False) globalDbManager.parserDatabase.commit() for parserConfig in self.parsers: parserConfig.runtimeLastForecastInfo = None parserConfig.failCounter = 0 parserConfig.lastFailTimestamp = None parser = self.parsers[parserConfig] enabled = parser.isEnabledForLocation(globalSettings.location.timezone, globalSettings.location.latitude, globalSettings.location.longitude ) self.parserTable.addParser(parserConfig.fileName, parserConfig.name, enabled, parser.params) result = True except Exception, e: log.exception(e)
def perform(self): s = self.settings appKey = self.params.get("appKey", None) if appKey is None: self.lastKnownError = "Error: No Api Key" return URL = "https://api.darksky.net/forecast/" + str(appKey) + "/" + str( s.location.latitude) + "," + str(s.location.longitude) # URLParams = \ # [ # ("units", "si"), # ("exclude", "currently,minutely,alerts,flags"), # ("extend", "hourly") # ] URLParams = \ [ ("units", "si"), ("exclude", "currently,minutely,alerts,flags, hourly") ] try: d = self.openURL(URL, URLParams) if d is None: return forecast = json.loads(d.read()) self.__getDailyData(forecast) except Exception, e: log.error("*** Error running darksky.net parser") log.exception(e)
def __getDailyData(self, forecast): dayTimestamp = rmCurrentDayTimestamp() maxDayTimestamp = dayTimestamp + globalSettings.parserDataSizeInDays * 86400 daily = [] try: daily = forecast["daily"]["data"] except Exception, e: log.error("*** No daily information found in response!") self.lastKnownError = "Warning: No daily information" log.exception(e)
def perform(self): s = self.settings URLHourly = "http://fawn.ifas.ufl.edu/controller.php/lastHour/summary/json" URLDaily = "http://fawn.ifas.ufl.edu/controller.php/lastDay/summary/json" URLParams = [] useHourly = self.params.get("useHourly", False) # ----------------------------------------------------------------------------------------------- # # Get hourly data. # if useHourly: try: d = self.openURL(URLHourly, URLParams) if d is None: return json_data = d.read() json_data = json_data.replace("'", '"') hourly = json.loads(json_data) for entry in hourly: # only selected station if int(entry.get("StationID")) == self.params.get("station"): dateString = entry.get("startTime") # timestamp = rmTimestampFromDateAsStringWithOffset(dateString) timestamp = rmTimestampFromDateAsString(dateString[:-6], "%Y-%m-%dT%H:%M:%S") if timestamp is None: log.debug("Cannot convert hourly data startTime: %s to unix timestamp" % dateString) continue # Add 12h in the future for FAWN timestamp to fix badly reported offset and make it middle of the day UTC (Dragos) timestamp += 12 * 60 * 60 self.addValue(RMParser.dataType.TEMPERATURE, timestamp, self.__toFloat(entry.get("t2m_avg"))) self.addValue(RMParser.dataType.MINTEMP, timestamp, self.__toFloat(entry.get("t2m_min"))) self.addValue(RMParser.dataType.MAXTEMP, timestamp, self.__toFloat(entry.get("t2m_max"))) # km/h -> m/s self.addValue( RMParser.dataType.WIND, timestamp, 0.27777777777778 * self.__toFloat(entry.get("ws_avg")) ) # cm -> mm self.addValue(RMParser.dataType.RAIN, timestamp, 10 * self.__toFloat(entry.get("rain_sum"))) self.addValue(RMParser.dataType.DEWPOINT, timestamp, self.__toFloat(entry.get("dp_avg"))) self.addValue(RMParser.dataType.RH, timestamp, self.__toFloat(entry.get("rh_avg"))) if self.parserDebug: log.debug(self.result) except Exception, e: log.error("*** Error retrieving hourly data from FAWN") log.exception(e)
def performWithDataFeeds(self, station): s = self.settings URLHourly = "http://fawn.ifas.ufl.edu/controller.php/lastHour/summary/json" URLDaily = "http://fawn.ifas.ufl.edu/controller.php/lastDay/summary/json" URLParams = [] useHourly = self.params.get("useHourly", False) #----------------------------------------------------------------------------------------------- # # Get hourly data. # if useHourly: try: log.info("Retrieving data from: %s" % URLHourly) d = self.openURL(URLHourly, URLParams) if d is None: return json_data = d.read() json_data = json_data.replace("'","\"") hourly = json.loads(json_data) for entry in hourly: # only selected station if int(entry.get("StationID")) == station: dateString = entry.get("startTime") #timestamp = rmTimestampFromDateAsStringWithOffset(dateString) timestamp = rmTimestampFromDateAsString(dateString[:-6], '%Y-%m-%dT%H:%M:%S') if timestamp is None: log.debug("Cannot convert hourly data startTime: %s to unix timestamp" % dateString) continue # Add 12h in the future for FAWN timestamp to fix badly reported offset and make it middle of the day UTC (Dragos) timestamp += 12 * 60 *60 self.addValue(RMParser.dataType.TEMPERATURE, timestamp, self.__toFloat(entry.get("t2m_avg"))) self.addValue(RMParser.dataType.MINTEMP, timestamp, self.__toFloat(entry.get("t2m_min"))) self.addValue(RMParser.dataType.MAXTEMP, timestamp, self.__toFloat(entry.get("t2m_max"))) # km/h -> m/s self.addValue(RMParser.dataType.WIND, timestamp, 0.27777777777778 * self.__toFloat(entry.get("ws_avg"))) # cm -> mm self.addValue(RMParser.dataType.RAIN, timestamp, 10 * self.__toFloat(entry.get("rain_sum"))) self.addValue(RMParser.dataType.DEWPOINT, timestamp, self.__toFloat(entry.get("dp_avg"))) self.addValue(RMParser.dataType.RH, timestamp, self.__toFloat(entry.get("rh_avg"))) if self.parserDebug: log.debug(self.result) except Exception, e: self.lastKnownError = "Error retrieving hourly data." log.error(self.lastKnownError) log.exception(e)
def importFromSprinklerV1Db(self, filePath): if self.database.isOpen(): v1DB = RMDatabase(filePath) if not v1DB.open(): return False result = False try: data = OrderedDict() valuesToInsert = [] rows = v1DB.execute( "SELECT ts_started, usersch_id, zid, user_sec, machine_sec, real_sec, flag FROM %s ORDER BY ts_started, zid" % self._tableName ) for row in rows: dayTimestamp = rmGetStartOfDay(int(row[0])) dayData = data.get(dayTimestamp, None) if dayData is None: dayData = {"token": uuid.uuid4().hex, "tokenTimestamp": dayTimestamp} data[dayTimestamp] = dayData valuesToInsert.append( [ row[0], row[1], row[2], row[3], row[4], row[5], row[6], dayData["token"], dayData["tokenTimestamp"], ] ) if valuesToInsert: self.database.execute("DELETE FROM %s" % self._tableName) self.database.executeMany( "INSERT INTO %s(ts_started, usersch_id, zid, user_sec, machine_sec, real_sec, flag, token, tokenTimestamp) " "VALUES(?,?,?,?,?,?,?,?,?)" % self._tableName, valuesToInsert, ) self.database.commit() result = True except Exception, e: log.exception(e) v1DB.close() return result
def installParser(self, tempFilePath, fileName): filePath = os.path.abspath( os.path.join(os.path.dirname(os.path.abspath(__file__)), "parsers", fileName)) shutil.move(tempFilePath, filePath) try: module = imp.load_source(fileName, filePath) log.info(" * Parser %s successful loaded from file '%s'" % (fileName, filePath)) parser = RMParser.parsers[-1] # Last added parser enabled = parser.isEnabledForLocation(globalSettings.location.timezone, \ globalSettings.location.latitude, \ globalSettings.location.longitude ) parserConfig, isNew = self.parserTable.addParser( fileName, parser.parserName, enabled, parser.params) if not isNew: params = self.parserTable.getParserParams(parserConfig.dbID) if params: parser.params = params RMParser.parsers.pop() #delete old entry pkeys = self.parsers.keys() for pkey in pkeys: if parserConfig.dbID is pkey.dbID: del self.parsers[pkey] self.parsers[parserConfig] = parser parserConfig.userDataTypes = self.userDataTypeTable.addRecords( parser.userDataTypes) self.parserUserDataTypeTable.addRecords(parserConfig.dbID, parserConfig.userDataTypes) log.debug(parserConfig) return True except Exception as e: try: if os.path.exists(filePath): os.remove(filePath) except Exception, e: log.exception(e) log.error(" * Error installing/loading parser %s from file '%s'" % (fileName, filePath)) log.exception(e)
def perform(self): s = self.settings # Direct Forecast.io #URL = "https://api.forecast.io/forecast/d1deb05ce0bf3858054236e0171077e5/" + \ # `s.location.latitude` + "," + `s.location.longitude` appKey = self.params.get("appKey", None) if appKey is None: return if self.params["useProxy"]: # RainMachine Forecast.io proxy URL = s.doyDownloadUrl + "/api/forecast_io/forecast/" + appKey + "/" + \ str(s.location.latitude) + "," + str(s.location.longitude) else: URL = "https://api.forecast.io/forecast/" + appKey + "/" + \ str(s.location.latitude) + "," + str(s.location.longitude) URLParams = \ [ ("units", "si"), ("exclude", "currently,minutely,alerts,flags"), ("extend", "hourly") ] try: d = self.openURL(URL, URLParams) if d is None: return forecast = json.loads(d.read()) dayTimestamp = rmCurrentDayTimestamp() maxDayTimestamp = dayTimestamp + globalSettings.parserDataSizeInDays * 86400 hourly = [] daily = [] try: hourly = forecast["hourly"]["data"] except Exception, e: log.error("*** No hourly information found in response!") log.exception(e) try: daily = forecast["daily"]["data"] except Exception, e: log.error("*** No daily information found in response!") log.exception(e)
def perform(self): s = self.settings appKey = self.params.get("appKey", None) if appKey is None: self.lastKnownError = "Error: No Api Key" return if self.params["useProxy"]: # RainMachine Forecast.io proxy URL = s.doyDownloadUrl + "/api/forecast_io/forecast/" + str(appKey) + "/" + \ str(s.location.latitude) + "," + str(s.location.longitude) else: URL = "https://api.forecast.io/forecast/" + str(appKey) + "/" + \ str(s.location.latitude) + "," + str(s.location.longitude) URLParams = \ [ ("units", "si"), ("exclude", "currently,minutely,alerts,flags"), ("extend", "hourly") ] try: d = self.openURL(URL, URLParams) if d is None: return forecast = json.loads(d.read()) dayTimestamp = rmCurrentDayTimestamp() maxDayTimestamp = dayTimestamp + globalSettings.parserDataSizeInDays * 86400 hourly = [] daily = [] try: hourly = forecast["hourly"]["data"] except Exception, e: log.error("*** No hourly information found in response!") log.exception(e) self.lastKnownError = "Warning: No hourly information" try: daily = forecast["daily"]["data"] except Exception, e: log.error("*** No daily information found in response!") self.lastKnownError = "Warning: No daily information" log.exception(e)
def postRequest(self, url, params): params = urlencode(params) headers = {"Content-Type" : "application/x-www-form-urlencoded;charset=utf-8"} req = urllib2.Request(url=url, data=params, headers=headers) resp = None try: resp = urllib2.urlopen(req).read() except urllib2.URLError, e: log.debug(e) try: context = ssl._create_unverified_context() resp = urllib2.urlopen(req, context=context).read() except Exception, e: log.exception(e) return None
def installParser(self, tempFilePath, fileName): filePath = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "parsers", fileName)) shutil.move(tempFilePath, filePath) try: module = imp.load_source(fileName, filePath) log.info(" * Parser %s successful loaded from file '%s'" % (fileName, filePath)) parser = RMParser.parsers[-1] # Last added parser enabled = parser.isEnabledForLocation(globalSettings.location.timezone, \ globalSettings.location.latitude, \ globalSettings.location.longitude ) parserConfig, isNew = self.parserTable.addParser(fileName, parser.parserName, enabled, parser.params) if not isNew: params = self.parserTable.getParserParams(parserConfig.dbID) if params: parser.params = params RMParser.parsers.pop() #delete old entry pkeys = self.parsers.keys() for pkey in pkeys: if parserConfig.dbID is pkey.dbID: del self.parsers[pkey] self.parsers[parserConfig] = parser parserConfig.userDataTypes = self.userDataTypeTable.addRecords(parser.userDataTypes) self.parserUserDataTypeTable.addRecords(parserConfig.dbID, parserConfig.userDataTypes) log.debug(parserConfig) return True except Exception as e: try: if os.path.exists(filePath): os.remove(filePath) except Exception, e: log.exception(e) log.error(" * Error installing/loading parser %s from file '%s'" % (fileName, filePath)) log.exception(e)
def getNearbyStations(self, jsonData): try: nearestStation = jsonData["location"].get("id") except: log.warning("No closest station found!") self.lastKnownError = "Warning: No closest station found!" return closestURL = "https://api.willyweather.com.au/v2/" + self.apiKey + "/search/closest.json" closestURLParams = [("id", nearestStation), ("weatherTypes", "general"), ("units", "distance:km")] try: d = self.openURL(closestURL, closestURLParams) if d is None: return closest = json.loads(d.read()) if self.parserDebug: log.info(closest) for i in closest["general"]: id = i["id"] name = i["name"] region = i["region"] postcode = i["postcode"] distance = i["distance"] infoStr = "Station ID = " + str( id) + " (" + name + ", " + region + ", " + str( postcode) + ", " + str(distance) + " kms away)" self.params["_nearbyStationsIDList"].append(infoStr) if self.parserDebug: log.debug(self.params["_nearbyStationsIDList"]) except Exception, e: log.error("*** Error running WillyWeather parser") log.exception(e)
def importFromSprinklerV1Db(self, filePath): if (self.database.isOpen()): v1DB = RMDatabase(filePath) if not v1DB.open(): return False result = False try: data = OrderedDict() valuesToInsert = [] rows = v1DB.execute( "SELECT ts_started, usersch_id, zid, user_sec, machine_sec, real_sec, flag FROM %s ORDER BY ts_started, zid" % self._tableName) for row in rows: dayTimestamp = rmGetStartOfDay(int(row[0])) dayData = data.get(dayTimestamp, None) if dayData is None: dayData = { "token": uuid.uuid4().hex, "tokenTimestamp": dayTimestamp } data[dayTimestamp] = dayData valuesToInsert.append([ row[0], row[1], row[2], row[3], row[4], row[5], row[6], dayData["token"], dayData["tokenTimestamp"] ]) if valuesToInsert: self.database.execute("DELETE FROM %s" % self._tableName) self.database.executeMany("INSERT INTO %s(ts_started, usersch_id, zid, user_sec, machine_sec, real_sec, flag, token, tokenTimestamp) "\ "VALUES(?,?,?,?,?,?,?,?,?)" % self._tableName, valuesToInsert) self.database.commit() result = True except Exception, e: log.exception(e) v1DB.close() return result
def perform(self): days = self.params["historicDays"] intervals = days / self.maxAllowedDays + (days % self.maxAllowedDays != 0) lastIntervalStartDay = datetime.date.today() if intervals > 1: days = self.maxAllowedDays log.debug("Days: %d Intervals: %d" % (days, intervals)) for i in range(0, intervals): startDay = lastIntervalStartDay - datetime.timedelta(days=1) #CIMIS real data starts from yesterday endDay = startDay - datetime.timedelta(days=(days + 1)) lastIntervalStartDay = endDay try: log.debug("Running CIMIS for startDay: %s endDay: %s" % (startDay, endDay)) self.__retrieveData(endDay, startDay) # we call with startDay/endDay swapped because CIMIS expects historic intervals except Exception, e: log.error("*** Error running cimis parser") log.exception(e)
def postRequest(self, url, params): params = urlencode(params) headers = { "Content-Type": "application/x-www-form-urlencoded;charset=utf-8" } req = urllib2.Request(url=url, data=params, headers=headers) resp = None try: resp = urllib2.urlopen(req).read() except urllib2.URLError, e: log.debug(e) if hasattr( ssl, '_create_unverified_context' ): #for mac os only in order to ignore invalid certificates try: context = ssl._create_unverified_context() resp = urllib2.urlopen(req, context=context).read() except Exception, e: log.exception(e) return None
def setParserParams(self, parserID, params): parserConfig = self.findParserConfig(parserID) if parserConfig is None: return False parser = self.parsers[parserConfig] newParams = copy.deepcopy(parser.params) hasChanges = False try: for key, oldValue in parser.params.iteritems(): newValue = params.get(key, None) if newValue is not None: if oldValue is None or type(oldValue) == type(newValue): newParams[key] = newValue hasChanges = True else: log.warning("Types do not match: oldType=%s, newType=%s" % (type(oldValue), type(newValue))) except Exception, e: log.exception(e) return False
def setParserParams(self, parserID, params): parserConfig = self.findParserConfig(parserID) if parserConfig is None: return False parser = self.parsers[parserConfig] newParams = copy.deepcopy(parser.params) hasChanges = False try: for key, oldValue in parser.params.iteritems(): newValue = params.get(key, None) if newValue is not None: if oldValue is None or type(oldValue) == type(newValue): newParams[key] = newValue hasChanges = True else: log.warning( "Types do not match: oldType=%s, newType=%s" % (type(oldValue), type(newValue))) except Exception, e: log.exception(e) return False
def perform(self): s = self.settings apiKey = self.params.get("apiKey", None) if apiKey is None: self.lastKnownError = "Error: No API Key. Please register for a free account on https://openweathermap.org/." return URL = "https://api.openweathermap.org/data/2.5/forecast" URLParams = [ ("appid", str(apiKey)), ("lat", str(s.location.latitude)), ("lon", str(s.location.longitude)), ("units", "metric"), ] forecast = None try: d = self.openURL(URL, URLParams) if d is None: return forecast = json.loads(d.read()) if self.parserDebug: with open("dump.json", "w") as f: json.dump(forecast, f) log.info(forecast) self.__getForecastData(forecast) except Exception, e: log.error("*** Error running OpenWeatherMap parser") log.exception(e)
def __load(self, parserDir): log.info("*** BEGIN Loading parsers from '%s'" % parserDir) fileMap = OrderedDict() #--------------------------------------------------------------------------- # # for root, dirs, files in os.walk(parserDir): for fname in files: tmpsplit = os.path.splitext(fname) modname = tmpsplit[0] modext = tmpsplit[1] modPath = os.path.join(root, fname) fileEntry = fileMap.get(modname, None) if fileEntry is None: fileMap[modname] = { "file": fname, "name": modname, "ext": modext, "path": modPath } else: if modext == ".py": fileEntry["file"] = fname fileEntry["name"] = modname fileEntry["ext"] = modext fileEntry["path"] = modPath #--------------------------------------------------------------------------- # # for fileEntry in fileMap.values(): try: if fileEntry["ext"] == ".pyc": module = imp.load_compiled(fileEntry["name"], fileEntry["path"]) elif fileEntry["ext"] == ".py": module = imp.load_source(fileEntry["name"], fileEntry["path"]) else: continue except Exception as e: log.error(" * Error loading parser %s from file '%s'" % (fileEntry["name"], fileEntry["path"])) log.exception(e) continue try: log.debug(" * Parser %s successful loaded from file '%s'" % (fileEntry["name"], fileEntry["path"])) parser = RMParser.parsers[-1] # Last added parser enabled = parser.isEnabledForLocation(globalSettings.location.timezone, \ globalSettings.location.latitude, \ globalSettings.location.longitude ) parserConfig, isNew = self.parserTable.addParser( fileEntry["file"], parser.parserName, enabled, parser.params) parser.defaultParams = parser.params.copy( ) # save the default parser params for an eventual params reset if not isNew: params = self.parserTable.getParserParams( parserConfig.dbID) unusedKeyList = [] if params: for key in params: bFound = False for pkey in parser.params: if key == pkey: bFound = True if not bFound: unusedKeyList.append(key) for key in unusedKeyList: params.pop(key, None) parser.params.update(params) self.parserTable.updateParserParams( parserConfig.dbID, parser.params) self.parsers[parserConfig] = parser parserConfig.userDataTypes = self.userDataTypeTable.addRecords( parser.userDataTypes) self.parserUserDataTypeTable.addRecords( parserConfig.dbID, parserConfig.userDataTypes) log.debug(parserConfig) except Exception, e: log.info( "Failed to register parser from file : %s. Error: %s" % (fileEntry["name"], e)) RMParser.parsers.pop()
def perform(self): # The function that will be executed must have this name # Accessing system location settings #lat = self.settings.location.latitude log.info("Hello History") # Other location settings #self.zip #self.name #self.state #self.latitude #self.longitude #self.address #self.elevation #self.gmtOffset #self.dstOffset #self.stationID #self.stationName #self.et0Average station = self.params.get("StationID", None) if station is None or station == "": station = "02860" log.debug("No station set, using (%s)" % station) #url = "https://opendata.dwd.de/climate/observations_germany/climate/hourly/precipitation/recent/stundenwerte_RR_" + str(station) + "_akt.zip" url = "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/1_minute/precipitation/now/1minutenwerte_nieder_" + str(station) + "_now.zip" URLParams = [ ("User-Agent", "RainMachine v2") ] try: req = urllib2.Request(url) response = urllib2.urlopen(req) raw = response.read() zipFile = ZipFile(StringIO.StringIO(raw)) dataFile = None for fileInfo in zipFile.filelist: if fileInfo.filename.startswith("produkt_ein_now_"): dataFile = fileInfo if dataFile == None: log.error("Unable to find data file.") return content = zipFile.read(dataFile) reader = csv.reader(StringIO.StringIO(content), delimiter=';') next(reader) #minDate = datetime.datetime.today() - datetime.timedelta(days=30) minDate = rmDeltaDayFromTimestamp(rmCurrentDayTimestamp(), -30) epochDate = datetime.datetime(1970,1,1) log.debug("minDate: %s" % minDate) lastHour = None currentHour = None totalHour = 0 for row in reader: timeStamp = row[1] #log.debug("Timestamp: %s" % timeStamp) myDate = datetime.datetime.strptime(row[1], "%Y%m%d%H%M") myEpoch = (int)((myDate - epochDate).total_seconds()) #date = rmTimestampFromDateAsString(timeStamp, "%Y%m%d%H") if myEpoch is None: log.debug("Cannot convert timestamp: %s to unix timestamp" % timeStamp) continue if myEpoch < minDate: continue value = parseString(row[3]) if value == None: continue currentHour = myEpoch - (myEpoch % 3600) if currentHour != lastHour: if lastHour != None: # log.debug("Adding value %s" % value) self.addValue(RMParser.dataType.RAIN, lastHour, totalHour) totalHour = value lastHour = currentHour else: totalHour += value log.info("Done") except Exception, e: log.error("*** Error running DWD parser") log.exception(e)
if timestamp is None: continue timestamp = int(timestamp) if timestamp < maxDayTimestamp: self.addValue(RMParser.dataType.MINTEMP, timestamp, entry.get("temperatureMin")) self.addValue(RMParser.dataType.MAXTEMP, timestamp, entry.get("temperatureMax")) self.addValue(RMParser.dataType.CONDITION, timestamp, self.conditionConvert(entry.get("icon"))) if self.parserDebug: log.debug(self.result) except Exception, e: log.error("*** Error running forecastio parser") log.exception(e) log.debug("Finished running forecast io parser") def conditionConvert(self, conditionStr): if 'clear-day' in conditionStr: return RMParser.conditionType.Fair elif 'clear-night' in conditionStr: return RMParser.conditionType.Fair elif 'rain' in conditionStr: return RMParser.conditionType.HeavyRain elif 'snow' in conditionStr: return RMParser.conditionType.Snow elif 'sleet' in conditionStr: return RMParser.conditionType.RainSnow elif 'wind' in conditionStr:
def __load(self, parserDir): log.info("*** BEGIN Loading parsers from '%s'" % parserDir) fileMap = OrderedDict() #--------------------------------------------------------------------------- # # for root, dirs, files in os.walk(parserDir): for fname in files: tmpsplit = os.path.splitext(fname) modname = tmpsplit[0] modext = tmpsplit[1] modPath = os.path.join(root, fname) fileEntry = fileMap.get(modname, None) if fileEntry is None: fileMap[modname] = { "file": fname, "name": modname, "ext": modext, "path": modPath } else: if modext == ".py": fileEntry["file"] = fname fileEntry["name"] = modname fileEntry["ext"] = modext fileEntry["path"] = modPath #--------------------------------------------------------------------------- # # for fileEntry in fileMap.values(): try: if fileEntry["ext"] == ".pyc" : module = imp.load_compiled(fileEntry["name"], fileEntry["path"]) elif fileEntry["ext"] == ".py" : module = imp.load_source(fileEntry["name"], fileEntry["path"]) else: continue except Exception as e: log.error(" * Error loading parser %s from file '%s'" % (fileEntry["name"], fileEntry["path"])) log.exception(e) continue try: log.debug(" * Parser %s successful loaded from file '%s'" % (fileEntry["name"], fileEntry["path"])) parser = RMParser.parsers[-1] # Last added parser enabled = parser.isEnabledForLocation(globalSettings.location.timezone, \ globalSettings.location.latitude, \ globalSettings.location.longitude ) parserConfig, isNew = self.parserTable.addParser(fileEntry["file"], parser.parserName, enabled, parser.params) parser.defaultParams = parser.params.copy() # save the default parser params for an eventual params reset if not isNew: params = self.parserTable.getParserParams(parserConfig.dbID) unusedKeyList = [] if params: for key in params: bFound = False for pkey in parser.params: if key == pkey: bFound = True if not bFound: unusedKeyList.append(key) for key in unusedKeyList: params.pop(key, None) parser.params.update(params) self.parserTable.updateParserParams(parserConfig.dbID, parser.params) self.parsers[parserConfig] = parser parserConfig.userDataTypes = self.userDataTypeTable.addRecords(parser.userDataTypes) self.parserUserDataTypeTable.addRecords(parserConfig.dbID, parserConfig.userDataTypes) log.debug(parserConfig) except Exception, e: log.info("Failed to register parser from file : %s. Error: %s" % (fileEntry["name"], e)) RMParser.parsers.pop()
timestamp = int(timestamp) if timestamp < maxDayTimestamp: self.addValue(RMParser.dataType.MINTEMP, timestamp, entry.get("temperatureMin")) self.addValue(RMParser.dataType.MAXTEMP, timestamp, entry.get("temperatureMax")) self.addValue(RMParser.dataType.CONDITION, timestamp, self.conditionConvert(entry.get("icon"))) if self.parserDebug: log.debug(self.result) except Exception, e: log.error("*** Error running forecastio parser") log.exception(e) log.debug("Finished running forecast io parser") def conditionConvert(self, conditionStr): if 'clear-day' in conditionStr: return RMParser.conditionType.Fair elif 'clear-night' in conditionStr: return RMParser.conditionType.Fair elif 'rain' in conditionStr: return RMParser.conditionType.HeavyRain elif 'snow' in conditionStr: return RMParser.conditionType.Snow elif 'sleet' in conditionStr: return RMParser.conditionType.RainSnow elif 'wind' in conditionStr:
class WillyWeather(RMParser): parserName = "WillyWeather Australia Parser" parserDescription = "Australian weather service from https://www.willyweather.com.au" parserForecast = True parserHistorical = True parserID = "willyweather" parserInterval = 6 * 3600 parserEnabled = True parserDebug = False params = { "apiKey": None, "stationID": None, "stationLookUp": False, "_nearbyStationsIDList": [] } defaultParams = { "apiKey": None, "stationID": 13960, "stationLookUp": True, "_nearbyStationsIDList": [] } forecast = None def isEnabledForLocation(self, timezone, lat, long): return WillyWeather.parserEnabled def perform(self): self.apiKey = self.params.get("apiKey", None) self.stationID = self.params.get("stationID", None) if self.apiKey is None or not self.apiKey or not isinstance( self.apiKey, str): self.lastKnownError = "Error: No API Key. Please register an account at https://www.willyweather.com.au/info/api.html" return self.params["_nearbyStationsIDList"] = [] self.noDays = 7 if self.params.get("stationLookUp"): s = self.settings llat = s.location.latitude llon = s.location.longitude searchURL = "https://api.willyweather.com.au/v2/" + self.apiKey + "/search.json" searchURLParams = [("lat", llat), ("lng", llon), ("units", "distance:km")] try: d = self.openURL(searchURL, searchURLParams) if d is None: return search = json.loads(d.read()) if self.parserDebug: log.info(search) self.getNearbyStations(search) except Exception, e: log.error("*** Error finding nearby stations") log.exception(e) if self.stationID is None: self.lastKnownError = "Error: No Station ID entered." return URL = "https://api.willyweather.com.au/v2/" + self.apiKey + "/locations/" + str( self.stationID) + "/weather.json" URLParams = [("observational", "true"), ("forecasts", "weather,temperature,rainfall,wind"), ("days", self.noDays), {"units", "speed:m/s"}] try: d = self.openURL(URL, URLParams) if d is None: return forecast = json.loads(d.read()) if self.parserDebug: log.info(forecast) self.__getForecastData(forecast) except Exception, e: log.error("*** Error running WillyWeather parser") log.exception(e)
class FAWNReport(RMParser): parserName = "FAWN Report Parser" parserDescription = "Florida Automated Weather Network observations alternative mode" parserForecast = False parserHistorical = True parserEnabled = False parserDebug = True parserInterval = 6 * 3600 params = { "station": 480 } def isEnabledForLocation(self, timezone, lat, long): if FAWNReport.parserEnabled and timezone: return timezone.startswith("US") or timezone.startswith("America") return False def perform(self): s = self.settings now = time.time() URLReport = "https://fawn.ifas.ufl.edu/data/reports/?res" #+ str(now) station = self.params.get("station", None) if station is None: self.lastKnownError = "No station number configured." log.error(self.lastKnownError) return POSTParams = self.__generatePOSTParams(station) #----------------------------------------------------------------------------------------------- # # Get daily data. # try: POSTParams = urllib.urlencode(POSTParams) req = urllib2.Request(URLReport, data=POSTParams) response = urllib2.urlopen(req) data = response.read() except Exception, e: self.lastKnownError = "Cannot download data" log.error(self.lastKnownError) log.error(e) return if data is None: return try: parsedData = csv.DictReader(data.splitlines()) # '2m DewPt avg (F)': '62.13', # 'RelHum avg 2m (pct)': '81', # '2m Rain max over 15min(in)': '0.00', # '10m T max (F)': '79.16', # '10m Wind min (mph)': '0.00', # 'SolRad avg 2m (w/m^2)': '264.22', # 'N (# obs)': '96', # 'Period': '26 Mar 2019', # '10m Wind avg (mph)': '3.70', # 'BP avg (mb)': '1015', # '10m T min (F)': '60.35', # '10m Wind max (mph)': '14.28', # '10m T avg (F)': '69.24', # 'WDir avg 10m (deg)': '317', # '2m Rain tot (in)': '0.00', # 'FAWN Station': 'North Port', # 'ET (in)': '0.14' for entry in parsedData: timestamp = rmTimestampFromDateAsString(entry['Period'], "%d %b %Y") self.addValue(RMParser.dataType.TEMPERATURE, timestamp, convertFahrenheitToCelsius(entry['10m T avg (F)'])) self.addValue(RMParser.dataType.MINTEMP, timestamp, convertFahrenheitToCelsius(entry['10m T min (F)'])) self.addValue(RMParser.dataType.MAXTEMP, timestamp, convertFahrenheitToCelsius(entry['10m T max (F)'])) self.addValue(RMParser.dataType.RAIN, timestamp, convertInchesToMM(entry['2m Rain tot (in)'])) self.addValue(RMParser.dataType.DEWPOINT, timestamp, convertFahrenheitToCelsius(entry[ '2m DewPt avg (F)'])) self.addValue(RMParser.dataType.RH, timestamp, self.__toInt(entry['RelHum avg 2m (pct)'])) self.addValue(RMParser.dataType.ET0, timestamp, convertInchesToMM(entry['ET (in)'])) self.addValue(RMParser.dataType.SOLARRADIATION, timestamp, convertRadiationFromWattsToMegaJoules(entry['SolRad avg 2m (w/m^2)'])) wind = self.__toFloat(entry['10m Wind avg (mph)']) if wind is not None: wind = 0.44704 * wind # mph to mps self.addValue(RMParser.dataType.WIND, timestamp, wind) if self.parserDebug: log.debug(self.result) except Exception, e: self.lastKnownError = "Error parsing last observed data from FAWN" log.error(self.lastKnownError) log.exception(e)
def run(self, parserId=None, forceRunParser=False, forceRunMixer=False): currentTimestamp = rmCurrentTimestamp() forceRunParser = True if not forceRunParser and self.__lastRunningTimestamp is not None and ( currentTimestamp - self.__lastRunningTimestamp) < self.__runningInterval: # We want to run the parser only each N minutes. This condition is not met, try later. log.debug( "Parser %r not run lastRunning timestamp %s current %s" % (parserId, self.__lastRunningTimestamp, currentTimestamp)) return None, None self.__lastRunningTimestamp = currentTimestamp newValuesAvailable = False newForecast = RMForecastInfo(None, currentTimestamp) log.debug("*** BEGIN Running parsers: %d (%s)" % (newForecast.timestamp, rmTimestampToDateAsString(newForecast.timestamp))) for parserConfig in self.parsers: if parserId is not None and parserId != parserConfig.dbID: continue log.debug(" * Parser: %s -> %s" % (parserConfig, parserConfig.runtimeLastForecastInfo)) if parserConfig.enabled: if parserConfig.failCounter >= self.__maxFails: if forceRunParser or parserConfig.lastFailTimestamp is None or ( abs(newForecast.timestamp - parserConfig.lastFailTimestamp) >= self.__delayAfterMaxFails): parserConfig.failCounter = 0 parserConfig.lastFailTimestamp = None else: if parserConfig.failCounter == self.__maxFails: log.warning( " * Parser: %s - ignored because of lack of data (failCounter=%s, lastFail=%s)!" % (parserConfig, ` parserConfig.failCounter `, rmTimestampToDateAsString( parserConfig.lastFailTimestamp))) parserConfig.failCounter += 1 # Increment this to get rid of the above message. continue elif parserConfig.failCounter > 0: retryDelay = min( self.__minDelayBetweenFails + (parserConfig.failCounter - 1) * self.__stepDelayBetweenFails, self.__maxDelayBetweenFails) nextRetryTimestamp = parserConfig.lastFailTimestamp + retryDelay if newForecast.timestamp < nextRetryTimestamp: log.debug( " * Ignored because retry delay %d (sec) was not reached" % retryDelay) continue log.debug(" * Parser retry after previous fail") parser = self.parsers[parserConfig] lastUpdate = None if parserConfig.runtimeLastForecastInfo: # Check if parser hasn't run with an invalid future date if parserConfig.runtimeLastForecastInfo.timestamp <= currentTimestamp: lastUpdate = parserConfig.runtimeLastForecastInfo.timestamp # Save the newest parser run if lastUpdate is not None and lastUpdate > self.__lastUpdateTimestamp: self.__lastUpdateTimestamp = lastUpdate if not forceRunParser and not self.forceParsersRun and ( lastUpdate != None and (newForecast.timestamp - lastUpdate) < parser.parserInterval): log.debug( " * Ignored because interval %d not expired for timestamp %d lastUpdate: %d" % (parser.parserInterval, newForecast.timestamp, lastUpdate)) continue log.debug(" * Running parser %s with interval %d" % (parser.parserName, parser.parserInterval)) parser.settings = globalSettings.getSettings() parser.runtime[ RMParser.RuntimeDayTimestamp] = rmCurrentDayTimestamp() try: parser.lastKnownError = '' parser.isRunning = True parser.perform() parser.isRunning = False except Exception, e: log.error(" * Cannot execute parser %s" % parser.parserName) log.exception(e) parser.isRunning = False if len(parser.lastKnownError) == 0: parser.lastKnownError = 'Error: Failed to run' if not parser.hasValues(): parserConfig.failCounter += 1 parserConfig.lastFailTimestamp = newForecast.timestamp if len(parser.lastKnownError) == 0: parser.lastKnownError = 'Error: parser returned no values' parser.isRunning = False if parserConfig.failCounter == 1: log.warn(" * Parser %s returned no values" % parser.parserName) continue parserConfig.failCounter = 0 parserConfig.lastFailTimestamp = None if newForecast.id == None: self.forecastTable.addRecordEx(newForecast) parserConfig.runtimeLastForecastInfo = newForecast if not globalSettings.vibration: self.parserDataTable.removeEntriesWithParserIdAndTimestamp( parserConfig.dbID, parser.getValues()) self.parserDataTable.addRecords(newForecast.id, parserConfig.dbID, parser.getValues()) parser.clearValues() newValuesAvailable = True
def perform(self): station = self.params.get("station", None) if station is None or station == "": station = "10637" log.debug("No station set, using Frankfurt am Main (%s)" % station) url = "http://opendata.dwd.de/weather/local_forecasts/poi/" + str( station) + "-MOSMIX.csv" URLParams = [("User-Agent", "RainMachine v2")] try: file = self.openURL(url, URLParams) if file is None: self.lastKnownError = "Cannot read data from DWD Service." return reader = csv.reader(file, delimiter=';') included_cols = [0, 1, 2, 3, 4, 5, 9, 14, 22, 31, 34] next(reader) next(reader) next(reader) for row in reader: content = list(row[i] for i in included_cols) #print(content) datestring = content[0] + ':' + content[1] timestamp = rmTimestampFromDateAsString( datestring, '%d.%m.%y:%H:%M') if timestamp is None: log.debug( "Cannot convert timestamp: %s to unix timestamp" % datestring) continue #print timestamp if content[2] != '---': self.addValue(RMParser.dataType.TEMPERATURE, timestamp, float(content[2].replace(",", "."))) if content[3] != '---': self.addValue(RMParser.dataType.DEWPOINT, timestamp, float(content[3].replace(",", "."))) if content[4] != '---': self.addValue(RMParser.dataType.MAXTEMP, timestamp, float(content[4].replace(",", "."))) if content[5] != '---': self.addValue(RMParser.dataType.MINTEMP, timestamp, float(content[5].replace(",", "."))) ## km/h -> m/s if content[6] != '---': self.addValue( RMParser.dataType.WIND, timestamp, 0.27777777777778 * float(content[6].replace(",", "."))) if content[7] != '---': self.addValue(RMParser.dataType.QPF, timestamp, float(content[7].replace(",", "."))) #if content[8] != '---': # self.addValue(RMParser.dataType.SOLARRADIATION, timestamp, float(content[8].replace(",", "."))) if content[9] != '---': self.addValue(RMParser.dataType.PRESSURE, timestamp, float(content[9].replace(",", ".")) / 10) #if content[10] != '---': # self.addValue(RMParser.dataType.SOLARRADIATION, timestamp, float(content[10].replace(",", "."))) #log.info(self.result) except Exception, e: log.error("*** Error running DWD parser") log.exception(e)
def perform( self): # The function that will be executed must have this name # Accessing system location settings #lat = self.settings.location.latitude log.info("Hello KMZ") # Other location settings #self.zip #self.name #self.state #self.latitude #self.longitude #self.address #self.elevation #self.gmtOffset #self.dstOffset #self.stationID #self.stationName #self.et0Average station = self.params.get("station", None) if station is None or station == "": station = "K4086" log.debug("No station set, using (%s)" % station) url = "https://opendata.dwd.de/weather/local_forecasts/mos/MOSMIX_L/single_stations/" + str( station) + "/kml/MOSMIX_L_LATEST_" + str(station) + ".kmz" URLParams = [("User-Agent", "RainMachine v2")] try: req = urllib2.Request(url) response = urllib2.urlopen(req) raw = response.read() zipFile = ZipFile(StringIO.StringIO(raw)) kml = zipFile.read(zipFile.filelist[0]) rootNode = ET.fromstring(kml) nameSpaces = { 'dwd': "https://opendata.dwd.de/weather/lib/pointforecast_dwd_extension_V1_0.xsd", 'gx': "http://www.google.com/kml/ext/2.2", 'xal': "urn:oasis:names:tc:ciq:xsdschema:xAL:2.0", 'kml': "http://www.opengis.net/kml/2.2", 'atom': "http://www.w3.org/2005/Atom" } timeStampsNode = rootNode.findall( "./kml:Document/kml:ExtendedData/dwd:ProductDefinition/dwd:ForecastTimeSteps/", nameSpaces) extendedDataNode = rootNode.findall( "./kml:Document/kml:Placemark/kml:ExtendedData/", nameSpaces) nowTimeStamp = rmCurrentTimestamp() skipColumens = 0 # Parse Timestamps timeStampList = [] for ts in timeStampsNode: compatibleString = re.sub(r"\.\d+Z$", '', ts.text) unix = rmTimestampFromDateAsString(compatibleString, "%Y-%m-%dT%H:%M:%S") #ts = datetime.datetime.strptime(compatibleString, "%Y-%m-%dT%H:%M:%S") if (unix < nowTimeStamp): skipColumens += 1 continue timeStampList.append(unix) dwdData = [] parsedData = DWDData() for data in extendedDataNode: currentCol = 0 for k, v in data.attrib.items(): if k.endswith("elementName"): valueNode = data.find("./dwd:value", nameSpaces) if valueNode == None: continue allValues = valueNode.text.split() if skipColumens > 0: rawValues = allValues[skipColumens:] else: rawValues = allValues if len(rawValues) != len(timeStampList): continue # Temperature if v.lower() == "TTT".lower(): parsedData.Temperature = parseFloats( rawValues, timeStampList, temperatureTransformation) continue # Min Temperature if v.lower() == "TN".lower(): parsedData.MinTemp = parseFloats( rawValues, timeStampList, temperatureTransformation) continue # Max Temperature if v.lower() == "TX".lower(): parsedData.MaxTemp = parseFloats( rawValues, timeStampList, temperatureTransformation) continue # Probability of precipitation > 0.0mm during the last hour if v.lower() == "wwP".lower(): parsedData.POP = parseFloats( rawValues, timeStampList) continue # Wind if v.lower() == "FF".lower(): parsedData.Wind = parseFloats( rawValues, timeStampList) continue # Solar Radiation if v.lower() == "Rad1h".lower(): parsedData.SolarRadiation = parseFloats( rawValues, timeStampList, pressureTransformation) continue # Cloud if v.lower() == "Neff".lower(): parsedData.SkyCover = parseFloats( rawValues, timeStampList, skyCoverTransform) continue # QPF if v.lower() == "RRdc".lower(): parsedData.QPF = parseFloats( rawValues, timeStampList, None, yesterday) continue # evapotranspiration if v.lower() == "PEvap".lower(): parsedData.ET0 = parseFloats( rawValues, timeStampList, None, yesterday) continue # Pressure if v.lower() == "PPPP".lower(): parsedData.Pressure = parseFloats( rawValues, timeStampList, pressureTransformation) continue # Dewpoint if v.lower() == "Td".lower(): parsedData.DewPoint = parseFloats( rawValues, timeStampList, temperatureTransformation) continue # Condition if v.lower() == "WPcd1".lower(): parsedData.Condition = parseFloats( rawValues, timeStampList, conditionParser, yesterday) continue log.info("Adding parsed values to database") if parsedData.Temperature != None: log.debug("Adding Temparatures values") self.addValues(RMParser.dataType.TEMPERATURE, parsedData.Temperature) if parsedData.MinTemp != None: log.debug("Adding Min-Temparatures values") self.addValues(RMParser.dataType.MINTEMP, parsedData.MinTemp) if parsedData.MaxTemp != None: log.debug("Adding Max-Temparatures values") self.addValues(RMParser.dataType.MAXTEMP, parsedData.MaxTemp) if parsedData.RH != None: log.debug("Adding RH values") self.addValues(RMParser.dataType.RH, parsedData.RH) if parsedData.Wind != None: log.debug("Adding Wind values") self.addValues(RMParser.dataType.WIND, parsedData.Wind) if parsedData.SolarRadiation != None: log.debug("Adding Solar Radiation values") self.addValues(RMParser.dataType.SOLARRADIATION, parsedData.SolarRadiation) if parsedData.SkyCover != None: log.debug("Adding SkyCover values") self.addValues(RMParser.dataType.SKYCOVER, parsedData.SkyCover) if parsedData.QPF != None: log.debug("Adding QPF values") self.addValues(RMParser.dataType.QPF, parsedData.QPF) if parsedData.ET0 != None: log.debug("Adding ET0 values") #self.addValues(RMParser.dataType.ET0, parsedData.ET0) if parsedData.POP != None: log.debug("Adding POP values") self.addValues(RMParser.dataType.POP, parsedData.POP) if parsedData.Pressure != None: log.debug("Adding Pressure values") self.addValues(RMParser.dataType.PRESSURE, parsedData.Pressure) if parsedData.DewPoint != None: log.debug("Adding DewPoint values") self.addValues(RMParser.dataType.DEWPOINT, parsedData.DewPoint) if parsedData.Condition != None: self.addValues(RMParser.dataType.CONDITION, parsedData.Condition) except Exception, e: log.error("*** Error running DWD parser") log.exception(e)
def run(self, parserId = None, forceRunParser = False, forceRunMixer = False): currentTimestamp = rmCurrentTimestamp() forceRunParser = True if not forceRunParser and self.__lastRunningTimestamp is not None and (currentTimestamp - self.__lastRunningTimestamp) < self.__runningInterval: # We want to run the parser only each N minutes. This condition is not met, try later. log.debug("Parser %r not run lastRunning timestamp %s current %s" % (parserId, self.__lastRunningTimestamp, currentTimestamp)) return None, None self.__lastRunningTimestamp = currentTimestamp newValuesAvailable = False newForecast = RMForecastInfo(None, currentTimestamp) log.debug("*** BEGIN Running parsers: %d (%s)" % (newForecast.timestamp, rmTimestampToDateAsString(newForecast.timestamp))) for parserConfig in self.parsers: if parserId is not None and parserId != parserConfig.dbID: continue log.debug(" * Parser: %s -> %s" % (parserConfig, parserConfig.runtimeLastForecastInfo)) if parserConfig.enabled: if parserConfig.failCounter >= self.__maxFails: if forceRunParser or parserConfig.lastFailTimestamp is None or (abs(newForecast.timestamp - parserConfig.lastFailTimestamp) >= self.__delayAfterMaxFails): parserConfig.failCounter = 0 parserConfig.lastFailTimestamp = None else: if parserConfig.failCounter == self.__maxFails: log.warning(" * Parser: %s - ignored because of lack of data (failCounter=%s, lastFail=%s)!" % (parserConfig, `parserConfig.failCounter`, rmTimestampToDateAsString(parserConfig.lastFailTimestamp))) parserConfig.failCounter += 1 # Increment this to get rid of the above message. continue elif parserConfig.failCounter > 0: retryDelay = min(self.__minDelayBetweenFails + (parserConfig.failCounter - 1) * self.__stepDelayBetweenFails, self.__maxDelayBetweenFails) nextRetryTimestamp = parserConfig.lastFailTimestamp + retryDelay if newForecast.timestamp < nextRetryTimestamp: log.debug(" * Ignored because retry delay %d (sec) was not reached" % retryDelay) continue log.debug(" * Parser retry after previous fail") parser = self.parsers[parserConfig] lastUpdate = None if parserConfig.runtimeLastForecastInfo: # Check if parser hasn't run with an invalid future date if parserConfig.runtimeLastForecastInfo.timestamp <= currentTimestamp: lastUpdate = parserConfig.runtimeLastForecastInfo.timestamp # Save the newest parser run if lastUpdate is not None and lastUpdate > self.__lastUpdateTimestamp: self.__lastUpdateTimestamp = lastUpdate if not forceRunParser and not self.forceParsersRun and (lastUpdate != None and (newForecast.timestamp - lastUpdate) < parser.parserInterval): log.debug(" * Ignored because interval %d not expired for timestamp %d lastUpdate: %d" % (parser.parserInterval, newForecast.timestamp, lastUpdate)) continue log.debug(" * Running parser %s with interval %d" % (parser.parserName, parser.parserInterval)) parser.settings = globalSettings.getSettings() parser.runtime[RMParser.RuntimeDayTimestamp] = rmCurrentDayTimestamp() try: parser.lastKnownError = '' parser.isRunning = True parser.perform() parser.isRunning = False except Exception, e: log.error(" * Cannot execute parser %s" % parser.parserName) log.exception(e) parser.isRunning = False if len(parser.lastKnownError) == 0: parser.lastKnownError = 'Error: Failed to run' if not parser.hasValues(): parserConfig.failCounter += 1 parserConfig.lastFailTimestamp = newForecast.timestamp if len(parser.lastKnownError) == 0: parser.lastKnownError = 'Error: parser returned no values' parser.isRunning = False if parserConfig.failCounter == 1: log.warn (" * Parser %s returned no values" % parser.parserName) continue parserConfig.failCounter = 0 parserConfig.lastFailTimestamp = None if newForecast.id == None: self.forecastTable.addRecordEx(newForecast) parserConfig.runtimeLastForecastInfo = newForecast if not globalSettings.vibration: self.parserDataTable.removeEntriesWithParserIdAndTimestamp(parserConfig.dbID, parser.getValues()) self.parserDataTable.addRecords(newForecast.id, parserConfig.dbID, parser.getValues()) parser.clearValues() newValuesAvailable = True
def __retrieveData(self, startDate, endDate): useCustomStation = self.params["customStation"] customStation = self.params["station"] appKey = self.params["appKey"] s = self.settings URL = "http://et.water.ca.gov/api/data" # bad req if lat;lon is not in CA. dataItems = "day-asce-eto,day-precip,day-sol-rad-avg,day-vap-pres-avg,day-air-tmp-max," + \ "day-air-tmp-min,day-air-tmp-avg,day-rel-hum-max,day-rel-hum-min,day-rel-hum-avg," + \ "day-dew-pnt,day-wind-spd-avg,day-wind-run,day-soil-tmp-avg" if useCustomStation: URLParams = "appKey={0}&targets={1}&startDate={2}&endDate={3}&unitOfMeasure={4}".format\ ( appKey, customStation, startDate, endDate, "M" ) else: URLParams = "appKey={0}&targets=lat={1},lng={2}&dataItems={3}&startDate={4}&endDate={5}&unitOfMeasure={6}".format\ ( appKey, s.location.latitude, s.location.longitude, dataItems, startDate, endDate, "M" ) #Non-standard URL parameters #URLParams = \ # [ # ("appKey", self.appKey), # ("targets", "lat=" + `s.location.latitude` + ",lng=" + `s.location.longitude`), # multiple locations separated by ";" # ("dataItems", "day-asce-eto,day-precip,day-sol-rad-avg,day-vap-pres-avg,day-air-tmp-max,day-air-tmp-min,day-air-tmp-avg,day-rel-hum-max,day-rel-hum-min,day-rel-hum-avg,day-dew-pnt,day-wind-spd-avg,day-wind-run,day-soil-tmp-avg"), # ("startDate", startDate), # ("endDate", endDate), # ("unitOfMeasure", "M") # ] try: d = self.openURL(URL, URLParams, encodeParameters=False) if d is None: return observation = json.loads(d.read()) daily = [] try: daily = observation["Data"]["Providers"][0]["Records"] except Exception, e: log.error("*** No daily information found in response!") log.exception(e) for entry in daily: timestamp = entry.get("Date") if timestamp is None: continue timestamp = int(time.mktime(datetime.datetime.strptime(timestamp, "%Y-%m-%d").timetuple())) avgTemp = entry.get("DayAirTmpAvg")["Value"] minTemp = entry.get("DayAirTmpMin")["Value"] maxTemp = entry.get("DayAirTmpMax")["Value"] if avgTemp is None or minTemp is None or maxTemp is None: continue self.addValue(RMParser.dataType.TEMPERATURE, timestamp, self.__toFloat(avgTemp)) self.addValue(RMParser.dataType.MINTEMP, timestamp, self.__toFloat(minTemp)) self.addValue(RMParser.dataType.MAXTEMP, timestamp, self.__toFloat(maxTemp)) self.addValue(RMParser.dataType.QPF, timestamp, self.__toFloat(entry.get("DayPrecip")["Value"])) self.addValue(RMParser.dataType.RH, timestamp, self.__toFloat(entry.get("DayRelHumAvg")["Value"])) self.addValue(RMParser.dataType.MINRH, timestamp, self.__toFloat(entry.get("DayRelHumMin")["Value"])) self.addValue(RMParser.dataType.MAXRH, timestamp, self.__toFloat(entry.get("DayRelHumMax")["Value"])) self.addValue(RMParser.dataType.WIND, timestamp, self.__toFloat(entry.get("DayWindSpdAvg")["Value"])) self.addValue(RMParser.dataType.RAIN, timestamp, self.__toFloat(entry.get("DayPrecip")["Value"])) self.addValue(RMParser.dataType.DEWPOINT, timestamp, self.__toFloat(entry.get("DayDewPnt")["Value"])) self.addValue(RMParser.dataType.PRESSURE, timestamp, self.__toFloat(entry.get("DayVapPresAvg")["Value"])) self.addValue(RMParser.dataType.ET0, timestamp, self.__toFloat(entry.get("DayAsceEto")["Value"])) # We receive solar radiation in watt/m2 we need in mjoules/m2 solarRadiation = self.__toFloat(entry.get("DaySolRadAvg")["Value"]) if solarRadiation is not None: solarRadiation *= 0.0864; self.addValue(RMParser.dataType.SOLARRADIATION, timestamp, solarRadiation) if self.parserDebug: log.debug(self.result)
def __retrieveData(self, startDate, endDate): useCustomStation = self.params["customStation"] customStation = self.params["station"] appKey = self.params["appKey"] s = self.settings URL = "http://et.water.ca.gov/api/data" # bad req if lat;lon is not in CA. dataItems = "day-asce-eto,day-precip,day-sol-rad-avg,day-vap-pres-avg,day-air-tmp-max," + \ "day-air-tmp-min,day-air-tmp-avg,day-rel-hum-max,day-rel-hum-min,day-rel-hum-avg," + \ "day-dew-pnt,day-wind-spd-avg,day-wind-run,day-soil-tmp-avg" if useCustomStation: URLParams = "appKey={0}&targets={1}&startDate={2}&endDate={3}&unitOfMeasure={4}".format\ ( appKey, customStation, startDate, endDate, "M" ) else: URLParams = "appKey={0}&targets=lat={1},lng={2}&dataItems={3}&startDate={4}&endDate={5}&unitOfMeasure={6}".format\ ( appKey, s.location.latitude, s.location.longitude, dataItems, startDate, endDate, "M" ) #Non-standard URL parameters #URLParams = \ # [ # ("appKey", self.appKey), # ("targets", "lat=" + `s.location.latitude` + ",lng=" + `s.location.longitude`), # multiple locations separated by ";" # ("dataItems", "day-asce-eto,day-precip,day-sol-rad-avg,day-vap-pres-avg,day-air-tmp-max,day-air-tmp-min,day-air-tmp-avg,day-rel-hum-max,day-rel-hum-min,day-rel-hum-avg,day-dew-pnt,day-wind-spd-avg,day-wind-run,day-soil-tmp-avg"), # ("startDate", startDate), # ("endDate", endDate), # ("unitOfMeasure", "M") # ] try: d = self.openURL(URL, URLParams, encodeParameters=False) if d is None: return observation = json.loads(d.read()) daily = [] try: daily = observation["Data"]["Providers"][0]["Records"] except Exception, e: log.error("*** No daily information found in response!") log.exception(e) self.lastKnownError = 'Warning: No daily information' for entry in daily: timestamp = entry.get("Date") if timestamp is None: continue timestamp = int( time.mktime( datetime.datetime.strptime(timestamp, "%Y-%m-%d").timetuple())) avgTemp = entry.get("DayAirTmpAvg")["Value"] minTemp = entry.get("DayAirTmpMin")["Value"] maxTemp = entry.get("DayAirTmpMax")["Value"] wind = entry.get("DayWindSpdAvg")["Value"] if minTemp is None or maxTemp is None: continue self.addValue(RMParser.dataType.TEMPERATURE, timestamp, self.__toFloat(avgTemp)) self.addValue(RMParser.dataType.MINTEMP, timestamp, self.__toFloat(minTemp)) self.addValue(RMParser.dataType.MAXTEMP, timestamp, self.__toFloat(maxTemp)) self.addValue( RMParser.dataType.RH, timestamp, self.__toFloat(entry.get("DayRelHumAvg")["Value"])) self.addValue( RMParser.dataType.MINRH, timestamp, self.__toFloat(entry.get("DayRelHumMin")["Value"])) self.addValue( RMParser.dataType.MAXRH, timestamp, self.__toFloat(entry.get("DayRelHumMax")["Value"])) self.addValue(RMParser.dataType.WIND, timestamp, convertWindFrom2mTo10m(wind)) self.addValue(RMParser.dataType.RAIN, timestamp, self.__toFloat(entry.get("DayPrecip")["Value"])) self.addValue(RMParser.dataType.DEWPOINT, timestamp, self.__toFloat(entry.get("DayDewPnt")["Value"])) self.addValue( RMParser.dataType.PRESSURE, timestamp, self.__toFloat(entry.get("DayVapPresAvg")["Value"])) self.addValue(RMParser.dataType.ET0, timestamp, self.__toFloat(entry.get("DayAsceEto")["Value"])) # We receive solar radiation in watt/m2 we need in mjoules/m2 solarRadiation = entry.get("DaySolRadAvg")["Value"] solarRadiation = convertRadiationFromWattsToMegaJoules( solarRadiation) self.addValue(RMParser.dataType.SOLARRADIATION, timestamp, solarRadiation) if self.parserDebug: log.debug(self.result)
class FAWN(RMParser): parserName = "FAWN Parser" parserDescription = "Florida Automated Weather Network observations" parserForecast = False parserHistorical = True parserEnabled = False parserDebug = True parserInterval = 3600 params = {"station": 480, "useHourly": False} def isEnabledForLocation(self, timezone, lat, long): if FAWN.parserEnabled and timezone: return timezone.startswith("Europe") return False def perform(self): s = self.settings URLHourly = "http://fawn.ifas.ufl.edu/controller.php/lastHour/summary/json" URLDaily = "http://fawn.ifas.ufl.edu/controller.php/lastDay/summary/json" URLParams = [] useHourly = self.params.get("useHourly", False) #----------------------------------------------------------------------------------------------- # # Get hourly data. # if useHourly: try: d = self.openURL(URLHourly, URLParams) if d is None: return json_data = d.read() json_data = json_data.replace("'","\"") hourly = json.loads(json_data) for entry in hourly: # only selected station if int(entry.get("StationID")) == self.params.get("station"): dateString = entry.get("startTime") #timestamp = rmTimestampFromDateAsStringWithOffset(dateString) timestamp = rmTimestampFromDateAsString(dateString[:-6], '%Y-%m-%dT%H:%M:%S') if timestamp is None: log.debug("Cannot convert hourly data startTime: %s to unix timestamp" % dateString) continue # Add 12h in the future for FAWN timestamp to fix badly reported offset and make it middle of the day UTC (Dragos) timestamp += 12 * 60 *60 self.addValue(RMParser.dataType.TEMPERATURE, timestamp, self.__toFloat(entry.get("t2m_avg"))) self.addValue(RMParser.dataType.MINTEMP, timestamp, self.__toFloat(entry.get("t2m_min"))) self.addValue(RMParser.dataType.MAXTEMP, timestamp, self.__toFloat(entry.get("t2m_max"))) # km/h -> m/s self.addValue(RMParser.dataType.WIND, timestamp, 0.27777777777778 * self.__toFloat(entry.get("ws_avg"))) # cm -> mm self.addValue(RMParser.dataType.RAIN, timestamp, 10 * self.__toFloat(entry.get("rain_sum"))) self.addValue(RMParser.dataType.DEWPOINT, timestamp, self.__toFloat(entry.get("dp_avg"))) self.addValue(RMParser.dataType.RH, timestamp, self.__toFloat(entry.get("rh_avg"))) if self.parserDebug: log.debug(self.result) except Exception, e: log.error("*** Error retrieving hourly data from FAWN") log.exception(e) #----------------------------------------------------------------------------------------------- # # Get daily data. # try: d = self.openURL(URLDaily, URLParams) if d is None: return json_data = d.read() json_data = json_data.replace("'","\"") daily = json.loads(json_data) for entry in daily: # only selected station if int(entry.get("StationID")) == self.params.get("station"): dateString = entry.get("startTime") #timestamp = rmTimestampFromDateAsStringWithOffset(dateString) timestamp = rmTimestampFromDateAsString(dateString[:-6], '%Y-%m-%dT%H:%M:%S') if timestamp is None: log.debug("Cannot convert daily data startTime: %s to unix timestamp" % dateString) continue # Add 12h in the future for FAWN timestamp to fix badly reported offset and make it middle of the day UTC (Dragos) timestamp += 12 * 60 *60 self.addValue(RMParser.dataType.TEMPERATURE, timestamp, self.__toFloat(entry.get("t2m_avg"))) self.addValue(RMParser.dataType.MINTEMP, timestamp, self.__toFloat(entry.get("t2m_min"))) self.addValue(RMParser.dataType.MAXTEMP, timestamp, self.__toFloat(entry.get("t2m_max"))) # km/h -> m/s self.addValue(RMParser.dataType.WIND, timestamp, 0.27777777777778 * self.__toFloat(entry.get("ws_avg"))) # cm -> mm self.addValue(RMParser.dataType.RAIN, timestamp, 10 * self.__toFloat(entry.get("rain_sum"))) self.addValue(RMParser.dataType.DEWPOINT, timestamp, self.__toFloat(entry.get("dp_avg"))) self.addValue(RMParser.dataType.RH, timestamp, self.__toFloat(entry.get("rh_avg"))) self.addValue(RMParser.dataType.MINRH, timestamp, self.__toFloat(entry.get("rh_min"))) self.addValue(RMParser.dataType.MAXRH, timestamp, self.__toFloat(entry.get("rh_max"))) # in -> mm self.addValue(RMParser.dataType.ET0, timestamp, 25.4 * self.__toFloat(entry.get("et"))) if self.parserDebug: log.debug(self.result) except Exception, e: log.error("*** Error retrieving daily data from FAWN") log.exception(e)
def perform(self): station = self.params.get("station", None) if station is None or station == "": station = "10637" log.debug("No station set, using Frankfurt am Main (%s)" % station) url = "http://opendata.dwd.de/weather/local_forecasts/mos/MOSMIX_L/single_stations/" + str( station) + "/kml/MOSMIX_L_LATEST_" + str(station) + ".kmz" try: datafile = self.openURL(url) if datafile is None: self.lastKnownError = "Cannot read data from DWD Service." return else: log.debug("Successfully loaded the KML file") kmz = zipfile.ZipFile(BufferedRandomReader(datafile), 'r') for name in kmz.namelist(): kml = kmz.read(name) root = ElementTree.fromstring(kml) ns = { 'xmlns': "http://www.opengis.net/kml/2.2", 'dwd': 'https://opendata.dwd.de/weather/lib/pointforecast_dwd_extension_V1_0.xsd' } # temporary storage of forecast values tmp = dict() forecastDict = dict() timestamps = [] forecasts = dict() # Find all forecasts for element in root.findall('.//dwd:Forecast', ns): forecasts.update({ element.attrib['{https://opendata.dwd.de/weather/lib/pointforecast_dwd_extension_V1_0.xsd}elementName']: element[0].text.split() }) # Find all timestamps for element in root.findall('.//dwd:TimeStep', namespaces=ns): timestamps.append(element.text) for timestep in timestamps: for measure, values in forecasts.iteritems(): tmp.update({measure: values.pop(0)}) forecastDict.update({timestep: dict(tmp)}) # Add retreived data to DB for time, forecast in forecastDict.iteritems(): timestamp = rmTimestampFromDateAsString( time[:-5], "%Y-%m-%dT%H:%M:%S") yesterdayTimestamp = rmGetStartOfDay(timestamp - 12 * 60 * 60) if timestamp is None: log.debug( "Cannot convert timestamp: %s to unix timestamp" % time) continue # Temperature if forecast['TTT'] != '-': TTT = float(forecast['TTT']) - 273.15 self.addValue(RMParser.dataType.TEMPERATURE, timestamp, TTT) # Minimum temperature last 24h if forecast['TN'] != '-': TN = float(forecast['TN']) - 273.15 self.addValue(RMParser.dataType.MINTEMP, timestamp - 12 * 60 * 60, TN) # Maximum temperature last 24h if forecast['TX'] != '-': TX = float(forecast['TX']) - 273.15 self.addValue(RMParser.dataType.MINTEMP, timestamp - 12 * 60 * 60, TX) # Windspeed if forecast['FF'] != '-': FF = float(forecast['FF']) self.addValue(RMParser.dataType.WIND, timestamp, FF) # Precipation last 24h if forecast['RRdc'] != '-': RRdc = float(forecast['RRdc']) self.addValue(RMParser.dataType.QPF, yesterdayTimestamp, RRdc) # Atmospheric pressure if forecast['PPPP'] != '-': PPPP = float(forecast['PPPP']) / 1000 self.addValue(RMParser.dataType.PRESSURE, timestamp, PPPP) # Dewpoint if forecast['Td'] != '-': Td = float(forecast['Td']) - 273.15 self.addValue(RMParser.dataType.DEWPOINT, timestamp, Td) except Exception as e: log.error("*** Error running DWD parser") log.exception(e)