def __init__(self, xmlConfigFilename): ysiDataCollection.__init__(self, xmlConfigFilename) configSettings = xmlConfigFile(xmlConfigFilename) #DWR 7/13/2011 #Added flag to write an appending data file per station to keep track of all the data we pull down. self.writeCSVDebugDataFile = configSettings.getEntry( "//environment/debug/writeCSVDebugDataFile") if (self.writeCSVDebugDataFile != None): self.writeCSVDebugDataFile = float(self.writeCSVDebugDataFile) else: self.writeCSVDebugDataFile = 0
def __init__(self, xmlConfigFilename): configSettings = xmlConfigFile(xmlConfigFilename) self.siteSettings = recursivedefaultdict() # Hash of various parameters for each customer site to process. paramList = configSettings.getListHead("//environment/ysiSettingsList") for child in configSettings.getNextInList(paramList): siteName = configSettings.getEntry("name",child) geoLoc = configSettings.getEntry("geoLoc",child) self.siteSettings[siteName]['latitude'] = 0.0 self.siteSettings[siteName]['longitude'] = 0.0 if(geoLoc != None): latLong = geoLoc.split(',') self.siteSettings[siteName]['latitude'] = latLong[0] self.siteSettings[siteName]['longitude'] = latLong[1] self.siteSettings[siteName]['url'] = configSettings.getEntry("platformURL",child) self.siteSettings[siteName]['ysiconfigfile'] = configSettings.getEntry("ysiParamFile",child) self.siteSettings[siteName]['outputtype'] = configSettings.getEntry("outputs/output/type",child) self.siteSettings[siteName]['outputfilename']= configSettings.getEntry("outputs/output/filename",child) self.siteSettings[siteName]['paramScrapeURL']= configSettings.getEntry("paramScrapeURL",child) self.siteSettings[siteName]['dataQueryURL'] = configSettings.getEntry("dataQueryURL",child) self.unitsConversionFile = configSettings.getEntry("//environment/unitsConversion/file") self.ysiMapToXeniaFile = configSettings.getEntry("//environment/ysiObservationMappingFile/file")
"--XMLConfigFile", dest="xmlConfigFile", help="Configuration file.") parser.add_option("-f", "--CreateFriends", action='store_true', dest="createFriends", help="Friend the other buoys in the xml Config file.") (options, args) = parser.parse_args() if (options.xmlConfigFile == None): parser.print_usage() parser.print_help() sys.exit(-1) try: #Open the XML Config File for processing. configFile = xmlConfigFile(options.xmlConfigFile) #Are we having the buoys friend each other? if (options.createFriends): createFriends(configFile) #We are tweeting the buoy status. else: #Get the various settings we need from out xml config file. dbSettings = configFile.getDatabaseSettings() if (dbSettings['dbHost'] != None and dbSettings['dbName'] != None and dbSettings['dbUser'] != None and dbSettings['dbPwd'] != None): db = xeniaPostGres() if (db.connect(None, dbSettings['dbUser'], dbSettings['dbPwd'], dbSettings['dbHost'], dbSettings['dbName']) == False):
def handler(req): #if __name__ == '__main__': from xeniatools.xenia import dbXenia from xeniatools.xmlConfigFile import xmlConfigFile from xeniatools.xenia import qaqcTestFlags from xeniatools.xenia import uomconversionFunctions from xeniatools.emailAlertSystem import GroupWriteRotatingFileHandler from urllib2 import Request, urlopen, URLError, HTTPError import simplejson import os import stat import logging import logging.handlers if(_USE_HANDLER): configFile = '/home/xeniaprod/config/mobileBuoyConfig.xml' req.log_error('handler') #req.add_common_vars() params = util.FieldStorage(req) else: #configFile = '/home/xeniaprod/config/mobileBuoyConfig.xml' configFile = 'C:\\Documents and Settings\\dramage\\workspace\\SVNSandbox\\carolinasrcoos\\trunk\\website\\mobileBuoyConfigDebug.xml' params = {} params['radius'] = 'nearby' params['latitude'] = 33.65921 params['longitude'] = -78.91754 configSettings = xmlConfigFile( configFile ) logFile = configSettings.getEntry("//environment/logging/logFilename") backupCount = configSettings.getEntry("//environment/logging/backupCount") maxBytes = configSettings.getEntry("//environment/logging/maxBytes") logFileExists = True #If the log file does not exist, we want to make sure when we create it to give everyone write access to it. if(os.path.isfile(logFile) != True): logFileExists = False logger = logging.getLogger("mobilebuoy_handler") logger.setLevel(logging.DEBUG) # create formatter and add it to the handlers formatter = logging.Formatter("%(asctime)s,%(name)s,%(levelname)s,%(lineno)d,%(message)s") #Create the log rotation handler. handler = logging.handlers.GroupWriteRotatingFileHandler = GroupWriteRotatingFileHandler handler = logging.handlers.GroupWriteRotatingFileHandler( logFile, "a", maxBytes, backupCount ) handler.setLevel(logging.DEBUG) handler.setFormatter(formatter) logger.addHandler(handler) if(logFileExists != True): currMode = os.stat(logFile).st_mode #Since the email alert module can be used by web service as well as from command line, we want to change #the file permissions to give everyone access to it. Probably would be better to use group permissions #only, but for now we grant all. os.chmod(logFile, currMode | (stat.S_IXUSR|stat.S_IWGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IWOTH|stat.S_IXOTH)) # add the handlers to the logger logger.info('Log file opened') try: convertFile = configSettings.getEntry( '//environment/unitsCoversion/file' ) uomConverter = None if( convertFile != None ): uomConverter = uomconversionFunctions(convertFile) dbSettings = configSettings.getDatabaseSettings() dbCon = dbXenia() if(dbCon.connect(None, dbSettings['dbUser'], dbSettings['dbPwd'], dbSettings['dbHost'], dbSettings['dbName']) != True): logger.error("Unable to open database connection. Cannot continue.") req.content_type = 'text/plain;' output = "Unable to lookup buoys. Please try again later." req.write(output) req.status = apache.HTTP_OK else: logger.info("Opened database connection.") radius = params['radius'] lat = float(params['latitude']) lon = float(params['longitude']) if(radius == 'nearby'): radius = 40.0 else: radius = float(radius) logger.debug("Radius: %s Latitude: %f Longitude: %f" % (radius,lat,lon)) sql = "SELECT platform_handle,\ fixed_longitude,fixed_latitude,\ Distance( GeomFromText('POINT(' || fixed_longitude || ' ' || fixed_latitude ||')'), GeomFromText('POINT(%f %f)')) * 60 as distancenm FROM platform\ WHERE active = 1 AND\ Distance( GeomFromText('POINT(' || fixed_longitude || ' ' || fixed_latitude ||')'), GeomFromText('POINT(%f %f)')) * 60 < %f\ ORDER BY distancenm ASC;" %(lon, lat, lon, lat, radius) dbCursor = dbCon.dbConnection.executeQuery(sql) if(dbCursor != None): logger.debug("Platform query: %s" %(sql)) jsonDict = {} platforms = [] rowCnt = 0 for row in dbCursor: platform = {} platform['platform_handle'] = row['platform_handle'] platform['latitude'] = row['fixed_latitude'] platform['longitude'] = row['fixed_longitude'] distance = float(row['distancenm']) platform['distance'] = ("%4.3f" % (distance)) platform['distanceUnits'] = 'NM' platforms.append(platform) rowCnt += 1 dbCursor.close() if(rowCnt == 0): platform = {} platform['platform_handle'] = "No platforms found" platform['latitude'] = "" platform['longitude'] = "" platform['distance'] = "0" platform['distanceUnits'] = "NM" platforms.append(platform) #Now let's get the latest met data. jsonBaseURL = configSettings.getEntry('//environment/jsonFiles/baseURL') for platform in platforms: fullURL = "%s%s_data.json" % (jsonBaseURL, (platform['platform_handle'].replace('.',':')).lower()) logger.debug("Processing json file: %s" %(fullURL)) urlReq = Request(fullURL) try: result = simplejson.load(urlopen(urlReq)) except HTTPError, e: logger.debug("%s Code: %d" %(e.filename,e.code)) continue latestObs = [] features = (result['properties'])['features'] for feature in features: obs = {} properties = feature['properties'] obsName = properties['obsType'] #Check to see if we have an abbreviated name to use. abbrName = uomConverter.getAbbreviatedObservationName(obsName) if(abbrName != None): obsName = abbrName else: obsName = obsName.replace('_', ' ') obs['name'] = obsName obs['time'] = properties['time'][-1] obs['uom'] = properties['uomType'] obs['value'] = float((properties['value'])[-1]) uom = uomConverter.getConversionUnits( obs['uom'], 'en' ) if( len(uom) > 0 ): obs['value'] = uomConverter.measurementConvert( obs['value'], obs['uom'], uom ) displayUnits = uomConverter.getUnits(obs['uom'], uom) if(displayUnits != None): obs['uom'] = displayUnits latestObs.append(obs) platform['latest_obs'] = latestObs if(len(platforms)): jsonDict['platform_list'] = platforms jsonData = simplejson.dumps(jsonDict) if(_USE_HANDLER): req.content_type = 'application/json;' req.set_content_length(len(jsonData)) logger.debug("Json: %s" %(jsonData)) req.write(jsonData) req.status = apache.HTTP_OK else: print(jsonData) else:
action= 'store_true', help="Process the XMRG Radar data." ) parser.add_option("-v", "--Vacuum", dest="vacuum", action= 'store_true', help="Use to vacuum the database to free unused space and shrink filesize." ) parser.add_option("-b", "--BackupPrecipitation", dest="backupPrecip", action= 'store_true', help="Used to roll precipitation data out of the working database and into a backup database." ) parser.add_option("-f", "--CreateXMRGSummaryFiles", dest="createXMRGSummaryFiles", action= 'store_true', help="Specifies creation of the XMRG summary files for DHEC excel sheets." ) parser.add_option("-a", "--ArchiveXMRGFiles", dest="archiveXMRG", action= 'store_true', help="If true, then files in the XMRG download directory are moved to the archival directory." ) (options, args) = parser.parse_args() if( options.xmlConfigFile == None ): parser.print_usage() parser.print_help() sys.exit(-1) configSettings = xmlConfigFile(options.xmlConfigFile) logFile = configSettings.getEntry('//environment/logging/logConfigFile') logging.config.fileConfig(logFile) logger = logging.getLogger("dhec_processing_logger") logger.info("Session started") if( options.vacuum ): vacuum(options.xmlConfigFile ) else: if( options.getXMRGData ): getXMRGData(options.xmlConfigFile) if(options.getModelData): getModelData(options.modelIniFile) if( options.backupPrecip ): backupData( options.xmlConfigFile )
help="XML Alerts message to parse" ) parser.add_option("-s", "--SaveAlert", dest="saveAlert", help="The XML message containing the new alert to parse.", action="store_true" ) parser.add_option("-u", "--Unsubscribe", dest="unsubscribeAlert", help="Flag that specifies we are unsubscribing from an alert", action="store_true" ) parser.add_option("-p", "--UnsubscribeParams", dest="unsubscribeParams", help="Parameters for the unsubscribe request" ) parser.add_option("-a", "--CheckAlerts", dest="checkAlerts", help="Flag that specifies we are checking for alerts", action="store_true" ) (options, args) = parser.parse_args() configSettings = xmlConfigFile( options.xmlConfigFile ) #2011-11-15 DWR #Use python logging config file. #logFile = configSettings.getEntry("//environment/logging/logFilename") logFile = configSettings.getEntry("//environment/logging/configFile") logging.config.fileConfig(logFile) logger = logging.getLogger("emailalert_logger") """ backupCount = configSettings.getEntry("//environment/logging/backupCount") maxBytes = configSettings.getEntry("//environment/logging/maxBytes") logFileExists = True #If the log file does not exist, we want to make sure when we create it to give everyone write access to it. if(os.path.isfile(logFile) != True): logFileExists = False
except Exception, e: traceback.print_exc() else: data = response.read() #Break the HTML page apart using the 'table' string as the matching word. We #end up with a list of table entries. splitData=data.split('table') #Now we search for the string"dgParamHistory" since that seems to be in the table #that contains the parameter data we are interested in. for row in splitData: if( row.find('dgParamHistory') > 0 ): splitData = "<table%stable>" %(row) splitData = splitData.replace(" ", "") break html = StringIO.StringIO(splitData) tableHead = xmlConfigFile(html) rowList = tableHead.getListHead("//table") #We want to clean up the date/time text so we use regexp to retrieve just that. date = re.compile('(\d\d\/\d\d\/\d\d\d\d\s\d\d\:\d\d\s(AM|PM))') val = re.compile('\s') row = 0 for child in tableHead.getNextInList(rowList): tag = child.xpath( 'td' ) #First row is the header, so skip it. if(row > 0): if(len(tag) >=2): col1 = date.findall(tag[0].text) col2 = val.sub('', tag[1].text) #Create date/time, value tuple. obsList.append([col1[0][0],col2]) row += 1
if __name__ == '__main__': parser = optparse.OptionParser() parser.add_option("-c", "--XMLConfigFile", dest="xmlConfigFile", help="Configuration file." ) parser.add_option("-f", "--CreateFriends", action= 'store_true', dest="createFriends", help="Friend the other buoys in the xml Config file." ) (options, args) = parser.parse_args() if( options.xmlConfigFile == None ): parser.print_usage() parser.print_help() sys.exit(-1) try: #Open the XML Config File for processing. configFile = xmlConfigFile( options.xmlConfigFile ) #Are we having the buoys friend each other? if( options.createFriends ): createFriends( configFile ) #We are tweeting the buoy status. else: #Get the various settings we need from out xml config file. dbSettings = configFile.getDatabaseSettings() if( dbSettings['dbHost'] != None and dbSettings['dbName'] != None and dbSettings['dbUser'] != None and dbSettings['dbPwd'] != None ): db = xeniaPostGres() if( db.connect( None, dbSettings['dbUser'], dbSettings['dbPwd'], dbSettings['dbHost'], dbSettings['dbName'] ) == False ): print( "Unable to connect to PostGres." )
def handler(req): try: from xeniatools.emailAlertSystem import emailAlerts #from xeniatools.emailAlertSystem import GroupWriteRotatingFileHandler from xeniatools.xmlConfigFile import xmlConfigFile import os import stat import logging import logging.config configFile = '/home/xeniaprod/config/emailAlertsConfig.xml' req.log_error('handler') req.add_common_vars() req.content_type = 'text/plain;' req.send_http_header() req.status = apache.HTTP_OK params = util.FieldStorage(req) configSettings = xmlConfigFile( configFile ) #2011-11-21 DWR #Use python logging config file. #logFile = configSettings.getEntry("//environment/logging/logHandlerFilename") logFile = configSettings.getEntry("//environment/logging/configFileHandler") logging.config.fileConfig(logFile) logger = logging.getLogger("emailalert_logger") """ logFile = configSettings.getEntry("//environment/logging/modPythonHandlerFilename") backupCount = configSettings.getEntry("//environment/logging/backupCount") maxBytes = configSettings.getEntry("//environment/logging/maxBytes") logFileExists = True #If the log file does not exist, we want to make sure when we create it to give everyone write access to it. #if(os.path.isfile(logFile) != True): # logFileExists = False logger = logging.getLogger("emailalert_logger") logger.setLevel(logging.DEBUG) # create formatter and add it to the handlers formatter = logging.Formatter("%(asctime)s,%(name)s,%(levelname)s,%(lineno)d,%(message)s") #Create the log rotation handler. #handler = logging.handlers.RotatingFileHandler( logFile, "a", maxBytes, backupCount ) #handler = logging.handlers.GroupWriteRotatingFileHandler = GroupWriteRotatingFileHandler #handler = logging.handlers.GroupWriteRotatingFileHandler( logFile, "a", maxBytes, backupCount ) #For now, seperate the web handler log file from the user log handler. The rollover doesn't seem to work #correctly. handler = logging.handlers.RotatingFileHandler( logFile, "a", maxBytes, backupCount ) handler.setLevel(logging.DEBUG) handler.setFormatter(formatter) logger.addHandler(handler) """ # add the handlers to the logger logger.info('Log file opened') operation = None if('operation' in params != False): operation = params['operation'] if(operation != None): logger.debug( "Operation: %s" % (operation) ) if(operation == 'add'): alertXML = None if('xml' in params != False): alertXML = params['xml'] procAlerts = emailAlerts( configSettings ) logger.info( "Saving new email alert. Params: %s" % (alertXML) ) if(procAlerts.saveXMLAlerts(alertXML)): output = "Successfully added email alert." else: output = "An error occured while adding the email alert. Please retry later." req.write(output) else: output = "Cannot complete request, there are missing parameters." req.write(output) elif(operation == 'unsubscribe'): procAlerts = emailAlerts( configSettings ) logger.info( "Unsubscribing from email alert. Params: %s" % (params) ) output = "Successfully unsubscribed from email alert." if( procAlerts.unsubscribeAlert(params)): output = "Successfully unsubscribed from email alert." else: output = "An error occured, unable to unsubscribe from the email alert" req.write(output) else: logger.error("Unknown operation parameter: %s" % (operation)) else: output = "Cannot complete request, there are missing parameters." req.write(output) logger.info('Closing log file.') except Exception,e: if(logger != None): logger.exception(e) else: print(e) req.write("An error has occured on the server, please try again later.")