def __init__(self, organizationID, configFile, logger=None): dataIngestion.__init__(self, configFile, logger) self.configFilename = configFile self.organizationID = organizationID self.inventory = xeniaFedsInventory(self.organizationID, configFile, logger) try: filePath = self.config.get('settings', 'uomconversionfile') #Get the units conversion XML file. Use it to translate the NDBC units into xenia uoms. self.uomConverter = uomconversionFunctions(filePath) self.lastNHours = float( self.config.get(self.organizationID, 'lastnhours')) self.checkForNewPlatforms = bool( int( self.config.get(self.organizationID, 'checkfornewplatforms'))) self.stationoffering = self.config.get(self.organizationID, 'stationoffering') self.url = self.config.get(self.organizationID, 'difurl') #self.difObj = ioosDif(url) self.difGetObs = difObservationCSV( self.url, self.inventory.xeniaDataMappings, self.uomConverter, True) except ConfigParser.Error, e: if (self.logger): self.logger.exception(e) sys.exit(-1)
def initialize(self, **kwargs): self.startFromLastDBEntry = False self.lastEntryDate = None #COnnect to the xenia database. if(self.connect()): #Get the required config file settings. try: filePath = self.config.get('settings', 'uomconversionfile') self.uomConverter = uomconversionFunctions(filePath) self.csvFilepath = self.config.get(self.organizationId, 'csvFilepath') self.jsonMappingFile = self.config.get(self.organizationId, 'jsonconfig') self.dataQueue = Queue.Queue(0) dataSaver = dataSaveWorker(self.configFilename, self.dataQueue) dataSaver.start() except ConfigParser.Error, e: if(self.logger): self.logger.exception(e) else: #Get optional parameters. try: #If this is set, we query the ini for the last date/time of data we put into the database and #add data that is from that date forward. Some csv datafiles append for a long time period so we #can ignore data we already have. #self.startFromLastDBEntry = self.config.getboolean(self.organizationId, 'processfromlatestdbrec') #DWR 2013-05-09 #Use the ini file instead of the database since the platform can go down for a longer period that #the data we retain in the real time database. Only last couple of weeks in real time database. lastDate = self.config.get(self.organizationId, 'lastentrydate') if(len(lastDate)): self.lastEntryDate = datetime.datetime.strptime(lastDate, "%Y-%m-%dT%H:%M:%S") if(self.logger): self.logger.debug("Starting from date/time: %s" % (self.lastEntryDate)) else: self.lastEntryDate = datetime.datetime.now() if(self.logger): self.logger.debug("lastentrydate does not exist, starting from current date/time: %s" % (self.lastEntryDate)) #DWR 2013-12-13 #Added ability to pass in a custom csvREader object to use. if('csvReader' in kwargs and kwargs['csvReader'] != None): self.csvDataFile = kwargs['csvReader'](fileObj = open(self.csvFilepath, 'r'), xeniaMappingFile = self.jsonMappingFile, uomConverter = self.uomConverter, logger = True) else: self.csvDataFile = xeniaCSVReader(fileObj = open(self.csvFilepath, 'r'), xeniaMappingFile = self.jsonMappingFile, uomConverter = self.uomConverter, logger = True) except ConfigParser.Error, e: if(self.logger): self.logger.debug("Optional parameter: %s: %s does not exist. Using default setting." % (e.section,e.option)) return(True)
def initialize(self, configFile=None): if (self.connect()): self.platformRecs = None self.metarFetcher = None #The date we use to time stamp the rows we add. self.rowEntryDate = datetime.now().strftime("%Y-%m-%dT%H:%M:%S") try: #GEt the url to use for fetching the stations. metarUrl = self.config.get(self.organizationId, 'metarurl') #The METAR to xenia mapping file. mappingFile = self.config.get(self.organizationId, 'jsonconfig') #Units converion file. uomFilePath = self.config.get('settings', 'uomconversionfile') except ConfigParser.Error, e: if (self.logger): self.logger.exception(e) else: #Create our fetcher object. self.metarFetcher = ReportFetcher(baseurl=metarUrl) #Units Converter self.uomConverter = uomconversionFunctions(uomFilePath) #Build the mapping file that allows us to pick apart the METAR data fields and convert them to xenia fields. self.xeniaMapping = xeniaMappings(mappingFile) self.xeniaMapping.buildMTypeMapping(self.xeniaDb, self.uomConverter) #Let's get a list of the platforms we want to retrieve the data for. try: self.platformRecs = self.xeniaDb.session.query(platform).\ join((organization,organization.row_id == platform.organization_id)).\ filter(organization.short_name == self.organizationId).\ filter(platform.active > 0).\ order_by(platform.short_name).\ all() if (self.logger): self.logger.info( "Organization: %s returned: %d platforms to query for data." % (self.organizationId, len(self.platformRecs))) #Fire up the saver thread. self.dataQueue = Queue.Queue(0) dataSaver = dataSaveWorker(self.configFilename, self.dataQueue) dataSaver.start() return (True) except Exception, e: if (self.logger): self.logger.exception(e)
def getRemoteData(self, siteName, siteSetting): print("Getting remote data for: %s" %(siteName)) uomConvert = uomconversionFunctions(self.unitsConversionFile) if(siteSetting['ysiconfigfile'] != None): ysi = ysiObsSettings(siteSetting['dataQueryURL'], None, siteSetting['ysiconfigfile']) elif(siteSetting['paramScrapeURL'] != None): ysi = ysiObsSettings(siteSetting['dataQueryURL'], siteSetting['paramScrapeURL'], None) ysi.initList() obsDict = ysi.getAllObservations() obsHash = recursivedefaultdict() obsHash['platform'][siteName]['url'] = siteSetting['url'] obsHash['platform'][siteName]['latitude'] = siteSetting['latitude'] obsHash['platform'][siteName]['longitude'] = siteSetting['longitude'] for param in obsDict: #The ysi observation name has the name and units all in one string. This function #converts the obs name into our data dictionary name, and breaks out the units as well. (obs,fromUOM,sOrder) = self.convertToXeniaObsAndUOM(param) #Didn't have a match, so we'll use the source. if(len(obs) == 0): print("ERROR: Unable to find Xenia observation name for YSI param: %s." %(param)) parts = param.split('[') obs = parts[0] fromUOM = parts[1] fromUOM = fromUOM.replace("]", "") sOrder = "1" elev = "0" dataTuple = obsDict[param] #Now see if we need to convert into different units. toUOM = uomConvert.getConversionUnits(fromUOM, 'metric') for entry in dataTuple: date = entry[0] date = self.formDBDate(date) value = float(entry[1]) if(toUOM != None and len(toUOM)): convertedVal = uomConvert.measurementConvert(value, fromUOM, toUOM) if(convertedVal != None ): value = convertedVal else: toUOM = fromUOM #Build the obs hash. obsUOM = "%s.%s" %(obs,toUOM) obsHash['platform'][siteName]['date'][date]['obsuom'][obsUOM]['elev'][elev]['sorder'][sOrder]['value'] = value #obsHash[siteName][date][obs][elev][sOrder]['uom'] = toUOM return(obsHash)
def initialize(self, configFile=None): if(self.connect()): self.platformRecs = None self.metarFetcher = None #The date we use to time stamp the rows we add. self.rowEntryDate = datetime.now().strftime("%Y-%m-%dT%H:%M:%S") try: #GEt the url to use for fetching the stations. metarUrl = self.config.get(self.organizationId, 'metarurl') #The METAR to xenia mapping file. mappingFile = self.config.get(self.organizationId, 'jsonconfig') #Units converion file. uomFilePath = self.config.get('settings', 'uomconversionfile') except ConfigParser.Error, e: if(self.logger): self.logger.exception(e) else: #Create our fetcher object. self.metarFetcher = ReportFetcher(baseurl=metarUrl) #Units Converter self.uomConverter = uomconversionFunctions(uomFilePath) #Build the mapping file that allows us to pick apart the METAR data fields and convert them to xenia fields. self.xeniaMapping = xeniaMappings(mappingFile) self.xeniaMapping.buildMTypeMapping(self.xeniaDb, self.uomConverter) #Let's get a list of the platforms we want to retrieve the data for. try: self.platformRecs = self.xeniaDb.session.query(platform).\ join((organization,organization.row_id == platform.organization_id)).\ filter(organization.short_name == self.organizationId).\ filter(platform.active > 0).\ order_by(platform.short_name).\ all() if(self.logger): self.logger.info("Organization: %s returned: %d platforms to query for data." % (self.organizationId, len(self.platformRecs))) #Fire up the saver thread. self.dataQueue = Queue.Queue(0) dataSaver = dataSaveWorker(self.configFilename, self.dataQueue) dataSaver.start() return(True) except Exception, e: if(self.logger): self.logger.exception(e)
def __init__(self, organizationID, configFile, logger=None): dataIngestion.__init__(self, configFile, logger) self.configFilename = configFile self.organizationID = organizationID self.inventory = xeniaFedsInventory(self.organizationID, configFile, logger) try: filePath = self.config.get('settings', 'uomconversionfile') #Get the units conversion XML file. Use it to translate the NDBC units into xenia uoms. self.uomConverter = uomconversionFunctions(filePath) self.lastNHours = float(self.config.get(self.organizationID, 'lastnhours')) self.checkForNewPlatforms = bool(int(self.config.get(self.organizationID, 'checkfornewplatforms'))) self.stationoffering = self.config.get(self.organizationID, 'stationoffering') self.url = self.config.get(self.organizationID, 'difurl') #self.difObj = ioosDif(url) self.difGetObs = difObservationCSV(self.url, self.inventory.xeniaDataMappings, self.uomConverter, True) except ConfigParser.Error, e: if(self.logger): self.logger.exception(e) sys.exit(-1)
def __init__(self, configSettings): alertsDBFile = configSettings.getEntry("//environment/database/alertsDB/name") xeniaDBSettings = configSettings.getDatabaseSettings() conversionXMLFile = configSettings.getEntry("//environment/unitsConversion/file") self.unsubURL = configSettings.getEntry("//environment/unsubscribeURL") self.georssURL = configSettings.getEntry("//environment/geoRSSURL") self.surveyURL = configSettings.getEntry("//environment/surveyURL") self.uomConverter = uomconversionFunctions( conversionXMLFile ) self.logger = logging.getLogger("emailalert_logger.alertsDB") self.logger.info("creating an instance of emailAlerts") self.alertsDB = alertsDB( alertsDBFile ) if( self.alertsDB.connectDB() == False ): self.logger.error( "Unable to connect to Alerts DB: %s" % ( alertsDBFile ) ) sys.exit(-1) else: self.logger.debug( "Connected to Alerts DB: %s" % ( alertsDBFile ) ) if( xeniaDBSettings['type'] == 'SQLite' ): self.obsDB = xeniaSQLite() if( self.obsDB.connect( xeniaDBSettings['dbName'] ) == False ): self.logger.error( "Unable to connect to xenia DB: %s Error: %s" % ( xeniaDBSettings['dbName'], self.obsDB.lastErrorMsg ) ) sys.exit(-1) else: self.logger.debug( "Connected to xenia DB: %s" % ( xeniaDBSettings['dbName'] ) ) else: self.obsDB = xeniaPostGres() if( self.obsDB.connect( None, xeniaDBSettings['dbUser'], xeniaDBSettings['dbPwd'], xeniaDBSettings['dbHost'], xeniaDBSettings['dbName'] ) == False ): self.logger.error( "Unable to connect to xeniaDB: %s Host: %s User: %s\nError: %s" % (xeniaDBSettings['dbName'], xeniaDBSettings['dbHost'], xeniaDBSettings['dbUser'] ,self.obsDB.lastErrorMsg) ) sys.exit(-1) else: self.logger.debug( "Connected to Host: %s Name: %s User: %s", xeniaDBSettings['dbHost'],xeniaDBSettings['dbName'],xeniaDBSettings['dbUser'] )
def __init__(self, organizationID, configurationFile, logger=True): platformInventory.__init__(self, organizationID, configurationFile, logger) try: self.config = ConfigParser.RawConfigParser() self.config.read(self.configurationFile) url = self.config.get(self.organizationID, 'difurl') self.difObj = ioosDif(url, logger) self.difCap = None self.bbox = self.config.get('area', 'bbox') jsonConfig = self.config.get(self.organizationID, 'jsonconfig') self.xeniaDataMappings = xeniaMappings(jsonConfig) self.platformType = 'met'; filePath = self.config.get('settings', 'uomconversionfile') self.uomConverter = uomconversionFunctions(filePath) except Exception, e: if(self.logger): self.logger.exception(e) sys.exit(-1)
def __init__(self, organizationID, configurationFile, logger=True): platformInventory.__init__(self, organizationID, configurationFile, logger) try: self.config = ConfigParser.RawConfigParser() self.config.read(self.configurationFile) url = self.config.get(self.organizationID, 'difurl') self.difObj = ioosDif(url, logger) self.difCap = None self.bbox = self.config.get('area', 'bbox') jsonConfig = self.config.get(self.organizationID, 'jsonconfig') self.xeniaDataMappings = xeniaMappings(jsonConfig) self.platformType = 'met' filePath = self.config.get('settings', 'uomconversionfile') self.uomConverter = uomconversionFunctions(filePath) except Exception, e: if (self.logger): self.logger.exception(e) sys.exit(-1)
dbSettings['dbHost'], dbSettings['dbName']) == False): print("Unable to connect to PostGres.") sys.exit(-1) else: print("Connect to PostGres: %s %s" % (dbSettings['dbHost'], dbSettings['dbName'])) else: print("Missing configuration info for PostGres setup.") sys.exit(-1) #Get the conversion xml file convertFile = configFile.getEntry( '//environment/unitsCoversion/file') if (convertFile != None): uomConverter = uomconversionFunctions(convertFile) else: print( "Unable to find XML conversion file given in the configuration file." ) twitList = configFile.getListHead('//environment/twitterList') for child in configFile.getNextInList(twitList): platform = configFile.getEntry('handle', child) twitterAccount = configFile.getEntry('twitterAccount', child) #twitterPwd = configFile.getEntry( 'twitterPwd',child ) accessTK = configFile.getEntry('accessTokenKey', child) accessSec = configFile.getEntry('accessTokenSecret', child) consumerKey = configFile.getEntry('consumerKey', child) consumerSecret = configFile.getEntry('consumerSecret', child) if (accessTK != None and accessSec != None
def handler(req): #if __name__ == '__main__': from xeniatools.xenia import dbXenia from xeniatools.xmlConfigFile import xmlConfigFile from xeniatools.xenia import qaqcTestFlags from xeniatools.xenia import uomconversionFunctions from xeniatools.emailAlertSystem import GroupWriteRotatingFileHandler from urllib2 import Request, urlopen, URLError, HTTPError import simplejson import os import stat import logging import logging.handlers if(_USE_HANDLER): configFile = '/home/xeniaprod/config/mobileBuoyConfig.xml' req.log_error('handler') #req.add_common_vars() params = util.FieldStorage(req) else: #configFile = '/home/xeniaprod/config/mobileBuoyConfig.xml' configFile = 'C:\\Documents and Settings\\dramage\\workspace\\SVNSandbox\\carolinasrcoos\\trunk\\website\\mobileBuoyConfigDebug.xml' params = {} params['radius'] = 'nearby' params['latitude'] = 33.65921 params['longitude'] = -78.91754 configSettings = xmlConfigFile( configFile ) logFile = configSettings.getEntry("//environment/logging/logFilename") backupCount = configSettings.getEntry("//environment/logging/backupCount") maxBytes = configSettings.getEntry("//environment/logging/maxBytes") logFileExists = True #If the log file does not exist, we want to make sure when we create it to give everyone write access to it. if(os.path.isfile(logFile) != True): logFileExists = False logger = logging.getLogger("mobilebuoy_handler") logger.setLevel(logging.DEBUG) # create formatter and add it to the handlers formatter = logging.Formatter("%(asctime)s,%(name)s,%(levelname)s,%(lineno)d,%(message)s") #Create the log rotation handler. handler = logging.handlers.GroupWriteRotatingFileHandler = GroupWriteRotatingFileHandler handler = logging.handlers.GroupWriteRotatingFileHandler( logFile, "a", maxBytes, backupCount ) handler.setLevel(logging.DEBUG) handler.setFormatter(formatter) logger.addHandler(handler) if(logFileExists != True): currMode = os.stat(logFile).st_mode #Since the email alert module can be used by web service as well as from command line, we want to change #the file permissions to give everyone access to it. Probably would be better to use group permissions #only, but for now we grant all. os.chmod(logFile, currMode | (stat.S_IXUSR|stat.S_IWGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IWOTH|stat.S_IXOTH)) # add the handlers to the logger logger.info('Log file opened') try: convertFile = configSettings.getEntry( '//environment/unitsCoversion/file' ) uomConverter = None if( convertFile != None ): uomConverter = uomconversionFunctions(convertFile) dbSettings = configSettings.getDatabaseSettings() dbCon = dbXenia() if(dbCon.connect(None, dbSettings['dbUser'], dbSettings['dbPwd'], dbSettings['dbHost'], dbSettings['dbName']) != True): logger.error("Unable to open database connection. Cannot continue.") req.content_type = 'text/plain;' output = "Unable to lookup buoys. Please try again later." req.write(output) req.status = apache.HTTP_OK else: logger.info("Opened database connection.") radius = params['radius'] lat = float(params['latitude']) lon = float(params['longitude']) if(radius == 'nearby'): radius = 40.0 else: radius = float(radius) logger.debug("Radius: %s Latitude: %f Longitude: %f" % (radius,lat,lon)) sql = "SELECT platform_handle,\ fixed_longitude,fixed_latitude,\ Distance( GeomFromText('POINT(' || fixed_longitude || ' ' || fixed_latitude ||')'), GeomFromText('POINT(%f %f)')) * 60 as distancenm FROM platform\ WHERE active = 1 AND\ Distance( GeomFromText('POINT(' || fixed_longitude || ' ' || fixed_latitude ||')'), GeomFromText('POINT(%f %f)')) * 60 < %f\ ORDER BY distancenm ASC;" %(lon, lat, lon, lat, radius) dbCursor = dbCon.dbConnection.executeQuery(sql) if(dbCursor != None): logger.debug("Platform query: %s" %(sql)) jsonDict = {} platforms = [] rowCnt = 0 for row in dbCursor: platform = {} platform['platform_handle'] = row['platform_handle'] platform['latitude'] = row['fixed_latitude'] platform['longitude'] = row['fixed_longitude'] distance = float(row['distancenm']) platform['distance'] = ("%4.3f" % (distance)) platform['distanceUnits'] = 'NM' platforms.append(platform) rowCnt += 1 dbCursor.close() if(rowCnt == 0): platform = {} platform['platform_handle'] = "No platforms found" platform['latitude'] = "" platform['longitude'] = "" platform['distance'] = "0" platform['distanceUnits'] = "NM" platforms.append(platform) #Now let's get the latest met data. jsonBaseURL = configSettings.getEntry('//environment/jsonFiles/baseURL') for platform in platforms: fullURL = "%s%s_data.json" % (jsonBaseURL, (platform['platform_handle'].replace('.',':')).lower()) logger.debug("Processing json file: %s" %(fullURL)) urlReq = Request(fullURL) try: result = simplejson.load(urlopen(urlReq)) except HTTPError, e: logger.debug("%s Code: %d" %(e.filename,e.code)) continue latestObs = [] features = (result['properties'])['features'] for feature in features: obs = {} properties = feature['properties'] obsName = properties['obsType'] #Check to see if we have an abbreviated name to use. abbrName = uomConverter.getAbbreviatedObservationName(obsName) if(abbrName != None): obsName = abbrName else: obsName = obsName.replace('_', ' ') obs['name'] = obsName obs['time'] = properties['time'][-1] obs['uom'] = properties['uomType'] obs['value'] = float((properties['value'])[-1]) uom = uomConverter.getConversionUnits( obs['uom'], 'en' ) if( len(uom) > 0 ): obs['value'] = uomConverter.measurementConvert( obs['value'], obs['uom'], uom ) displayUnits = uomConverter.getUnits(obs['uom'], uom) if(displayUnits != None): obs['uom'] = displayUnits latestObs.append(obs) platform['latest_obs'] = latestObs if(len(platforms)): jsonDict['platform_list'] = platforms jsonData = simplejson.dumps(jsonDict) if(_USE_HANDLER): req.content_type = 'application/json;' req.set_content_length(len(jsonData)) logger.debug("Json: %s" %(jsonData)) req.write(jsonData) req.status = apache.HTTP_OK else: print(jsonData) else:
dbSettings['dbUser'] != None and dbSettings['dbPwd'] != None ): db = xeniaPostGres() if( db.connect( None, dbSettings['dbUser'], dbSettings['dbPwd'], dbSettings['dbHost'], dbSettings['dbName'] ) == False ): print( "Unable to connect to PostGres." ) sys.exit(-1) else: print( "Connect to PostGres: %s %s" % (dbSettings['dbHost'],dbSettings['dbName'])) else: print( "Missing configuration info for PostGres setup." ) sys.exit(-1) #Get the conversion xml file convertFile = configFile.getEntry( '//environment/unitsCoversion/file' ) if( convertFile != None ): uomConverter = uomconversionFunctions(convertFile) else: print( "Unable to find XML conversion file given in the configuration file.") twitList = configFile.getListHead( '//environment/twitterList' ) for child in configFile.getNextInList(twitList): platform = configFile.getEntry( 'handle',child ) twitterAccount = configFile.getEntry( 'twitterAccount',child ) #twitterPwd = configFile.getEntry( 'twitterPwd',child ) accessTK = configFile.getEntry( 'accessTokenKey',child ) accessSec = configFile.getEntry( 'accessTokenSecret',child ) consumerKey = configFile.getEntry( 'consumerKey',child ) consumerSecret = configFile.getEntry( 'consumerSecret',child ) if( accessTK != None and accessSec != None and consumerKey != None and consumerSecret != None): #Connect to the Twitter api.
"--dbPwd", dest="dbPwd", help="The xenia database password name to connect with.") parser.add_option( "-b", "--BoundingBox", dest="bbox", help= "The bounding box we want to use to select the platforms. Format is: long lat, long lat...." ) parser.add_option("-c", "--ConversionFile", dest="uomFile", help="The XML file with the units conversion formulas.") (options, args) = parser.parse_args() xeniaDb = dbXenia() #(self, dbFilePath=None, user=None, passwd=None, host=None, dbName=None ) if (xeniaDb.connect(None, options.dbUser, options.dbPwd, options.dbHost, options.dbName) == False): print("Unable to connect to the database: %s" % (xeniaDb.getErrorInfo())) sys.exit(-1) obsDb = dbDisplayLatestObs(options.obsTableDB) uomConverter = uomconversionFunctions(options.uomFile) obsDb.buildContent(xeniaDb, uomConverter, options.bbox)
parser.add_option("-d", "--dbName", dest="dbName", help="The name of the xenia database to connect to." ) parser.add_option("-o", "--dbHost", dest="dbHost", help="The xenia database host address to connect to." ) parser.add_option("-u", "--dbUser", dest="dbUser", help="The xenia database user name to connect with." ) parser.add_option("-p", "--dbPwd", dest="dbPwd", help="The xenia database password name to connect with." ) parser.add_option("-b", "--BoundingBox", dest="bbox", help="The bounding box we want to use to select the platforms. Format is: long lat, long lat...." ) parser.add_option("-c", "--ConversionFile", dest="uomFile", help="The XML file with the units conversion formulas." ) (options, args) = parser.parse_args() xeniaDb = dbXenia() #(self, dbFilePath=None, user=None, passwd=None, host=None, dbName=None ) if( xeniaDb.connect(None, options.dbUser, options.dbPwd, options.dbHost, options.dbName) == False ): print("Unable to connect to the database: %s" %(xeniaDb.getErrorInfo())) sys.exit(-1) obsDb = dbDisplayLatestObs(options.obsTableDB) uomConverter = uomconversionFunctions(options.uomFile) obsDb.buildContent(xeniaDb, uomConverter, options.bbox)
bbox = configFile.get('output', 'bbox') observations = configFile.get('output', 'observations').split(',') templateFilepath = configFile.get('output', 'kmltemplatefile') kmlOutfilename = configFile.get('output', 'kmlfilename') uomConversionFilename = configFile.get('output', 'uomconversionfilename') dbUser = configFile.get('Database', 'user') dbPwd = configFile.get('Database', 'password') dbHost = configFile.get('Database', 'host') dbName = configFile.get('Database', 'name') dbConnType = configFile.get('Database', 'connectionstring') except ConfigParser.Error, e: if(logger): logger.exception(e) else: uomConverter = uomconversionFunctions(uomConversionFilename) db = xeniaAlchemy() if(db.connectDB(dbConnType, dbUser, dbPwd, dbHost, dbName, False) == True): if(logger): logger.info("Succesfully connect to DB: %s at %s" %(dbName,dbHost)) else: logger.error("Unable to connect to DB: %s at %s. Terminating script." %(dbName,dbHost)) kmlData = {} kmlData['iconStyles'] = [{'id' : 'buoy', 'url' : 'http://129.252.37.86/rcoos/resources/images/legend/buoy-default.png'}, {'id' : 'shore_station', 'url' : 'http://129.252.37.86/rcoos/resources/images/legend/shore_station-default.png'}, {'id' : 'land_station', 'url' : 'http://129.252.37.86/rcoos/resources/images/legend/land_station-default.png'}, {'id' : 'estuary_station', 'url' : 'http://129.252.37.86/rcoos/resources/images/legend/estuary_station_default.png'}, {'id' : 'river_gauge', 'url' : 'http://129.252.37.86/rcoos/resources/images/legend/river_gauge-default.png'} ]