Esempio n. 1
0
def createFriends( configFile ):
  from xeniatools.xenia import recursivedefaultdict
  
  twitAccountList = recursivedefaultdict()
  #Grab all the account info out of the config file.
  twitList = configFile.getListHead( '//environment/twitterList' )
  for child in configFile.getNextInList(twitList):
    platform        = configFile.getEntry( 'handle',child )
    twitterAccount  = configFile.getEntry( 'twitterAccount',child )
    twitterPwd      = configFile.getEntry( 'twitterPwd',child )
    twitAccountList[platform]['account'] =  twitterAccount
    twitAccountList[platform]['password'] =  twitterPwd
  #Now let's loop and friend all the buoys to one another
  for platform in twitAccountList:
    try:
      account = twitAccountList[platform]['account']
      pwd = twitAccountList[platform]['password']
      client = twitter.Api(account, pwd)
      for friendPlatform in twitAccountList:
        try:
          if( friendPlatform != platform ):
            account = twitAccountList[friendPlatform]['account']
            user = client.CreateFriendship( account )
            print( "%s friended user: %s" %( platform, user.screen_name ) )
        except twitter.TwitterError,e:
          print("Twitter Error: %s" %(e.message))
          continue
        except Exception, E:
          print( "Error from twitter call: %s" % (str(E)) )
          continue
Esempio n. 2
0
def createFriends(configFile):
    from xeniatools.xenia import recursivedefaultdict

    twitAccountList = recursivedefaultdict()
    #Grab all the account info out of the config file.
    twitList = configFile.getListHead('//environment/twitterList')
    for child in configFile.getNextInList(twitList):
        platform = configFile.getEntry('handle', child)
        twitterAccount = configFile.getEntry('twitterAccount', child)
        twitterPwd = configFile.getEntry('twitterPwd', child)
        twitAccountList[platform]['account'] = twitterAccount
        twitAccountList[platform]['password'] = twitterPwd
    #Now let's loop and friend all the buoys to one another
    for platform in twitAccountList:
        try:
            account = twitAccountList[platform]['account']
            pwd = twitAccountList[platform]['password']
            client = twitter.Api(account, pwd)
            for friendPlatform in twitAccountList:
                try:
                    if (friendPlatform != platform):
                        account = twitAccountList[friendPlatform]['account']
                        user = client.CreateFriendship(account)
                        print("%s friended user: %s" %
                              (platform, user.screen_name))
                except twitter.TwitterError, e:
                    print("Twitter Error: %s" % (e.message))
                    continue
                except Exception, E:
                    print("Error from twitter call: %s" % (str(E)))
                    continue
Esempio n. 3
0
 def processData(self):
   #DWR 2013-12-17
   #Use the recursive dictionary so we can store the m_type and sensor_ids by: <obs name><uom><sorder>
   #Without doing this, platforms with multiples of the same sensors will not get each sensor represented
   #and we ended up using incorrect sensor ids.
   self.sensorMappings = recursivedefaultdict()
   #The date we use to time stamp the rows we add.
   self.rowDate = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
   
   try:    
     lineCnt = 0      
     #DWR 2013-0509
     lastRecDate = None
     dataRecs = self.getData()      
     while(dataRecs):
       #if(self.logger):
       #  self.logger.debug("Line: %d m_date: %s" % (self.csvDataFile.line_num, dataRecs['m_date']))
       saveRec = True
       #DWR 2013-05-09
       #Use the self.lastEntryDate variable for the test.
       if(self.lastEntryDate):
         if(dataRecs['m_date'] < self.lastEntryDate):
           saveRec = False
           
       if(saveRec):
         #DWR 2013-05-09    
         #Save the date, so when we hit the last data record, we have it saved and can then save it to the file.
         lastRecDate = dataRecs['m_date']  
         self.saveData(dataRecs)
       dataRecs = self.getData()
       lineCnt += 1
       
   except StopIteration,e:
     if(self.logger):
       self.logger.info("End of file reached.")        
Esempio n. 4
0
    def __init__(self,
                 dbUser=None,
                 dbName=None,
                 dbHost=None,
                 dbPwd=None,
                 SQLiteDB=None,
                 tablePerPlatform=False,
                 chart=False):
        self.tablePerPlatform = tablePerPlatform  #Flag that specifies whether or not we create a seperate HTML page per platform. True, each platform gets a seperate page, False all results on 1.
        self.chart = chart  #Flag that specifies if we add a URL to a google chart for the observation.
        if (SQLiteDB == None):
            self.xeniaDB = xeniaPostGres()  #Database object.
            if (self.xeniaDB.connect(None, dbUser, dbPwd, dbHost, dbName)):
                print("Successfully connected to Xenia DB: Name: %s at %s\n" %
                      (dbName, dbHost))
            else:
                print( "Failed to connect to Xenia DB: Host: %s Name: %s User: %s(%s)\nError: %s"\
                        %( dbHost, dbName, dbUser, dbPwd, self.xeniaDB.getErrorInfo() ) )
                sys.exit(-1)
        else:
            self.xeniaDB = xeniaSQLite()  #Database object.
            if (self.xeniaDB.connect(SQLiteDB)):
                print("Successfully connected to Xenia DB: Name: %s\n" %
                      (SQLiteDB))
            else:
                print( "Failed to connect to Xenia DB: %s\nError: %s"\
                        %( SQLiteDB, self.xeniaDB.getErrorInfo() ) )
                sys.exit(-1)

        self.obsDataPoints = recursivedefaultdict(
        )  #Dictionary keyed on obs and sOrder for each data point. Used to collect the points to graph.
Esempio n. 5
0
 def getRemoteData(self, siteName, siteSetting):    
   print("Getting remote data for: %s" %(siteName))
   uomConvert = uomconversionFunctions(self.unitsConversionFile)
   if(siteSetting['ysiconfigfile'] != None):     
     ysi = ysiObsSettings(siteSetting['dataQueryURL'], None, siteSetting['ysiconfigfile'])
   elif(siteSetting['paramScrapeURL'] != None):
     ysi = ysiObsSettings(siteSetting['dataQueryURL'], siteSetting['paramScrapeURL'], None)
     
   ysi.initList()
   obsDict = ysi.getAllObservations()    
   obsHash = recursivedefaultdict()     
   obsHash['platform'][siteName]['url'] = siteSetting['url']
   obsHash['platform'][siteName]['latitude'] = siteSetting['latitude']
   obsHash['platform'][siteName]['longitude'] = siteSetting['longitude']
   for param in obsDict:
     #The ysi observation name has the name and units all in one string. This function
     #converts the obs name into our data dictionary name, and breaks out the units as well.
     (obs,fromUOM,sOrder) = self.convertToXeniaObsAndUOM(param)
     #Didn't have a match, so we'll use the source.
     if(len(obs) == 0):
       print("ERROR: Unable to find Xenia observation name for YSI param: %s." %(param))
       parts = param.split('[')
       obs = parts[0]
       fromUOM = parts[1]
       fromUOM = fromUOM.replace("]", "")
       sOrder = "1"
     elev = "0"
     dataTuple = obsDict[param]
     #Now see if we need to convert into different units.
     toUOM = uomConvert.getConversionUnits(fromUOM, 'metric')
     for entry in dataTuple:
       date = entry[0]
       date = self.formDBDate(date)
       value = float(entry[1])
       if(toUOM != None and len(toUOM)):
         convertedVal = uomConvert.measurementConvert(value, fromUOM, toUOM)
         if(convertedVal != None ):
           value = convertedVal
       else:
         toUOM = fromUOM
       #Build the obs hash.
       obsUOM = "%s.%s" %(obs,toUOM)
       obsHash['platform'][siteName]['date'][date]['obsuom'][obsUOM]['elev'][elev]['sorder'][sOrder]['value'] = value
       #obsHash[siteName][date][obs][elev][sOrder]['uom'] = toUOM
       
   return(obsHash)
Esempio n. 6
0
 def __init__(self, dbUser=None, dbName=None, dbHost=None, dbPwd=None, SQLiteDB=None, tablePerPlatform=False,chart=False):
   self.tablePerPlatform = tablePerPlatform    #Flag that specifies whether or not we create a seperate HTML page per platform. True, each platform gets a seperate page, False all results on 1.
   self.chart = chart  #Flag that specifies if we add a URL to a google chart for the observation.
   if(SQLiteDB == None):
     self.xeniaDB = xeniaPostGres()  #Database object.
     if( self.xeniaDB.connect( None, dbUser, dbPwd, dbHost, dbName ) ):
       print( "Successfully connected to Xenia DB: Name: %s at %s\n" % ( dbName, dbHost) )
     else:
       print( "Failed to connect to Xenia DB: Host: %s Name: %s User: %s(%s)\nError: %s"\
               %( dbHost, dbName, dbUser, dbPwd, self.xeniaDB.getErrorInfo() ) )      
       sys.exit(-1)
   else:
     self.xeniaDB = xeniaSQLite()  #Database object.
     if( self.xeniaDB.connect( SQLiteDB ) ):
       print( "Successfully connected to Xenia DB: Name: %s\n" % ( SQLiteDB) )
     else:
       print( "Failed to connect to Xenia DB: %s\nError: %s"\
               %( SQLiteDB, self.xeniaDB.getErrorInfo() ) )      
       sys.exit(-1)
     
   self.obsDataPoints = recursivedefaultdict() #Dictionary keyed on obs and sOrder for each data point. Used to collect the points to graph.
Esempio n. 7
0
 def __init__(self,htmlFilename,styleSheet):
   self.htmlFilename = htmlFilename
   self.platformResultsTable = platformResultsTable()
   self.platforms = recursivedefaultdict()
   try:
     self.htmlFile = open(self.htmlFilename, "w")
     self.htmlFile.write( "<html>\n" )
     self.htmlFile.write( "<BODY id=\"RGB_BODY_BG\" >\n" )    
     self.htmlFile.write( "<link href=\"%s\" rel=\"stylesheet\" type=\"text/css\" media=\"screen\" />\n"  % ( styleSheet ) )
   except IOError, e:
     import sys
     import traceback
     
     info = sys.exc_info()        
     excNfo = traceback.extract_tb(info[2], 1)
     items = excNfo[0]
     lastErrorFile = items[0]    
     lastErrorLineNo = items[1]    
     lastErrorFunc = items[2]        
     print("%s Function: %s Line: %s File: %s" % (str(e), lastErrorFunc, lastErrorLineNo, lastErrorFile)) 
     sys.exit(- 1)
Esempio n. 8
0
 def __init__(self, xmlConfigFilename):
   configSettings = xmlConfigFile(xmlConfigFilename)
   self.siteSettings = recursivedefaultdict() # Hash of various parameters for each customer site to process.
   paramList = configSettings.getListHead("//environment/ysiSettingsList")
   for child in configSettings.getNextInList(paramList):
     siteName = configSettings.getEntry("name",child)
     geoLoc   = configSettings.getEntry("geoLoc",child)
     self.siteSettings[siteName]['latitude']      = 0.0
     self.siteSettings[siteName]['longitude']     = 0.0
     if(geoLoc != None):
       latLong  = geoLoc.split(',')
       self.siteSettings[siteName]['latitude']      = latLong[0]
       self.siteSettings[siteName]['longitude']     = latLong[1]
       
     self.siteSettings[siteName]['url']           = configSettings.getEntry("platformURL",child)  
     self.siteSettings[siteName]['ysiconfigfile'] = configSettings.getEntry("ysiParamFile",child)  
     self.siteSettings[siteName]['outputtype']    = configSettings.getEntry("outputs/output/type",child)
     self.siteSettings[siteName]['outputfilename']= configSettings.getEntry("outputs/output/filename",child)
     self.siteSettings[siteName]['paramScrapeURL']= configSettings.getEntry("paramScrapeURL",child)
     self.siteSettings[siteName]['dataQueryURL']  = configSettings.getEntry("dataQueryURL",child)
   self.unitsConversionFile = configSettings.getEntry("//environment/unitsConversion/file")
   self.ysiMapToXeniaFile = configSettings.getEntry("//environment/ysiObservationMappingFile/file")
Esempio n. 9
0
    def __init__(self, htmlFilename, styleSheet):
        self.htmlFilename = htmlFilename
        self.platformResultsTable = platformResultsTable()
        self.platforms = recursivedefaultdict()
        try:
            self.htmlFile = open(self.htmlFilename, "w")
            self.htmlFile.write("<html>\n")
            self.htmlFile.write("<BODY id=\"RGB_BODY_BG\" >\n")
            self.htmlFile.write(
                "<link href=\"%s\" rel=\"stylesheet\" type=\"text/css\" media=\"screen\" />\n"
                % (styleSheet))
        except IOError, e:
            import sys
            import traceback

            info = sys.exc_info()
            excNfo = traceback.extract_tb(info[2], 1)
            items = excNfo[0]
            lastErrorFile = items[0]
            lastErrorLineNo = items[1]
            lastErrorFunc = items[2]
            print("%s Function: %s Line: %s File: %s" %
                  (str(e), lastErrorFunc, lastErrorLineNo, lastErrorFile))
            sys.exit(-1)
Esempio n. 10
0
    def buildContent(self, xeniaDb, uomConverter, boundingBox):

        try:
            GEORSSPATH = 'http://129.252.37.90/xenia/feeds/georss/'
            DATAQUERYPAGEPATH = 'http://carolinasrcoos.org/queryStation.php?station='
            ADCPGRAPHURL = 'http://carocoops.org/~dramage_prod/cgi-bin/rcoos/ADCPGraph.php?PLATFORMID=<ID>&INTERVAL=<INTERVAL>'
            TWITTERURL = 'http://twitter.com/'
            EMAILALERTPAGEPATH = "http://www.secoora.org/pages/alertpage.php?platform="

            sql = "SELECT to_char(timezone('UTC', m_date), 'YYYY-MM-DD HH24:MI:SS') AS local_date \
      ,m_date as m_date\
      ,multi_obs.platform_handle  as multi_obs_platform_handle\
      ,obs_type.standard_name as obs_type_standard_name\
      ,uom_type.standard_name as uom_type_standard_name\
      ,multi_obs.m_type_id as multi_obs_m_type_id\
      ,m_lon\
      ,m_lat\
      ,m_z\
      ,m_value\
      ,qc_level\
      ,sensor.row_id as sensor_row_id\
      ,sensor.s_order as sensor_s_order\
      ,sensor.url as sensor_url\
      ,platform.url as platform_url\
      ,platform.description as platform_description\
      ,organization.short_name as organization_short_name\
      ,organization.url as organization_url\
      ,m_type_display_order.row_id as m_type_display_order_row_id\
      ,extract(epoch from m_date)\
      from multi_obs\
      left join sensor on sensor.row_id=multi_obs.sensor_id\
      left join m_type on m_type.row_id=multi_obs.m_type_id\
      left join m_scalar_type on m_scalar_type.row_id=m_type.m_scalar_type_id\
      left join obs_type on obs_type.row_id=m_scalar_type.obs_type_id\
      left join uom_type on uom_type.row_id=m_scalar_type.uom_type_id\
      left join platform on platform.row_id=sensor.platform_id\
      left join organization on organization.row_id=platform.organization_id\
      left join m_type_display_order on m_type_display_order.m_type_id=multi_obs.m_type_id\
      where\
        m_date>(now()-interval '12 hours') AND\
       Contains( GeomFromText( \'POLYGON((%s))\'), GeomFromText( 'POINT(' || fixed_longitude || ' ' || fixed_latitude ||')' ) )\
      union\
      select\
            null as local_date,\
            null as m_date,\
            platform.platform_handle as multi_obs_platform_handle ,\
            null as obs_type_standard_name,\
            null as uom_type_standard_name,\
            null as multi_obs_m_type_id,\
            platform.fixed_longitude,\
            platform.fixed_latitude,\
            null as m_z,\
            null as m_value ,\
            null as qc_level,\
            null as row_id,\
            null as s_order,\
            null as sensor_url,\
            platform.url as platform_url ,\
            platform.description as platform_description,\
            organization.short_name as organization_short_name,\
            organization.url as organization_url,\
            null as m_type_display_order_row_id,\
            null as epoch\
            from platform\
      left join organization on organization.row_id=platform.organization_id\
       where platform.active=1 AND\
       Contains( GeomFromText( \'POLYGON((%s))\'), GeomFromText( 'POINT(' || fixed_longitude || ' ' || fixed_latitude ||')' ) )\
        order by multi_obs_platform_handle,m_type_display_order_row_id,sensor_s_order,m_date desc;"      \
              % (boundingBox,boundingBox)

            print(sql)
            latestObs = recursivedefaultdict()
            latestDate = None
            currentPlatform = None
            dbCursor = xeniaDb.dbConnection.executeQuery(sql)
            if (dbCursor != None):
                for obsRow in dbCursor:

                    #print("Organization: %s platform: %s" %(obsRow['organization_short_name'], obsRow['multi_obs_platform_handle']))
                    if (currentPlatform == None):
                        currentPlatform = obsRow['multi_obs_platform_handle']

                    if (latestDate == None):
                        latestDate = str(obsRow['m_date'])
                    else:
                        if (obsRow['m_date'] != None):
                            #We only want the most current obs.
                            if (latestDate != str(obsRow['m_date'])
                                    and currentPlatform
                                    == obsRow['multi_obs_platform_handle']):
                                continue

                    currentPlatform = obsRow['multi_obs_platform_handle']
                    latestDate = str(obsRow['m_date'])
                    if (obsRow['m_type_display_order_row_id'] != None):
                        #latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['m_type_id'] = obsRow['multi_obs_m_type_id']
                        latestObs[obsRow['organization_short_name']][
                            'platform_list'][obsRow[
                                'multi_obs_platform_handle']]['obs_list'][
                                    obsRow['m_type_display_order_row_id']][
                                        'obs_name'] = obsRow[
                                            'obs_type_standard_name']
                        latestObs[obsRow['organization_short_name']][
                            'platform_list'][obsRow[
                                'multi_obs_platform_handle']]['obs_list'][
                                    obsRow['m_type_display_order_row_id']][
                                        'uom'] = obsRow[
                                            'uom_type_standard_name']
                        latestObs[obsRow['organization_short_name']][
                            'platform_list'][obsRow[
                                'multi_obs_platform_handle']]['obs_list'][
                                    obsRow['m_type_display_order_row_id']][
                                        'm_value'] = obsRow['m_value']
                        #latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['m_z'] = obsRow['m_z']
                        latestObs[obsRow['organization_short_name']][
                            'platform_list'][obsRow[
                                'multi_obs_platform_handle']]['obs_list'][
                                    obsRow['m_type_display_order_row_id']][
                                        'qc_level'] = obsRow['qc_level']
                        #latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['sensor_url'] = obsRow['sensor_url']
                        latestObs[obsRow['organization_short_name']][
                            'platform_list'][obsRow[
                                'multi_obs_platform_handle']]['obs_list'][
                                    obsRow['m_type_display_order_row_id']][
                                        'sensor_id'] = obsRow['sensor_row_id']
                        latestObs[obsRow['organization_short_name']][
                            'platform_list'][obsRow[
                                'multi_obs_platform_handle']]['obs_list'][
                                    obsRow['m_type_display_order_row_id']][
                                        'local_date'] = obsRow['local_date']
                        latestObs[obsRow['organization_short_name']][
                            'platform_list'][obsRow[
                                'multi_obs_platform_handle']]['obs_list'][
                                    obsRow['m_type_display_order_row_id']][
                                        'm_date'] = latestDate
                    #assuming all observations are basically the same lat/lon as platform
                    latestObs[obsRow['organization_short_name']][
                        'platform_list'][obsRow['multi_obs_platform_handle']][
                            'm_lat'] = obsRow['m_lat']
                    latestObs[obsRow['organization_short_name']][
                        'platform_list'][obsRow['multi_obs_platform_handle']][
                            'm_lon'] = obsRow['m_lon']
                    latestObs[obsRow['organization_short_name']][
                        'platform_list'][obsRow['multi_obs_platform_handle']][
                            'url'] = obsRow['platform_url']
                    latestObs[obsRow['organization_short_name']][
                        'platform_list'][obsRow['multi_obs_platform_handle']][
                            'platform_desc'] = obsRow['platform_description']
                    #latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']][status] = $platform_status
                    latestObs[obsRow['organization_short_name']][
                        'name'] = obsRow['organization_short_name']
                    latestObs[obsRow['organization_short_name']][
                        'url'] = obsRow['organization_url']
            else:
                print(xeniaDb.dbConnection.getErrorInfo())
                sys.exit(-1)

            dbCursor.close()
            operatorKeys = latestObs.keys()
            operatorKeys.sort()
            platformCnt = 0
            operatorCnt = 0
            insertDate = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
            for operator in operatorKeys:
                for platform in latestObs[operator]['platform_list']:
                    print("Processing platform: %s" % (platform))
                    htmlContent = ''
                    contentHeader = ''
                    platformContent = ''
                    latestDate = None
                    platformParts = platform.split('.')
                    lcPlatform = platformParts[1].lower()
                    operator = platformParts[0]
                    links ='<a href=%s%s_%s_%s_GeoRSS_latest.xml target=new title="RSS Feed"><img src="resources/images/default/rss_small.jpg"/></a>'\
                            %(GEORSSPATH, platformParts[0], lcPlatform, platformParts[2])
                    if (lcPlatform == 'cap2' or lcPlatform == 'sun2'
                            or lcPlatform == 'frp2' or lcPlatform == 'ocp1'
                            or lcPlatform == 'ilm2' or lcPlatform == 'ilm3'):
                        links += '<a href=%s%sRCOOS target=new title="Twitter Feed"><img src="resources/images/default/twitter.png"/></a>'\
                         %(TWITTERURL,lcPlatform)
                    links += '<a href=%s%s target=new title="Data Query"><img src="resources/images/default/data_query.png"/></a>'\
                              %(DATAQUERYPAGEPATH, lcPlatform.upper())

                    links += '<a href=%s%s target=new title="Email Alerts"><img src="resources/images/default/mail.png"/></a>'\
                              %(EMAILALERTPAGEPATH, platform)

                    desc = latestObs[operator]['platform_list'][platform][
                        'platform_desc']
                    #No description in the database, so we'll make one based on the operator and platform
                    if (len(desc) == 0):
                        desc = "%s %s" % (operator, platformParts[1])

                    if (('url'
                         in latestObs[operator]['platform_list'][platform]) !=
                            False):
                        platformUrl = latestObs[operator]['platform_list'][
                            platform]['url']
                    elif (('url' in latestObs[operator]) != False):
                        platformUrl = latestObs[operator]['url']
                    else:
                        platformUrl = ''

                    lat = 0.0
                    lon = 0.0
                    if (('m_lat'
                         in latestObs[operator]['platform_list'][platform]) !=
                            False):
                        lat = latestObs[operator]['platform_list'][platform][
                            'm_lat']
                    else:
                        print("No latitude defined for platform: %s" %
                              (platform))
                    if (('m_lat'
                         in latestObs[operator]['platform_list'][platform]) !=
                            False):
                        lon = latestObs[operator]['platform_list'][platform][
                            'm_lon']
                    else:
                        print("No longitude defined for platform: %s" %
                              (platform))

                    contentHeader = "<div id=\"popupobscontent\" class=\"popupobscontent\"><hr/><a href=\"%s\" target=new onclick=\"\">%s</a><p id=\"popupobsloc\" class=\"popupobsloc\">Latitude: %4.3f Longitude: %4.3f</p><p id=\"popupobslinks\" class=\"popupobslinks\">%s</p>"\
                                    %(platformUrl,
                                      desc,
                                      lat,
                                      lon,
                                      links )

                    displayOrderKeys = latestObs[operator]['platform_list'][
                        platform]['obs_list'].keys()
                    displayOrderKeys.sort()
                    obsDate = ""

                    for displayOrder in displayOrderKeys:
                        if (latestDate == None):
                            if (latestObs[operator]['platform_list'][platform]
                                ['obs_list'][displayOrder]['m_date'] != None):
                                #latestDate = latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['m_date']
                                latestDate = latestObs[operator][
                                    'platform_list'][platform]['obs_list'][
                                        displayOrder]['local_date']
                                obsDate = latestDate
                                localDatetime = time.strptime(
                                    latestDate, '%Y-%m-%d %H:%M:%S')
                                obsLocalEpochSecs = time.mktime(localDatetime)

                                datetimeLabel = "<span id=\"popupobsstatus\" class=\"popupobsstatusold\">No data available within the past 6 hours</span>"
                                localNow = time.mktime(time.localtime())
                                if ((localNow - obsLocalEpochSecs) > 21600):
                                    datetimeLabel = "<span id=\"popupobsstatus\" class=\"popupobsstatusold\">No data available within the past 6 hours</span>"
                                else:
                                    tz = 'EST'
                                    if (time.daylight == 1):
                                        tz = 'EDT'
                                    day = time.strftime("%m/%d", localDatetime)
                                    datetimeLabel = time.strftime(
                                        "Surface conditions as of %I:%M %p",
                                        localDatetime)
                                    datetimeLabel = "%s %s on %s" % (
                                        datetimeLabel, tz, day)
                                    if ((localNow - obsLocalEpochSecs) > 7200):
                                        datetimeLabel += "<br><span class=\"popupobsstatusstale\">Note: This report is more than 2 hours old</span>"

                                platformContent = "<div id=\"popupobs\" class=\"popupobs\"><table class=\"popupobsdata\"><caption>%s</caption>"\
                                                % (datetimeLabel)

                        if (latestObs[operator]['platform_list'][platform]
                            ['obs_list'][displayOrder]['m_value'] != None):
                            obsUOM = latestObs[operator]['platform_list'][
                                platform]['obs_list'][displayOrder]['uom']
                            value = latestObs[operator]['platform_list'][
                                platform]['obs_list'][displayOrder]['m_value']
                            #Get the label we want to use for the observation
                            obsLabel = uomConverter.getDisplayObservationName(
                                latestObs[operator]['platform_list'][platform]
                                ['obs_list'][displayOrder]['obs_name'])
                            if (obsLabel == None):
                                obsLabel = latestObs[operator][
                                    'platform_list'][platform]['obs_list'][
                                        displayOrder]['obs_name']
                            #Get the units we want to convert the data to. This is also used as the label for the units in the text display.
                            displayUOM = uomConverter.getConversionUnits(
                                obsUOM, 'en')
                            if (len(displayUOM) == 0):
                                displayUOM = obsUOM
                            value = uomConverter.measurementConvert(
                                value, obsUOM, displayUOM)
                            if (value == None):
                                value = latestObs[operator]['platform_list'][
                                    platform]['obs_list'][displayOrder][
                                        'm_value']
                                displayUOM = obsUOM
                            googURL = self.buildGoogleChartLink(
                                xeniaDb, platform, obsLabel,
                                latestObs[operator]['platform_list'][platform]
                                ['obs_list'][displayOrder]['sensor_id'],
                                obsUOM, displayUOM,
                                latestObs[operator]['platform_list'][platform]
                                ['obs_list'][displayOrder]['m_date'],
                                uomConverter)

                            measureLabel = "%s %s" % (str(value), displayUOM)
                            qcLevel = latestObs[operator]['platform_list'][
                                platform]['obs_list'][displayOrder]['qc_level']
                            #Add a bad or suspect label if the quality control flag is set to 1 or 2.
                            if (qcLevel == 1):
                                measureLabel += "(bad)"
                            elif (qcLevel == 2):
                                measureLabel += "(suspect)"

                            platformContent += "<tr %s><td scope=\"row\">%s</td><td>%s</td></tr>"\
                                           %(googURL, obsLabel, measureLabel)

                    #Finished platform, increment our count.
                    platformCnt += 1
                    #Check to see if we had any platform content, if not let's just tag it with nothing available before adding the entry into the
                    #database.
                    if (len(platformContent) == 0):
                        platformContent = "<tr><td>No data available</td></tr>"
                        print(
                            "Platform has no data, possible inactive station: %s"
                            % (platform))

                    htmlContent = "%s%s</table><div id=\"popupobsgraph\"></div>" % (
                        contentHeader, platformContent)
                    """
          lat = 0.0
          if( 'm_lat' in latestObs[operator]['platform_list'][platform] != False):
            lat = latestObs[operator]['platform_list'][platform]['m_lat']
          lon = 0.0
          if('m_lon' in latestObs[operator]['platform_list'][platform] != False):
            lon = latestObs[operator]['platform_list'][platform]['m_lon']
          """
                    dbCur = self.addRowToObsTable(insertDate, obsDate, lat,
                                                  lon, operator, htmlContent,
                                                  platform)
                    if (dbCur == False):
                        print(xeniaDb.dbConnection.getErrorInfo())
                    """                  
          sql = "INSERT INTO html_content(wkt_geometry,organization,html,platform_handle)\
                 values ('POINT(%f %f)','%s','%s', '%s');"\
                 %(latestObs[operator]['platform_list'][platform]['m_lon'], latestObs[operator]['platform_list'][platform]['m_lat'], 
                   operator, 
                   htmlContent, 
                   platform)
          #print("Saving content.\n %s" %(sql))
          
          dbCur = self.executeQuery(sql)
          if(dbCur != None):
            self.db.commit()
          else:
            print(xeniaDb.dbConnection.getErrorInfo())
          """
                operatorCnt += 1

            self.db.commit()
            print("Processed %d operators and %d platforms." %
                  (operatorCnt, platformCnt))
            self.copyToWorkingDB()
        except Exception, E:
            print(traceback.format_exc())
Esempio n. 11
0
 def __init__(self):
   self.table = None
   self.platforms = recursivedefaultdict()
Esempio n. 12
0
def checkSensorActivity(xeniaDb, hoursToLookback, emailList, outFilename, emailUser, emailPwd):
  
  try:
    print("Opening file: %s" %(outFilename))
    outFile = open(outFilename, "w")
    if(hoursToLookback != None):
      outFile.write("Sensor activity check for the past %d hours.\n" %(hoursToLookback))
    else:
      outFile.write("Sensor activity check for the entire record set in multi_obs.\n")
    platNfo = recursivedefaultdict()
    
    sql = "SELECT sensor.row_id AS row_id, sensor.active AS active, \
           obs_type.standard_name as obs_type_standard_name,\
           platform.platform_handle as platform_handle\
           FROM sensor\
             LEFT JOIN m_type on sensor.m_type_id = m_type.row_id\
             LEFT JOIN m_scalar_type on m_scalar_type.row_id = m_type.m_scalar_type_id\
             LEFT JOIN obs_type on  obs_type.row_id = m_scalar_type.obs_type_id\
             LEFT JOIN platform on sensor.platform_id = platform.row_id\
          WHERE platform.active=1 ORDER BY sensor.row_id ASC;"
    dbCursor = xeniaDb.dbConnection.executeQuery(sql)
    if(dbCursor != None):        
      for row in dbCursor:
        sensorId = row['row_id']
        platNfo[sensorId]['platform_handle'] = row['platform_handle']
        platNfo[sensorId]['obsName'] = row['obs_type_standard_name']
        platNfo[sensorId]['active'] = row['active']
      dbCursor.close()
    else:
      msg = "Error: %s" %(xeniaDb.dbConnection.getErrorInfo())
      print(msg)
      outFile.write(msg)
      sys.exit(-1)
    
    reActivatedPlatforms = {}
    where = ""
    if(hoursToLookback != None):
      where = "WHERE m_date >= now() - interval '%d hours'" %(hoursToLookback) 
    #Get all the DISTINCT sensor ids, then we'll mark them as active.
    sql = "SELECT DISTINCT(sensor_id) AS sensor_id,platform_handle,\
            obs_type.standard_name as obs_type_standard_name\
            FROM multi_obs\
             LEFT JOIN m_type on multi_obs.m_type_id = m_type.row_id\
             LEFT JOIN m_scalar_type on m_scalar_type.row_id = m_type.m_scalar_type_id\
             LEFT JOIN obs_type on  obs_type.row_id = m_scalar_type.obs_type_id %s;" %(where)
            
    dbCursor = xeniaDb.dbConnection.executeQuery(sql)
    if(dbCursor != None):
      for row in dbCursor:
        sensorId = row['sensor_id']
        if(platNfo.has_key(sensorId)):
          active = platNfo[sensorId]['active']
          if(active != 1):
            platform_handle = platNfo[sensorId]['platform_handle']
            obsName = platNfo[sensorId]['obsName']
            msg = 'Platform: %s Sensor: %s(%d) inactive sensor now reporting.'\
                   %(platform_handle,obsName,sensorId)
            print(msg)
            outFile.write("%s\n" %(msg))
          del platNfo[sensorId]
        else:
          platformHandle = row['platform_handle']
          print("Platform: %s Sensor ID: %d not present in current active platform/sensor list. Added to re-activate list." %(platformHandle,sensorId))
          if(reActivatedPlatforms.has_key(platformHandle) == False):
            reActivatedPlatforms[platformHandle] = []
          info = {}
          info['sensorId'] = sensorId
          info['obsName'] = row['obs_type_standard_name']
          reActivatedPlatforms[platformHandle].append(info)
      dbCursor.close()
      
      #If we had sensors come alive for platforms that were inactive, we re-activate the platforms and the sensors.
      if(len(reActivatedPlatforms)):
        print("Preparing to re-active Platforms and Sensors that have become active.")
        outFile.write("Reactivating Platforms and Sensors that have become active.\n")
        for platform in reActivatedPlatforms:
          sql = "UPDATE platform SET active=1 WHERE platform_handle='%s'" %(platform)
          dbCursor = xeniaDb.dbConnection.executeQuery(sql)
          if(dbCursor != None):
            
            msg = "Platform: %s" %(platform)
            print(msg + " set active to 1.")
            outFile.write("%s\n" %(msg))
            xeniaDb.dbConnection.commit()
            dbCursor.close()
            #sensorList = reActivatedPlatforms[platform]
            for info in reActivatedPlatforms[platform]:
              sql = "UPDATE sensor SET active=1 WHERE row_id=%d" %(info['sensorId'])
              dbCursor = xeniaDb.dbConnection.executeQuery(sql)
              if(dbCursor != None):
                msg = "Platform: %s Sensor: %s(%d)" %(platform, info['obsName'], info['sensorId'])
                print(msg + " set active to 1.")
                outFile.write("%s\n" %(msg))
                xeniaDb.dbConnection.commit()
                dbCursor.close()
              else:
                print("Error: %s" %(xeniaDb.dbConnection.getErrorInfo()))
          else:
            print("Error: %s" %(xeniaDb.dbConnection.getErrorInfo()))
                  
      if(len(platNfo)):
        outFile.write("The following details show platforms and sensors that are marked as active but did not report.\n")
        for sensorId in platNfo:
          msg = "Platform: %s Sensor: %s(%d)" %(platNfo[sensorId]['platform_handle'],platNfo[sensorId]['obsName'],sensorId)
          print(msg + " did not report observations.")
          outFile.write("%s\n" %(msg))
      outFile.close()
      smtp = smtpClass("inlet.geol.sc.edu", emailUser, emailPwd)
      smtp.subject('[secoora_auto_alert] Check Sensor Results')
      smtp.message('Attached is the latest run results for the sensorTableUtils script.')
      smtp.from_addr('*****@*****.**')
      smtp.rcpt_to(emailList)
      smtp.attach(outFilename)
      smtp.send()      
    else:
      print("Error: %s" %(xeniaDb.dbConnection.getErrorInfo()))
      sys.exit(-1)
      
  except Exception, e:
    import traceback
    print(traceback.print_exc())
Esempio n. 13
0
def checkSensorActivity(xeniaDb, hoursToLookback, emailList, outFilename,
                        emailUser, emailPwd):

    try:
        print("Opening file: %s" % (outFilename))
        outFile = open(outFilename, "w")
        if (hoursToLookback != None):
            outFile.write("Sensor activity check for the past %d hours.\n" %
                          (hoursToLookback))
        else:
            outFile.write(
                "Sensor activity check for the entire record set in multi_obs.\n"
            )
        platNfo = recursivedefaultdict()

        sql = "SELECT sensor.row_id AS row_id, sensor.active AS active, \
           obs_type.standard_name as obs_type_standard_name,\
           platform.platform_handle as platform_handle\
           FROM sensor\
             LEFT JOIN m_type on sensor.m_type_id = m_type.row_id\
             LEFT JOIN m_scalar_type on m_scalar_type.row_id = m_type.m_scalar_type_id\
             LEFT JOIN obs_type on  obs_type.row_id = m_scalar_type.obs_type_id\
             LEFT JOIN platform on sensor.platform_id = platform.row_id\
          WHERE platform.active=1 ORDER BY sensor.row_id ASC;"

        dbCursor = xeniaDb.dbConnection.executeQuery(sql)
        if (dbCursor != None):
            for row in dbCursor:
                sensorId = row['row_id']
                platNfo[sensorId]['platform_handle'] = row['platform_handle']
                platNfo[sensorId]['obsName'] = row['obs_type_standard_name']
                platNfo[sensorId]['active'] = row['active']
            dbCursor.close()
        else:
            msg = "Error: %s" % (xeniaDb.dbConnection.getErrorInfo())
            print(msg)
            outFile.write(msg)
            sys.exit(-1)

        reActivatedPlatforms = {}
        where = ""
        if (hoursToLookback != None):
            where = "WHERE m_date >= now() - interval '%d hours'" % (
                hoursToLookback)
        #Get all the DISTINCT sensor ids, then we'll mark them as active.
        sql = "SELECT DISTINCT(sensor_id) AS sensor_id,platform_handle,\
            obs_type.standard_name as obs_type_standard_name\
            FROM multi_obs\
             LEFT JOIN m_type on multi_obs.m_type_id = m_type.row_id\
             LEFT JOIN m_scalar_type on m_scalar_type.row_id = m_type.m_scalar_type_id\
             LEFT JOIN obs_type on  obs_type.row_id = m_scalar_type.obs_type_id %s;" % (
            where)

        dbCursor = xeniaDb.dbConnection.executeQuery(sql)
        if (dbCursor != None):
            for row in dbCursor:
                sensorId = row['sensor_id']
                if (platNfo.has_key(sensorId)):
                    active = platNfo[sensorId]['active']
                    if (active != 1):
                        platform_handle = platNfo[sensorId]['platform_handle']
                        obsName = platNfo[sensorId]['obsName']
                        msg = 'Platform: %s Sensor: %s(%d) inactive sensor now reporting.'\
                               %(platform_handle,obsName,sensorId)
                        print(msg)
                        outFile.write("%s\n" % (msg))
                    del platNfo[sensorId]
                else:
                    platformHandle = row['platform_handle']
                    print(
                        "Platform: %s Sensor ID: %d not present in current active platform/sensor list. Added to re-activate list."
                        % (platformHandle, sensorId))
                    if (reActivatedPlatforms.has_key(platformHandle) == False):
                        reActivatedPlatforms[platformHandle] = []
                    info = {}
                    info['sensorId'] = sensorId
                    info['obsName'] = row['obs_type_standard_name']
                    reActivatedPlatforms[platformHandle].append(info)
            dbCursor.close()

            #If we had sensors come alive for platforms that were inactive, we re-activate the platforms and the sensors.
            if (len(reActivatedPlatforms)):
                print(
                    "Preparing to re-active Platforms and Sensors that have become active."
                )
                outFile.write(
                    "Reactivating Platforms and Sensors that have become active.\n"
                )
                for platform in reActivatedPlatforms:
                    sql = "UPDATE platform SET active=1 WHERE platform_handle='%s'" % (
                        platform)
                    dbCursor = xeniaDb.dbConnection.executeQuery(sql)
                    if (dbCursor != None):

                        msg = "Platform: %s" % (platform)
                        print(msg + " set active to 1.")
                        outFile.write("%s\n" % (msg))
                        xeniaDb.dbConnection.commit()
                        dbCursor.close()
                        #sensorList = reActivatedPlatforms[platform]
                        for info in reActivatedPlatforms[platform]:
                            sql = "UPDATE sensor SET active=1 WHERE row_id=%d" % (
                                info['sensorId'])
                            dbCursor = xeniaDb.dbConnection.executeQuery(sql)
                            if (dbCursor != None):
                                msg = "Platform: %s Sensor: %s(%d)" % (
                                    platform, info['obsName'],
                                    info['sensorId'])
                                print(msg + " set active to 1.")
                                outFile.write("%s\n" % (msg))
                                xeniaDb.dbConnection.commit()
                                dbCursor.close()
                            else:
                                print("Error: %s" %
                                      (xeniaDb.dbConnection.getErrorInfo()))
                    else:
                        print("Error: %s" %
                              (xeniaDb.dbConnection.getErrorInfo()))

            if (len(platNfo)):
                outFile.write(
                    "The following details show platforms and sensors that are marked as active but did not report.\n"
                )
                for sensorId in platNfo:
                    msg = "Platform: %s Sensor: %s(%d)" % (
                        platNfo[sensorId]['platform_handle'],
                        platNfo[sensorId]['obsName'], sensorId)
                    print(msg + " did not report observations.")
                    outFile.write("%s\n" % (msg))
            outFile.close()
            smtp = smtpClass("inlet.geol.sc.edu", emailUser, emailPwd)
            smtp.subject('[secoora_auto_alert] Check Sensor Results')
            smtp.message(
                'Attached is the latest run results for the sensorTableUtils script.'
            )
            smtp.from_addr('*****@*****.**')
            smtp.rcpt_to(emailList)
            smtp.attach(outFilename)
            smtp.send()
        else:
            print("Error: %s" % (xeniaDb.dbConnection.getErrorInfo()))
            sys.exit(-1)

    except Exception, e:
        import traceback
        print(traceback.print_exc())
Esempio n. 14
0
  def computeMonthlyDataPoints(self, platformList, beginYear, endYear, QAQCFlags, outputFilePath,writeRawDataPoints):
    import calendar
    
    #If we want to use the qc_level to determine which data to include, let's build the SQL for this.
    #qaqcWHERE = ''
    #if(len(QAQCFlags)):
    #  for qcLevel in QAQCFlags:
    #    if(len(qaqcWHERE)):
    #      qaqcWHERE += 'OR '
    #    qaqcWHERE += "qc_level=%d " % (qcLevel)
    #  qaqcWHERE = "AND (%s)" %(qaqcWHERE)
    for platformHandle in platformList:
      #Get all the observations on the platform
      platformNfoCur = self.dbConnection.getPlatformInfo(platformHandle)
      #Platform doesn't seem to exist, so move on.
      if(platformNfoCur == None):
        continue
      platformNfo = platformNfoCur.fetchone()
      platformID = int(platformNfo['row_id'])
      platformNfoCur.close()
            
      sql= "SELECT\
            obs_type.standard_name \
            ,uom_type.standard_name as uom \
            ,sensor.row_id as sensor_id\
            ,sensor.m_type_id as m_type_id\
            ,sensor.s_order as s_order\
          FROM sensor \
            left join m_type on m_type.row_id=sensor.m_type_id \
            left join m_scalar_type on m_scalar_type.row_id=m_type.m_scalar_type_id \
            left join obs_type on obs_type.row_id=m_scalar_type.obs_type_id \
            left join uom_type on uom_type.row_id=m_scalar_type.uom_type_id \
            WHERE sensor.platform_id = %d ORDER BY obs_type.standard_name ASC"\
            %(platformID)
      
      sensorCur = self.dbConnection.executeQuery(sql)
      #No sensors available on the platform.
      if(sensorCur == None):
        continue
      sensorNfo = recursivedefaultdict()
      for row in sensorCur:       
        sensorNfo[row['standard_name']]['uom'] = row['uom']
        sensorNfo[row['standard_name']]['sensor_id'] = int(row['sensor_id'])
        sensorNfo[row['standard_name']]['m_type_id'] = int(row['m_type_id'])
        sensorNfo[row['standard_name']]['sorder'] = int(row['s_order'])
      sensorCur.close()
      
      outputFile = "%s/%s-yearly-stats-%s_%s.csv" %(outputFilePath,platformHandle,beginYear,endYear)
      statsFile = open(outputFile,'w')
      statsFile.write('Observation,StartDate,EndDate,Min,Max,Average,StdDev,90thPercentile,TotalRecordCount\n')
      rawDataPoints = None
      if(writeRawDataPoints):
        outputFile = "%s/%s-yearly-raw.csv" %(outputFilePath,platformHandle)
        rawDataPoints = open(outputFile, 'w')
      if(rawDataPoints != None):
        rawDataPoints.write('Observation,StartDate,EndDate,Data\n')
        
      yearList = []
      if(beginYear == None):
        # Get the distinct years
        if(self.dbConnection.dbType == dbTypes.PostGRES):
          sql = "SELECT DISTINCT(EXTRACT(YEAR FROM m_date)) as year FROM multi_obs WHERE platform_handle='%s'" %(platformHandle)
        else:
          sql = "SELECT DISTINCT(strftime('%%Y', m_date)) as year FROM multi_obs WHERE platform_handle='%s'" %(platformHandle)
        dbCursor = self.dbConnection.executeQuery(sql)
        if(dbCursor != None):
          for row in dbCursor:
            yearList.append(int(row['year']))
          dbCursor.close()
      else:
        for i in range(beginYear, endYear+1):
          yearList.append(i)
          
      #This is a dictionary we use to hold all the months of data for the years. We use it as a collection
      #of stats() objects so we can calculate some overall stats for each month over the years.
      #obsOverallMonthStats = recursivedefaultdict()
      print("Processing: %s" % (platformHandle))      
      for year in yearList:
        for obsName in sensorNfo:       
          uom = sensorNfo[obsName]['uom']
          sensorID = sensorNfo[obsName]['sensor_id']
          mTypeID = sensorNfo[obsName]['m_type_id']
          #sOrder = sensorNfo[obsName]['sorder']
          #Now for each month, we calc stats on the data.
          for month in range( 1,13 ):
            print("Obs: %s(%s) Year: %d Month: %d" %(obsName, uom, year, month))       
            monthStats = stats()
            dayCnt = calendar.monthrange(year, month)
            startDate = "%d-%02d-%02dT00:00:00" %(year,month,1)
            endDate = "%d-%02d-%2dT24:00:00" %(year,month,dayCnt[1])
            if(rawDataPoints != None):
              rawDataPoints.write("%s,%s,%s" %(obsName,startDate,endDate))

            
            #mTypeID = self.dbConnection.getMTypeFromObsName(obsName, uom, platformHandle, sOrder)
            #sql = "SELECT m_date,m_value FROM multi_obs WHERE (m_date >= '%s' AND m_date <= '%s')\
            #       AND sensor_id=%d %s;"\
            #      %(startDate,endDate,sensorID,qaqcWHERE)
            sql = "SELECT m_date,m_value,qc_level FROM multi_obs WHERE (m_date >= '%s' AND m_date <= '%s')\
                   AND sensor_id=%d;"\
                  %(startDate,endDate,sensorID)
            dbCursor = self.dbConnection.executeQuery(sql)
            if(dbCursor != None):
              for row in dbCursor:
                goodVal = False
                #Use all data.
                if(len(QAQCFlags) == 0):
                  goodVal = True                
                elif(row['qc_level'] != None):
                  for qaqcFlag in QAQCFlags:
                    if(qaqcFlag == row['qc_level']):
                      goodVal = True
                      break                
                if(goodVal):
                  m_value = row['m_value']
                  if(m_value != None):
                    m_value = float(m_value)
                    monthStats.addValue(m_value)
                    if(rawDataPoints != None):
                      rawDataPoints.write(",%f" %(m_value))
                
              monthStats.doCalculations()
              avg = monthStats.average
              if(avg == None):
                avg = -1.0
              stdDev = monthStats.stdDev
              if(stdDev == None):
                stdDev = -1.0
              popStdDev = monthStats.populationStdDev
              if(popStdDev == None):
                popStdDev = -1.0
                
              UpperPercentile = monthStats.getValueAtPercentile(90)
              if(UpperPercentile == None):
                UpperPercentile = -1.0
              min = monthStats.minVal
              if(min == None):
                min = -1.0
              max = monthStats.maxVal
              if(max == None):
                max = -1.0
                
              statsFile.write('%s,%s,%s,%f,%f,%f,%f,%f,%d\n'\
                              %(obsName,startDate,endDate,min,max,avg,stdDev,UpperPercentile,len(monthStats.items)))
              if(rawDataPoints != None):
                rawDataPoints.write("\n")
            else:
              i = 0
      statsFile.close()    
      if(rawDataPoints != None):
        rawDataPoints.close()
Esempio n. 15
0
  def buildContent(self, xeniaDb, uomConverter, boundingBox):
    
    try:
      GEORSSPATH = 'http://129.252.37.90/xenia/feeds/georss/';
      DATAQUERYPAGEPATH = 'http://carolinasrcoos.org/queryStation.php?station=';
      ADCPGRAPHURL = 'http://carocoops.org/~dramage_prod/cgi-bin/rcoos/ADCPGraph.php?PLATFORMID=<ID>&INTERVAL=<INTERVAL>';
      TWITTERURL = 'http://twitter.com/';
      EMAILALERTPAGEPATH = "http://www.secoora.org/pages/alertpage.php?platform="

      sql = "SELECT to_char(timezone('UTC', m_date), 'YYYY-MM-DD HH24:MI:SS') AS local_date \
      ,m_date as m_date\
      ,multi_obs.platform_handle  as multi_obs_platform_handle\
      ,obs_type.standard_name as obs_type_standard_name\
      ,uom_type.standard_name as uom_type_standard_name\
      ,multi_obs.m_type_id as multi_obs_m_type_id\
      ,m_lon\
      ,m_lat\
      ,m_z\
      ,m_value\
      ,qc_level\
      ,sensor.row_id as sensor_row_id\
      ,sensor.s_order as sensor_s_order\
      ,sensor.url as sensor_url\
      ,platform.url as platform_url\
      ,platform.description as platform_description\
      ,organization.short_name as organization_short_name\
      ,organization.url as organization_url\
      ,m_type_display_order.row_id as m_type_display_order_row_id\
      ,extract(epoch from m_date)\
      from multi_obs\
      left join sensor on sensor.row_id=multi_obs.sensor_id\
      left join m_type on m_type.row_id=multi_obs.m_type_id\
      left join m_scalar_type on m_scalar_type.row_id=m_type.m_scalar_type_id\
      left join obs_type on obs_type.row_id=m_scalar_type.obs_type_id\
      left join uom_type on uom_type.row_id=m_scalar_type.uom_type_id\
      left join platform on platform.row_id=sensor.platform_id\
      left join organization on organization.row_id=platform.organization_id\
      left join m_type_display_order on m_type_display_order.m_type_id=multi_obs.m_type_id\
      where\
        m_date>(now()-interval '12 hours') AND\
       Contains( GeomFromText( \'POLYGON((%s))\'), GeomFromText( 'POINT(' || fixed_longitude || ' ' || fixed_latitude ||')' ) )\
      union\
      select\
            null as local_date,\
            null as m_date,\
            platform.platform_handle as multi_obs_platform_handle ,\
            null as obs_type_standard_name,\
            null as uom_type_standard_name,\
            null as multi_obs_m_type_id,\
            platform.fixed_longitude,\
            platform.fixed_latitude,\
            null as m_z,\
            null as m_value ,\
            null as qc_level,\
            null as row_id,\
            null as s_order,\
            null as sensor_url,\
            platform.url as platform_url ,\
            platform.description as platform_description,\
            organization.short_name as organization_short_name,\
            organization.url as organization_url,\
            null as m_type_display_order_row_id,\
            null as epoch\
            from platform\
      left join organization on organization.row_id=platform.organization_id\
       where platform.active=1 AND\
       Contains( GeomFromText( \'POLYGON((%s))\'), GeomFromText( 'POINT(' || fixed_longitude || ' ' || fixed_latitude ||')' ) )\
        order by multi_obs_platform_handle,m_type_display_order_row_id,sensor_s_order,m_date desc;"\
        % (boundingBox,boundingBox)
      
      print(sql)
      latestObs = recursivedefaultdict()
      latestDate = None
      currentPlatform = None
      dbCursor = xeniaDb.dbConnection.executeQuery( sql )    
      if(dbCursor != None):          
        for obsRow in dbCursor:
          
          #print("Organization: %s platform: %s" %(obsRow['organization_short_name'], obsRow['multi_obs_platform_handle']))
          if(currentPlatform == None):
            currentPlatform = obsRow['multi_obs_platform_handle']
            
          if(latestDate == None):
            latestDate = str(obsRow['m_date'])          
          else:
            if(obsRow['m_date'] != None):
              #We only want the most current obs.
              if(latestDate != str(obsRow['m_date']) and currentPlatform == obsRow['multi_obs_platform_handle']):
                continue
                
          currentPlatform = obsRow['multi_obs_platform_handle']
          latestDate = str(obsRow['m_date'])
          if(obsRow['m_type_display_order_row_id'] != None):
            #latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['m_type_id'] = obsRow['multi_obs_m_type_id']
            latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['obs_name'] = obsRow['obs_type_standard_name']
            latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['uom'] = obsRow['uom_type_standard_name']
            latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['m_value'] = obsRow['m_value']
            #latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['m_z'] = obsRow['m_z']
            latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['qc_level'] = obsRow['qc_level']
            #latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['sensor_url'] = obsRow['sensor_url']
            latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['sensor_id'] = obsRow['sensor_row_id']          
            latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['local_date'] = obsRow['local_date']
            latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['obs_list'][obsRow['m_type_display_order_row_id']]['m_date'] = latestDate
          #assuming all observations are basically the same lat/lon as platform
          latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['m_lat'] = obsRow['m_lat']
          latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['m_lon'] = obsRow['m_lon']
          latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['url'] = obsRow['platform_url']
          latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']]['platform_desc'] = obsRow['platform_description']
          #latestObs[obsRow['organization_short_name']]['platform_list'][obsRow['multi_obs_platform_handle']][status] = $platform_status
          latestObs[obsRow['organization_short_name']]['name'] = obsRow['organization_short_name']
          latestObs[obsRow['organization_short_name']]['url'] = obsRow['organization_url']
      else:
        print( xeniaDb.dbConnection.getErrorInfo() )
        sys.exit(-1)
                                                                                                  
      dbCursor.close()
      operatorKeys = latestObs.keys()
      operatorKeys.sort()
      platformCnt = 0
      operatorCnt = 0
      insertDate = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
      for operator in operatorKeys:
        for platform in latestObs[operator]['platform_list']:
          print("Processing platform: %s" %(platform))
          htmlContent = ''        
          contentHeader = ''
          platformContent = ''
          latestDate = None
          platformParts = platform.split('.')    
          lcPlatform = platformParts[1].lower()
          operator = platformParts[0]
          links ='<a href=%s%s_%s_%s_GeoRSS_latest.xml target=new title="RSS Feed"><img src="resources/images/default/rss_small.jpg"/></a>'\
                  %(GEORSSPATH, platformParts[0], lcPlatform, platformParts[2]);     
          if( lcPlatform == 'cap2' or lcPlatform == 'sun2' or lcPlatform == 'frp2' or
              lcPlatform == 'ocp1' or lcPlatform == 'ilm2' or lcPlatform == 'ilm3' ):
            links += '<a href=%s%sRCOOS target=new title="Twitter Feed"><img src="resources/images/default/twitter.png"/></a>'\
             %(TWITTERURL,lcPlatform)
          links += '<a href=%s%s target=new title="Data Query"><img src="resources/images/default/data_query.png"/></a>'\
                    %(DATAQUERYPAGEPATH, lcPlatform.upper());
                    
          links += '<a href=%s%s target=new title="Email Alerts"><img src="resources/images/default/mail.png"/></a>'\
                    %(EMAILALERTPAGEPATH, platform);
                    
          desc = latestObs[operator]['platform_list'][platform]['platform_desc']
          #No description in the database, so we'll make one based on the operator and platform
          if(len(desc) == 0):
            desc = "%s %s" % (operator, platformParts[1])
          
          if( ('url' in latestObs[operator]['platform_list'][platform]) != False):
            platformUrl = latestObs[operator]['platform_list'][platform]['url']
          elif(('url' in latestObs[operator]) != False):          
            platformUrl = latestObs[operator]['url']
          else:
            platformUrl = ''
          
          lat = 0.0
          lon = 0.0
          if(('m_lat' in latestObs[operator]['platform_list'][platform]) != False):
            lat = latestObs[operator]['platform_list'][platform]['m_lat']
          else:
            print("No latitude defined for platform: %s" %(platform))
          if(('m_lat' in latestObs[operator]['platform_list'][platform]) != False):
            lon = latestObs[operator]['platform_list'][platform]['m_lon']
          else:
            print("No longitude defined for platform: %s" %(platform))

          contentHeader = "<div id=\"popupobscontent\" class=\"popupobscontent\"><hr/><a href=\"%s\" target=new onclick=\"\">%s</a><p id=\"popupobsloc\" class=\"popupobsloc\">Latitude: %4.3f Longitude: %4.3f</p><p id=\"popupobslinks\" class=\"popupobslinks\">%s</p>"\
                          %(platformUrl, 
                            desc,
                            lat,
                            lon,
                            links )
          
          displayOrderKeys = latestObs[operator]['platform_list'][platform]['obs_list'].keys()
          displayOrderKeys.sort()    
          obsDate = ""     

          for displayOrder in displayOrderKeys:    
            if(latestDate == None):
              if(latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['m_date'] != None):
                #latestDate = latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['m_date']                     
                latestDate = latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['local_date']
                obsDate = latestDate
                localDatetime = time.strptime(latestDate, '%Y-%m-%d %H:%M:%S')                
                obsLocalEpochSecs = time.mktime(localDatetime)
                
                datetimeLabel = "<span id=\"popupobsstatus\" class=\"popupobsstatusold\">No data available within the past 6 hours</span>"
                localNow = time.mktime(time.localtime())
                if((localNow - obsLocalEpochSecs) > 21600):
                  datetimeLabel = "<span id=\"popupobsstatus\" class=\"popupobsstatusold\">No data available within the past 6 hours</span>"         
                else:
                  tz = 'EST'
                  if(time.daylight == 1):
                    tz = 'EDT'
                  day = time.strftime("%m/%d", localDatetime)
                  datetimeLabel = time.strftime("Surface conditions as of %I:%M %p", localDatetime)
                  datetimeLabel = "%s %s on %s" %(datetimeLabel, tz, day)
                  if((localNow - obsLocalEpochSecs) > 7200): 
                    datetimeLabel += "<br><span class=\"popupobsstatusstale\">Note: This report is more than 2 hours old</span>"
                  
                platformContent = "<div id=\"popupobs\" class=\"popupobs\"><table class=\"popupobsdata\"><caption>%s</caption>"\
                                % (datetimeLabel)
                                          
            if(latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['m_value'] != None):
              obsUOM = latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['uom']
              value = latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['m_value']
              #Get the label we want to use for the observation
              obsLabel = uomConverter.getDisplayObservationName(latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['obs_name'])
              if(obsLabel == None):
                obsLabel = latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['obs_name']
              #Get the units we want to convert the data to. This is also used as the label for the units in the text display.
              displayUOM = uomConverter.getConversionUnits( obsUOM, 'en' )
              if(len(displayUOM) == 0):
                displayUOM = obsUOM            
              value = uomConverter.measurementConvert( value, obsUOM, displayUOM )
              if(value == None):
                value = latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['m_value']
                displayUOM = obsUOM
              googURL = self.buildGoogleChartLink(xeniaDb, 
                                                  platform, 
                                                  obsLabel, 
                                                  latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['sensor_id'],
                                                  obsUOM, 
                                                  displayUOM, 
                                                  latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['m_date'], 
                                                  uomConverter)
              
              measureLabel = "%s %s" %(str(value), displayUOM);
              qcLevel = latestObs[operator]['platform_list'][platform]['obs_list'][displayOrder]['qc_level']
              #Add a bad or suspect label if the quality control flag is set to 1 or 2.
              if(qcLevel == 1):
                measureLabel += "(bad)"
              elif(qcLevel == 2):
                measureLabel += "(suspect)"
              
              platformContent += "<tr %s><td scope=\"row\">%s</td><td>%s</td></tr>"\
                             %(googURL, obsLabel, measureLabel)
                            
          #Finished platform, increment our count.
          platformCnt += 1      
          #Check to see if we had any platform content, if not let's just tag it with nothing available before adding the entry into the
          #database.
          if(len(platformContent) == 0):
            platformContent = "<tr><td>No data available</td></tr>"
            print("Platform has no data, possible inactive station: %s" %(platform))
    
          htmlContent = "%s%s</table><div id=\"popupobsgraph\"></div>" %(contentHeader,platformContent)                          
          
          """
          lat = 0.0
          if( 'm_lat' in latestObs[operator]['platform_list'][platform] != False):
            lat = latestObs[operator]['platform_list'][platform]['m_lat']
          lon = 0.0
          if('m_lon' in latestObs[operator]['platform_list'][platform] != False):
            lon = latestObs[operator]['platform_list'][platform]['m_lon']
          """            
          dbCur = self.addRowToObsTable(insertDate, 
                                        obsDate,                                        
                                        lat, 
                                        lon,
                                        operator, 
                                        htmlContent, 
                                        platform)
          if(dbCur == False):
            print(xeniaDb.dbConnection.getErrorInfo())
          """                  
          sql = "INSERT INTO html_content(wkt_geometry,organization,html,platform_handle)\
                 values ('POINT(%f %f)','%s','%s', '%s');"\
                 %(latestObs[operator]['platform_list'][platform]['m_lon'], latestObs[operator]['platform_list'][platform]['m_lat'], 
                   operator, 
                   htmlContent, 
                   platform)
          #print("Saving content.\n %s" %(sql))
          
          dbCur = self.executeQuery(sql)
          if(dbCur != None):
            self.db.commit()
          else:
            print(xeniaDb.dbConnection.getErrorInfo())
          """
        operatorCnt += 1
                    
      self.db.commit()
      print("Processed %d operators and %d platforms." %(operatorCnt, platformCnt))
      self.copyToWorkingDB()
    except Exception, E:
      print(traceback.format_exc())
Esempio n. 16
0
    def computeMonthlyDataPoints(self, platformList, beginYear, endYear, QAQCFlags, outputFilePath, writeRawDataPoints):
        import calendar

        # If we want to use the qc_level to determine which data to include, let's build the SQL for this.
        # qaqcWHERE = ''
        # if(len(QAQCFlags)):
        #  for qcLevel in QAQCFlags:
        #    if(len(qaqcWHERE)):
        #      qaqcWHERE += 'OR '
        #    qaqcWHERE += "qc_level=%d " % (qcLevel)
        #  qaqcWHERE = "AND (%s)" %(qaqcWHERE)
        for platformHandle in platformList:
            # Get all the observations on the platform
            platformNfoCur = self.dbConnection.getPlatformInfo(platformHandle)
            # Platform doesn't seem to exist, so move on.
            if platformNfoCur == None:
                continue
            platformNfo = platformNfoCur.fetchone()
            platformID = int(platformNfo["row_id"])
            platformNfoCur.close()

            sql = (
                "SELECT\
            obs_type.standard_name \
            ,uom_type.standard_name as uom \
            ,sensor.row_id as sensor_id\
            ,sensor.m_type_id as m_type_id\
            ,sensor.s_order as s_order\
          FROM sensor \
            left join m_type on m_type.row_id=sensor.m_type_id \
            left join m_scalar_type on m_scalar_type.row_id=m_type.m_scalar_type_id \
            left join obs_type on obs_type.row_id=m_scalar_type.obs_type_id \
            left join uom_type on uom_type.row_id=m_scalar_type.uom_type_id \
            WHERE sensor.platform_id = %d ORDER BY obs_type.standard_name ASC"
                % (platformID)
            )

            sensorCur = self.dbConnection.executeQuery(sql)
            # No sensors available on the platform.
            if sensorCur == None:
                continue
            sensorNfo = recursivedefaultdict()
            for row in sensorCur:
                sensorNfo[row["standard_name"]]["uom"] = row["uom"]
                sensorNfo[row["standard_name"]]["sensor_id"] = int(row["sensor_id"])
                sensorNfo[row["standard_name"]]["m_type_id"] = int(row["m_type_id"])
                sensorNfo[row["standard_name"]]["sorder"] = int(row["s_order"])
            sensorCur.close()

            outputFile = "%s/%s-yearly-stats-%s_%s.csv" % (outputFilePath, platformHandle, beginYear, endYear)
            statsFile = open(outputFile, "w")
            statsFile.write("Observation,StartDate,EndDate,Min,Max,Average,StdDev,90thPercentile,TotalRecordCount\n")
            rawDataPoints = None
            if writeRawDataPoints:
                outputFile = "%s/%s-yearly-raw.csv" % (outputFilePath, platformHandle)
                rawDataPoints = open(outputFile, "w")
            if rawDataPoints != None:
                rawDataPoints.write("Observation,StartDate,EndDate,Data\n")

            yearList = []
            if beginYear == None:
                # Get the distinct years
                if self.dbConnection.dbType == dbTypes.PostGRES:
                    sql = (
                        "SELECT DISTINCT(EXTRACT(YEAR FROM m_date)) as year FROM multi_obs WHERE platform_handle='%s'"
                        % (platformHandle)
                    )
                else:
                    sql = (
                        "SELECT DISTINCT(strftime('%%Y', m_date)) as year FROM multi_obs WHERE platform_handle='%s'"
                        % (platformHandle)
                    )
                dbCursor = self.dbConnection.executeQuery(sql)
                if dbCursor != None:
                    for row in dbCursor:
                        yearList.append(int(row["year"]))
                    dbCursor.close()
            else:
                for i in range(beginYear, endYear + 1):
                    yearList.append(i)

            # This is a dictionary we use to hold all the months of data for the years. We use it as a collection
            # of stats() objects so we can calculate some overall stats for each month over the years.
            # obsOverallMonthStats = recursivedefaultdict()
            print("Processing: %s" % (platformHandle))
            for year in yearList:
                for obsName in sensorNfo:
                    uom = sensorNfo[obsName]["uom"]
                    sensorID = sensorNfo[obsName]["sensor_id"]
                    mTypeID = sensorNfo[obsName]["m_type_id"]
                    # sOrder = sensorNfo[obsName]['sorder']
                    # Now for each month, we calc stats on the data.
                    for month in range(1, 13):
                        print("Obs: %s(%s) Year: %d Month: %d" % (obsName, uom, year, month))
                        monthStats = stats()
                        dayCnt = calendar.monthrange(year, month)
                        startDate = "%d-%02d-%02dT00:00:00" % (year, month, 1)
                        endDate = "%d-%02d-%2dT24:00:00" % (year, month, dayCnt[1])
                        if rawDataPoints != None:
                            rawDataPoints.write("%s,%s,%s" % (obsName, startDate, endDate))

                        # mTypeID = self.dbConnection.getMTypeFromObsName(obsName, uom, platformHandle, sOrder)
                        # sql = "SELECT m_date,m_value FROM multi_obs WHERE (m_date >= '%s' AND m_date <= '%s')\
                        #       AND sensor_id=%d %s;"\
                        #      %(startDate,endDate,sensorID,qaqcWHERE)
                        sql = (
                            "SELECT m_date,m_value,qc_level FROM multi_obs WHERE (m_date >= '%s' AND m_date <= '%s')\
                   AND sensor_id=%d;"
                            % (startDate, endDate, sensorID)
                        )
                        dbCursor = self.dbConnection.executeQuery(sql)
                        if dbCursor != None:
                            for row in dbCursor:
                                goodVal = False
                                # Use all data.
                                if len(QAQCFlags) == 0:
                                    goodVal = True
                                elif row["qc_level"] != None:
                                    for qaqcFlag in QAQCFlags:
                                        if qaqcFlag == row["qc_level"]:
                                            goodVal = True
                                            break
                                if goodVal:
                                    m_value = row["m_value"]
                                    if m_value != None:
                                        m_value = float(m_value)
                                        monthStats.addValue(m_value)
                                        if rawDataPoints != None:
                                            rawDataPoints.write(",%f" % (m_value))

                            monthStats.doCalculations()
                            avg = monthStats.average
                            if avg == None:
                                avg = -1.0
                            stdDev = monthStats.stdDev
                            if stdDev == None:
                                stdDev = -1.0
                            popStdDev = monthStats.populationStdDev
                            if popStdDev == None:
                                popStdDev = -1.0

                            UpperPercentile = monthStats.getValueAtPercentile(90)
                            if UpperPercentile == None:
                                UpperPercentile = -1.0
                            min = monthStats.minVal
                            if min == None:
                                min = -1.0
                            max = monthStats.maxVal
                            if max == None:
                                max = -1.0

                            statsFile.write(
                                "%s,%s,%s,%f,%f,%f,%f,%f,%d\n"
                                % (
                                    obsName,
                                    startDate,
                                    endDate,
                                    min,
                                    max,
                                    avg,
                                    stdDev,
                                    UpperPercentile,
                                    len(monthStats.items),
                                )
                            )
                            if rawDataPoints != None:
                                rawDataPoints.write("\n")
                        else:
                            i = 0
            statsFile.close()
            if rawDataPoints != None:
                rawDataPoints.close()
Esempio n. 17
0
 def __init__(self):
     self.table = None
     self.platforms = recursivedefaultdict()