Example #1
0
  def importFilesIntoDB(self, xmrgDir, deleteDataFiles):     
    try:
      outputFile = None
      #Get a list of the files in the import dir.
      fileList = os.listdir(xmrgDir)
      fileList.sort()          
      if(self.outputFilename):     
        #Add starting date-ending date to file name.
        xmrg = xmrgFile()
        
        #Convert into Eastern time.
        eastern = timezone('UTC')  
        estDate = eastern.localize(datetime.datetime.strptime(xmrg.getCollectionDateFromFilename(fileList[0]), "%Y-%m-%dT%H:%M:%S"))
        startDate = estDate.astimezone(timezone('US/Eastern')).strftime("%Y-%m-%dT%H:%M:%S")
        estDate = eastern.localize(datetime.datetime.strptime(xmrg.getCollectionDateFromFilename(fileList[-1]), "%Y-%m-%dT%H:%M:%S"))
        endDate = estDate.astimezone(timezone('US/Eastern')).strftime("%Y-%m-%dT%H:%M:%S")
               
        startDate = startDate.replace(':', '_')
        endDate = endDate.replace(':', '_')
        nameSubs = {"start" : startDate, "end" : endDate }
        filename = self.outputFilename % (nameSubs)
        outputFile = open(filename, "w")
        if(self.logger != None):
          self.logger.debug("Output file: %s opened" % (filename))
        outputFile.write("Start Time, End Time, Weighted Average\n")
      for fileName in fileList:    
        fullPath = "%s/%s" %(xmrgDir,fileName)  
        #Make sure we are trying to import a file and not a directory.
        if(os.path.isfile(fullPath) != True):
          self.logger.debug("%s is not a file, skipping" % (fullPath))
          continue       

        xmrg = xmrgFile("nexrad_proc_logger")
        xmrg.openFile(fullPath)
        if( xmrg.readFileHeader() ):     
          if(self.logger != None):
            self.logger.debug( "File Origin: X %d Y: %d Columns: %d Rows: %d" %(xmrg.XOR,xmrg.YOR,xmrg.MAXX,xmrg.MAXY))
          if(xmrg.readAllRows()):          
            if(self.outputFilename):           
              self.processData(xmrg, outputFile)
            if(self.shapefilePath):
              self.writeShapefile(xmrg)
          else:
            if(self.logger != None):
              self.logger.error("Unable to read rows all rows in from file.")
              
          if(deleteDataFiles):
            xmrg.cleanUp(True,True)
          else:
            xmrg.cleanUp(True,False)
          xmrg.xmrgFile.close()
                  
        else:
          self.logger.error("Unable to process: %s" %(fileName))
      outputFile.close()
    except Exception, E:
      if(self.logger != None):
        self.logger.exception(E)
  def writeLatLonDB(self, fileName, dbFile, minLatLong=None, maxLatLong=None,db=None):
    #DWR 2012-10-29  
    retVal = True
    if(self.logger != None):
      self.logger.debug("writeLatLonDB File: %s BBOX: %f,%f %f,%f"\
                        %(fileName,minLatLong.latitude,minLatLong.longitude,maxLatLong.latitude,maxLatLong.longitude))
    #Database connection not supplied, so create it.
    if(db == None):
      db = dhecDB(dbFile, self.configSettings.loggerName)     
      if(self.logger != None):
        self.logger.debug("Loading spatialite: %s" %(self.configSettings.spatiaLiteLib))
      if(db.loadSpatiaLiteLib(self.configSettings.spatiaLiteLib) == False):
        if(self.logger != None):
          self.logger.debug("Error loading: %s Error: %s" %(self.configSettings.spatiaLiteLib,db.lastErrorMsg))

    xmrg = xmrgFile( self.configSettings.loggerName )
    xmrg.openFile( fileName )
    
    if( xmrg.readFileHeader() ):     
      self.logger.debug( "File Origin: X %d Y: %d Columns: %d Rows: %d" %(xmrg.XOR,xmrg.YOR,xmrg.MAXX,xmrg.MAXY))
      try:
        if( xmrg.readAllRows() ):
          
          #This is the database insert datetime.           
          datetime = time.strftime( "%Y-%m-%dT%H:%M:%S", time.localtime() )
          #Parse the filename to get the data time.
          (directory,filetime) = os.path.split( xmrg.fileName )
          (filetime,ext) = os.path.splitext( filetime )
          filetime = self.getCollectionDateFromFilename(filetime)
          #In the binary file, the data is stored as hundreths of mm, if we want to write the data as 
          #inches , need to divide by 2540.
          dataConvert = 100.0 
          dataConvert = 25.4 * dataConvert 

          #Flag to specifiy if any non 0 values were found. No need processing the weighted averages 
          #below if nothing found.
          rainDataFound=False 
          #If we are using a bounding box, let's get the row/col in hrap coords.
          llHrap = None
          urHrap = None
          startCol = 0
          startRow = 0
          if( minLatLong != None and maxLatLong != None ):
            llHrap = xmrg.latLongToHRAP(minLatLong,True,True)
            urHrap = xmrg.latLongToHRAP(maxLatLong,True,True)
            startCol = llHrap.column
            startRow = llHrap.row
          recsAdded = 0
          for row in range(startRow,xmrg.MAXY):
            for col in range(startCol,xmrg.MAXX):
              val = xmrg.grid[row][col]
              #If there is no precipitation value, or the value is erroneous 
              if( val <= 0 ):
                if(self.saveAllPrecipVals):
                  val = 0
                else:
                  continue
              else:
                val /= dataConvert
                
              hrap = hrapCoord( xmrg.XOR + col, xmrg.YOR + row )
              latlon = xmrg.hrapCoordToLatLong( hrap )                                
              latlon.longitude *= -1
              insertSQL = False
              if( minLatLong != None and maxLatLong != None ):
                if( xmrg.inBBOX( latlon, minLatLong, maxLatLong ) ):
                  insertSQL = True
              else:
                insertSQL = True
              if( insertSQL ):
                rainDataFound = True
                #Build polygon points. Each grid point represents a 4km square, so we want to create a polygon
                #that has each point in the grid for a given point.                  
                hrapNewPt = hrapCoord( xmrg.XOR + col, xmrg.YOR + row + 1)
                latlonUL = xmrg.hrapCoordToLatLong( hrapNewPt )
                latlonUL.longitude *= -1
                hrapNewPt = hrapCoord( xmrg.XOR + col + 1, xmrg.YOR + row)
                latlonBR = xmrg.hrapCoordToLatLong( hrapNewPt )
                latlonBR.longitude *= -1
                hrapNewPt = hrapCoord( xmrg.XOR + col + 1, xmrg.YOR + row + 1)
                latlonUR = xmrg.hrapCoordToLatLong( hrapNewPt )
                latlonUR.longitude *= -1
                wkt = "POLYGON((%f %f,%f %f,%f %f,%f %f,%f %f))"\
                      %(latlon.longitude, latlon.latitude,
                        latlonUL.longitude, latlonUL.latitude, 
                        latlonUR.longitude, latlonUR.latitude, 
                        latlonBR.longitude, latlonBR.latitude, 
                        latlon.longitude, latlon.latitude, 
                        )
                #wkt = "POINT(%f %f)" %(latlon.longitude, latlon.latitude)
                sql = "INSERT INTO precipitation_radar \
                      (insert_date,collection_date,latitude,longitude,precipitation,geom) \
                      VALUES('%s','%s',%f,%f,%f,GeomFromText('%s',4326));" \
                      %( datetime,filetime,latlon.latitude,latlon.longitude,val,wkt)
                cursor = db.executeQuery( sql )
                #Problem with the query, since we are working with transactions, we have to rollback.
                if( cursor == None ):
                  self.logger.error( db.lastErrorMsg )
                  db.lastErrorMsg = None
                  db.DB.rollback()
                recsAdded += 1
          #Commit the inserts.    
          db.commit()
          if( self.logger != None ):
            self.logger.info( "Processed: %d rows. Added: %d records to database." %((row + 1),recsAdded))
          else:
            print( 'Processed %d rows. Added: %d records to database.' % (row + 1),recsAdded )
          #NOw calc the weighted averages for the watersheds and add the measurements to the multi-obs table
          if(rainDataFound and self.calcWeightedAvg):
            self.calculateWeightedAverages(filetime,filetime,db,True)
          else:
            i = 0
      except Exception, E:
        self.lastErrorMsg = str(E)
        if(self.logger != None):
          self.logger.exception(E)
        else:
          print(traceback.print_exc())
        #DWR 2012-10-29  
        retVal = False
      
      #DWR 2012-10-29
      #Move the deletion of the file to a member function so child classes can control.
      xmrg.xmrgFile.close()
      xmrg.cleanUp(self.deleteSourceFile,self.deleteCompressedSourceFile)
      return(retVal)
  def writeShapefile(self, fileName, minLatLong=None, maxLatLong=None):
    import osgeo.ogr
    import osgeo.osr
    #DWR 2012-10-29  
    retVal = True
    xmrg = xmrgFile( self.configSettings.loggerName )
    xmrg.openFile( fileName )
    if( xmrg.readFileHeader() ):     
      self.logger.debug( "File Origin: X %d Y: %d Columns: %d Rows: %d" %(xmrg.XOR,xmrg.YOR,xmrg.MAXX,xmrg.MAXY))
      try:        
        if( xmrg.readAllRows() ):
          
          spatialReference = osgeo.osr.SpatialReference()
          spatialReference.ImportFromProj4('+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
          driver = osgeo.ogr.GetDriverByName('ESRI Shapefile')

          #This is the database insert datetime.           
          datetime = time.strftime( "%Y-%m-%dT%H:%M:%S", time.localtime() )
          #Parse the filename to get the data time.
          (directory,filetime) = os.path.split( xmrg.fileName )
          (filetime,ext) = os.path.splitext( filetime )
          #Check to see if any of the shapefile files exist, if so delete them otherwise
          #we can't create the shapefile.
          shapeFilename = '%s/%s.shp' %(self.shapefileDir,filetime)
          if(os.path.exists(shapeFilename)):
            os.remove(shapeFilename)
          shapeData = driver.CreateDataSource(shapeFilename)
          if(shapeData == None):
            self.logger.error("Unable to create shapefile: %s" %(shapeData))
            return

          layer = shapeData.CreateLayer("xmrg", spatialReference, osgeo.ogr.wkbPolygon)
          # create a field for the precipitation value.
          fieldDefn = osgeo.ogr.FieldDefn('FID', osgeo.ogr.OFTInteger)
          # add the field to the shapefile
          layer.CreateField(fieldDefn)
          # create a field for the precipitation value.
          fieldDefn = osgeo.ogr.FieldDefn('precip', osgeo.ogr.OFTReal)
          # add the field to the shapefile
          layer.CreateField(fieldDefn)

          fieldDefn = osgeo.ogr.FieldDefn('latitude', osgeo.ogr.OFTReal)
          # add the field to the shapefile
          layer.CreateField(fieldDefn)
          fieldDefn = osgeo.ogr.FieldDefn('longitude', osgeo.ogr.OFTReal)
          # add the field to the shapefile
          layer.CreateField(fieldDefn)

          fieldDefn = osgeo.ogr.FieldDefn('HRAPX', osgeo.ogr.OFTInteger)
          # add the field to the shapefile
          layer.CreateField(fieldDefn)
          fieldDefn = osgeo.ogr.FieldDefn('HRAPY', osgeo.ogr.OFTInteger)
          # add the field to the shapefile
          layer.CreateField(fieldDefn)

          layerDefinition = layer.GetLayerDefn()
          """
          #Let's get rid of the xmrg verbage so we have the time remaining.
          #The format for the time on these files is MMDDYYY sometimes a trailing z for
          #the UTC time zone. 24 hour files don't have the z, or an hour           
          filetime = filetime.replace('24hrxmrg', '')
          filetime = filetime.replace('xmrg', '')
          dateformat = "%m%d%Y%Hz" 
          if( filetime.rfind( 'z' ) == -1 ):
            dateformat = "%m%d%Y"  
          #The XMRG time is UTC, however we want to store all our times as localtimes.
          isdst = time.localtime()[-1]
          offset = 0
          if (isdst):            
            offset = 4 * 3600
          else:
            offset = 5 * 3600
          #Using mktime() and localtime() is a hack. The time package in python doesn't have a way
          #to convert a struct_time in UTC to epoch secs. So I just use the local time functions to do what
          #I want instead of brining in the calender package which has the conversion.
          secs = time.mktime(time.strptime( filetime, dateformat ))
          secs -= offset
          filetime = time.strftime( "%Y-%m-%dT%H:00:00", time.localtime(secs) )
          """          
          filetime = self.getCollectionDateFromFilename(filetime)

          #In the binary file, the data is stored as hundreths of mm, if we want to write the data as 
          #inches , need to divide by 2540.
          dataConvert = 100.0 
          dataConvert = 25.4 * dataConvert 

          #If we are using a bounding box, let's get the row/col in hrap coords.
          llHrap = None
          urHrap = None
          startCol = 0
          startRow = 0
          if( minLatLong != None and maxLatLong != None ):
            llHrap = xmrg.latLongToHRAP(minLatLong,True,True)
            urHrap = xmrg.latLongToHRAP(maxLatLong,True,True)
            startCol = llHrap.column
            startRow = llHrap.row
          recsAdded = 0
          featureId = 0
          for row in range(startRow,xmrg.MAXY):
            for col in range(startCol,xmrg.MAXX):
              val = xmrg.grid[row][col]
              #If there is no precipitation value, or the value is erroneous 
              if( val <= 0 ):
                if(self.saveAllPrecipVals):
                  val = 0
                else:
                  continue                                    
              else:
                val /= dataConvert
                
              hrap = hrapCoord( xmrg.XOR + col, xmrg.YOR + row )
              latlon = xmrg.hrapCoordToLatLong( hrap )                                
              latlon.longitude *= -1
              insertSQL = False
              if( minLatLong != None and maxLatLong != None ):
                if( xmrg.inBBOX( latlon, minLatLong, maxLatLong ) ):
                  insertSQL = True
              else:
                insertSQL = True
              if( insertSQL ):
                #Build polygon points. Each grid point represents a 4km square, so we want to create a polygon
                #that has each point in the grid for a given point.                  
                hrapNewPt = hrapCoord( xmrg.XOR + col, xmrg.YOR + row + 1)
                latlonUL = xmrg.hrapCoordToLatLong( hrapNewPt )
                latlonUL.longitude *= -1
                hrapNewPt = hrapCoord( xmrg.XOR + col + 1, xmrg.YOR + row)
                latlonBR = xmrg.hrapCoordToLatLong( hrapNewPt )
                latlonBR.longitude *= -1
                hrapNewPt = hrapCoord( xmrg.XOR + col + 1, xmrg.YOR + row + 1)
                latlonUR = xmrg.hrapCoordToLatLong( hrapNewPt )
                latlonUR.longitude *= -1
                     
                # Create ring for polygon
                ring = osgeo.ogr.Geometry(osgeo.ogr.wkbLinearRing)
                polygon = osgeo.ogr.Geometry(osgeo.ogr.wkbPolygon)
                #polygon.ImportFromWkt(wkt)
                ring.AddPoint(latlon.longitude, latlon.latitude)
                ring.AddPoint(latlonUL.longitude, latlonUL.latitude)
                ring.AddPoint(latlonUR.longitude, latlonUR.latitude)
                ring.AddPoint(latlonBR.longitude, latlonBR.latitude)
                ring.AddPoint(latlonBR.longitude, latlonBR.latitude)
                polygon.AddGeometry(ring)

                # Create feature
                feature = osgeo.ogr.Feature(layerDefinition)
                feature.SetGeometry(polygon)
                #feature.SetFID(featureId)
                feature.SetField('FID', featureId)
                feature.SetField('precip', val)
                feature.SetField('latitude', latlon.latitude)
                feature.SetField('longitude', latlon.longitude)
                feature.SetField('HRAPX', hrap.column)
                feature.SetField('HRAPY', hrap.row)

                # Save feature
                layer.CreateFeature(feature)

                featureId += 1                            
      except Exception, E:
        self.lastErrorMsg = str(E)
        if(self.logger != None):
          self.logger.exception(E)
        else:
          print(traceback.print_exc())
        #DWR 2012-10-29            
        retVal = False
      
      #DWR 2012-10-29
      #Move the deletion of the file to a member function so child classes can control.
      xmrg.xmrgFile.close()
      xmrg.cleanUp(self.deleteSourceFile,self.deleteCompressedSourceFile)
      return(retVal)
                           )
    #nexrad_db_conn.db_connection.isolation_level = None
    nexrad_db_conn.db_connection.execute("PRAGMA synchronous = OFF")
    nexrad_db_conn.db_connection.execute("PRAGMA journal_mode = MEMORY")
  except Exception,e:
    if logger:
      logger.exception(e)

  else:
    for xmrg_filename in iter(inputQueue.get, 'STOP'):
      tot_file_time_start = time.time()
      if logger:
        logger.debug("ID: %s processing file: %s" % (current_process().name, xmrg_filename))

      xmrg_proc_obj = wqXMRGProcessing(logger=False)
      xmrg = xmrgFile(current_process().name)
      xmrg.openFile(xmrg_filename)

      #Data store in hundreths of mm, we want mm, so convert.
      dataConvert = 100.0

      if xmrg.readFileHeader():
        if logger:
          logger.debug("ID: %s File Origin: X %d Y: %d Columns: %d Rows: %d" %(current_process().name, xmrg.XOR,xmrg.YOR,xmrg.MAXX,xmrg.MAXY))
        try:
          read_rows_start = time.time()
          if xmrg.readAllRows():
            if logger:
              logger.debug("ID: %s(%f secs) to read all rows in file: %s" % (current_process().name, time.time() - read_rows_start, xmrg_filename))
            #This is the database insert datetime.
            #Parse the filename to get the data time.