Пример #1
0
 def createWatershedSummaries(self):
   if(self.logger != None):
     self.logger.debug("Entering createWatershedSummaries.")
     
   db = dhecDB(self.configSettings.dbSettings['dbName'], None, self.logger)
   if(self.logger != None):
     self.logger.debug("Loading spatialite: %s" %(self.configSettings.spatiaLiteLib))
   if(db.loadSpatiaLiteLib(self.configSettings.spatiaLiteLib) == False):
     if(self.logger != None):
       self.logger.debug("Error loading: %s Error: %s" %(self.configSettings.spatiaLiteLib,db.lastErrorMsg))
     
   #Get the unique dates for the radar data.
   sql = "SELECT DISTINCT(collection_date) as date FROM precipitation_radar ORDER BY collection_date ASC;"
   rainDatesCursor = db.executeQuery(sql)            
   dates = []
   for dateRow in rainDatesCursor:         
     date = dateRow['date']
     dates.append(date)
   #Get all the raingauges.
   where = "WHERE platform_handle LIKE '%raingauge%'"
   rainGauges = db.getPlatforms(where)
   rainGaugeList = []
   #We create a list of the gauges. The reason I don't loop through the cursor is if we 
   #have the addSensor set to True and we add a sensor and commit the changes to the database,
   #our open cursor is no longer valid.
   for rainGauge in rainGauges:
     rainGaugeList.append(rainGauge['short_name'])
     
   rainDatesCursor.close()
   #Get the geom for the watershed boundary.
   for date in dates:       
     self.calculateWeightedAverages(date,date,db,True,rainGaugeList)   
   db.DB.close()
 def vacuumDB(self):
   db = dhecDB(self.configSettings.dbSettings['dbName'], 'dhec_logger')
   if(db.vacuumDB() != None):
     self.logger.debug("Vacuumed database.")
     return(True)
   else:
     self.logger.error("Database vacuum failed: %s" % (db.lastErrorMsg))
     db.lastErrorMsg = ""
   return(False)
Пример #3
0
 def importFiles(self, importDirectory=None):
   try:
     if(importDirectory == None):
       importDirectory = self.importDirectory
     
     db = dhecDB(self.configSettings.dbSettings['dbName'], None, self.logger)
     if(self.logger != None):
       self.logger.debug("Loading spatialite: %s" %(self.configSettings.spatiaLiteLib))
     if(db.loadSpatiaLiteLib(self.configSettings.spatiaLiteLib) == False):
       if(self.logger != None):
         self.logger.debug("Error loading: %s Error: %s" %(self.configSettings.spatiaLiteLib,db.lastErrorMsg))
     
     startMonth = 'Apr'
     endMonth   = 'Oct'
     #Get a list of the files in the import dir.
     fileList = os.listdir(importDirectory)
     #If we want to skip certain months, let's pull those files out of the list.
     monthList = {'Jan': 1, 'Feb': 2, 'Mar': 3, "Apr": 4, "May": 5, "Jun": 6, "Jul": 7, "Aug": 8, "Sep": 9, "Oct": 10, "Nov": 11, "Dec": 12 }
     startMonth = monthList[startMonth]
     endMonth = monthList[endMonth]
             
     for fileName in fileList:    
       fileTime = self.getCollectionDateFromFilename(fileName)
       #Get the month from the time.
       month = time.strptime(fileTime, "%Y-%m-%dT%H:%M:%S")
       month = int(time.strftime("%m", month))
       #If the file is outside the month range we are interested in, go on to the next file. 
       if(month < startMonth or month > endMonth):
         continue
       fullPath = "%s/%s" %(importDirectory,fileName)  
       #Make sure we are trying to import a file and not a directory.
       if(os.path.isfile(fullPath) != True):
         self.logger.debug("%s is not a file, skipping" % (fullPath))
         continue          
       if( self.processXMRGFile(fullPath, db)):
         self.logger.debug("Successfully processed: %s" %(fileName))
       else:
         self.logger.error("Unable to process: %s" %(fileName))
     db.DB.close()                
   except Exception, E:
     self.lastErrorMsg = str(E) 
     if(self.logger != None):
       self.logger.exception(E)
     else:
       print(traceback.print_exc())
Пример #4
0
 def vacuumDB(self):
   from stat import *
   
   retVal = False
   if(self.logger != None):
     stats = os.stat(self.configSettings.dbSettings['dbName'])
     self.logger.debug("Begin database vacuum. File size: %d" % (stats[ST_SIZE]))      
   db = dhecDB(self.configSettings.dbSettings['dbName'], None, self.logger)
   if(db.vacuumDB() != None):
     if(self.logger != None):
       stats = os.stat(self.configSettings.dbSettings['dbName'])
       self.logger.debug("Database vacuum completed. File size: %d" % (stats[ST_SIZE]))      
     retVal = True
   else:
     self.logger.error("Database vacuum failed: %s" % (db.lastErrorMsg))
     db.lastErrorMsg = ""
   db.DB.close()
   return(retVal)
  def backupData(self):
    self.logger.info("Beginning data backup/rollover.")
    
    curYear = time.strftime('%Y', time.localtime())
    backupDB = None
    filePath = "%s%s/" % (self.configSettings.dbBackupFile, curYear)
    backupFilename = "%s%s-dhec.db" % (filePath, curYear)
    ################################################################################
    #Test to see if the database exists.
    if(not os.path.isfile(backupFilename)):    
      #Check to see if the directory exists
      if(not os.path.exists(filePath)):
        os.mkdir(filePath)
        self.logger.info("Directory: %s does not exist, creating." % (filePath))
      
      if(self.configSettings.dbBackupSQLSchemaFile == None):  
        self.logger.info("File: %s does not exist, cannot continue." % (backupFilename))
        return;
      #We've got a SQL file to create the schema with.
      else:
        backupDB = dhecDB(backupFilename, "dhec_logger")
        backupDB.DB.close()
        shellCmd = "sqlite3 \"%s\" < \"%s\""%(backupFilename,self.configSettings.dbBackupSQLSchemaFile)
        ret = os.system(shellCmd)
        self.logger.debug("Created database: %s with schema file: %s" %(backupFilename,self.configSettings.dbBackupSQLSchemaFile))
    else:
      backupDB = dhecDB(backupFilename, "dhec_logger")
      self.logger.info("Connecting to database: %s" % (backupFilename))

    sys.exit(-1)

    ################################################################################
      
    ################################################################################
    #On a platform by platform basis, get all data that is not in the current month.    
    dbCursor = self.db.getRainGauges()
    gaugeList = []
    for row in dbCursor:    
      gaugeList.append(row['platform_handle'])
    dbCursor.close()
    #Cutoff date, we want to keep last 30 days.      
    cutoffDate = time.strftime("%Y-%m-%dT00:00:00", time.localtime(time.time() - (30 * 24 * 60 * 60)))
    for platformHandle in gaugeList:
      #Get the m_types for the sensors we will roll over into the backup database.
      #(self, obsName, uom, platform, sOrder=1 ):
      mTypeList = '('
      precipMType = self.db.getMTypeFromObsName( "precipitation", "in", platformHandle, 1)
      mTypeList += "m_type_id=%d "%(precipMType)
      precipDailySum = self.db.getMTypeFromObsName( "precipitation_accumulated_daily", "in", platformHandle, 1)
      if(precipDailySum != None):
        mTypeList += "OR m_type_id=%d "%(precipDailySum)       
      windSpd = self.db.getMTypeFromObsName( "wind_speed", "mph", platformHandle, 1)
      if(windSpd != None):
        mTypeList += "OR m_type_id=%d "%(windSpd)
      windDir = self.db.getMTypeFromObsName( "wind_from_direction", "degrees_true", platformHandle, 1)
      if(windDir != None):
        mTypeList += "OR m_type_id=%d"%(windDir)
      
      if( len(mTypeList) <= 0 ):
        self.logger.error("No m_type_ids found for platform: %s" % (platformHandle))
        return
      else:
        mTypeList += ") AND "
      self.logger.info("Processing multi_obs table data for platform: %s" % (platformHandle))
      sql = "SELECT * FROM multi_obs\
             WHERE\
             m_date < '%s' AND\
             %s\
             platform_handle='%s' ORDER BY m_date ASC"\
             %(cutoffDate, mTypeList, platformHandle) 
      resultsCursor = self.db.executeQuery(sql)
      rowCnt = 0
      if(resultsCursor != None):
        for item in resultsCursor:
          obsName = ''
          uom = ''
          mVals = []
          mVals.append(float(item['m_value']))
          if( int(item['m_type_id']) == precipMType ):
            obsName = 'precipitation'
            uom = 'in'
            mVals.append(float(item['m_value_2']))
            mVals.append(float(item['m_value_3']))
          elif( int(item['m_type_id']) == precipDailySum ):
            obsName = 'precipitation_accumulated_daily'
            uom = 'in'
          elif( int(item['m_type_id']) == windSpd ):
            obsName = 'wind_speed'
            uom = 'mph'
          elif( int(item['m_type_id']) == windDir ): 
            obsName = 'wind_from_direction'
            uom = 'degrees_true'
          if(backupDB.addMeasurement(obsName, uom,
                                     platformHandle,
                                     item['m_date'],
                                     item['m_lat'], item['m_lon'],
                                     0,
                                     mVals,
                                     1,
                                     False) != True):
            self.logger.critical( "%s Function: %s Line: %s File: %s"\
                               %(backupDB.lastErrorMsg,backupDB.lastErrorFunc, backupDB.lastErrorLineNo, backupDB.lastErrorFile) )
            return
          
          rowCnt += 1               
        if(not backupDB.commit()):
          self.logger.error(backupDB.lastErrorMsg)
          sys.exit(- 1)
        self.logger.info("Successfully processed and committed: %d rows into backup." % (rowCnt))
        resultsCursor.close()
        
        #Now we delete the records from the source DB.
        self.logger.info("Deleting backed up records from source database.")
        sql = "DELETE FROM multi_obs WHERE m_date < '%s' and platform_handle='%s'" % (cutoffDate, platformHandle)
        resultsCursor = self.db.executeQuery(sql)
        if(resultsCursor != None):
          if(not self.db.commit()):
            self.logger.error(self.db.lastErrorMsg)
            sys.exit(- 1)
        else:
          self.logger.error(self.db.lastErrorMsg)
          sys.exit(- 1)
        resultsCursor.close()
        
    self.logger.info("Finished data backup/rollover.")
  def __init__(self, xmlConfigFile):
    self.lastErrorFile = ''    
    self.lastErrorLineNo = ''    
    self.lastErrorFunc = ''    
    
    self.totalLinesProcd = 0        #Total number of lines processed from all files.
    self.totalLinesUnprocd = 0      #Total number of lines unable to be processed for some reason/
    self.totalTime = 0.0            #Total execution time.
    try:
      #xmlTree = etree.parse(xmlConfigFile)
      self.configSettings = dhecConfigSettings(xmlConfigFile)
      
      #Create our logging object.
      if(self.configSettings.logFile == None):
        print('ERROR: //logging/logDir not defined in config file. Terminating script')
        sys.exit(- 1)     

      self.logger = logging.getLogger("dhec_logger")
      self.logger.setLevel(logging.DEBUG)
      # create formatter and add it to the handlers
      formatter = logging.Formatter("%(asctime)s,%(name)s,%(levelname)s,%(lineno)d,%(message)s")

      #Create the log rotation handler.
      handler = logging.handlers.RotatingFileHandler(self.configSettings.logFile, "a", self.configSettings.maxBytes, self.configSettings.backupCount)
      handler.setLevel(logging.DEBUG)
      handler.setFormatter(formatter)    
      self.logger.addHandler(handler)
      # add the handlers to the logger
      self.logger.info('Log file opened')
      
      if(self.configSettings.dbSettings['dbName'] == None):
        self.logger.error('ERROR: //database/db/name not defined in config file. Terminating script')
        sys.exit(- 1)                      
      self.logger.debug('Database path: %s' % (self.configSettings.dbSettings['dbName']))
      self.db = dhecDB(self.configSettings.dbSettings['dbName'],"dhec_logger")
            
      #Get a file list for the directory.
      if(self.configSettings.rainGaugeFileDir == None):
        self.logger.error('ERROR: //rainGaugeProcessing/rainGaugeFileDir not defined in config file. Terminating script')
        sys.exit(- 1)      
      
      self.logger.debug('Directory for rain gauge data: %s' % self.configSettings.rainGaugeFileDir)
      
      #Check the settings for ftping the rain gauge data
      if(self.configSettings.rainGaugeFTPAddy == None):       
        self.logger.error('ERROR: //rainGaugeProcessing/ftp/ip not defined in config file. Terminating script')
        sys.exit(- 1)      
      if(self.configSettings.rainGaugeFTPUser == None): 
        self.logger.error('ERROR: ///rainGaugeProcessing/ftp/user not defined in config file. Terminating script')
        sys.exit(- 1)      
      if(self.configSettings.rainGaugeFTPPwd == None):
        self.logger.error('ERROR: //rainGaugeProcessing/ftp/passwd not defined in config file. Terminating script')
        sys.exit(- 1)      
      if(self.configSettings.rainGaugeFTPDir == None):
        self.logger.error('ERROR: //rainGaugeProcessing/ftp/fileDir not defined in config file. Terminating script')
        sys.exit(- 1)      
      if(self.configSettings.delServerFiles == None):
        self.logger.error('ERROR: //rainGaugeProcessing/ftp/delServerFile not defined in config file. Terminating script')
        sys.exit(- 1)      
        
      self.logger.debug('Raingauge FTP Info: IP: %s User: %s Pwd: %s Dir: %s Delete Server Files: %d' % 
                         (self.configSettings.rainGaugeFTPAddy, self.configSettings.rainGaugeFTPUser, self.configSettings.rainGaugeFTPPwd, self.configSettings.rainGaugeFTPDir, self.configSettings.delServerFiles))
      
      if(self.configSettings.kmlFilePath == None):
        self.logger.error('ERROR: //rainGaugeProcessing/outputs/kml/filePath, cannot output KML file')
        
      self.emailList = []
      emailList = self.configSettings.getEntry('//rainGaugeProcessing/alert/emailList')
      if(emailList != None):
        #List of email addresses to send the alert to.
        emailList = emailList.split(',')
        for email in emailList:
          self.emailList.append(email) 
        val = self.configSettings.getEntry('//rainGaugeProcessing/alert/lagTimeAlert')
        if(val != None):
          #This is the number of hours we can miss updates for the rain gauge data before we send an email alert.
          #Convert it into seconds.
          self.lagAlertTime = (float(val) * 3600.0)
        else:
          self.logger.error("ERROR: //rainGaugeProcessing/alert/lagTimeAlert missing, cannot send email alert if data is missing")      
      else:
        self.logger.debug("//rainGaugeProcessing/alert/emailList missing, cannot send email alert if data is missing")
                              
    except OSError, e:
      print('ERROR: ' + str(e) + ' Terminating script')      
      sys.exit(- 1)
Пример #7
0
  def writeLatLonDB(self, fileName, dbFile, minLatLong=None, maxLatLong=None,db=None):
    #DWR 2012-10-29  
    retVal = True
    if(self.logger != None):
      self.logger.debug("writeLatLonDB File: %s BBOX: %f,%f %f,%f"\
                        %(fileName,minLatLong.latitude,minLatLong.longitude,maxLatLong.latitude,maxLatLong.longitude))
    #Database connection not supplied, so create it.
    if(db == None):
      db = dhecDB(dbFile, self.configSettings.loggerName)     
      if(self.logger != None):
        self.logger.debug("Loading spatialite: %s" %(self.configSettings.spatiaLiteLib))
      if(db.loadSpatiaLiteLib(self.configSettings.spatiaLiteLib) == False):
        if(self.logger != None):
          self.logger.debug("Error loading: %s Error: %s" %(self.configSettings.spatiaLiteLib,db.lastErrorMsg))

    xmrg = xmrgFile( self.configSettings.loggerName )
    xmrg.openFile( fileName )
    
    if( xmrg.readFileHeader() ):     
      self.logger.debug( "File Origin: X %d Y: %d Columns: %d Rows: %d" %(xmrg.XOR,xmrg.YOR,xmrg.MAXX,xmrg.MAXY))
      try:
        if( xmrg.readAllRows() ):
          
          #This is the database insert datetime.           
          datetime = time.strftime( "%Y-%m-%dT%H:%M:%S", time.localtime() )
          #Parse the filename to get the data time.
          (directory,filetime) = os.path.split( xmrg.fileName )
          (filetime,ext) = os.path.splitext( filetime )
          filetime = self.getCollectionDateFromFilename(filetime)
          #In the binary file, the data is stored as hundreths of mm, if we want to write the data as 
          #inches , need to divide by 2540.
          dataConvert = 100.0 
          dataConvert = 25.4 * dataConvert 

          #Flag to specifiy if any non 0 values were found. No need processing the weighted averages 
          #below if nothing found.
          rainDataFound=False 
          #If we are using a bounding box, let's get the row/col in hrap coords.
          llHrap = None
          urHrap = None
          startCol = 0
          startRow = 0
          if( minLatLong != None and maxLatLong != None ):
            llHrap = xmrg.latLongToHRAP(minLatLong,True,True)
            urHrap = xmrg.latLongToHRAP(maxLatLong,True,True)
            startCol = llHrap.column
            startRow = llHrap.row
          recsAdded = 0
          for row in range(startRow,xmrg.MAXY):
            for col in range(startCol,xmrg.MAXX):
              val = xmrg.grid[row][col]
              #If there is no precipitation value, or the value is erroneous 
              if( val <= 0 ):
                if(self.saveAllPrecipVals):
                  val = 0
                else:
                  continue
              else:
                val /= dataConvert
                
              hrap = hrapCoord( xmrg.XOR + col, xmrg.YOR + row )
              latlon = xmrg.hrapCoordToLatLong( hrap )                                
              latlon.longitude *= -1
              insertSQL = False
              if( minLatLong != None and maxLatLong != None ):
                if( xmrg.inBBOX( latlon, minLatLong, maxLatLong ) ):
                  insertSQL = True
              else:
                insertSQL = True
              if( insertSQL ):
                rainDataFound = True
                #Build polygon points. Each grid point represents a 4km square, so we want to create a polygon
                #that has each point in the grid for a given point.                  
                hrapNewPt = hrapCoord( xmrg.XOR + col, xmrg.YOR + row + 1)
                latlonUL = xmrg.hrapCoordToLatLong( hrapNewPt )
                latlonUL.longitude *= -1
                hrapNewPt = hrapCoord( xmrg.XOR + col + 1, xmrg.YOR + row)
                latlonBR = xmrg.hrapCoordToLatLong( hrapNewPt )
                latlonBR.longitude *= -1
                hrapNewPt = hrapCoord( xmrg.XOR + col + 1, xmrg.YOR + row + 1)
                latlonUR = xmrg.hrapCoordToLatLong( hrapNewPt )
                latlonUR.longitude *= -1
                wkt = "POLYGON((%f %f,%f %f,%f %f,%f %f,%f %f))"\
                      %(latlon.longitude, latlon.latitude,
                        latlonUL.longitude, latlonUL.latitude, 
                        latlonUR.longitude, latlonUR.latitude, 
                        latlonBR.longitude, latlonBR.latitude, 
                        latlon.longitude, latlon.latitude, 
                        )
                #wkt = "POINT(%f %f)" %(latlon.longitude, latlon.latitude)
                sql = "INSERT INTO precipitation_radar \
                      (insert_date,collection_date,latitude,longitude,precipitation,geom) \
                      VALUES('%s','%s',%f,%f,%f,GeomFromText('%s',4326));" \
                      %( datetime,filetime,latlon.latitude,latlon.longitude,val,wkt)
                cursor = db.executeQuery( sql )
                #Problem with the query, since we are working with transactions, we have to rollback.
                if( cursor == None ):
                  self.logger.error( db.lastErrorMsg )
                  db.lastErrorMsg = None
                  db.DB.rollback()
                recsAdded += 1
          #Commit the inserts.    
          db.commit()
          if( self.logger != None ):
            self.logger.info( "Processed: %d rows. Added: %d records to database." %((row + 1),recsAdded))
          else:
            print( 'Processed %d rows. Added: %d records to database.' % (row + 1),recsAdded )
          #NOw calc the weighted averages for the watersheds and add the measurements to the multi-obs table
          if(rainDataFound and self.calcWeightedAvg):
            self.calculateWeightedAverages(filetime,filetime,db,True)
          else:
            i = 0
      except Exception, E:
        self.lastErrorMsg = str(E)
        if(self.logger != None):
          self.logger.exception(E)
        else:
          print(traceback.print_exc())
        #DWR 2012-10-29  
        retVal = False
      
      #DWR 2012-10-29
      #Move the deletion of the file to a member function so child classes can control.
      xmrg.xmrgFile.close()
      xmrg.cleanUp(self.deleteSourceFile,self.deleteCompressedSourceFile)
      return(retVal)
Пример #8
0
  def getLatestHourXMRGData(self):   
    import datetime
    from datetime import tzinfo
    from pytz import timezone

    try: 
      self.remoteFileDL = getRemoteFiles.remoteFileDownload( self.configSettings.baseURL, self.configSettings.xmrgDLDir, 'b', False, None, True, self.logger)

      #Clean out any data older than xmrgKeepLastNDays.
      db = dhecDB(self.configSettings.dbSettings['dbName'], None, self.logger)
      if(self.logger != None):
        self.logger.debug("Loading spatialite: %s" %(self.configSettings.spatiaLiteLib))
      if(db.loadSpatiaLiteLib(self.configSettings.spatiaLiteLib) == False):
        if(self.logger != None):
          self.logger.debug("Error loading: %s Error: %s" %(self.configSettings.spatiaLiteLib,db.lastErrorMsg))
            
      
      #DWR 2011-06-23
      #Since we process on the previous day, make the starting time the beginning of the current day.
      #Use the datetime object to improve the code readability.
      nowTime = datetime.datetime.now(timezone('UTC'))
      curDay = nowTime
      curDay = curDay.replace(hour=0, minute=0, second=0,microsecond=0)
      
      #Clean up data older than xmrgKeepLastNDays
      olderThan = curDay - datetime.timedelta(days=self.xmrgKeepLastNDays)
      #2013-10-29
      #Fix up bug in calling function.
      db.cleanPrecipRadar(olderThan)
      #db.cleanPrecipRadar(olderThan.strftime(nowTime))
      #Current time minus N days worth of seconds.
      #timeNHoursAgo = time.time() - ( self.xmrgKeepLastNDays * 24 * 60 * 60 ) 
      #currentDateTime = time.strftime( "%Y-%m-%dT%H:%M:%S", time.localtime(timeNHoursAgo))
      #db.cleanPrecipRadar(currentDateTime)
            
      dateList=[]
      #The latest completed hour will be current hour - 1.
      #hr = time.time()-3600
      #latestHour = time.strftime( "%Y-%m-%dT%H:00:00", time.localtime(hr))
      latestHour = nowTime - datetime.timedelta(hours=1)
      latestHour = latestHour.strftime("%Y-%m-%dT%H:00:00")
      #add it to our list to process
      dateList.append(latestHour)
      
      #Are we going to try to backfill any gaps in the data?
      if(self.backfillLastNDays):
        #baseTime = time.time()-3600
        baseTime = nowTime - datetime.timedelta(hours=1)
        #Now let's build a list of the last N hours of data we should have to see if we have any holes
        #to fill.
        lastNHours = self.backfillLastNDays * 24
        for x in range(lastNHours):
          hr = x + 1
          dateTime = baseTime - datetime.timedelta(hours=hr)
          dateList.append(dateTime.strftime("%Y-%m-%dT%H:00:00"))
          #DWR 2011-06-23
          #This is incorrect, was building the list with local time where data in the database is in UTC.
          #datetime = time.strftime("%Y-%m-%dT%H:00:00", time.localtime(baseTime - ((x+1) * 3600)))          
          #dateList.append(datetime)
        sql = "SELECT DISTINCT(collection_date) as date FROM precipitation_radar ORDER BY collection_date DESC;"
        dbCursor = db.executeQuery(sql)
        if(dbCursor != None):
          #Now we'll loop through and pull any date from the database that matches a date in our list
          #from the list. This gives us our gaps.
          for row in dbCursor:
            dbDate = row['date'] 
            for x in range(len(dateList)):
              if(dbDate == dateList[x]):
                dateList.pop(x)
                break          
          dbCursor.close()
                    
      if(self.logger != None):
        self.logger.debug( "Date/times missing in XMRG database: %s" % (dateList))
      for date in dateList:
        xmrgFilename = self.buildXMRGFilename(date,False)
        if(self.logger != None):
          self.logger.info( "Date: %s XMRG Filename: %s" %(date,xmrgFilename))
        #Now we try to download the file.
        fileName = self.getXMRGFile(xmrgFilename)
        if( fileName != None ):
          if(self.logger != None):
            self.logger.info( "Processing XMRG File: %s" %(fileName))
          self.processXMRGFile(fileName, db)
        else:
          self.logger.error( "Date: %s Unable to download file: %s" %(date, xmrgFilename))
          
    except Exception, E:
      self.lastErrorMsg = str(E) 
      if(self.logger != None):
        self.logger.exception(E)
      else:
        print(traceback.print_exc())