def layerIfReady(self, parms, configFile):
        """  Perform layering if state is such that it should be done

        Parameters
        ----------
        parms : Parms
           Parameters
        configFile : string
           name of file with settings

        Returns
        -------
        bool
           True if layering was done, or had previously been done
        """
        
        if (self._layered):
            return True
        if ((not self._hrrr) and (not self._rap)):
            return False
        if (self._hrrr and self._rap):
            self._layer(parms, configFile)
            self._layered = True
            return True
        if (self._rap and not self._hrrr):
            ntime = datetime.datetime.utcnow()
            diff = ntime - self._clockTime
            idiff = diff.total_seconds()
            if (idiff > parms._maxWaitSeconds):
                WhfLog.debug("timeout..Should layer, dt=%d", idiff)
                self._passthroughRap(parms)
                self._layered = True
                return True
        return False
    def _forecastExists(self, dir):
        """ Check if forecast indicated by local state exists

        Parameters
        ----------
        dir : str
           Full path to the issue time directories

        Returns
        -------
        bool
           True if the forecast does exist on disk
        """
           
        path = dir + "/"
        path += self._issue.strftime("%Y%m%d%H")
        if (os.path.isdir(path)):
            fname = self._valid.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
            names = df.getFileNames(path)
            for n in names:
                if (n == fname):
                    WhfLog.debug("Found %s in %s",  fname, path)
                    return True
            return False
        else:
            return False
    def _forecastExists(self, dir):
        """ Check if forecast indicated by local state exists

        Parameters
        ----------
        dir : str
           Full path to the issue time directories

        Returns
        -------
        bool
           True if the forecast does exist on disk
        """

        path = dir + "/"
        path += self._issue.strftime("%Y%m%d%H")
        if (os.path.isdir(path)):
            fname = self._valid.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
            names = df.getFileNames(path)
            for n in names:
                if (n == fname):
                    WhfLog.debug("Found %s in %s", fname, path)
                    return True
            return False
        else:
            return False
Exemplo n.º 4
0
   def _allDataFiles(self):
      """Return all data files on disk, in order

      Parameters
      ----------
      none

      Returns
      -------
      list[DataFile]
         The DataFile specs, in order oldest to newest

      """

      # get the yyyymmdd subdirs
      dirs = getYyyymmddSubdirectories(self._topDir)

      # sort them into ascending order
      dirs = sorted(dirs)

      if not dirs:
         # nothing there
         WhfLog.debug("_allDataFiles: No data in %s", self._topDir)
         return []
      else:
         # make one big array
         ret = []
         for d in dirs:
            ret.extend(self._allDataFilesInDir(d))
         return ret
Exemplo n.º 5
0
def run(fileType, configFile, realtime):
   """ Run the script, process any new data
   Parameters
   ----------
   fileType: str
      'HRRR', ...
   configFile : str
      Name of the file with settings
   realtime : boolean
      True if this is realtime
   Returns
   -------
   1 for error, 0 for success
   """   
   good = False
   regriddable = ['HRRR', 'RAP', 'MRMS', 'GFS']
   if (fileType not in regriddable):
      print 'ERROR unknown file type command arg ', fileType
      return 1

   # User must pass the config file into the main driver.
   if not os.path.exists(configFile):
      print 'ERROR forcing engine config file not found:', configFile
      return 1

   # read in fixed main params
   parms = parmRead(configFile, fileType, realtime)

   #if there is not a state file, create one now using newest
   if (not os.path.exists(parms._stateFile)):
      parms.debugPrint()
      createStateFile(parms, fileType, realtime)
        
   # read in state
   state = State(parms._stateFile, fileType)

      # query each directory and get newest model run file for each, then
   # get all for that and previous issue time
   data = df.DataFiles(parms._dataDir, parms._maxFcstHour, fileType)
   data.setNewestFiles(parms._hoursBack)

   # Update the state to reflect changes, returning those files to regrid
   # Regrid 'em
   toProcess = state.lookForNew(data, parms._hoursBack, fileType)
   for f in toProcess:
      try:
         regrid(f, fileType, configFile);
      except:
         WhfLog.error("Could not regrid/downscale %s", f)
      else:
         WhfLog.debug("Adding new file %s, and writing state file", f)
         if (not state.addFileIfNew(f)):
            WhfLog.error("File %s was not new", f)
         else:
            state.write(parms._stateFile, fileType);
          
   # write out state (in case it has not been written yet) and exit
   #state.debugPrint()
   state.write(parms._stateFile, fileType)
   return 0
Exemplo n.º 6
0
def run(fileType, configFile, realtime):
   """ Run the script, process any new data
   Parameters
   ----------
   fileType: str
      'HRRR', ...
   configFile : str
      Name of the file with settings
   realtime : boolean
      True if this is realtime
   Returns
   -------
   1 for error, 0 for success
   """   
   good = False
   regriddable = ['HRRR', 'RAP', 'MRMS', 'GFS']
   if (fileType not in regriddable):
      print 'ERROR unknown file type command arg ', fileType
      return 1

   # User must pass the config file into the main driver.
   if not os.path.exists(configFile):
      print 'ERROR forcing engine config file not found:', configFile
      return 1

   # read in fixed main params
   parms = parmRead(configFile, fileType, realtime)

   #if there is not a state file, create one now using newest
   if (not os.path.exists(parms._stateFile)):
      parms.debugPrint()
      createStateFile(parms, fileType, realtime)
        
   # read in state
   state = State(parms._stateFile, fileType)

      # query each directory and get newest model run file for each, then
   # get all for that and previous issue time
   data = df.DataFiles(parms._dataDir, parms._maxFcstHour, fileType)
   data.setNewestFiles(parms._hoursBack)

   # Update the state to reflect changes, returning those files to regrid
   # Regrid 'em
   toProcess = state.lookForNew(data, parms._hoursBack, fileType)
   for f in toProcess:
      try:
         regrid(f, fileType, configFile);
      except:
         WhfLog.error("Could not regrid/downscale %s", f)
      else:
         WhfLog.debug("Adding new file %s, and writing state file", f)
         if (not state.addFileIfNew(f)):
            WhfLog.error("File %s was not new", f)
         else:
            state.write(parms._stateFile, fileType);
          
   # write out state (in case it has not been written yet) and exit
   #state.debugPrint()
   state.write(parms._stateFile, fileType)
   return 0
Exemplo n.º 7
0
   def lookForNew(self, data, hoursBack, fileType):
      """ See if new data has arrived compared to state.
      If a new issue time, purge older stuff from state.

      Parameters
      ----------
      data: DataFiles
         The newest data
      hoursBack: int
         Maximum number of hours back to keep data in state
      fileType : str
         'HRRR', 'RAP', ...
      Returns
      -------
      list[str]
          The data file names that are are to be added to state
      """
         
      ret = []
      fnames = data.getFnames()
      if (not fnames):
         return ret

      if (self.isEmpty()):
         WhfLog.debug("Adding to empty list")
      else:
         sname = self.newest()
         if (not sname):
            WhfLog.error("Expected file, got none")
            return ret
         self._analyzeNewest(fnames[-1], sname, hoursBack, fileType)
      for f in fnames:
         if (self._isNew(f)):
            ret.append(f)
      return ret
def forecastExists(dir, issueTime, fcstHour):
    """ Check if forecast exists

    Parameters
    ----------
    dir : str
       Full path to the issue time directories
    issueTime : datetime
       The issue time (y,m,d,h)       
    fcstHour:  int
       should be 0 or 3

    Returns
    -------
    bool
    True if the forecast does exist on disk
    """
           
    path = dir + "/"
    path += issueTime.strftime("%Y%m%d%H")
    if (os.path.isdir(path)):
        validTime = issueTime + datetime.timedelta(hours=fcstHour)
        fname = validTime.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
        names = df.getFileNames(path)
        for n in names:
            if (n == fname):
                WhfLog.debug("Found %s in %s",  fname, path)
                return True
    return False
def obsExists(dir, issueTime):
    """ Check if obs exists

    Parameters
    ----------
    dir : str
       Full path to the MRMS directories
    issueTime : datetime
       The issue time (y,m,d,h)       

    Returns
    -------
    bool
    True if the data does exist on disk
    """
           
    path = dir + "/"
    path += issueTime.strftime("%Y%m%d%H")
    if (os.path.isdir(path)):
        fname = issueTime.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
        names = df.getFileNames(path)
        for n in names:
            if (n == fname):
                WhfLog.debug("Found %s in %s",  fname, path)
                return True
    return False
def obsExists(dir, issueTime):
    """ Check if obs exists

    Parameters
    ----------
    dir : str
       Full path to the MRMS directories
    issueTime : datetime
       The issue time (y,m,d,h)       

    Returns
    -------
    bool
    True if the data does exist on disk
    """

    path = dir + "/"
    path += issueTime.strftime("%Y%m%d%H")
    if (os.path.isdir(path)):
        fname = issueTime.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
        names = df.getFileNames(path)
        for n in names:
            if (n == fname):
                WhfLog.debug("Found %s in %s", fname, path)
                return True
    return False
    def layerIfReady(self, parms, configFile):
        """  Perform layering if state is such that it should be done

        Parameters
        ----------
        parms : Parms
           Parameters
        configFile : string
           name of file with settings

        Returns
        -------
        bool
           True if layering was done, or had previously been done
        """

        if (self._layered):
            return True
        if ((not self._hrrr) and (not self._rap)):
            return False
        if (self._hrrr and self._rap):
            self._layer(parms, configFile)
            self._layered = True
            return True
        if (self._rap and not self._hrrr):
            ntime = datetime.datetime.utcnow()
            diff = ntime - self._clockTime
            idiff = diff.total_seconds()
            if (idiff > parms._maxWaitSeconds):
                WhfLog.debug("timeout..Should layer, dt=%d", idiff)
                self._passthroughRap(parms)
                self._layered = True
                return True
        return False
def run(configFile, realtime):
    if not os.path.exists(configFile):
        print 'ERROR forcing engine config file not found.'
        return 1

    # read in fixed main params
    parms = parmRead(configFile, realtime)
    #parms.debugPrint()

    #if there is not a state file, create one now using newest
    if (not os.path.exists(parms._stateFile)):
        parms.debugPrint()
        createStateFile(parms, realtime)

    # begin normal processing situation
    WhfLog.debug("....Check for new input data to regid")

    # read in state
    state = State(parms._stateFile)

    # query directory and get newest model run file, then
    # get all for that and previous issue time
    cfs = df.DataFiles(parms._cfsDir, parms._maxFcstHourCfs, "CFS")
    cfs.setNewestFiles(parms._hoursBackCfs)

    # Same with CFS
    toProcess = state.updateWithNew(cfs, parms._hoursBackCfs)
    for f in toProcess:
        regridCFS(configFile, f)

    # write out state and exit
    #state.debugPrint()
    state.write(parms._stateFile)
    return 0
def forecastExists(dir, issueTime, fcstHour):
    """ Check if forecast exists

    Parameters
    ----------
    dir : str
       Full path to the issue time directories
    issueTime : datetime
       The issue time (y,m,d,h)       
    fcstHour:  int
       should be 0 or 3

    Returns
    -------
    bool
    True if the forecast does exist on disk
    """

    path = dir + "/"
    path += issueTime.strftime("%Y%m%d%H")
    if (os.path.isdir(path)):
        validTime = issueTime + datetime.timedelta(hours=fcstHour)
        fname = validTime.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
        names = df.getFileNames(path)
        for n in names:
            if (n == fname):
                WhfLog.debug("Found %s in %s", fname, path)
                return True
    return False
Exemplo n.º 14
0
   def lookForNew(self, data, hoursBack, fileType):
      """ See if new data has arrived compared to state.
      If a new issue time, purge older stuff from state.

      Parameters
      ----------
      data: DataFiles
         The newest data
      hoursBack: int
         Maximum number of hours back to keep data in state
      fileType : str
         'HRRR', 'RAP', ...
      Returns
      -------
      list[str]
          The data file names that are are to be added to state
      """
         
      ret = []
      fnames = data.getFnames()
      if (not fnames):
         return ret

      if (self.isEmpty()):
         WhfLog.debug("Adding to empty list")
      else:
         sname = self.newest()
         if (not sname):
            WhfLog.error("Expected file, got none")
            return ret
         self._analyzeNewest(fnames[-1], sname, hoursBack, fileType)
      for f in fnames:
         if (self._isNew(f)):
            ret.append(f)
      return ret
Exemplo n.º 15
0
def filterWithinNHours(files, type, ftime, N):
   """Filter a list of file names with assumed format to those in a time range
       
   Parameters
   ----------
   files: list[str]
      file names, each with parent directory: 'yyyymmdd/<file>
   type: str
      file type string, 'RAP', 'HRRR', 'MRMS', GFS'
   ftime: ForecastTime
      time to compare against, assumed most recent time
   N: int
      Number of hours back from ftime to keep

   Returns
   -------
   list[str]
      subset of input files with issue time is in the range [ftime-N,ftime]

   """

   ret = []
   for f in files:
      df = DataFile(f[0:8], f[9:], type)
      if (df._ok):
         ithFtime = df._time
         if (ithFtime.withinNHours(ftime, N)):
            ret.append(f)
         else:
            WhfLog.debug("Did not append file, too old compared to %s  file=%s", ftime.debugString(), f)

   WhfLog.debug("filtering within %d hours, input length %d output %d",
                 N, len(files), len(ret))
   return ret
Exemplo n.º 16
0
   def _allDataFiles(self):
      """Return all data files on disk, in order

      Parameters
      ----------
      none

      Returns
      -------
      list[DataFile]
         The DataFile specs, in order oldest to newest

      """

      # get the yyyymmdd subdirs
      dirs = getYyyymmddSubdirectories(self._topDir)

      # sort them into ascending order
      dirs = sorted(dirs)

      if not dirs:
         # nothing there
         WhfLog.debug("_allDataFiles: No data in %s", self._topDir)
         return []
      else:
         # make one big array
         ret = []
         for d in dirs:
            ret.extend(self._allDataFilesInDir(d))
         return ret
Exemplo n.º 17
0
def filterWithinNHours(files, type, ftime, N):
   """Filter a list of file names with assumed format to those in a time range
       
   Parameters
   ----------
   files: list[str]
      file names, each with parent directory: 'yyyymmdd/<file>
   type: str
      file type string, 'RAP', 'HRRR', 'MRMS', GFS'
   ftime: ForecastTime
      time to compare against, assumed most recent time
   N: int
      Number of hours back from ftime to keep

   Returns
   -------
   list[str]
      subset of input files with issue time is in the range [ftime-N,ftime]

   """

   ret = []
   for f in files:
      df = DataFile(f[0:8], f[9:], type)
      if (df._ok):
         ithFtime = df._time
         if (ithFtime.withinNHours(ftime, N)):
            ret.append(f)
         else:
            WhfLog.debug("Did not append file, too old compared to %s  file=%s", ftime.debugString(), f)

   WhfLog.debug("filtering within %d hours, input length %d output %d",
                 N, len(files), len(ret))
   return ret
 def debugPrint(self):
     """ logging debug of content
     """
     WhfLog.debug("FcstStep: empty=%d", self._empty)
     if (self._empty):
         return
     WhfLog.debug("FcstStep[%d] hrrr0:%d hrrr3:%d rap0:%d rap3:%d mrms:%d lay:%d",
                   self._step, self._hrrr0, self._hrrr3, self._rap0,
                   self._rap3, self._mrms, self._layered)
 def debugPrint(self):
     """ WhfLog debug of content
     """
     WhfLog.debug("Model: empty=%d", df.boolToInt(self._empty))
     if (self._empty):
         return
     WhfLog.debug("Model: Issue=%s  clockTime=%s",
                  self._issue.strftime("%Y%m%d%H"),
                  self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
 def debugPrint(self):
     """ WhfLog debug of content
     """
     WhfLog.debug("Model: empty=%d", df.boolToInt(self._empty))
     if (self._empty):
         return
     WhfLog.debug("Model: Issue=%s  clockTime=%s",
                   self._issue.strftime("%Y%m%d%H"),
                   self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
 def debugPrint(self):
     """ logging debug of content
     """
     WhfLog.debug("FcstStep: empty=%d", self._empty)
     if (self._empty):
         return
     WhfLog.debug(
         "FcstStep[%d] hrrr0:%d hrrr3:%d rap0:%d rap3:%d mrms:%d lay:%d",
         self._step, self._hrrr0, self._hrrr3, self._rap0, self._rap3,
         self._mrms, self._layered)
 def debugPrint(self):
     """ logging debug of content
     """
     WhfLog.debug("Fcst: empty=%d first=%d", self._empty, self._first)
     if (self._empty):
         return
     WhfLog.debug(
         "Fcst: I:%s step[0]:%s step[1]:%s step[2]:%s layered:%d clockTime=%s",
         self._issue.strftime("%Y%m%d%H"), self._step[0].debugPrintString(),
         self._step[1].debugPrintString(), self._step[2].debugPrintString(),
         self._layered, self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
def main(argv):

    # User must pass the config file into the main driver.
    configFile = argv[0]
    if not os.path.exists(configFile):
        print 'ERROR forcing engine config file not found.'
        return 1
    # read in fixed main params
    parms = parmRead(configFile)

    # query each directory to get newest thing, and update overall newest
    #WhfLog.debug("Looking in %s and %s", parms._hrrrDir, parms._rapDir)
    newestT = df.newestIssueTime(parms._hrrrDir)
    newestT2 = df.newestIssueTime(parms._rapDir)
    if (not newestT) and (not newestT2):
        WhfLog.debug("NO INPUT DATA available")
        return 0
    
    if (newestT2 > newestT):
        newestT = newestT2
    #WhfLog.debug("Newest issue time = %s", newestT)

    # if there is not a state file, create one now using newest
    if (not os.path.exists(parms._stateFile)):
        state = State()
        WhfLog.info("Initializing")
        state.initialSetup(parms)
        state.initialize(parms, newestT)
        state.write(parms._stateFile)

    # Normal processing situation
    #WhfLog.debug("Look for Layering....")
    
    # read in state
    state2 = State()
    state2.initFromStateFile(parms._stateFile)
    if state2.isEmpty():
        # error return here
        return 0
    
    # check for new issue time
    if (state2.isNewModelIssueTime(newestT)):
        WhfLog.info("Re-Initializing state, new model issue time %s", newestT)
        state2.initialize(parms, newestT)

    # update availability
    state2.setCurrentModelAvailability(parms)

    # layer if appropriate
    state2.layerIfReady(parms, configFile)

    # write out final state
    state2.write(parms._stateFile)
    return 0
 def debugPrint(self):
     """ WhfLog debug of content
     """
     WhfLog.debug("Fcst: empty=%d", int(self._empty))
     if (self._empty):
         return
     WhfLog.debug("Fcst: I:%s V:%s hrrr:%d rap:%d layered:%d clockTime=%s",
                   self._issue.strftime("%Y%m%d%H"),
                   self._valid.strftime("%Y%m%d%H"), df.boolToInt(self._hrrr),
                   df.boolToInt(self._rap), df.boolToInt(self._layered),
                   self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
   def updateWithNew(self, data, hoursBack):
      """ Update internal state with new data

      The dataType is used to determine which part of state to update

      Parameters
      ----------
      data: DataFiles
         The newest data
      hoursBack: int
         Maximum number of hours back to keep data in state

      Returns
      -------
      list[str]
          The data file names that are are newly added to state
      """
         
      ret = []
      fnames = data.getFnames()
      if (not fnames):
         return ret

      if (self.isEmpty()):
         WhfLog.debug("Adding to empty %s list")
      else:
         sname = self.newest()
         if (not sname):
            WhfLog.error("Expected file, got none")
            return ret
         if (fnames[-1] > sname):
            WhfLog.debug("Newer time encountered")
            # see if issue time has increased and if so, purge old stuff
            # create DataFile objects
            try:
               df0 = df.DataFile(sname[0:8], sname[9:], 'CFS')
               df1 = df.DataFile(fnames[-1][0:8], fnames[-1][9:], 'CFS')
            except FilenameMatchError as fe:
               WhfLog.debug("Skipping file use due to %s", fe)
            except InvalidArgumentError as ie:
               WhfLog.debug("Skipping file use due to %s", ie)

            if (df0._time.inputIsNewerIssueHour(df1._time)):
               WhfLog.debug("Issue hour has increased, purge now")
               self.update(df1._time, hoursBack)

      for f in fnames:
         if (self.addFileIfNew(f)):
            ret.append(f)

      self.sortFiles()
      return ret
    def updateWithNew(self, data, hoursBack):
        """ Update internal state with new data

      The dataType is used to determine which part of state to update

      Parameters
      ----------
      data: DataFiles
         The newest data
      hoursBack: int
         Maximum number of hours back to keep data in state

      Returns
      -------
      list[str]
          The data file names that are are newly added to state
      """

        ret = []
        fnames = data.getFnames()
        if (not fnames):
            return ret

        if (self.isEmpty()):
            WhfLog.debug("Adding to empty %s list")
        else:
            sname = self.newest()
            if (not sname):
                WhfLog.error("Expected file, got none")
                return ret
            if (fnames[-1] > sname):
                WhfLog.debug("Newer time encountered")
                # see if issue time has increased and if so, purge old stuff
                # create DataFile objects
                try:
                    df0 = df.DataFile(sname[0:8], sname[9:], 'CFS')
                    df1 = df.DataFile(fnames[-1][0:8], fnames[-1][9:], 'CFS')
                except FilenameMatchError as fe:
                    WhfLog.debug("Skipping file use due to %s", fe)
                except InvalidArgumentError as ie:
                    WhfLog.debug("Skipping file use due to %s", ie)

                if (df0._time.inputIsNewerIssueHour(df1._time)):
                    WhfLog.debug("Issue hour has increased, purge now")
                    self.update(df1._time, hoursBack)

        for f in fnames:
            if (self.addFileIfNew(f)):
                ret.append(f)

        self.sortFiles()
        return ret
 def debugPrint(self):
     """ logging debug of content
     """
     WhfLog.debug("Fcst: empty=%d first=%d", self._empty, self._first)
     if (self._empty):
         return
     WhfLog.debug("Fcst: I:%s step[0]:%s step[1]:%s step[2]:%s layered:%d clockTime=%s",
                   self._issue.strftime("%Y%m%d%H"),
                   self._step[0].debugPrintString(),
                   self._step[1].debugPrintString(),
                   self._step[2].debugPrintString(),
                   self._layered, 
                   self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
 def debugPrint(self):
    """ Debug logging of content
    """
    WhfLog.debug("Parms: CFS_data = %s", self._cfsDir)
    WhfLog.debug("Parms: CFS_num_ensembles = %d", self._cfsNumEnsemble)
    WhfLog.debug("Parms: MaxFcstHourCfs = %d", self._maxFcstHourCfs)
    WhfLog.debug("Parms: StateFile = %s", self._stateFile)
Exemplo n.º 29
0
 def debugPrint(self):
    """ Debug logging of content
    """
    WhfLog.debug("%s data = %s", self._dataType, self._dataDir)
    WhfLog.debug("%s source = %s", self._dataType, self._sourceDataDir)
    WhfLog.debug("%s statefile = %s", self._dataType, self._stateFile)
    WhfLog.debug("%s format = %s", self._dataType, self._format)
Exemplo n.º 30
0
   def _analyzeNewest(self, dataNewest, stateNewest, hoursBack, fileType):
      if (dataNewest <= stateNewest):
         return

      # see if issue time has increased and if so, purge old stuff
      # create DataFile objects, which requires breaking the full
      # file into yymmdd/filename
      sind = stateNewest.find('/')
      if (sind < 0):
         raise FileNameMatchError('Cannot parse directory from ' + stateNewest)
      nind = dataNewest.find('/')
      if (nind < 0):
         raise FileNameMatchError('Cannot parse directory from ' + dataNewest)
      symd = stateNewest[:sind]
      sfile = stateNewest[sind+1:]
      nymd = dataNewest[:nind]
      nfile = dataNewest[nind+1:]
      WhfLog.debug("Checking %s / %s  against %s / %s", symd, sfile, nymd, nfile)
      try:
         df0 = df.DataFile(symd, sfile, fileType)
         df1 = df.DataFile(nymd, nfile, fileType)
      except FilenameMatchError as fe:
         WhfLog.debug("Cannot update due to %s", fe)
      except InvalidArgumentError as ie:
         WhfLog.debug("Cannot update due to %s", ie)

      if (df0._time.inputIsNewerIssueHour(df1._time)):
         WhfLog.debug("%s Issue hour has increased, purge now", fileType)
         self._update(df1._time, hoursBack, fileType)
 def debugPrint(self):
     """ Debug logging of content
   """
     WhfLog.debug("Parms: CFS_data = %s", self._cfsDir)
     WhfLog.debug("Parms: CFS_num_ensembles = %d", self._cfsNumEnsemble)
     WhfLog.debug("Parms: MaxFcstHourCfs = %d", self._maxFcstHourCfs)
     WhfLog.debug("Parms: StateFile = %s", self._stateFile)
Exemplo n.º 32
0
 def debugPrint(self):
     """ Debug logging of content
   """
     WhfLog.debug("%s data = %s", self._dataType, self._dataDir)
     WhfLog.debug("%s source = %s", self._dataType, self._sourceDataDir)
     WhfLog.debug("%s statefile = %s", self._dataType, self._stateFile)
     WhfLog.debug("%s format = %s", self._dataType, self._format)
Exemplo n.º 33
0
   def _analyzeNewest(self, dataNewest, stateNewest, hoursBack, fileType):
      if (dataNewest <= stateNewest):
         return

      # see if issue time has increased and if so, purge old stuff
      # create DataFile objects, which requires breaking the full
      # file into yymmdd/filename
      sind = stateNewest.find('/')
      if (sind < 0):
         raise FileNameMatchError('Cannot parse directory from ' + stateNewest)
      nind = dataNewest.find('/')
      if (nind < 0):
         raise FileNameMatchError('Cannot parse directory from ' + dataNewest)
      symd = stateNewest[:sind]
      sfile = stateNewest[sind+1:]
      nymd = dataNewest[:nind]
      nfile = dataNewest[nind+1:]
      WhfLog.debug("Checking %s / %s  against %s / %s", symd, sfile, nymd, nfile)
      try:
         df0 = df.DataFile(symd, sfile, fileType)
         df1 = df.DataFile(nymd, nfile, fileType)
      except FilenameMatchError as fe:
         WhfLog.debug("Cannot update due to %s", fe)
      except InvalidArgumentError as ie:
         WhfLog.debug("Cannot update due to %s", ie)

      if (df0._time.inputIsNewerIssueHour(df1._time)):
         WhfLog.debug("%s Issue hour has increased, purge now", fileType)
         self._update(df1._time, hoursBack, fileType)
Exemplo n.º 34
0
   def debugPrint(self, name):
      """WhfLog.debug call with description of this data

      Parameters
      ----------
      name: string
         name that is included in the description

      Returns
      -------
         none

      """
      WhfLog.debug("%s[%s]=%s, %s, %s", name, self._yyyymmddDir,
                    self._name, self._fileType,
                    self._time.debugString())
Exemplo n.º 35
0
   def debugPrint(self, name):
      """WhfLog.debug call with description of this data

      Parameters
      ----------
      name: string
         name that is included in the description

      Returns
      -------
         none

      """
      WhfLog.debug("%s[%s]=%s, %s, %s", name, self._yyyymmddDir,
                    self._name, self._fileType,
                    self._time.debugString())
    def setCurrentModelAvailability(self, parms, model):
        """ Change availability status when appropriate by looking at disk

        Parameters
        ----------
        parms : Parms
            parameters
        model : Model
            overall status for this model run, used for clock time

        Returns
        -------
        none
        """

        if (self._layered):
            # no need to do more, already layered
            return

        # make note if going from nothing to something
        nothing = (not self._hrrr) and (not self._rap)

        #if (nothing):
        #WhfLog.debug("Nothing, so trying to get stuff")
        if (not self._hrrr):
            # update HRRR status
            self._hrrr = self._forecastExists(parms._hrrrDir)
        if (not self._rap):
            # update RAP status
            self._rap = self._forecastExists(parms._rapDir)
        if (nothing and (self._hrrr or self._rap)):
            # went from nothing to something, so start the clock
            WhfLog.debug("Starting clock now, hrrr=%d, rap=%d",
                         df.boolToInt(self._hrrr), df.boolToInt(self._rap))
            self._clockTime = datetime.datetime.utcnow()
        else:
            if (nothing and ((not self._hrrr) and (not self._rap))):
                # nothing to nothing, compare current time to time from
                # model input, and squeaky wheel if too long
                tnow = datetime.datetime.utcnow()
                diff = tnow - model._clockTime
                idiff = diff.total_seconds()
                if (idiff > parms._veryLateSeconds):
                    WhfLog.warning(
                        "Inputs for short range layering are very late Issue:%s Valid:%s",
                        self._issue.strftime("%Y%m%d%H"),
                        self._valid.strftime("%Y%m%d%H"))
def layer(parms, itime, step, which, config):
    """ Perform layering

    NOTE: here is where returns status will be added and used

    Parameters
    ----------
    parms : Parms
        parameters

    """        

    WhfLog.debug("LAYERING: %s  %d %s", itime.strftime("%Y%m%d%H"),
                  step, which)
    aaf.anal_assim_layer(itime.strftime('%Y%m%d%H'), '-%01d'%(step), which, config)
    WhfLog.debug("DONE LAYERING: %s  %d %s", itime.strftime("%Y%m%d%H"),
                  step, which)
    def setCurrentModelAvailability(self, parms, model):
        """ Change availability status when appropriate by looking at disk

        Parameters
        ----------
        parms : Parms
            parameters
        model : Model
            overall status for this model run, used for clock time

        Returns
        -------
        none
        """

        if (self._layered):
            # no need to do more, already layered
            return
        
        # make note if going from nothing to something
        nothing = (not self._hrrr) and (not self._rap)

        #if (nothing):
            #WhfLog.debug("Nothing, so trying to get stuff")
        if (not self._hrrr):
            # update HRRR status
            self._hrrr = self._forecastExists(parms._hrrrDir)
        if (not self._rap):
            # update RAP status
            self._rap = self._forecastExists(parms._rapDir)
        if (nothing and (self._hrrr or self._rap)):
            # went from nothing to something, so start the clock
            WhfLog.debug("Starting clock now, hrrr=%d, rap=%d", df.boolToInt(self._hrrr),
                          df.boolToInt(self._rap))
            self._clockTime = datetime.datetime.utcnow()
        else:
            if (nothing and ((not self._hrrr) and (not self._rap))):
                # nothing to nothing, compare current time to time from
                # model input, and squeaky wheel if too long
                tnow = datetime.datetime.utcnow()
                diff = tnow - model._clockTime 
                idiff = diff.total_seconds()
                if (idiff > parms._veryLateSeconds):
                    WhfLog.warning("Inputs for short range layering are very late Issue:%s Valid:%s",
                                  self._issue.strftime("%Y%m%d%H"),
                                  self._valid.strftime("%Y%m%d%H"))
def layer(parms, itime, step, which, config):
    """ Perform layering

    NOTE: here is where returns status will be added and used

    Parameters
    ----------
    parms : Parms
        parameters

    """

    WhfLog.debug("LAYERING: %s  %d %s", itime.strftime("%Y%m%d%H"), step,
                 which)
    aaf.anal_assim_layer(itime.strftime('%Y%m%d%H'), '-%01d' % (step), which,
                         config)
    WhfLog.debug("DONE LAYERING: %s  %d %s", itime.strftime("%Y%m%d%H"), step,
                 which)
def main(argv):

    # User must pass the config file into the main driver.
    configFile = argv[0]
    if not os.path.exists(configFile):
        print 'ERROR forcing engine config file not found.'
        return 1

    # read in fixed main params
    parms = parmRead(configFile)
    #parms.debugPrint()

    #if there is not a state file, create one now using newest
    if (not os.path.exists(parms._stateFile)):
        parms.debugPrint()
        createStateFile(parms)
        
    # begin normal processing situation
    WhfLog.debug("....Check for new input data to regid")
    
    # read in state
    state = State(parms._stateFile)
    if state.isEmpty():
        # error return here
        return 0
    #state.debugPrint()
    
    # query directory and get newest model run file, then
    # get all for that and previous issue time
    cfs = df.DataFiles(parms._cfsDir, parms._maxFcstHourCfs, "CFS")
    cfs.setNewestFiles(parms._hoursBackCfs)

    # Same with CFS
    toProcess = state.updateWithNew(cfs, parms._hoursBackCfs)
    for f in toProcess:
        regridCFS(configFile, f)

    # write out state and exit
    #state.debugPrint()
    state.write(parms._stateFile)
    return 0
Exemplo n.º 41
0
   def olderIssueHour(self, hoursBack):
      """ Return the ForecastTime that has hoursBack hours earlier issue time

      Parameters
      ----------
      hoursBack: int
         Number of hours back to look

      Returns
      -------
      ForecastTime
         Forecast with issue time hoursBack earlier than local, forecastHour=0
      """          
      if (self.isEmpty()):
         WhfLog.debug("ERROR empty input to olderIssueHour")
      else:
         timeBack = self.ymdh() - datetime.timedelta(hours=hoursBack)
         self._fcstTime = datetime.datetime(timeBack.year, timeBack.month,
                                            timeBack.day, 0, 0, 0)
         self._issueHour = timeBack.hour
         self._forecastHour = 0
Exemplo n.º 42
0
   def olderIssueHour(self, hoursBack):
      """ Return the ForecastTime that has hoursBack hours earlier issue time

      Parameters
      ----------
      hoursBack: int
         Number of hours back to look

      Returns
      -------
      ForecastTime
         Forecast with issue time hoursBack earlier than local, forecastHour=0
      """          
      if (self.isEmpty()):
         WhfLog.debug("ERROR empty input to olderIssueHour")
      else:
         timeBack = self.ymdh() - datetime.timedelta(hours=hoursBack)
         self._fcstTime = datetime.datetime(timeBack.year, timeBack.month,
                                            timeBack.day, 0, 0, 0)
         self._issueHour = timeBack.hour
         self._forecastHour = 0
Exemplo n.º 43
0
def regridIfZeroHr(configFile, fileType, fname):
   """If it is a 0 hour forecast (RAP or HRRR) regrid in a special way
   Parameters
   ----------
   configFile : str
   configuration file with all settings
   fileType: str
   HRRR, RAP, ... string
   fname: str
   name of file to regrid and downscale, with yyyymmdd parent dir

   Returns
   -------
   None
   """
   # check for 0 hour by creating a DataFile and checking forecast hour
   try:
      f = df.DataFile(fname[0:8], fname[9:], fileType)
   except FilenameMatchError as fe:
      WhfLog.debug("Cannot check for 0 hour data due to %s", fe)
      raise
   except InvalidArgumentError as ie:
      WhfLog.debug("Cannot check for 0 hour data due to %s", ie)
      raise
   if (f._time._forecastHour == 0):
      WhfLog.setConfigType('AA')
      WhfLog.debug("SPECIAL 0 hour case %s", fname[9:0])
      aaf.forcing(configFile, 'regrid', fileType, fname[9:])
      WhfLog.setConfigType('Short')
Exemplo n.º 44
0
def regridIfZeroHr(configFile, fileType, fname):
   """If it is a 0 hour forecast (RAP or HRRR) regrid in a special way
   Parameters
   ----------
   configFile : str
      configuration file with all settings
   fileType: str
      HRRR, RAP, ... string
   fname: str
      name of file to regrid and downscale, with yyyymmdd parent dir

   Returns
   -------
   None
   """
   # check for 0 hour by creating a DataFile and checking forecast hour
   try:
      f = df.DataFile(fname[0:8], fname[9:], fileType)
   except FilenameMatchError as fe:
      WhfLog.debug("Cannot check for 0 hour data due to %s", fe)
      raise
   except InvalidArgumentError as ie:
      WhfLog.debug("Cannot check for 0 hour data due to %s", ie)
      raise
   if (f._time._forecastHour == 0):
      WhfLog.setConfigType('AA')
      WhfLog.debug("SPECIAL 0 hour case %s", fname[9:0])
      aaf.forcing(configFile, 'regrid', fileType, fname[9:])
      WhfLog.setConfigType('Short')
Exemplo n.º 45
0
   def setNewestFiles(self, hoursBack):
      """Set _content to files that are the most recent ones.

      Parameters
      ----------
      hoursBack: int
         Maximum number of hours back compared to newest issue time to consider
         a file new enough to be a newest file
      
      Returns
      -------
      true if successful and there is at least one file in _content

      """

      # Clear content
      self._content = []

      # get the newest DataFile
      newestF = self._newestDataFile()
      if (not newestF._ok):
         WhfLog.debug("setNewestFiles:No data in %s", self._topDir)
         return False

      # create a new ForecastTime that is hoursBack 
      oldestTime = ForecastTime()
      oldestTime.copyFields(newestF._time)
      oldestTime.olderIssueHour(hoursBack)

      # build up all the files on disk
      dataFiles = self._allDataFiles()

      # filter to those in range and put those as _content
      for d in dataFiles:
         if (d.inRange(oldestTime, newestF._time)):
            self._content.append(d)
      return True
Exemplo n.º 46
0
   def setNewestFiles(self, hoursBack):
      """Set _content to files that are the most recent ones.

      Parameters
      ----------
      hoursBack: int
         Maximum number of hours back compared to newest issue time to consider
         a file new enough to be a newest file
      
      Returns
      -------
      true if successful and there is at least one file in _content

      """

      # Clear content
      self._content = []

      # get the newest DataFile
      newestF = self._newestDataFile()
      if (not newestF._ok):
         WhfLog.debug("setNewestFiles:No data in %s", self._topDir)
         return False

      # create a new ForecastTime that is hoursBack 
      oldestTime = ForecastTime()
      oldestTime.copyFields(newestF._time)
      oldestTime.olderIssueHour(hoursBack)

      # build up all the files on disk
      dataFiles = self._allDataFiles()

      # filter to those in range and put those as _content
      for d in dataFiles:
         if (d.inRange(oldestTime, newestF._time)):
            self._content.append(d)
      return True
Exemplo n.º 47
0
   def _newestDataFile(self):
      """Return the newest (biggest issuetime in newest day, max fcst hour) file

      Parameters
      ----------
      none

      Returns
      -------
      DataFile
         Empty or the newest one with content

      """

      dirs = getYyyymmddSubdirectories(self._topDir)
      # sort them into ascending order
      dirs = sorted(dirs)
      if not dirs:
         # nothing there
         WhfLog.debug("_newestDatFile:No data in %s", self._topDir)
         return DataFile()
      else:
         # The last directory will be newest, look there for our newest
         return self._newestDataFileInDir(dirs[-1])
Exemplo n.º 48
0
   def _newestDataFile(self):
      """Return the newest (biggest issuetime in newest day, max fcst hour) file

      Parameters
      ----------
      none

      Returns
      -------
      DataFile
         Empty or the newest one with content

      """

      dirs = getYyyymmddSubdirectories(self._topDir)
      # sort them into ascending order
      dirs = sorted(dirs)
      if not dirs:
         # nothing there
         WhfLog.debug("_newestDatFile:No data in %s", self._topDir)
         return DataFile()
      else:
         # The last directory will be newest, look there for our newest
         return self._newestDataFileInDir(dirs[-1])
Exemplo n.º 49
0
 def debugPrint(self):
    """ Debug logging of content
    """
    WhfLog.debug("Parms: data = %s", self._dataDir)
    WhfLog.debug("Parms: MaxFcstHour = %d", self._maxFcstHour)
    WhfLog.debug("Parms: StateFile = %s", self._stateFile)
 def debugPrint(self):
    """ logging debug of contents
    """
    for f in self._cfs:
       WhfLog.debug("State:CFS:%s", f)
Exemplo n.º 51
0
   def lookForNew(self, data, hoursBack, fileType):
      """ See if new data has arrived compared to state.
      If a new issue time, purge older stuff from state.

      Parameters
      ----------
      data: DataFiles
         The newest data
      hoursBack: int
         Maximum number of hours back to keep data in state
      fileType : str
         'HRRR', 'RAP', ...
      Returns
      -------
      list[str]
          The data file names that are are to be added to state
      """
         
      ret = []
      fnames = data.getFnames()
      if (not fnames):
         return ret

      if (self.isEmpty()):
         WhfLog.debug("Adding to empty list")
      else:
         sname = self.newest()
         if (not sname):
            WhfLog.error("Expected file, got none")
            return ret
         if (fnames[-1] > sname):
            WhfLog.debug("Newer time encountered")
            # see if issue time has increased and if so, purge old stuff
            # create DataFile objects, which requires breaking the full
            # file into yymmdd/filename
            sind = sname.find('/')
            if (sind < 0):
               raise FileNameMatchError('Cannot parse directory from ' + sname)
            nind = fnames[-1].find('/')
            if (sind < 0):
               raise FileNameMatchError('Cannot parse directory from ' + fnames[-1])

            symd = sname[:sind]
            sfile = sname[sind+1:]
            nymd = fnames[-1][:nind]
            nfile = fnames[-1][nind+1:]
            WhfLog.debug("Checking %s / %s  against %s / %s", symd, sfile, nymd, nfile)
            try:
               df0 = df.DataFile(symd, sfile, fileType)
               df1 = df.DataFile(nymd, nfile, fileType)
            except FilenameMatchError as fe:
               WhfLog.debug("Cannot update due to %s", fe)
            except InvalidArgumentError as ie:
               WhfLog.debug("Cannot update due to %s", ie)

            if (df0._time.inputIsNewerIssueHour(df1._time)):
               WhfLog.debug("%s Issue hour has increased, purge now",
                            fileType)
               self.update(df1._time, hoursBack, fileType)

      for f in fnames:
         if (self.isNew(f)):
            ret.append(f)
      return ret
 def debugPrint(self):
     """ WhfLog debug of content
     """
     WhfLog.debug("Parms: HRRR_data = %s", self._hrrrDir)
     WhfLog.debug("Parms: RAP_data = %s", self._rapDir)
     WhfLog.debug("Parms: Layer_data = %s", self._layerDir)
     WhfLog.debug("Parms: MaxFcstHour = %d", self._maxFcstHour)
     WhfLog.debug("Parms: HoursBack = %d", self._hoursBack)
     WhfLog.debug("Parms: maxWaitSeconds = %d", self._maxWaitSeconds)
     WhfLog.debug("Parms: veryLateSeconds = %d", self._veryLateSeconds)
     WhfLog.debug("Parms: StateFile = %s", self._stateFile)
Exemplo n.º 53
0
 def debugPrint(self):
    """ logging debug of content
    """
    WhfLog.debug("%s,i[%s],f[%s]", self._fcstTime, self._issueHour,
                   self._forecastHour)
def forcing(configFile,file_in):
    """ Args:
	1.) configFile (string): The config file with all 
	    the settings.
        2.) file (string): The file name. The full path is 
            not necessary as full paths will be derived from
            parameter directory paths and datetime information.
        Returns:
	None - Performs indicated bias correction, regridding,
               and downscaling of CFSv2 data. Any errors are
               trapped and passed back to the driver.
    """

    WhfLog.debug("file_in = %s", file_in)

    # Obtain CFSv2 forcing engine parameters.
    parser = SafeConfigParser()
    parser.read(configFile)

    # Set up logging environments, etc.
    forcing_config_label = "Long_Range"
    try:
        Whf.initial_setup(parser,forcing_config_label)
    except:
        raise

    out_dir = parser.get('layering','long_range_output') 
    tmp_dir = parser.get('bias_correction','CFS_tmp_dir')

    if (not df.makeDirIfNeeded(out_dir)):
        raise MissingDirectoryError('Dir %s cannot be created', out_dir)
    if (not df.makeDirIfNeeded(tmp_dir)):
        raise MissingDirectoryError('Dir %s cannot be created', tmp_dir)

    # Define CFSv2 cycle date and valid time based on file name.
    (cycleYYYYMMDD,cycleHH,fcsthr,em) = Whf.extract_file_info_cfs(file_in)
    em_str = str(em)

    # Pull path to NCL bias correction module file. Export this as an 
    # environmental variable NCL refers to later. 
    nclBiasMod = parser.get('exe','CFS_bias_correct_mod')
    os.environ["CFS_NCL_BIAS_MOD"] = nclBiasMod

    # Establish datetime objects
    dateCurrent = datetime.datetime.today()
    dateCycleYYYYMMDDHH = datetime.datetime(year=int(cycleYYYYMMDD[0:4]),
                          month=int(cycleYYYYMMDD[4:6]),
                          day=int(cycleYYYYMMDD[6:8]),
                          hour=cycleHH)
    dateFcstYYYYMMDDHH = dateCycleYYYYMMDDHH + \
                         datetime.timedelta(seconds=fcsthr*3600)

    # Determine if this is a 0hr forecast file or not.
    if dateFcstYYYYMMDDHH == dateCycleYYYYMMDDHH:
        fFlag = 1 
    else:
        fFlag = 0 
    # Establish final output directories to hold 'LDASIN' files used for
    # WRF-Hydro long-range forecasting. If the directory does not exist,
    # create it.
    out_path = out_dir + "/Member_" + em_str.zfill(2) + "/" + \
               dateCycleYYYYMMDDHH.strftime("%Y%m%d%H")

    try:
        Whf.mkdir_p(out_path)
    except:
        raise

    in_fcst_range = Whf.is_in_fcst_range("CFSv2",fcsthr,parser)

    if in_fcst_range:
        # First, bias-correct CFSv2 data and generate hourly files 
        # from six-hour forecast
        WhfLog.info("Bias correcting for CFSv2 cycle: " + \
                     dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
                     " CFSv2 forecast time: " + dateFcstYYYYMMDDHH.strftime('%Y%m%d%H'))
        try:
            Whf.bias_correction('CFSV2',file_in,dateCycleYYYYMMDDHH,
                                dateFcstYYYYMMDDHH,parser, em = em)
        except (MissingFileError,NCLError):
            raise

        # Second, regrid to the conus IOC domain
        # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename 
        # generated from bias-correction and call the regridding to go to the conus domain.
        if fFlag == 1:
            begCt = 6 
            endCt = 7
        else:
            begCt = 1
            endCt = 7
        for hour in range(begCt,endCt):
  	    dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)
               
            fileBiasCorrected = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
                                dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
                                dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + ".M" + \
                                em_str.zfill(2) + ".nc"
            WhfLog.info("Regridding CFSv2 to conus domain for cycle: " + \
                         dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
                         " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H'))
            try:
                fileRegridded = Whf.regrid_data("CFSV2",fileBiasCorrected,parser)
            except (MissingFileError,NCLError):
                raise

            # Double check to make sure file was created, delete temporary bias-corrected file
            try:
                Whf.file_exists(fileRegridded)
            except MissingFileError:
                raise	
            cmd = "rm -rf " + fileBiasCorrected
            status = os.system(cmd)
            if status != 0:
		raise SystemCommandError('Command %s failed.'%cmd)

  
        # Third, perform topography downscaling to generate final
        # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename
        # generated from regridding and call the downscaling function.
        for hour in range(begCt,endCt):
            dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)

            WhfLog.info("Downscaling CFSv2 for cycle: " +
                         dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') +
                         " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H'))
            fileRegridded = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
                            dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
                                dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + \
                                "_regridded.M" + em_str.zfill(2) + ".nc"
            LDASIN_path_tmp = tmp_dir + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1.nc"
            LDASIN_path_final = out_path + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1"
            try:
                Whf.downscale_data("CFSv2",fileRegridded,parser, out_path=LDASIN_path_tmp, \
                                   verYYYYMMDDHH=dateTempYYYYMMDDHH)
            except (MissingFileError,FilenameMatchError,NCLError,SystemCommandError):
                raise
            # Double check to make sure file was created, delete temporary regridded file
            try:
                Whf.file_exists(LDASIN_path_tmp)
            except MissingFileError:
                raise
            # Rename file to conform to WRF-Hydro expectations
            cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final
            status = os.system(cmd)
            if status != 0:
                raise SystemCommandError('Command %s failed.'%cmd)
            try:
                Whf.file_exists(LDASIN_path_final)
            except MissingFileError:
                raise
            cmd = "rm -rf " + fileRegridded
            status = os.system(cmd)
            if status != 0:
                raise SystemCommandError('Command %s failed.'%cmd)
       
	WhfLog.info("Long_Range processing for %s%d Forecast Hour: %d Ensemble: %s",
                cycleYYYYMMDD, cycleHH, fcsthr, em_str)
    else:
        # Skip processing this file. Exit gracefully with a 0 exit status.
        WhfLog.info("Requested file is outside max fcst for CFSv2")
Exemplo n.º 55
0
 def debugPrint(self):
    """ logging debug of content
    """
    WhfLog.debug("%s,i[%s],f[%s]", self._fcstTime, self._issueHour,
                   self._forecastHour)
Exemplo n.º 56
0
 def debugPrint(self):
    """ Debug logging of content
    """
    WhfLog.debug("Parms: data = %s", self._dataDir)
    WhfLog.debug("Parms: MaxFcstHour = %d", self._maxFcstHour)
    WhfLog.debug("Parms: StateFile = %s", self._stateFile)
Exemplo n.º 57
0
 def debugPrint(self):
    """ logging debug of contents
    """
    for f in self._data:
       WhfLog.debug("State:%s", f)
Exemplo n.º 58
0
def main(argv):

    fileType = argv[0]
    good = False
    if (fileType == 'HRRR' or fileType == 'RAP' or fileType == 'MRMS' or
        fileType == 'GFS'):
       good = True
    if (not good):
       print 'ERROR unknown file type command arg ', fileType
       return 1

    # User must pass the config file into the main driver.
    configFile = argv[1]
    if not os.path.exists(configFile):
        print 'ERROR forcing engine config file not found:', configFile
        return 1
    
    # read in fixed main params
    parms = parmRead(configFile, fileType)

    #parms.debugPrint()

    #if there is not a state file, create one now using newest
    if (not os.path.exists(parms._stateFile)):
        parms.debugPrint()
        createStateFile(parms, fileType)
        
    # begin normal processing situation
    #WhfLog.debug("....Check for new input data to regid")
    
    # read in state
    state = State(parms._stateFile, fileType)
    if state.isEmpty():
        # error return here
        return 0
    #state.debugPrint()
    
    # query each directory and get newest model run file for each, then
    # get all for that and previous issue time
    data = df.DataFiles(parms._dataDir, parms._maxFcstHour, fileType)
    data.setNewestFiles(parms._hoursBack)

    # Update the state to reflect changes, returning those files to regrid
    # Regrid 'em
    toProcess = state.lookForNew(data, parms._hoursBack, fileType)
    for f in toProcess:
       try:
          regrid(f, fileType, configFile);
       except:
          WhfLog.error("Could not regrid/downscale %s", f)
       else:
          WhfLog.debug("Adding new file %s, and writing state file", f)
          if (not state.addFileIfNew(f)):
             WhfLog.error("File %s was not new", f)
          else:
             state.write(parms._stateFile, fileType);
          
    # write out state (in case it has not been written yet) and exit
    #state.debugPrint()
    state.write(parms._stateFile, fileType)
    return 0