Example #1
0
def initialize(source_dir, destination_dir):
   '''Initialize by looking at source_dir, and creating destination_dir
   Args:
      source_dir (string) : Full path to source data, subdir yyyymmdd
      destination_dir(string) : Full path to destination
   Returns:
      list of source file paths, ordered
   '''
   try:
      #Source directory
      dirExists(source_dir)
   except MissingDirectoryError:
      print "Source directory missing. Check directory path ", source_dir
      files = []
   else:
      # Get a directory listing and save all files with the specified
      # extension.
      files = whf.get_filepaths(source_dir)
      files = sorted(files)
      print "Numfiles in ", source_dir, " = ", len(files)

   try:
      #Destination directory
      dirExists(destination_dir)
   except MissingDirectoryError:
      print "Destination directory does not exist, creating it now ", destination_dir
      whf.mkdir_p(destination_dir)

   return files
Example #2
0
def moveFile(fname, destination_dir):
   '''Moves one file to the destination directory.
   Args:
      fname (string):      File to move
      destination_dir (string): Full path to the destination directory

   Returns:
      None
   '''

   #separate the filename from the directory and the
   #date directory
   date_match = re.match(r'.*/([0-9]{8})',fname)
   if date_match:
      date_dir = date_match.group(1)
   else:
      print "No date directory found, exiting"
      raise MissingDirectoryError("No date directory")
            
   # Just the filename, no path
   exp = re.compile(r'.*/[0-9]{8}/(.*.grib2|.*.grb2)')
   file_match = exp.match(fname)
   if file_match:
      filename_only = file_match.group(1)
   else: 
      print "No file name match, exiting"
      raise MissingFileError("No file matching the expected pattern") 

   dest_path = destination_dir + "/" + date_dir
   whf.mkdir_p(dest_path)
   dest = dest_path + "/" + filename_only

   print "Move ", fname, " to ", dest
   shutil.move(fname, dest)
def do_layering(rap_downscale_dir, hrrr_downscale_dir, is_yellowstone=False):
    # Go through the RAP downscaled directories and find 
    # the corresponding HRRR downscaled file for each RAP
    # file.
    rap_file_paths = whf.get_filepaths(rap_downscale_dir)
    hrrr_file_paths = whf.get_filepaths(hrrr_downscale_dir)
    
    # Compare the YYYYMMDDHH/YYYYMMDDhh00.LDASIN_DOMAIN1.nc portions
    rap_files = []
    hrrr_files = []
    if is_yellowstone:    
        for rap in rap_file_paths:
             match = re.match(r'.*/RAP/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',rap)
             rap_files.append(match.group(1)) 
        
        for hrrr in hrrr_file_paths:
             match = re.match(r'.*/HRRR/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',hrrr)
             hrrr_files.append(match.group(1)) 
    else:
        for rap in rap_file_paths:
            match = re.match(r'.*/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',rap)
            rap_files.append(match.group(1))
        for hrrr in hrrr_file_paths:
            match = re.match(r'.*/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',hrrr)
            hrrr_files.append(match.group(1))

    # Find the matching files from each list
    files_to_layer = set(rap_files) & set(hrrr_files)
    for file in files_to_layer:
        srf.forcing("layer","RAP", file, "HRRR", file)
def do_layering(rap_downscale_dir, hrrr_downscale_dir, is_yellowstone=False):
    # Go through the RAP downscaled directories and find 
    # the corresponding HRRR downscaled file for each RAP
    # file.
    rap_file_paths = whf.get_filepaths(rap_downscale_dir)
    hrrr_file_paths = whf.get_filepaths(hrrr_downscale_dir)
    
    # Compare the YYYYMMDDHH/YYYYMMDDhh00.LDASIN_DOMAIN1.nc portions
    rap_files = []
    hrrr_files = []
    if is_yellowstone:    
        for rap in rap_file_paths:
             match = re.match(r'.*/RAP/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',rap)
             rap_files.append(match.group(1)) 
        
        for hrrr in hrrr_file_paths:
             match = re.match(r'.*/HRRR/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',hrrr)
             hrrr_files.append(match.group(1)) 
    else:
        for rap in rap_file_paths:
            match = re.match(r'.*/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',rap)
            rap_files.append(match.group(1))
        for hrrr in hrrr_file_paths:
            match = re.match(r'.*/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',hrrr)
            hrrr_files.append(match.group(1))

    # Find the matching files from each list
    files_to_layer = set(rap_files) & set(hrrr_files)
    for file in files_to_layer:
        srf.forcing("layer","RAP", file, "HRRR", file)
Example #5
0
def moveFile(fname, destination_dir):
    '''Moves one file to the destination directory.
   Args:
      fname (string):      File to move
      destination_dir (string): Full path to the destination directory

   Returns:
      None
   '''

    #separate the filename from the directory and the
    #date directory
    date_match = re.match(r'.*/([0-9]{8})', fname)
    if date_match:
        date_dir = date_match.group(1)
    else:
        print "No date directory found, exiting"
        raise MissingDirectoryError("No date directory")

    # Just the filename, no path
    exp = re.compile(r'.*/[0-9]{8}/(.*.grib2|.*.grb2)')
    file_match = exp.match(fname)
    if file_match:
        filename_only = file_match.group(1)
    else:
        print "No file name match, exiting"
        raise MissingFileError("No file matching the expected pattern")

    dest_path = destination_dir + "/" + date_dir
    whf.mkdir_p(dest_path)
    dest = dest_path + "/" + filename_only

    print "Move ", fname, " to ", dest
    shutil.move(fname, dest)
Example #6
0
def initialize(source_dir, destination_dir):
    '''Initialize by looking at source_dir, and creating destination_dir
   Args:
      source_dir (string) : Full path to source data, subdir yyyymmdd
      destination_dir(string) : Full path to destination
   Returns:
      list of source file paths, ordered
   '''
    try:
        #Source directory
        dirExists(source_dir)
    except MissingDirectoryError:
        print "Source directory missing. Check directory path ", source_dir
        files = []
    else:
        # Get a directory listing and save all files with the specified
        # extension.
        files = whf.get_filepaths(source_dir)
        files = sorted(files)
        print "Numfiles in ", source_dir, " = ", len(files)

    try:
        #Destination directory
        dirExists(destination_dir)
    except MissingDirectoryError:
        print "Destination directory does not exist, creating it now ", destination_dir
        whf.mkdir_p(destination_dir)

    return files
Example #7
0
def moveFiles(source_dir, destination_dir, delay=0):
    '''Moves all the files from the source directory to the
       destination directory.
  
       Args:
           source_dir (string):      Full path to the source directory
           destination_dir (string): Full path to the destination directory
           extension (string):       File extension of files to be moved

       Returns:
           None

    '''

    try:
        #Source directory
        dir_exists(source_dir)
    
    except MissingDirectoryError:
        print "Source directory missing. Check directory path"
        
    else:
        # Get a directory listing and save all files with the specified
        # extension.
        files = whf.get_filepaths(source_dir)
        
    try:
        #Destination directory
        dir_exists(destination_dir)
    except MissingDirectoryError:
        print "Destination directory does not exist, creating it now"
        whf.mkdir_p(destination_dir)
    else:
        #move the files
        for file in files:
            #separate the filename from the directory and the
            #date directory
            date_match = re.match(r'.*/([0-9]{8})',file)
            if date_match:
                date_dir = date_match.group(1)
            else:
                print "No date directory found, exiting"
                raise MissingDirectoryError("No date directory")
            
            # Just the filename, no path
            exp = re.compile(r'.*/[0-9]{8}/(.*.grib2|.*.grb2)')
            file_match = exp.match(file)
            if file_match:
                filename_only = file_match.group(1)
            else: 
                print "No file name match, exiting"
                raise MissingFileError("No file matching the expected pattern") 

            dest = (destination_dir,date_dir,"/" ) 
            dest_path = "".join(dest)
            whf.mkdir_p(dest_path)
            dest = dest_path +  filename_only
            shutil.move(file, dest)
            time.sleep(delay)
Example #8
0
def moveFiles(source_dir, destination_dir, delay=0):
    '''Moves all the files from the source directory to the
       destination directory.
  
       Args:
           source_dir (string):      Full path to the source directory
           destination_dir (string): Full path to the destination directory
           extension (string):       File extension of files to be moved

       Returns:
           None

    '''

    try:
        #Source directory
        dir_exists(source_dir)
    
    except MissingDirectoryError:
        print "Source directory missing. Check directory path"
        
    else:
        # Get a directory listing and save all files with the specified
        # extension.
        files = whf.get_filepaths(source_dir)
        
    try:
        #Destination directory
        dir_exists(destination_dir)
    except MissingDirectoryError:
        print "Destination directory does not exist, creating it now"
        whf.mkdir_p(destination_dir)
    else:
        #move the files
        for file in files:
            #separate the filename from the directory and the
            #date directory
            date_match = re.match(r'.*/([0-9]{8})',file)
            if date_match:
                date_dir = date_match.group(1)
            else:
                print "No date directory found, exiting"
                raise MissingDirectoryError("No date directory")
            
            # Just the filename, no path
            exp = re.compile(r'.*/[0-9]{8}/(.*.grib2|.*.grb2)')
            file_match = exp.match(file)
            if file_match:
                filename_only = file_match.group(1)
            else: 
                print "No file name match, exiting"
                raise MissingFileError("No file matching the expected pattern") 

            dest = (destination_dir,date_dir,"/" ) 
            dest_path = "".join(dest)
            whf.mkdir_p(dest_path)
            dest = dest_path +  filename_only
            shutil.move(file, dest)
            time.sleep(delay)
def main():
    """Tests the regridding and downscaling of RAP and HRRR
       data for the Short Range Forcing Configuration.
    """
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # CHANGE THIS TO REFLECT WHICH RUN ENVIRONMENT:
    # YELLOWSTONE OR HYDRO-C!
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Set flag for testing host
    #is_yellowstone = True
    is_yellowstone = False
    parser = SafeConfigParser()

    try:
        parser.read('../../parm/wrf_hydro_forcing.parm')
    except:
        print "d'oh!"

    

    # Start and end dates 
    if is_yellowstone:
         start_dt = datetime.datetime.strptime("20150930","%Y%m%d")
         end_dt = datetime.datetime.strptime("20151001","%Y%m%d")
    else:
         start_dt = datetime.datetime.strptime("20160126","%Y%m%d")
         end_dt = datetime.datetime.strptime("20160127","%Y%m%d")

    # Set the directory where the input data resides.
    # For running on yellowstone:
    # RAP_dir_base = "/glade/scratch/lpan/IOC/data/RAP"
    # HRRR_dir_base = "/glade/scratch/lpan/IOC/data/HRRR"
    # For running on hydro-c1:
    # RAP_downscale_dir =
    # "/glade/scratch/gochis/IOC_evaluation_datasets/
    # Forcing_Engine/workspace/downscaled/RAP"
    # HRRR_downscale_dir = "/glade/scratch/gochis/
    # IOC_evaluation_datasets/Forcing_Engine/workspace/downscaled/HRRR"
    RAP_dir_base = parser.get('data_dir','RAP_data')
    HRRR_dir_base = parser.get('data_dir', 'HRRR_data')
    RAP_downscale_dir = parser.get('downscaling', 'RAP_downscale_output_dir')
    HRRR_downscale_dir = parser.get('downscaling', 'HRRR_downscale_output_dir')

    all_RAP_files_with_path = whf.get_filepaths(RAP_dir_base) 
    all_HRRR_files_with_path = whf.get_filepaths(HRRR_dir_base) 

    # We are only interested in the RAP and HRRR files that are
    # within the start and end forecast times.
    HRRR_files_with_path = [x for x in all_HRRR_files_with_path if is_within_time_range(start_dt,end_dt,x,"HRRR",is_yellowstone)]
        
    RAP_files_with_path = [x for x in all_RAP_files_with_path if is_within_time_range(start_dt,end_dt,x,"RAP",is_yellowstone)]

    #for hrrr in HRRR_files_with_path:
    #    print ("process %s")%(hrrr)
    # do the processing on only the input grib files 
    #do_regrid(RAP_dir_base,'RAP', RAP_files_with_path, is_yellowstone)
    do_regrid(HRRR_dir_base, 'HRRR', HRRR_files_with_path, is_yellowstone)
def main():
    """Tests the regridding and downscaling of GFS
       data for the Medium Range Forcing Configuration.
    """
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # CHANGE THIS TO REFLECT WHICH RUN ENVIRONMENT,
    # YELLOWSTONE OR HYDRO-C1
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Set flag for testing host
    #is_yellowstone = True
    is_yellowstone = False
    parser = SafeConfigParser()
    config_file = "../../parm/wrf_hydro_forcing2.parm"
    parser.read(config_file)

    # Start and end dates
    if is_yellowstone:
        start_dt = datetime.datetime.strptime("20150929", "%Y%m%d")
        end_dt = datetime.datetime.strptime("20150930", "%Y%m%d")
    else:
        start_dt = datetime.datetime.strptime("20160201", "%Y%m%d")
        end_dt = datetime.datetime.strptime("20160202", "%Y%m%d")

    # Set the directory where the input data resides.
    # For running on yellowstone:
    # GFS_dir_base = "/glade/scratch/lpan/IOC/data/gfs5"
    # For running on hydro-c1:
    # /var/autofs/mnt/gfsdmg1/data/grib/GFS_0.25-pgrb2
    # GFS_downscale_dir =
    # "/glade/scratch/gochis/IOC_evaluation_datasets/Forcing_Engine/workspace/downscaled/GFS"
    GFS_dir_base = parser.get('data_dir', 'GFS_data')
    #GFS_downscale_dir = parser.get('downscaling', 'GFS_downscale_output_dir')
    GFS_downscale_dir = parser.get('layering', 'medium_range_output')

    all_GFS_files_with_path = whf.get_filepaths(GFS_dir_base)

    # We are only interested in the GFS files that are
    # within the start and end forecast times.

    GFS_files_with_path = [
        x for x in all_GFS_files_with_path
        if is_within_time_range(start_dt, end_dt, x, is_yellowstone)
    ]
    print("INFO: GFS files within time range:")
    for gfs_files in GFS_files_with_path:
        print(gfs_files)

    # do the processing on only the input grib files
    do_regrid(config_file, GFS_dir_base, 'GFS', GFS_files_with_path,
              is_yellowstone)
def main():
    """Tests the regridding and downscaling of GFS
       data for the Medium Range Forcing Configuration.
    """
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # CHANGE THIS TO REFLECT WHICH RUN ENVIRONMENT,
    # YELLOWSTONE OR HYDRO-C1
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Set flag for testing host
    #is_yellowstone = True
    is_yellowstone = False
    parser = SafeConfigParser()
    config_file = "../../parm/wrf_hydro_forcing2.parm"
    parser.read(config_file)    

    # Start and end dates 
    if is_yellowstone:
         start_dt = datetime.datetime.strptime("20150929","%Y%m%d")
         end_dt = datetime.datetime.strptime("20150930","%Y%m%d")
    else:
         start_dt = datetime.datetime.strptime("20160201","%Y%m%d")
         end_dt = datetime.datetime.strptime("20160202","%Y%m%d")

    # Set the directory where the input data resides.
    # For running on yellowstone:
    # GFS_dir_base = "/glade/scratch/lpan/IOC/data/gfs5"
    # For running on hydro-c1:
    # /var/autofs/mnt/gfsdmg1/data/grib/GFS_0.25-pgrb2
    # GFS_downscale_dir =
    # "/glade/scratch/gochis/IOC_evaluation_datasets/Forcing_Engine/workspace/downscaled/GFS"
    GFS_dir_base = parser.get('data_dir','GFS_data')
    #GFS_downscale_dir = parser.get('downscaling', 'GFS_downscale_output_dir')
    GFS_downscale_dir = parser.get('layering', 'medium_range_output')

    all_GFS_files_with_path = whf.get_filepaths(GFS_dir_base) 

    # We are only interested in the GFS files that are
    # within the start and end forecast times.
        
    GFS_files_with_path = [x for x in all_GFS_files_with_path if is_within_time_range(start_dt,end_dt,x,is_yellowstone)]
    print("INFO: GFS files within time range:")
    for gfs_files in GFS_files_with_path:
        print(gfs_files)

    # do the processing on only the input grib files 
    do_regrid(config_file, GFS_dir_base,'GFS', GFS_files_with_path, is_yellowstone)
def forcing(configFile, action, prod, file, prod2=None, file2=None):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           configFile (string): The config file with all the settings
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
                             'bias'   - bias correction 
                                        (requires two 
                                        products and two files)
                             'layer'  - layer (requires two
                                        products and two files)
           prod (string):  The first product [mandatory option]:
                            (HRRR or RAP)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

          prod2 (string):   The second product (RAP or HRRR), default
                            is None. Required for layering.
          file2 (string):   The second file name, required for 
                            layering, default is None.
       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """

    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    parser.read(configFile)
    forcing_config_label = "Short Range"

    try:
        whf.initial_setup(parser,forcing_config_label)
    except Exception as e:
        raise


    # Extract the date, model run time, and forecast hour from the file name
    # Use the fcsthr to process only the files that have a fcst hour less than
    # the max fcst hr defined in the param/config file.
    
    
    # Convert the action to lower case 
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()
    if action == 'regrid': 
        # Get the finished directory locations for the relevant product.
        if prod == 'RAP':
            regridded_dir = parser.get('regridding', 'RAP_output_dir')
            downscale_dir = parser.get('downscaling', 'RAP_downscale_output_dir')
            finished_downscale_dir = parser.get('downscaling', 'RAP_finished_output_dir')
            downscale_input_dir = parser.get('downscaling',  'RAP_data_to_downscale')
      
        elif prod == 'HRRR':
            regridded_dir = parser.get('regridding', 'HRRR_output_dir')
            downscale_dir = parser.get('downscaling', 'HRRR_downscale_output_dir')
            finished_downscale_dir = parser.get('downscaling', 'HRRR_finished_output_dir')
            downscale_input_dir = parser.get('downscaling',  'HRRR_data_to_downscale')


        (date,modelrun,fcsthr) = whf.extract_file_info(file)
        # Determine whether this current file lies within the forecast range
        # for the data product (e.g. if processing RAP, use only the 0hr-18hr forecasts).
        # Skip if this file has a forecast hour greater than the max indicated in the 
        # parm/config file.
        in_fcst_range = whf.is_in_fcst_range(prod, fcsthr, parser)

        if in_fcst_range:
            # Check for RAP or GFS data products.  If this file is
            # a 0 hr fcst and is RAP or GFS, substitute each 0hr forecast
            # with the file from the previous model run and the same valid
            # time.  This is necessary because there are missing variables
            # in the 0hr forecasts (e.g. precip rate for RAP and radiation
            # in GFS).
    
            WhfLog.info("Regridding and Downscaling for: "+ product_data_name)
            # Determine if this is a 0hr forecast for RAP data (GFS is also missing
            # some variables for 0hr forecast, but GFS is not used for Short Range
            # forcing). We will need to substitute this file for the downscaled
            # file from a previous model run with the same valid time.  
            # We only need to do this for downscaled files, as the Short Range 
            # forcing files that are regridded always get downscaled and we don't want
            # to do this for both the regridding and downscaling.
            if fcsthr == 0 and prod == 'RAP':
                WhfLog.info("Regridding, ignoring f0 RAP files " )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, True)
                except FilenameMatchError:
                    WhfLog.error('file name format is unexpected')
                    raise
                except NCLError:
                    WhfLog.error("FAIL could not regrid RAP file: " + file)
                    raise

                # Downscaling...
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser, True, True)
                except (NCLError, ZeroHourReplacementError) as e:
                    WhfLog.error("FAIL could not downscale data for hour 0 RAP")
                    # Ignore, and check the next file in the regridded directory.
                    pass

                else:
                    # Move the finished downscaled file to the "finished" area so the triggering
                    # script can determine when to layer with other data.
                    match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                    if match:
                        downscaled_dir = finished_downscale_dir + "/" + match.group(1)
                        input_dir = downscale_dir + "/" + match.group(1)
                        if not os.path.exists(downscaled_dir):
                            whf.mkdir_p(downscaled_dir)
                            downscaled_file = downscaled_dir + "/" + match.group(2)
                            input_file = input_dir + "/" + match.group(2)
                            try:
                               whf.move_to_finished_area(parser, prod, input_file) 
                            except UnrecognizedCommandError:
                               WhfLog.error('Unsupported/unrecognized command')
                               raise
                            except FilenameMatchError:
                               WhfLog.error('File move failed, name format unexpected for file %s'%input_file)
                               raise
                            
                    else:
                        WhfLog.error("FAIL- cannot move finished file: %s", regridded_file) 
                        raise FilenameMatchError('File move failed, name format unexpected for file %s'%regridded_file)

                    # Remove empty 0hr regridded file if it still exists
                    if os.path.exists(regridded_file):
                        cmd = 'rm -rf ' + regridded_file
                        status = os.system(cmd)
                        if status != 0:
                            WhfLog.error("Failure to remove empty file: " + regridded_file)
                            raise SystemCommandError('Cleaning regridded files, failed to remove file %s'%regridded_file)

            else:
                try: 
                    regridded_file = whf.regrid_data(product_data_name, file, parser, False)
                except FilenameMatchError:
                    WhfLog.error("Regridding failed")
                    raise 
                except NCLError:
                    WhfLog.error("Regridding failed")
                    raise 

                # Downscaling...
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser,True, False)                
                except (NCLError, ZeroHourReplacementError) as e:
                    WhfLog.error("FAIL could not downscale data (not a 0hr file)" )
                    raise
              

                # Move the downscaled file to the finished location 
                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    full_dir = finished_downscale_dir + "/" + match.group(1)
                    input_dir = downscale_dir + "/" + match.group(1)
                    full_input_file = input_dir + "/" + match.group(2)
                    full_finished_file = full_dir + "/" + match.group(2)
                    if not os.path.exists(full_dir):
                        WhfLog.info("finished dir doesn't exist, creating it now...")
                        whf.mkdir_p(full_dir)
                    WhfLog.info("Moving now, source = %s", full_input_file)
                    try:
                        whf.move_to_finished_area(parser, prod, full_input_file)
                        #whf.move_to_finished_area(parser, prod, full_finished_file)
                    except UnrecognizedCommandError:
                        raise
                    except FilenameMatchError:
                        raise
                else:
                    WhfLog.error("FAIL- cannot move finished file: %s", full_finished_file) 
                    raise FilenameMatchError('Cannot move finished file, file %s has unexpected filename format'%full_finished_file)

        else:
            # Skip processing this file, exiting...
            WhfLog.info("Skip processing, requested file is outside max fcst")
    elif action_requested == 'layer':
        WhfLog.info("Layering requested for %s and %s", prod, prod2)
        # Do some checking to make sure that there are two data products 
        # and two files indicated.
        if prod2 is None:
            logger.error("ERROR [Short_Range_Forcing]: layering requires two products")
            raise MissingInputError('Layering requires two products')
        elif file2 is None:
            logger.error("ERROR [Short_Range_Forcing]: layering requires two input files")
            raise MissingInputError('Layering requires two input files')
        else:
            # We have everything we need, request layering
            try:
                whf.layer_data(parser,file, file2, prod, prod2, 'Short_Range')
            except FilenameMatchError:
                raise
            except NCLError:
                raise 

            try:
                whf.rename_final_files(parser,'Short_Range')
            except FilenameMatchError:
                raise
            except UnrecognizedCommandError:
                raise
             
    elif action_requested == 'bias':
        WhfLog.info("Bias correction requested for %s", file)
        WhfLog.info("Bias correction not suppoted for Short Range Forcing")
def forcing(config, action, prod, file):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           config (string) : Config file name
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
           prod (string):  The first product [mandatory option]:
                            (MRMS, HRRR or RAP)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """

    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    parser.read(config)

    # Set up logging, environments, etc.
    forcing_config_label = "Anal_Assim"
    whf.initial_setup(parser, forcing_config_label)

    # Convert the action to lower case
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()

    # For analysis and assimilation, only 0hr, 3hr forecast fields from HRRR/RAP
    # are necessary. 3hr forecast files are already regridded and downscaled
    # from the short-range configuration, so only 0hr forecast files are regridded/downscaled
    # here. In addition, MRMS data will be regridded, when available.
    if action == "regrid":
        (date, modelrun, fcsthr) = whf.extract_file_info(file)
        # Usually check for forecast range, but only 0, 3 hr forecast/analysis data used

        # Check for HRRR, RAP, MRMS products.
        WhfLog.info("Regridding and Downscaling for %s", product_data_name)

        if fcsthr == 0 and prod == "HRRR":
            downscale_dir = parser.get("downscaling", "HRRR_downscale_output_dir_0hr")
            try:
                regridded_file = whf.regrid_data(product_data_name, file, parser, False, zero_process=True)
            except (FilenameMatchError, NCLError) as e:
                WhfLog.error("Unexpected filename format encountered while regridding 0hr HRRR")
                raise
            except NCLError:
                WhfLog.error("NCL error encountered while regridding 0hr HRRR")
                raise
            try:
                whf.downscale_data(product_data_name, regridded_file, parser, False, False, zero_process=True)

            except (FilenameMatchError, NCLError) as e:
                WhfLog.error("Unexpected filename format encountered while downscaling 0hr HRRR")
                raise
            except NCLError:
                WhfLog.error("NCL error encountered while downscaling 0hr HRRR")
                raise

            # Move downscaled file to staging area where triggering will monitor
            match = re.match(r".*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)", regridded_file)
            if match:
                full_dir = downscale_dir + "/" + match.group(1)
                full_finished_file = full_dir + "/" + match.group(2)
                # File should have been created in downscale_data step.
                try:
                    whf.file_exists(full_finished_file)
                except UnrecognizedCommandError:
                    WhfLog.error("File move failed for regridded/downscaled 0hr HRRR , filename format unexpected")
                    raise
                try:
                    whf.move_to_finished_area(parser, prod, full_finished_file, zero_move=True)
                except:
                    WhfLog.error("Unsupported/unrecognized command encountered while moving file to finished area.")
                    raise
            else:
                WhfLog.error("File name format is unexpected")
                raise FilenameMatchError("File name format is unexpected")
        elif fcsthr == 0 and prod == "RAP":
            downscale_dir = parser.get("downscaling", "RAP_downscale_output_dir_0hr")
            try:
                regridded_file = whf.regrid_data(product_data_name, file, parser, False, zero_process=True)
            except NCLError:
                WhfLog.error("NCL error while regridding 0hr RAP")
                raise
            except FilenameMatchError:
                WhfLog.error("Unexpected filename format encountered, cannot regrid 0hr RAP")
                raise

            try:
                whf.downscale_data(product_data_name, regridded_file, parser, False, False, zero_process=True)
            except (NCLError) as e:
                WhfLog.error("NCL error encountered while regridding 0hr RAP")
                raise

            # Move downscaled file to staging area where triggering will monitor
            match = re.match(r".*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)", regridded_file)
            if match:
                full_dir = downscale_dir + "/" + match.group(1)
                full_finished_file = full_dir + "/" + match.group(2)
                # File should have been created in downscale_data step.
                try:
                    whf.file_exists(full_finished_file)
                except MissingFileError as mfe:
                    WhfLog.error("Missing file encountered while moving 0hr RAP file to staging area.")
                    raise
                try:
                    whf.move_to_finished_area(parser, prod, full_finished_file, zero_move=True)
                except UnrecognizedCommandError:
                    WhfLog.error("Unrecognized command error while trying to move 0hr RAP file to finished area")
                    raise
                except FilenameMatchError:
                    WhfLog.error("File name's format is unexpected.  Cannot move file to finished area")
                    raise
            else:
                WhfLog.error("File name's format is unexpected")
                raise FilenameMatchError("File name format is unexpected")

        elif prod == "MRMS":
            try:
                regridded_file = whf.regrid_data(product_data_name, file, parser, False)
            except NCLError:
                WhfLog.error("NCL error encountered while regridding MRMS")
                raise
            except FilenameMatchError:
                WhfLog.error("File name's format is unexpected, cannot regrid MRMS")
                raise
            # Move regridded file to staging area where triggering will monitor
            # First make sure file exists
            try:
                whf.file_exists(regridded_file)
            except MissingFileError as mfe:
                WhfLog.error("Missing file encountered while moving regridded MRMS file")
                raise

            try:
                whf.move_to_finished_area(parser, prod, regridded_file, zero_move=False)
            except UnrecognizedCommandError:
                WhfLog.error("Unrecognized command error while trying to move MRMS file to finished area")
                raise
            except FilenameMatchError:
                WhfLog.error("File name's format is unexpecte.  Cannot move file to finished area")
                raise
        else:
            WhfLog.error("Either invalid forecast hour or invalid product chosen")
            WhfLog.error("Only 00hr forecast files, and RAP or HRRR or MRMS are valid choices")
            raise InvalidArgumentError(
                "Either invalid forecast hour %s or invalid product requested %s" % (fcsthr, prod)
            )
    else:  # Invalid action selected
        WhfLog.error("ERROR [Anal_Assim_Forcing]- Invalid action selected")
        raise UnrecognizedCommandError(
            "Invalid action selection within Analysis and Assimilation regridding and downscaling"
        )
def anal_assim_layer(cycleYYYYMMDDHH, fhr, action, config):
    """ Analysis and Assimilation layering
        Performs layering/combination of RAP/HRRR/MRMS
        data for a particular analysis and assimilation
        model cycle and forecast hour.

        Args:
            cycleYYYYMMDDHH (string): Analysis and assimilation
                                      model cycle date.
            fhr (string): Forecast hour of analysis and assimilation 
                          model cycle. Possible values are -2, -1, 0.
            action (string): Specifying which layering to do, given
                             possible available model data. Possible 
                             values are "RAP", "RAP_HRRR", and
                             "RAP_HRRR_MRMS".
            config (string) : Config file name
        Returns: 
            None: Performs specified layering to final input directory
                  used for WRF-Hydro.
    """

    # Determine specific layering route to take
    str_split = action.split("_")
    process = len(str_split)

    # Determine specific date/time information used for composing regridded
    # file paths.
    yearCycle = int(cycleYYYYMMDDHH[0:4])
    monthCycle = int(cycleYYYYMMDDHH[4:6])
    dayCycle = int(cycleYYYYMMDDHH[6:8])
    hourCycle = int(cycleYYYYMMDDHH[8:10])
    fhr = int(fhr)

    dateCurrent = datetime.datetime.today()
    cycleDate = datetime.datetime(year=yearCycle, month=monthCycle, day=dayCycle, hour=hourCycle)
    validDate = cycleDate + datetime.timedelta(seconds=fhr * 3600)
    fcstWindowDate = validDate + datetime.timedelta(seconds=-3 * 3600)  # Used for 3-hr forecast

    # HRRR/RAP files necessary for fluxes and precipitation data.
    # Obtain analysis and assimiltation configuration parameters.
    parser = SafeConfigParser()
    parser.read(config)
    out_dir = parser.get("layering", "analysis_assimilation_output")
    tmp_dir = parser.get("layering", "analysis_assimilation_tmp")
    qpe_parm_dir = parser.get("layering", "qpe_combine_parm_dir")
    hrrr_ds_dir_3hr = parser.get("downscaling", "HRRR_finished_output_dir")
    hrrr_ds_dir_0hr = parser.get("downscaling", "HRRR_finished_output_dir_0hr")
    rap_ds_dir_3hr = parser.get("downscaling", "RAP_finished_output_dir")
    rap_ds_dir_0hr = parser.get("downscaling", "RAP_finished_output_dir_0hr")
    mrms_ds_dir = parser.get("regridding", "MRMS_finished_output_dir")
    layer_exe = parser.get("exe", "Analysis_Assimilation_layering")
    ncl_exec = parser.get("exe", "ncl_exe")

    # in case it is first time, create the output dirs
    df.makeDirIfNeeded(out_dir)
    df.makeDirIfNeeded(tmp_dir)

    # Sanity checking
    try:
        whf.dir_exists(out_dir)
        whf.dir_exists(tmp_dir)
        whf.dir_exists(qpe_parm_dir)
        whf.dir_exists(hrrr_ds_dir_3hr)
        whf.dir_exists(hrrr_ds_dir_0hr)
        whf.dir_exists(rap_ds_dir_3hr)
        whf.dir_exists(rap_ds_dir_0hr)
        whf.dir_exists(mrms_ds_dir)
        whf.file_exists(layer_exe)
    except MissingDirectoryError:
        WhfLog.error("Missing directory during preliminary checking of Analysis Assimilation layering")
        raise

    # Establish final output directories to hold 'LDASIN' files used for
    # WRF-Hydro long-range forecasting. If the directory does not exist,
    # create it.
    out_path = out_dir + "/" + cycleDate.strftime("%Y%m%d%H")

    whf.mkdir_p(out_path)

    # Compose necessary file paths
    hrrr0Path = (
        hrrr_ds_dir_0hr
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    hrrr3Path = (
        hrrr_ds_dir_3hr
        + "/"
        + fcstWindowDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    rap0Path = (
        rap_ds_dir_0hr
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    rap3Path = (
        rap_ds_dir_3hr
        + "/"
        + fcstWindowDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    mrmsPath = (
        mrms_ds_dir
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    hrrrBiasPath = qpe_parm_dir + "/HRRR_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km.grb2"
    hrrrWgtPath = qpe_parm_dir + "/HRRR_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"
    mrmsBiasPath = (
        qpe_parm_dir + "/MRMS_radonly_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km-sm60.grb2"
    )
    mrmsWgtPath = qpe_parm_dir + "/MRMS_radonly_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"
    rapBiasPath = qpe_parm_dir + "/RAPD_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km.grb2"
    rapWgtPath = qpe_parm_dir + "/RAPD_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"

    # Sanity checking on parameter data
    try:
        whf.file_exists(hrrrBiasPath)
        whf.file_exists(hrrrWgtPath)
        whf.file_exists(mrmsBiasPath)
        whf.file_exists(mrmsWgtPath)
        whf.file_exists(rapBiasPath)
        whf.file_exists(rapWgtPath)
    except MissingFileError:
        WhfLog.error("Missing file encountered while checking parameter data for AA")
        raise

    # Compose output file paths
    LDASIN_path_tmp = tmp_dir + "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1_TMP.nc"
    LDASIN_path_final = out_path + "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1"
    # Perform layering/combining depending on processing path.
    if process == 1:  # RAP only
        WhfLog.info(
            "Layering and Combining RAP only for cycle date: "
            + cycleDate.strftime("%Y%m%d%H")
            + " valid date: "
            + validDate.strftime("%Y%m%d%H")
        )
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
        except MissingFileError:
            WhfLog.error("Missing RAP files for layering")
            raise

    elif process == 2:  # HRRR and RAP only
        WhfLog.info(
            "Layering and Combining RAP and HRRR for cycle date: "
            + cycleDate.strftime("%Y%m%d%H")
            + " valid date: "
            + validDate.strftime("%Y%m%d%H")
        )
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
            whf.file_exists(hrrr0Path)
            whf.file_exists(hrrr3Path)
        except MissingFileError:
            WhfLog.error("Missing RAP or HRRR files for layering")
            raise
    elif process == 3:  # HRRR, RAP, and MRMS
        WhfLog.info(
            "Layering and Combining RAP/HRRR/MRMS for cycle date: "
            + cycleDate.strftime("%Y%m%d%H")
            + " valid date: "
            + validDate.strftime("%Y%m%d%H")
        )
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
            whf.file_exists(hrrr0Path)
            whf.file_exists(hrrr3Path)
            whf.file_exists(mrmsPath)
        except MissingFileError:
            WhfLog.error("Missing RAP or HRRR or MRMS files for layering")
            raise

    else:  # Error out
        WhfLog.error("Invalid input action selected, invalid layer combination provided in AA.")
        raise UnrecognizedCommandError

    hrrrB_param = "'hrrrBFile=" + '"' + hrrrBiasPath + '"' + "' "
    mrmsB_param = "'mrmsBFile=" + '"' + mrmsBiasPath + '"' + "' "
    rapB_param = "'rapBFile=" + '"' + rapBiasPath + '"' + "' "
    hrrrW_param = "'hrrrWFile=" + '"' + hrrrWgtPath + '"' + "' "
    mrmsW_param = "'mrmsWFile=" + '"' + mrmsWgtPath + '"' + "' "
    rapW_param = "'rapWFile=" + '"' + rapWgtPath + '"' + "' "
    hrrr0_param = "'hrrr0File=" + '"' + hrrr0Path + '"' + "' "
    hrrr3_param = "'hrrr3File=" + '"' + hrrr3Path + '"' + "' "
    rap0_param = "'rap0File=" + '"' + rap0Path + '"' + "' "
    rap3_param = "'rap3File=" + '"' + rap3Path + '"' + "' "
    mrms_param = "'mrmsFile=" + '"' + mrmsPath + '"' + "' "
    process_param = "'process=" + '"' + str(process) + '"' + "' "
    out_param = "'outPath=" + '"' + LDASIN_path_tmp + '"' + "' "

    cmd_params = (
        hrrrB_param
        + mrmsB_param
        + rapB_param
        + hrrrW_param
        + mrmsW_param
        + rapW_param
        + hrrr0_param
        + hrrr3_param
        + rap0_param
        + rap3_param
        + mrms_param
        + process_param
        + out_param
    )
    cmd = ncl_exec + " -Q " + cmd_params + " " + layer_exe
    status = os.system(cmd)

    if status != 0:
        WhfLog.error("Error in combinining NCL program")
        raise NCLError("NCL error encountered while combining in AA")

    # Double check to make sure file was created, delete temporary regridded file
    whf.file_exists(LDASIN_path_tmp)
    # Rename file to conform to WRF-Hydro expectations
    cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final
    status = os.system(cmd)
    if status != 0:
        WhfLog.error("Failure to rename " + LDASIN_path_tmp)
    try:
        whf.file_exists(LDASIN_path_final)
    except MissingFileError:
        WhfLog.error("Missing LDASIN_path_final file")
        raise
    cmd = "rm -rf " + LDASIN_path_tmp
    status = os.system(cmd)
    if status != 0:
        WhfLog.error("Failure to remove " + LDASIN_path_tmp)
        raise SystemCommandError
def do_layering(config_file,parser,rap_downscale_dir, hrrr_downscale_dir, mrms_downscale_dir, fcst_hr, is_yellowstone=False):
    # Initialize some flags and lists,
    # assume that we only have RAP for now.
    request_hrrr = False
    request_mrms = False 
    rap_files = []
    hrrr_files = []
    mrms_files = []
    
    # Set flags to be used to determine which layers need to be layered.
    if hrrr_downscale_dir is not None:
        request_hrrr = True
    if mrms_downscale_dir is not None:
        request_mrmrs = True
  
    # We will always have RAP when calling do_layering.
    print "RAP downscale dir: %s"%rap_downscale_dir
    rap_file_paths = whf.get_filepaths(rap_downscale_dir)
    size = len(rap_file_paths)
    print("number of RAP files in %s, %s")%(rap_downscale_dir, size)
    if request_hrrr == True:
        print "HRRR true, layer RAP and HRRR"
        # Layer only RAP and HRRR
        hrrr_file_paths = whf.get_filepaths(hrrr_downscale_dir)
        if is_yellowstone:
            for rap in rap_file_paths:
                match = re.match(r'.*/RAP.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                model_run = match.group(1)
                aaf.anal_assim_layer(model_run,fcst_hr,"RAP_HRRR",config_file)

        else:
            for rap in rap_file_paths:
                match = re.match(r'.*/RAP.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                model_run = match.group(1)
                aaf.anal_assim_layer(model_run,fcst_hr,"RAP_HRRR", config_file)
 
        if request_mrms == True:
            # Layer all three: RAP, HRRR, and MRMS
            # Compare the YYYYMMDDHH/YYYYMMDDhh00.LDASIN_DOMAIN1.nc portions
            mrms_file_paths = whf.get_filepaths(mrms_downscale_dir)
            if is_yellowstone:    
                for rap in rap_file_paths:
                    match = re.match(r'.*/RAP.*([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                    model_run = match.group(1)
                    whf.anal_assim_layer(model_run, fcst_hr, 'RAP_HRRR_MRMS',config_file) 
            else:
                # Testing on development/test host
                for rap in rap_file_paths:
                    match = re.match(r'.*/RAP.*([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                    model_run = match.group(1)
                    whf.anal_assim_layer(model_run, fcst_hr, 'RAP_HRRR_MRMS',config_file) 
    else:
        # Only RAP requested, call layering with just RAP.     
        print ("Only RAP requested, layering called with just RAP")
        if is_yellowstone:    
            for rap in rap_file_paths:
                print("layering rap file: %s")%rap
                match = re.match(r'.*/RAP/.*([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                model_run = match.group(1)
                print("model run: %s, fcst hr %s")%(model_run,fcst_hr)
                aaf.anal_assim_layer(model_run, fcst_hr, "RAP",config_file)
         
        else:
            for rap in rap_file_paths:
                match = re.match(r'.*/RAP/.*([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                model_run = match.group(1)
                aaf.anal_assim_layer(model_run, fcst_hr, "RAP",config_file)
def main():
    """Tests the regridding and downscaling of RAP and HRRR
       data for the Short Range Forcing Configuration.
    """
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # CHANGE THIS TO REFLECT WHICH RUN ENVIRONMENT:
    # YELLOWSTONE OR HYDRO-C!
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Set flag for testing host
    #is_yellowstone = True
    is_yellowstone = False
    parser = SafeConfigParser()
    config_file = "../../parm/wrf_hydro_forcing.parm"
    parser.read(config_file)    

    # Set up logger
    wlog.init(parser, "testAA", "AA","Regrid","MRMS")

    # Start and end dates 
    if is_yellowstone:
         start_dt = datetime.datetime.strptime("20151004","%Y%m%d")
         end_dt = datetime.datetime.strptime("20151005","%Y%m%d")
    else:
         start_dt = datetime.datetime.strptime("20160202","%Y%m%d")
         end_dt = datetime.datetime.strptime("20160204","%Y%m%d")

    # Set the directory where the input data resides.
    # For running on yellowstone:
    # RAP_dir_base = "/glade/scratch/lpan/IOC/data/RAP"
    # HRRR_dir_base = "/glade/scratch/lpan/IOC/data/HRRR"
    # MRMS_dir_base = "/glade/scratch/lpan/IOC/data/MRMS"
    # For running on hydro-c1:
    # RAP_downscale_dir =
    # "/glade/scratch/gochis/IOC_evaluation_datasets/
    # Forcing_Engine/workspace/downscaled/RAP"
    # HRRR_downscale_dir = "/glade/scratch/gochis/
    # IOC_evaluation_datasets/Forcing_Engine/workspace/downscaled/HRRR"
    RAP_dir_base = parser.get('data_dir','RAP_data')
    HRRR_dir_base = parser.get('data_dir', 'HRRR_data')
    MRMS_dir_base = parser.get('data_dir', 'MRMS_data')
    RAP_downscale_dir = parser.get('downscaling', 'RAP_finished_output_dir')
    RAP_0hr_downscale_dir = parser.get('downscaling', 'RAP_finished_output_dir_0hr')
    HRRR_downscale_dir = parser.get('downscaling', 'HRRR_finished_output_dir')
    HRRR_0hr_downscale_dir = parser.get('downscaling', 'HRRR_finished_output_dir_0hr')
    MRMS_downscale_dir = parser.get('regridding','MRMS_finished_output_dir')

    all_RAP_files_with_path = whf.get_filepaths(RAP_dir_base) 
    all_HRRR_files_with_path = whf.get_filepaths(HRRR_dir_base) 
    all_MRMS_files_with_path = whf.get_filepaths(MRMS_dir_base) 

    # We are only interested in the MRMS, RAP and HRRR files that are
    # within the start and end forecast times, since the /glade/scratch/lpan/IOC/data
    # directory is continually adding more dates.
    RAP_files_with_path = [x for x in all_RAP_files_with_path if is_within_time_range(start_dt,end_dt,x,"RAP",is_yellowstone)]
    HRRR_files_with_path = [x for x in all_HRRR_files_with_path if is_within_time_range(start_dt,end_dt,x,"HRRR",is_yellowstone)]
    #    
    MRMS_files_with_path = [x for x in all_MRMS_files_with_path if is_within_time_range(start_dt,end_dt,x,"MRMS",is_yellowstone)]

    #do_regrid(config_file, RAP_dir_base,'RAP', RAP_files_with_path, is_yellowstone)
    #do_regrid(config_file, HRRR_dir_base, 'HRRR', HRRR_files_with_path, is_yellowstone)
    do_regrid(config_file,MRMS_dir_base, 'MRMS', MRMS_files_with_path, is_yellowstone)
def forcing(configFile, action, prod, file, prod2=None, file2=None):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           configFile (string): name of file with settings
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
                             'bias'   - bias correction 
                                        (requires two 
                                        products and two files)
                             'layer'  - layer (requires two
                                        products and two files)
           prod (string):  The first product [mandatory option]:
                            (GFS)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

          prod2 (string):   The second product (????), default
                            is None. Required for layering.
          file2 (string):   The second file name, required for 
                            layering, default is None.
       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """


    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    try:
        parser.read(configFile)
    except (NoSectionErrorException, DuplicateSectionErrorException,\
            DuplicateOptionErrorException,MissingSectionHeaderErrorException,\
            ParsingErrorException) as e:
        raise

    # Set up logging, environments, etc.
    forcing_config_label = 'Medium_Range'
    whf.initial_setup(parser,forcing_config_label)


    # Extract the date, model run time, and forecast hour from the file name
    # Use the fcsthr to process only the files that have a fcst hour less than
    # the max fcst hr defined in the param/config file.
    
    
    # Convert the action to lower case 
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()
    regridded_dir = parser.get('regridding','GFS_output_dir')
    downscale_dir = parser.get('downscaling','GFS_downscale_output_dir')
    finished_downscale_dir = parser.get('downscaling','GFS_finished_output_dir')
    final_dir = parser.get('layering','medium_range_output')
    if action == 'regrid': 
        (date,modelrun,fcsthr) = whf.extract_file_info(file)
        # Determine whether this current file lies within the forecast range
        # for the data product (e.g. if processing RAP, use only the 0hr-18hr forecasts).
        # Skip if this file has a forecast hour greater than the max indicated in the 
        # parm/config file.
        in_fcst_range = whf.is_in_fcst_range(prod, fcsthr, parser)

        if in_fcst_range:
            # Check for RAP or GFS data products.  If this file is
            # a 0 hr fcst and is RAP or GFS, substitute each 0hr forecast
            # with the file from the previous model run and the same valid
            # time.  This is necessary because there are missing variables
            # in the 0hr forecasts (e.g. precip rate for RAP and radiation
            # in GFS).
 
            WhfLog.info("Regridding and Downscaling for %s", product_data_name)
            # Determine if this is a 0hr forecast for RAP data (GFS is also missing
            # some variables for 0hr forecast, but GFS is not used for Medium Range
            # forcing). We will need to substitute this file for the downscaled
            # file from a previous model run with the same valid time.  
            # We only need to do this for downscaled files, as the Medium Range 
            # forcing files that are regridded always get downscaled and we don't want
            # to do this for both the regridding and downscaling.
            if fcsthr == 0 and prod == 'GFS':
                WhfLog.info("Regridding (ignoring f0 GFS files) %s: ", file )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, True)
                except (FilenameMatchError,NCLError,MissingFileError) as e:
                    WhfLog.error('Failure:regridding of GFS (ignore 0hr fcst) file: ' + file)
                    WhfLog.error(e) 
                    raise
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser, True, True)                
                except (MissingFileError, SystemCommandError,\
                        NCLError) as e:
                    WhfLog.error('Downscaling GFS failed: ' + e)
                    raise

                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    ymd_dir = match.group(1)
                    file_only = match.group(2)
                    downscaled_dir = downscale_dir + "/" + ymd_dir
                    downscaled_file = downscaled_dir + "/" + file_only
                    # Check to make sure downscaled file was created
                    try:
                        whf.file_exists(downscaled_file)
                    except MissingFileError as mfe:
                        WhfLog.error('Downscaling, non-existent downscaled file: ' + downscaled_file)
                        WhfLog.error(mfe)
                    try:
                        whf.rename_final_files(parser,"Medium_Range")
                    except FilenameMatchError as fme:
                        WhfLog.error('Failed to rename final files due to unexpected filename format: ' + fme) 
              
                    except UnrecognizedCommandError as uce:
                        WhfLog.error('Failed to rename final files due to unrecognized/unsupported request: ' + uce)
                else:
                    raise FilneameMatchError('MediumRangeForcing regridded_file %s has unexpected filename format'%regridded_file)
                # Remove empty 0hr regridded file if it still exists
                if os.path.exists(regridded_file):
                    cmd = 'rm -rf ' + regridded_file
                    status = os.system(cmd)
                    if status != 0:
                        WhfLog.error("Failure to remove empty file: " + regridded_file)
                        raise SystemCommandError('MediumRangeForcing failed to clean up regridded file %s'%(regridded_file))
            else:
                WhfLog.info("Regridding non-zero hour fcst%s: ", file )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, False)
                except (FilenameMatchError, NCLError) as e:
                    WhfLog.error('Regridding failed for GFS non-zero fcst regrid file: ' + file) 
                    raise
          
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser,True, False)                
                except (MissingFileError, SystemCommandError, NCLError):
                    raise

                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    ymd_dir = match.group(1)
                    file_only = match.group(2)
                    downscaled_dir = downscale_dir + "/" + ymd_dir
                    downscaled_file = downscaled_dir + "/" + file_only
                    # Check to make sure downscaled file was created
                    try:
                        whf.file_exists(downscaled_file)
                    except MissingFileError:
                        raise
                    try:
                        whf.rename_final_files(parser,"Medium_Range")
                    except (FilenameMatchError, UnrecognizedCommandError) as e:
                        raise

                else:
                    raise FilenameMatchError('MediumRangeForcing renaming finished file failed, unexpected filename format for %s'%(regridded_file)) 
        else:
            # Skip processing this file, exiting...
            WhfLog.info("Skip processing, requested file is outside max fcst")
    else:
        WhfLog.info("Unsupported action requested. Only regridding (and downscaling) performed for Medium Range")
Example #18
0
def forcing(configFile, action, prod, file, prod2=None, file2=None):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           configFile (string): The config file with all the settings
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
                             'bias'   - bias correction 
                                        (requires two 
                                        products and two files)
                             'layer'  - layer (requires two
                                        products and two files)
           prod (string):  The first product [mandatory option]:
                            (HRRR or RAP)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

          prod2 (string):   The second product (RAP or HRRR), default
                            is None. Required for layering.
          file2 (string):   The second file name, required for 
                            layering, default is None.
       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """

    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    parser.read(configFile)
    forcing_config_label = "Short Range"

    try:
        whf.initial_setup(parser, forcing_config_label)
    except Exception as e:
        raise

    # Extract the date, model run time, and forecast hour from the file name
    # Use the fcsthr to process only the files that have a fcst hour less than
    # the max fcst hr defined in the param/config file.

    # Convert the action to lower case
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()
    if action == 'regrid':
        # Get the finished directory locations for the relevant product.
        if prod == 'RAP':
            regridded_dir = parser.get('regridding', 'RAP_output_dir')
            downscale_dir = parser.get('downscaling',
                                       'RAP_downscale_output_dir')
            finished_downscale_dir = parser.get('downscaling',
                                                'RAP_finished_output_dir')
            downscale_input_dir = parser.get('downscaling',
                                             'RAP_data_to_downscale')

        elif prod == 'HRRR':
            regridded_dir = parser.get('regridding', 'HRRR_output_dir')
            downscale_dir = parser.get('downscaling',
                                       'HRRR_downscale_output_dir')
            finished_downscale_dir = parser.get('downscaling',
                                                'HRRR_finished_output_dir')
            downscale_input_dir = parser.get('downscaling',
                                             'HRRR_data_to_downscale')

        (date, modelrun, fcsthr) = whf.extract_file_info(file)
        # Determine whether this current file lies within the forecast range
        # for the data product (e.g. if processing RAP, use only the 0hr-18hr forecasts).
        # Skip if this file has a forecast hour greater than the max indicated in the
        # parm/config file.
        in_fcst_range = whf.is_in_fcst_range(prod, fcsthr, parser)

        if in_fcst_range:
            # Check for RAP or GFS data products.  If this file is
            # a 0 hr fcst and is RAP or GFS, substitute each 0hr forecast
            # with the file from the previous model run and the same valid
            # time.  This is necessary because there are missing variables
            # in the 0hr forecasts (e.g. precip rate for RAP and radiation
            # in GFS).

            WhfLog.info("Regridding and Downscaling for: " + product_data_name)
            # Determine if this is a 0hr forecast for RAP data (GFS is also missing
            # some variables for 0hr forecast, but GFS is not used for Short Range
            # forcing). We will need to substitute this file for the downscaled
            # file from a previous model run with the same valid time.
            # We only need to do this for downscaled files, as the Short Range
            # forcing files that are regridded always get downscaled and we don't want
            # to do this for both the regridding and downscaling.
            if fcsthr == 0 and prod == 'RAP':
                WhfLog.info("Regridding, ignoring f0 RAP files ")
                try:
                    regridded_file = whf.regrid_data(product_data_name, file,
                                                     parser, True)
                except FilenameMatchError:
                    WhfLog.error('file name format is unexpected')
                    raise
                except NCLError:
                    WhfLog.error("FAIL could not regrid RAP file: " + file)
                    raise

                # Downscaling...
                try:
                    whf.downscale_data(product_data_name, regridded_file,
                                       parser, True, True)
                except (NCLError, ZeroHourReplacementError) as e:
                    WhfLog.error(
                        "FAIL could not downscale data for hour 0 RAP")
                    # Ignore, and check the next file in the regridded directory.
                    pass

                else:
                    # Move the finished downscaled file to the "finished" area so the triggering
                    # script can determine when to layer with other data.
                    match = re.match(
                        r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',
                        regridded_file)
                    if match:
                        downscaled_dir = finished_downscale_dir + "/" + match.group(
                            1)
                        input_dir = downscale_dir + "/" + match.group(1)
                        if not os.path.exists(downscaled_dir):
                            whf.mkdir_p(downscaled_dir)
                            downscaled_file = downscaled_dir + "/" + match.group(
                                2)
                            input_file = input_dir + "/" + match.group(2)
                            try:
                                whf.move_to_finished_area(
                                    parser, prod, input_file)
                            except UnrecognizedCommandError:
                                WhfLog.error(
                                    'Unsupported/unrecognized command')
                                raise
                            except FilenameMatchError:
                                WhfLog.error(
                                    'File move failed, name format unexpected for file %s'
                                    % input_file)
                                raise

                    else:
                        WhfLog.error("FAIL- cannot move finished file: %s",
                                     regridded_file)
                        raise FilenameMatchError(
                            'File move failed, name format unexpected for file %s'
                            % regridded_file)

                    # Remove empty 0hr regridded file if it still exists
                    if os.path.exists(regridded_file):
                        cmd = 'rm -rf ' + regridded_file
                        status = os.system(cmd)
                        if status != 0:
                            WhfLog.error("Failure to remove empty file: " +
                                         regridded_file)
                            raise SystemCommandError(
                                'Cleaning regridded files, failed to remove file %s'
                                % regridded_file)

            else:
                try:
                    regridded_file = whf.regrid_data(product_data_name, file,
                                                     parser, False)
                except FilenameMatchError:
                    WhfLog.error("Regridding failed")
                    raise
                except NCLError:
                    WhfLog.error("Regridding failed")
                    raise

                # Downscaling...
                try:
                    whf.downscale_data(product_data_name, regridded_file,
                                       parser, True, False)
                except (NCLError, ZeroHourReplacementError) as e:
                    WhfLog.error(
                        "FAIL could not downscale data (not a 0hr file)")
                    raise

                # Move the downscaled file to the finished location
                match = re.match(
                    r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',
                    regridded_file)
                if match:
                    full_dir = finished_downscale_dir + "/" + match.group(1)
                    input_dir = downscale_dir + "/" + match.group(1)
                    full_input_file = input_dir + "/" + match.group(2)
                    full_finished_file = full_dir + "/" + match.group(2)
                    if not os.path.exists(full_dir):
                        WhfLog.info(
                            "finished dir doesn't exist, creating it now...")
                        whf.mkdir_p(full_dir)
                    WhfLog.info("Moving now, source = %s", full_input_file)
                    try:
                        whf.move_to_finished_area(parser, prod,
                                                  full_input_file)
                        #whf.move_to_finished_area(parser, prod, full_finished_file)
                    except UnrecognizedCommandError:
                        raise
                    except FilenameMatchError:
                        raise
                else:
                    WhfLog.error("FAIL- cannot move finished file: %s",
                                 full_finished_file)
                    raise FilenameMatchError(
                        'Cannot move finished file, file %s has unexpected filename format'
                        % full_finished_file)

        else:
            # Skip processing this file, exiting...
            WhfLog.info("Skip processing, requested file is outside max fcst")
    elif action_requested == 'layer':
        WhfLog.info("Layering requested for %s and %s", prod, prod2)
        # Do some checking to make sure that there are two data products
        # and two files indicated.
        if prod2 is None:
            logger.error(
                "ERROR [Short_Range_Forcing]: layering requires two products")
            raise MissingInputError('Layering requires two products')
        elif file2 is None:
            logger.error(
                "ERROR [Short_Range_Forcing]: layering requires two input files"
            )
            raise MissingInputError('Layering requires two input files')
        else:
            # We have everything we need, request layering
            try:
                whf.layer_data(parser, file, file2, prod, prod2, 'Short_Range')
            except FilenameMatchError:
                raise
            except NCLError:
                raise

            try:
                whf.rename_final_files(parser, 'Short_Range')
            except FilenameMatchError:
                raise
            except UnrecognizedCommandError:
                raise

    elif action_requested == 'bias':
        WhfLog.info("Bias correction requested for %s", file)
        WhfLog.info("Bias correction not suppoted for Short Range Forcing")
def forcing(configFile,file_in):
    """ Args:
	1.) configFile (string): The config file with all 
	    the settings.
        2.) file (string): The file name. The full path is 
            not necessary as full paths will be derived from
            parameter directory paths and datetime information.
        Returns:
	None - Performs indicated bias correction, regridding,
               and downscaling of CFSv2 data. Any errors are
               trapped and passed back to the driver.
    """

    WhfLog.debug("file_in = %s", file_in)

    # Obtain CFSv2 forcing engine parameters.
    parser = SafeConfigParser()
    parser.read(configFile)

    # Set up logging environments, etc.
    forcing_config_label = "Long_Range"
    try:
        Whf.initial_setup(parser,forcing_config_label)
    except:
        raise

    out_dir = parser.get('layering','long_range_output') 
    tmp_dir = parser.get('bias_correction','CFS_tmp_dir')

    if (not df.makeDirIfNeeded(out_dir)):
        raise MissingDirectoryError('Dir %s cannot be created', out_dir)
    if (not df.makeDirIfNeeded(tmp_dir)):
        raise MissingDirectoryError('Dir %s cannot be created', tmp_dir)

    # Define CFSv2 cycle date and valid time based on file name.
    (cycleYYYYMMDD,cycleHH,fcsthr,em) = Whf.extract_file_info_cfs(file_in)
    em_str = str(em)

    # Pull path to NCL bias correction module file. Export this as an 
    # environmental variable NCL refers to later. 
    nclBiasMod = parser.get('exe','CFS_bias_correct_mod')
    os.environ["CFS_NCL_BIAS_MOD"] = nclBiasMod

    # Establish datetime objects
    dateCurrent = datetime.datetime.today()
    dateCycleYYYYMMDDHH = datetime.datetime(year=int(cycleYYYYMMDD[0:4]),
                          month=int(cycleYYYYMMDD[4:6]),
                          day=int(cycleYYYYMMDD[6:8]),
                          hour=cycleHH)
    dateFcstYYYYMMDDHH = dateCycleYYYYMMDDHH + \
                         datetime.timedelta(seconds=fcsthr*3600)

    # Determine if this is a 0hr forecast file or not.
    if dateFcstYYYYMMDDHH == dateCycleYYYYMMDDHH:
        fFlag = 1 
    else:
        fFlag = 0 
    # Establish final output directories to hold 'LDASIN' files used for
    # WRF-Hydro long-range forecasting. If the directory does not exist,
    # create it.
    out_path = out_dir + "/Member_" + em_str.zfill(2) + "/" + \
               dateCycleYYYYMMDDHH.strftime("%Y%m%d%H")

    try:
        Whf.mkdir_p(out_path)
    except:
        raise

    in_fcst_range = Whf.is_in_fcst_range("CFSv2",fcsthr,parser)

    if in_fcst_range:
        # First, bias-correct CFSv2 data and generate hourly files 
        # from six-hour forecast
        WhfLog.info("Bias correcting for CFSv2 cycle: " + \
                     dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
                     " CFSv2 forecast time: " + dateFcstYYYYMMDDHH.strftime('%Y%m%d%H'))
        try:
            Whf.bias_correction('CFSV2',file_in,dateCycleYYYYMMDDHH,
                                dateFcstYYYYMMDDHH,parser, em = em)
        except (MissingFileError,NCLError):
            raise

        # Second, regrid to the conus IOC domain
        # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename 
        # generated from bias-correction and call the regridding to go to the conus domain.
        if fFlag == 1:
            begCt = 6 
            endCt = 7
        else:
            begCt = 1
            endCt = 7
        for hour in range(begCt,endCt):
  	    dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)
               
            fileBiasCorrected = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
                                dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
                                dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + ".M" + \
                                em_str.zfill(2) + ".nc"
            WhfLog.info("Regridding CFSv2 to conus domain for cycle: " + \
                         dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
                         " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H'))
            try:
                fileRegridded = Whf.regrid_data("CFSV2",fileBiasCorrected,parser)
            except (MissingFileError,NCLError):
                raise

            # Double check to make sure file was created, delete temporary bias-corrected file
            try:
                Whf.file_exists(fileRegridded)
            except MissingFileError:
                raise	
            cmd = "rm -rf " + fileBiasCorrected
            status = os.system(cmd)
            if status != 0:
		raise SystemCommandError('Command %s failed.'%cmd)

  
        # Third, perform topography downscaling to generate final
        # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename
        # generated from regridding and call the downscaling function.
        for hour in range(begCt,endCt):
            dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)

            WhfLog.info("Downscaling CFSv2 for cycle: " +
                         dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') +
                         " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H'))
            fileRegridded = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
                            dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
                                dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + \
                                "_regridded.M" + em_str.zfill(2) + ".nc"
            LDASIN_path_tmp = tmp_dir + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1.nc"
            LDASIN_path_final = out_path + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1"
            try:
                Whf.downscale_data("CFSv2",fileRegridded,parser, out_path=LDASIN_path_tmp, \
                                   verYYYYMMDDHH=dateTempYYYYMMDDHH)
            except (MissingFileError,FilenameMatchError,NCLError,SystemCommandError):
                raise
            # Double check to make sure file was created, delete temporary regridded file
            try:
                Whf.file_exists(LDASIN_path_tmp)
            except MissingFileError:
                raise
            # Rename file to conform to WRF-Hydro expectations
            cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final
            status = os.system(cmd)
            if status != 0:
                raise SystemCommandError('Command %s failed.'%cmd)
            try:
                Whf.file_exists(LDASIN_path_final)
            except MissingFileError:
                raise
            cmd = "rm -rf " + fileRegridded
            status = os.system(cmd)
            if status != 0:
                raise SystemCommandError('Command %s failed.'%cmd)
       
	WhfLog.info("Long_Range processing for %s%d Forecast Hour: %d Ensemble: %s",
                cycleYYYYMMDD, cycleHH, fcsthr, em_str)
    else:
        # Skip processing this file. Exit gracefully with a 0 exit status.
        WhfLog.info("Requested file is outside max fcst for CFSv2")
def main():
    """Tests the regridding and downscaling of RAP and HRRR
       data for the Short Range Forcing Configuration.
    """
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # CHANGE THIS TO REFLECT WHICH RUN ENVIRONMENT:
    # YELLOWSTONE OR HYDRO-C!
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Set flag for testing host
    #is_yellowstone = True
    is_yellowstone = False
    parser = SafeConfigParser()
    config_file = "../../parm/b_wrf_hydro_forcing.parm" 

    try:
        parser.read(config_file)
    except:
        print "d'oh!"

   # Set up logger
    #wlog.init(parser, "testShort", "Short","Regrid","HRRR")
 

    # Start and end dates 
    if is_yellowstone:
         start_dt = datetime.datetime.strptime("20150930","%Y%m%d")
         end_dt = datetime.datetime.strptime("20151001","%Y%m%d")
    else:
         start_dt = datetime.datetime.strptime("20160215","%Y%m%d")
         end_dt = datetime.datetime.strptime("20160216","%Y%m%d")

    # Set the directory where the input data resides.
    # For running on yellowstone:
    # RAP_dir_base = "/glade/scratch/lpan/IOC/data/RAP"
    # HRRR_dir_base = "/glade/scratch/lpan/IOC/data/HRRR"
    # For running on hydro-c1:
    # RAP_downscale_dir =
    # "/glade/scratch/gochis/IOC_evaluation_datasets/
    # Forcing_Engine/workspace/downscaled/RAP"
    # HRRR_downscale_dir = "/glade/scratch/gochis/
    # IOC_evaluation_datasets/Forcing_Engine/workspace/downscaled/HRRR"
    RAP_dir_base = parser.get('data_dir','RAP_data')
    HRRR_dir_base = parser.get('data_dir', 'HRRR_data')
    RAP_downscale_dir = parser.get('downscaling', 'RAP_downscale_output_dir')
    HRRR_downscale_dir = parser.get('downscaling', 'HRRR_downscale_output_dir')

    all_RAP_files_with_path = whf.get_filepaths(RAP_dir_base) 
    all_HRRR_files_with_path = whf.get_filepaths(HRRR_dir_base) 

    # We are only interested in the RAP and HRRR files that are
    # within the start and end forecast times.
    HRRR_files_with_path = [x for x in all_HRRR_files_with_path if is_within_time_range(start_dt,end_dt,x,"HRRR",is_yellowstone)]
        
    RAP_files_with_path = [x for x in all_RAP_files_with_path if is_within_time_range(start_dt,end_dt,x,"RAP",is_yellowstone)]

    #for hrrr in HRRR_files_with_path:
    #    print ("process %s")%(hrrr)
    # do the processing on only the input grib files 
    wlog.init(parser, "testShort", "Short","Regrid","RAP")
    do_regrid(config_file,RAP_dir_base,'RAP', RAP_files_with_path, is_yellowstone)
    wlog.init(parser, "testShort", "Short","Regrid","HRRR")
    do_regrid(HRRR_dir_base, 'HRRR', HRRR_files_with_path, is_yellowstone)
def forcing(config, action, prod, file):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           config (string) : Config file name
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
           prod (string):  The first product [mandatory option]:
                            (MRMS, HRRR or RAP)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """

    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    parser.read(config)

    # Set up logging, environments, etc.
    forcing_config_label = "Anal_Assim"
    whf.initial_setup(parser, forcing_config_label)

    # Convert the action to lower case
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()

    # For analysis and assimilation, only 0hr, 3hr forecast fields from HRRR/RAP
    # are necessary. 3hr forecast files are already regridded and downscaled
    # from the short-range configuration, so only 0hr forecast files are regridded/downscaled
    # here. In addition, MRMS data will be regridded, when available.
    if action == 'regrid':
        (date, modelrun, fcsthr) = whf.extract_file_info(file)
        # Usually check for forecast range, but only 0, 3 hr forecast/analysis data used

        # Check for HRRR, RAP, MRMS products.
        WhfLog.info("Regridding and Downscaling for %s", product_data_name)

        if fcsthr == 0 and prod == "HRRR":
            downscale_dir = parser.get('downscaling',
                                       'HRRR_downscale_output_dir_0hr')
            try:
                regridded_file = whf.regrid_data(product_data_name,file,parser,False, \
                                 zero_process=True)
            except (FilenameMatchError, NCLError) as e:
                WhfLog.error(
                    "Unexpected filename format encountered while regridding 0hr HRRR"
                )
                raise
            except NCLError:
                WhfLog.error("NCL error encountered while regridding 0hr HRRR")
                raise
            try:
                whf.downscale_data(product_data_name,regridded_file, parser,False, False, \
                                 zero_process=True)

            except (FilenameMatchError, NCLError) as e:
                WhfLog.error(
                    "Unexpected filename format encountered while downscaling 0hr HRRR"
                )
                raise
            except NCLError:
                WhfLog.error(
                    "NCL error encountered while downscaling 0hr HRRR")
                raise

            # Move downscaled file to staging area where triggering will monitor
            match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',
                             regridded_file)
            if match:
                full_dir = downscale_dir + "/" + match.group(1)
                full_finished_file = full_dir + "/" + match.group(2)
                # File should have been created in downscale_data step.
                try:
                    whf.file_exists(full_finished_file)
                except UnrecognizedCommandError:
                    WhfLog.error(
                        "File move failed for regridded/downscaled 0hr HRRR , filename format unexpected"
                    )
                    raise
                try:
                    whf.move_to_finished_area(parser,
                                              prod,
                                              full_finished_file,
                                              zero_move=True)
                except:
                    WhfLog.error(
                        'Unsupported/unrecognized command encountered while moving file to finished area.'
                    )
                    raise
            else:
                WhfLog.error("File name format is unexpected")
                raise FilenameMatchError("File name format is unexpected")
        elif fcsthr == 0 and prod == "RAP":
            downscale_dir = parser.get('downscaling',
                                       'RAP_downscale_output_dir_0hr')
            try:
                regridded_file = whf.regrid_data(product_data_name,file,parser,False, \
                                 zero_process=True)
            except NCLError:
                WhfLog.error("NCL error while regridding 0hr RAP")
                raise
            except FilenameMatchError:
                WhfLog.error(
                    "Unexpected filename format encountered, cannot regrid 0hr RAP"
                )
                raise

            try:
                whf.downscale_data(product_data_name,regridded_file, parser,False, False, \
                                   zero_process=True)
            except (NCLError) as e:
                WhfLog.error("NCL error encountered while regridding 0hr RAP")
                raise

            # Move downscaled file to staging area where triggering will monitor
            match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',
                             regridded_file)
            if match:
                full_dir = downscale_dir + "/" + match.group(1)
                full_finished_file = full_dir + "/" + match.group(2)
                # File should have been created in downscale_data step.
                try:
                    whf.file_exists(full_finished_file)
                except MissingFileError as mfe:
                    WhfLog.error(
                        "Missing file encountered while moving 0hr RAP file to staging area."
                    )
                    raise
                try:
                    whf.move_to_finished_area(parser,
                                              prod,
                                              full_finished_file,
                                              zero_move=True)
                except UnrecognizedCommandError:
                    WhfLog.error(
                        "Unrecognized command error while trying to move 0hr RAP file to finished area"
                    )
                    raise
                except FilenameMatchError:
                    WhfLog.error(
                        "File name's format is unexpected.  Cannot move file to finished area"
                    )
                    raise
            else:
                WhfLog.error("File name's format is unexpected")
                raise FilenameMatchError('File name format is unexpected')

        elif prod == "MRMS":
            try:
                regridded_file = whf.regrid_data(product_data_name, file,
                                                 parser, False)
            except NCLError:
                WhfLog.error("NCL error encountered while regridding MRMS")
                raise
            except FilenameMatchError:
                WhfLog.error(
                    "File name's format is unexpected, cannot regrid MRMS")
                raise
            # Move regridded file to staging area where triggering will monitor
            # First make sure file exists
            try:
                whf.file_exists(regridded_file)
            except MissingFileError as mfe:
                WhfLog.error(
                    "Missing file encountered while moving regridded MRMS file"
                )
                raise

            try:
                whf.move_to_finished_area(parser,
                                          prod,
                                          regridded_file,
                                          zero_move=False)
            except UnrecognizedCommandError:
                WhfLog.error(
                    "Unrecognized command error while trying to move MRMS file to finished area"
                )
                raise
            except FilenameMatchError:
                WhfLog.error(
                    "File name's format is unexpecte.  Cannot move file to finished area"
                )
                raise
        else:
            WhfLog.error(
                "Either invalid forecast hour or invalid product chosen")
            WhfLog.error(
                "Only 00hr forecast files, and RAP or HRRR or MRMS are valid choices"
            )
            raise InvalidArgumentError(
                "Either invalid forecast hour %s or invalid product requested %s"
                % (fcsthr, prod))
    else:  # Invalid action selected
        WhfLog.error("ERROR [Anal_Assim_Forcing]- Invalid action selected")
        raise UnrecognizedCommandError(
            "Invalid action selection within Analysis and Assimilation regridding and downscaling"
        )
def anal_assim_layer(cycleYYYYMMDDHH, fhr, action, config):
    """ Analysis and Assimilation layering
        Performs layering/combination of RAP/HRRR/MRMS
        data for a particular analysis and assimilation
        model cycle and forecast hour.

        Args:
            cycleYYYYMMDDHH (string): Analysis and assimilation
                                      model cycle date.
            fhr (string): Forecast hour of analysis and assimilation 
                          model cycle. Possible values are -2, -1, 0.
            action (string): Specifying which layering to do, given
                             possible available model data. Possible 
                             values are "RAP", "RAP_HRRR", and
                             "RAP_HRRR_MRMS".
            config (string) : Config file name
        Returns: 
            None: Performs specified layering to final input directory
                  used for WRF-Hydro.
    """

    # Determine specific layering route to take
    str_split = action.split("_")
    process = len(str_split)

    # Determine specific date/time information used for composing regridded
    # file paths.
    yearCycle = int(cycleYYYYMMDDHH[0:4])
    monthCycle = int(cycleYYYYMMDDHH[4:6])
    dayCycle = int(cycleYYYYMMDDHH[6:8])
    hourCycle = int(cycleYYYYMMDDHH[8:10])
    fhr = int(fhr)

    dateCurrent = datetime.datetime.today()
    cycleDate = datetime.datetime(year=yearCycle,month=monthCycle,day=dayCycle, \
                hour=hourCycle)
    validDate = cycleDate + datetime.timedelta(seconds=fhr * 3600)
    fcstWindowDate = validDate + datetime.timedelta(
        seconds=-3 * 3600)  # Used for 3-hr forecast

    # HRRR/RAP files necessary for fluxes and precipitation data.
    # Obtain analysis and assimiltation configuration parameters.
    parser = SafeConfigParser()
    parser.read(config)
    out_dir = parser.get('layering', 'analysis_assimilation_output')
    tmp_dir = parser.get('layering', 'analysis_assimilation_tmp')
    qpe_parm_dir = parser.get('layering', 'qpe_combine_parm_dir')
    hrrr_ds_dir_3hr = parser.get('downscaling', 'HRRR_finished_output_dir')
    hrrr_ds_dir_0hr = parser.get('downscaling', 'HRRR_finished_output_dir_0hr')
    rap_ds_dir_3hr = parser.get('downscaling', 'RAP_finished_output_dir')
    rap_ds_dir_0hr = parser.get('downscaling', 'RAP_finished_output_dir_0hr')
    mrms_ds_dir = parser.get('regridding', 'MRMS_finished_output_dir')
    layer_exe = parser.get('exe', 'Analysis_Assimilation_layering')
    ncl_exec = parser.get('exe', 'ncl_exe')

    # in case it is first time, create the output dirs
    df.makeDirIfNeeded(out_dir)
    df.makeDirIfNeeded(tmp_dir)

    # Sanity checking
    try:
        whf.dir_exists(out_dir)
        whf.dir_exists(tmp_dir)
        whf.dir_exists(qpe_parm_dir)
        whf.dir_exists(hrrr_ds_dir_3hr)
        whf.dir_exists(hrrr_ds_dir_0hr)
        whf.dir_exists(rap_ds_dir_3hr)
        whf.dir_exists(rap_ds_dir_0hr)
        whf.dir_exists(mrms_ds_dir)
        whf.file_exists(layer_exe)
    except MissingDirectoryError:
        WhfLog.error(
            "Missing directory during preliminary checking of Analysis Assimilation layering"
        )
        raise

    # Establish final output directories to hold 'LDASIN' files used for
    # WRF-Hydro long-range forecasting. If the directory does not exist,
    # create it.
    out_path = out_dir + "/" + cycleDate.strftime("%Y%m%d%H")

    whf.mkdir_p(out_path)

    # Compose necessary file paths
    hrrr0Path = hrrr_ds_dir_0hr + "/" + validDate.strftime("%Y%m%d%H") + \
                "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1.nc"
    hrrr3Path = hrrr_ds_dir_3hr + "/" + fcstWindowDate.strftime("%Y%m%d%H") + \
                "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1.nc"
    rap0Path = rap_ds_dir_0hr + "/" + validDate.strftime("%Y%m%d%H") + \
                "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1.nc"
    rap3Path = rap_ds_dir_3hr + "/" + fcstWindowDate.strftime("%Y%m%d%H") + \
                "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1.nc"
    mrmsPath = mrms_ds_dir + "/" + validDate.strftime("%Y%m%d%H") + \
                "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1.nc"
    hrrrBiasPath = qpe_parm_dir + "/HRRR_NLDAS-CPC_bias-corr_m" + \
                   validDate.strftime("%m") + "_v9_wrf1km.grb2"
    hrrrWgtPath = qpe_parm_dir + "/HRRR_wgt_m" + \
                  validDate.strftime("%m") + "_v8_wrf1km.grb2"
    mrmsBiasPath = qpe_parm_dir + "/MRMS_radonly_NLDAS-CPC_bias-corr_m" + \
                   validDate.strftime("%m") + "_v9_wrf1km-sm60.grb2"
    mrmsWgtPath = qpe_parm_dir + "/MRMS_radonly_wgt_m" + \
                  validDate.strftime("%m") + "_v8_wrf1km.grb2"
    rapBiasPath = qpe_parm_dir + "/RAPD_NLDAS-CPC_bias-corr_m" + \
                  validDate.strftime("%m") + "_v9_wrf1km.grb2"
    rapWgtPath = qpe_parm_dir + "/RAPD_wgt_m" + \
                 validDate.strftime("%m") + "_v8_wrf1km.grb2"

    # Sanity checking on parameter data
    try:
        whf.file_exists(hrrrBiasPath)
        whf.file_exists(hrrrWgtPath)
        whf.file_exists(mrmsBiasPath)
        whf.file_exists(mrmsWgtPath)
        whf.file_exists(rapBiasPath)
        whf.file_exists(rapWgtPath)
    except MissingFileError:
        WhfLog.error(
            "Missing file encountered while checking parameter data for AA")
        raise

    # Compose output file paths
    LDASIN_path_tmp = tmp_dir + "/" + validDate.strftime(
        '%Y%m%d%H') + "00.LDASIN_DOMAIN1_TMP.nc"
    LDASIN_path_final = out_path + "/" + validDate.strftime(
        '%Y%m%d%H') + "00.LDASIN_DOMAIN1"
    # Perform layering/combining depending on processing path.
    if process == 1:  # RAP only
        WhfLog.info("Layering and Combining RAP only for cycle date: " + \
                     cycleDate.strftime("%Y%m%d%H") + " valid date: " + \
                     validDate.strftime("%Y%m%d%H"))
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
        except MissingFileError:
            WhfLog.error("Missing RAP files for layering")
            raise

    elif process == 2:  # HRRR and RAP only
        WhfLog.info("Layering and Combining RAP and HRRR for cycle date: " + \
                     cycleDate.strftime("%Y%m%d%H") + " valid date: " + \
                     validDate.strftime("%Y%m%d%H"))
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
            whf.file_exists(hrrr0Path)
            whf.file_exists(hrrr3Path)
        except MissingFileError:
            WhfLog.error("Missing RAP or HRRR files for layering")
            raise
    elif process == 3:  # HRRR, RAP, and MRMS
        WhfLog.info("Layering and Combining RAP/HRRR/MRMS for cycle date: " + \
                     cycleDate.strftime("%Y%m%d%H") + " valid date: " + \
                     validDate.strftime("%Y%m%d%H"))
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
            whf.file_exists(hrrr0Path)
            whf.file_exists(hrrr3Path)
            whf.file_exists(mrmsPath)
        except MissingFileError:
            WhfLog.error("Missing RAP or HRRR or MRMS files for layering")
            raise

    else:  # Error out
        WhfLog.error(
            "Invalid input action selected, invalid layer combination provided in AA."
        )
        raise UnrecognizedCommandError

    hrrrB_param = "'hrrrBFile=" + '"' + hrrrBiasPath + '"' + "' "
    mrmsB_param = "'mrmsBFile=" + '"' + mrmsBiasPath + '"' + "' "
    rapB_param = "'rapBFile=" + '"' + rapBiasPath + '"' + "' "
    hrrrW_param = "'hrrrWFile=" + '"' + hrrrWgtPath + '"' + "' "
    mrmsW_param = "'mrmsWFile=" + '"' + mrmsWgtPath + '"' + "' "
    rapW_param = "'rapWFile=" + '"' + rapWgtPath + '"' + "' "
    hrrr0_param = "'hrrr0File=" + '"' + hrrr0Path + '"' + "' "
    hrrr3_param = "'hrrr3File=" + '"' + hrrr3Path + '"' + "' "
    rap0_param = "'rap0File=" + '"' + rap0Path + '"' + "' "
    rap3_param = "'rap3File=" + '"' + rap3Path + '"' + "' "
    mrms_param = "'mrmsFile=" + '"' + mrmsPath + '"' + "' "
    process_param = "'process=" + '"' + str(process) + '"' + "' "
    out_param = "'outPath=" + '"' + LDASIN_path_tmp + '"' + "' "

    cmd_params = hrrrB_param + mrmsB_param + rapB_param + \
                 hrrrW_param + mrmsW_param + rapW_param + \
                 hrrr0_param + hrrr3_param + rap0_param + rap3_param + \
                 mrms_param + process_param + out_param
    cmd = ncl_exec + " -Q " + cmd_params + " " + layer_exe
    status = os.system(cmd)

    if status != 0:
        WhfLog.error("Error in combinining NCL program")
        raise NCLError("NCL error encountered while combining in AA")

    # Double check to make sure file was created, delete temporary regridded file
    whf.file_exists(LDASIN_path_tmp)
    # Rename file to conform to WRF-Hydro expectations
    cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final
    status = os.system(cmd)
    if status != 0:
        WhfLog.error("Failure to rename " + LDASIN_path_tmp)
    try:
        whf.file_exists(LDASIN_path_final)
    except MissingFileError:
        WhfLog.error("Missing LDASIN_path_final file")
        raise
    cmd = "rm -rf " + LDASIN_path_tmp
    status = os.system(cmd)
    if status != 0:
        WhfLog.error("Failure to remove " + LDASIN_path_tmp)
        raise SystemCommandError
def forcing(configFile, action, prod, file, prod2=None, file2=None):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           configFile (string): name of file with settings
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
                             'bias'   - bias correction 
                                        (requires two 
                                        products and two files)
                             'layer'  - layer (requires two
                                        products and two files)
           prod (string):  The first product [mandatory option]:
                            (GFS)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

          prod2 (string):   The second product (????), default
                            is None. Required for layering.
          file2 (string):   The second file name, required for 
                            layering, default is None.
       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """


    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    try:
        parser.read(configFile)
    except (NoSectionErrorException, DuplicateSectionErrorException,\
            DuplicateOptionErrorException,MissingSectionHeaderErrorException,\
            ParsingErrorException) as e:
        raise

    # Set up logging, environments, etc.
    forcing_config_label = 'Medium_Range'
    whf.initial_setup(parser,forcing_config_label)


    # Extract the date, model run time, and forecast hour from the file name
    # Use the fcsthr to process only the files that have a fcst hour less than
    # the max fcst hr defined in the param/config file.
    
    
    # Convert the action to lower case 
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()
    regridded_dir = parser.get('regridding','GFS_output_dir')
    downscale_dir = parser.get('downscaling','GFS_downscale_output_dir')
    finished_downscale_dir = parser.get('downscaling','GFS_finished_output_dir')
    final_dir = parser.get('layering','medium_range_output')
    if action == 'regrid': 
        (date,modelrun,fcsthr) = whf.extract_file_info(file)
        # Determine whether this current file lies within the forecast range
        # for the data product (e.g. if processing RAP, use only the 0hr-18hr forecasts).
        # Skip if this file has a forecast hour greater than the max indicated in the 
        # parm/config file.
        in_fcst_range = whf.is_in_fcst_range(prod, fcsthr, parser)

        if in_fcst_range:
            # Check for RAP or GFS data products.  If this file is
            # a 0 hr fcst and is RAP or GFS, substitute each 0hr forecast
            # with the file from the previous model run and the same valid
            # time.  This is necessary because there are missing variables
            # in the 0hr forecasts (e.g. precip rate for RAP and radiation
            # in GFS).
 
            WhfLog.info("Regridding and Downscaling for %s", product_data_name)
            # Determine if this is a 0hr forecast for RAP data (GFS is also missing
            # some variables for 0hr forecast, but GFS is not used for Medium Range
            # forcing). We will need to substitute this file for the downscaled
            # file from a previous model run with the same valid time.  
            # We only need to do this for downscaled files, as the Medium Range 
            # forcing files that are regridded always get downscaled and we don't want
            # to do this for both the regridding and downscaling.
            if fcsthr == 0 and prod == 'GFS':
                WhfLog.info("Regridding (ignoring f0 GFS files) %s: ", file )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, True)
                except (FilenameMatchError,NCLError,MissingFileError) as e:
                    WhfLog.error('Failure:regridding of GFS (ignore 0hr fcst) file: ' + file)
                    WhfLog.error(e) 
                    raise
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser, True, True)                
                except (MissingFileError, SystemCommandError,\
                        NCLError) as e:
                    WhfLog.error('Downscaling GFS failed: ' + e)
                    raise

                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    ymd_dir = match.group(1)
                    file_only = match.group(2)
                    downscaled_dir = downscale_dir + "/" + ymd_dir
                    downscaled_file = downscaled_dir + "/" + file_only
                    # Check to make sure downscaled file was created
                    try:
                        whf.file_exists(downscaled_file)
                    except MissingFileError as mfe:
                        WhfLog.error('Downscaling, non-existent downscaled file: ' + downscaled_file)
                        WhfLog.error(mfe)
                    try:
                        whf.rename_final_files(parser,"Medium_Range")
                    except FilenameMatchError as fme:
                        WhfLog.error('Failed to rename final files due to unexpected filename format: ' + fme) 
              
                    except UnrecognizedCommandError as uce:
                        WhfLog.error('Failed to rename final files due to unrecognized/unsupported request: ' + uce)
                else:
                    raise FilneameMatchError('MediumRangeForcing regridded_file %s has unexpected filename format'%regridded_file)
                # Remove empty 0hr regridded file if it still exists
                if os.path.exists(regridded_file):
                    cmd = 'rm -rf ' + regridded_file
                    status = os.system(cmd)
                    if status != 0:
                        WhfLog.error("Failure to remove empty file: " + regridded_file)
                        raise SystemCommandError('MediumRangeForcing failed to clean up regridded file %s'%(regridded_file))
            else:
                WhfLog.info("Regridding non-zero hour fcst%s: ", file )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, False)
                except (FilenameMatchError, NCLError) as e:
                    WhfLog.error('Regridding failed for GFS non-zero fcst regrid file: ' + file) 
                    raise
          
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser,True, False)                
                except (MissingFileError, SystemCommandError, NCLError):
                    raise

                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    ymd_dir = match.group(1)
                    file_only = match.group(2)
                    downscaled_dir = downscale_dir + "/" + ymd_dir
                    downscaled_file = downscaled_dir + "/" + file_only
                    # Check to make sure downscaled file was created
                    try:
                        whf.file_exists(downscaled_file)
                    except MissingFileError:
                        raise
                    try:
                        whf.rename_final_files(parser,"Medium_Range")
                    except (FilenameMatchError, UnrecognizedCommandError) as e:
                        raise

                else:
                    raise FilenameMatchError('MediumRangeForcing renaming finished file failed, unexpected filename format for %s'%(regridded_file)) 
        else:
            # Skip processing this file, exiting...
            WhfLog.info("Skip processing, requested file is outside max fcst")
    else:
        WhfLog.info("Unsupported action requested. Only regridding (and downscaling) performed for Medium Range")