예제 #1
0
def init(parser, logFileName, configType, action, data):
    """Initialize log file using configFile content, and a log file name

    Parameters
    ----------
    parser : SafeConfigParser
        parser that has parsed the file on entry
    logFileName : str
        Name of the log file, without a .log suffix
    configType : str
        Short, Medium, Long, AA
    action : str
        Regrid, Layer
    data : str
        HRRR, RAP, MRMS, GFS, CFS
    """

    logging_level = parser.get('log_level', 'forcing_engine_log_level')
    # Set the logging level based on what was defined in the parm/config file
    if logging_level == 'DEBUG':
        set_level = logging.DEBUG
    elif logging_level == 'INFO':
        set_level = logging.INFO
    elif logging_level == 'WARNING':
        set_level = logging.WARNING
    elif logging_level == 'ERROR':
        set_level = logging.ERROR
    else:
        set_level = logging.CRITICAL

    # log files written to configured place with yyyymmdd subdirectory
    logging_path = parser.get('log_level', 'forcing_engine_log_dir')
    if (not df.makeDirIfNeeded(logging_path)):
        raise SystemCommandError("Cannot create " + logging_path)
    logging_path += "/"
    now = datetime.datetime.utcnow()
    logging_path += now.strftime("%Y%m%d")
    if (not df.makeDirIfNeeded(logging_path)):
        raise SystemCommandError("Cannot create " + logging_path)

    # we have two log files, one for python, one for ncl
    logging_filename = logging_path + "/" + logFileName + ".log"
    ncl_logging_filename = logging_path + "/" + logFileName + ".ncl.log"
    setup_logger('main', logging_filename, set_level)
    setup_logger('ncl', ncl_logging_filename, set_level)

    # set the global var's to inputs, padded to correct length
    #(so logging lines up nice)
    global WhfConfigType
    WhfConfigType = configType
    WhfConfigType = WhfConfigType.ljust(WhfConfigTypeLen)

    global WhfAction
    WhfAction = action
    WhfAction = WhfAction.ljust(WhfActionLen)

    global WhfData
    WhfData = data
    WhfData = WhfData.ljust(WhfDataLen)
예제 #2
0
def init(parser, which, initAll):
    """Initialize log file using configFile content, and a log file name

    Parameters
    ----------
    parser : SafeConfigParser
        parser that has parsed the file on entry
    logFileName : str
        Name of the log file, without a .log suffix
    configType : str
        Short, Medium, Long, AA
    action : str
        Regrid, Layer
    data : str
        HRRR, RAP, MRMS, GFS, CFS
    """

    logging_level = parser.get('log_level', 'forcing_engine_log_level')
    # Set the logging level based on what was defined in the parm/config file
    if logging_level == 'DEBUG':
        set_level = logging.DEBUG
    elif logging_level == 'INFO':
        set_level = logging.INFO
    elif logging_level == 'WARNING':
        set_level = logging.WARNING
    elif logging_level == 'ERROR':
        set_level = logging.ERROR
    else:
        set_level = logging.CRITICAL

    # log files written to configured place with yyyymmdd subdirectory
    logging_path = parser.get('log_level', 'forcing_engine_log_dir')
    if (not df.makeDirIfNeeded(logging_path)):
        raise SystemCommandError("Cannot create " + logging_path)
    logging_path += "/"
    now = datetime.datetime.utcnow()
    logging_path += now.strftime("%Y%m%d")
    if (not df.makeDirIfNeeded(logging_path)):
        raise SystemCommandError("Cannot create " + logging_path)

    # we have two log files, one for python, one for ncl, for each of the cases
    # string 'RegridHRRR', 'RegridRAP', 'RegridMRMS', 'RegridGFS', 'ShortLayer', 'AaLayer', 'LongRegrid'
    global WhfWhichChoices
    for choice in WhfWhichChoices:
        if (initAll):
            logging_filename =  logging_path + "/" + choice + ".log" 
            ncl_logging_filename =  logging_path + "/" + choice + ".ncl.log" 
            setup_logger(choice + 'main',  logging_filename, set_level)
            setup_logger(choice + 'ncl',  ncl_logging_filename, set_level)
        else:
            if (choice == which):
                logging_filename =  logging_path + "/" + choice + ".log" 
                ncl_logging_filename =  logging_path + "/" + choice + ".ncl.log" 
                setup_logger(choice + 'main',  logging_filename, set_level)
                setup_logger(choice + 'ncl',  ncl_logging_filename, set_level)
    set(which)
예제 #3
0
def forcing(configFile, action, prod, file, prod2=None, file2=None):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           configFile (string): The config file with all the settings
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
                             'bias'   - bias correction 
                                        (requires two 
                                        products and two files)
                             'layer'  - layer (requires two
                                        products and two files)
           prod (string):  The first product [mandatory option]:
                            (HRRR or RAP)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

          prod2 (string):   The second product (RAP or HRRR), default
                            is None. Required for layering.
          file2 (string):   The second file name, required for 
                            layering, default is None.
       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """

    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    parser.read(configFile)
    forcing_config_label = "Short Range"

    try:
        whf.initial_setup(parser, forcing_config_label)
    except Exception as e:
        raise

    # Extract the date, model run time, and forecast hour from the file name
    # Use the fcsthr to process only the files that have a fcst hour less than
    # the max fcst hr defined in the param/config file.

    # Convert the action to lower case
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()
    if action == 'regrid':
        # Get the finished directory locations for the relevant product.
        if prod == 'RAP':
            regridded_dir = parser.get('regridding', 'RAP_output_dir')
            downscale_dir = parser.get('downscaling',
                                       'RAP_downscale_output_dir')
            finished_downscale_dir = parser.get('downscaling',
                                                'RAP_finished_output_dir')
            downscale_input_dir = parser.get('downscaling',
                                             'RAP_data_to_downscale')

        elif prod == 'HRRR':
            regridded_dir = parser.get('regridding', 'HRRR_output_dir')
            downscale_dir = parser.get('downscaling',
                                       'HRRR_downscale_output_dir')
            finished_downscale_dir = parser.get('downscaling',
                                                'HRRR_finished_output_dir')
            downscale_input_dir = parser.get('downscaling',
                                             'HRRR_data_to_downscale')

        (date, modelrun, fcsthr) = whf.extract_file_info(file)
        # Determine whether this current file lies within the forecast range
        # for the data product (e.g. if processing RAP, use only the 0hr-18hr forecasts).
        # Skip if this file has a forecast hour greater than the max indicated in the
        # parm/config file.
        in_fcst_range = whf.is_in_fcst_range(prod, fcsthr, parser)

        if in_fcst_range:
            # Check for RAP or GFS data products.  If this file is
            # a 0 hr fcst and is RAP or GFS, substitute each 0hr forecast
            # with the file from the previous model run and the same valid
            # time.  This is necessary because there are missing variables
            # in the 0hr forecasts (e.g. precip rate for RAP and radiation
            # in GFS).

            WhfLog.info("Regridding and Downscaling for: " + product_data_name)
            # Determine if this is a 0hr forecast for RAP data (GFS is also missing
            # some variables for 0hr forecast, but GFS is not used for Short Range
            # forcing). We will need to substitute this file for the downscaled
            # file from a previous model run with the same valid time.
            # We only need to do this for downscaled files, as the Short Range
            # forcing files that are regridded always get downscaled and we don't want
            # to do this for both the regridding and downscaling.
            if fcsthr == 0 and prod == 'RAP':
                WhfLog.info("Regridding, ignoring f0 RAP files ")
                try:
                    regridded_file = whf.regrid_data(product_data_name, file,
                                                     parser, True)
                except FilenameMatchError:
                    WhfLog.error('file name format is unexpected')
                    raise
                except NCLError:
                    WhfLog.error("FAIL could not regrid RAP file: " + file)
                    raise

                # Downscaling...
                try:
                    whf.downscale_data(product_data_name, regridded_file,
                                       parser, True, True)
                except (NCLError, ZeroHourReplacementError) as e:
                    WhfLog.error(
                        "FAIL could not downscale data for hour 0 RAP")
                    # Ignore, and check the next file in the regridded directory.
                    pass

                else:
                    # Move the finished downscaled file to the "finished" area so the triggering
                    # script can determine when to layer with other data.
                    match = re.match(
                        r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',
                        regridded_file)
                    if match:
                        downscaled_dir = finished_downscale_dir + "/" + match.group(
                            1)
                        input_dir = downscale_dir + "/" + match.group(1)
                        if not os.path.exists(downscaled_dir):
                            whf.mkdir_p(downscaled_dir)
                            downscaled_file = downscaled_dir + "/" + match.group(
                                2)
                            input_file = input_dir + "/" + match.group(2)
                            try:
                                whf.move_to_finished_area(
                                    parser, prod, input_file)
                            except UnrecognizedCommandError:
                                WhfLog.error(
                                    'Unsupported/unrecognized command')
                                raise
                            except FilenameMatchError:
                                WhfLog.error(
                                    'File move failed, name format unexpected for file %s'
                                    % input_file)
                                raise

                    else:
                        WhfLog.error("FAIL- cannot move finished file: %s",
                                     regridded_file)
                        raise FilenameMatchError(
                            'File move failed, name format unexpected for file %s'
                            % regridded_file)

                    # Remove empty 0hr regridded file if it still exists
                    if os.path.exists(regridded_file):
                        cmd = 'rm -rf ' + regridded_file
                        status = os.system(cmd)
                        if status != 0:
                            WhfLog.error("Failure to remove empty file: " +
                                         regridded_file)
                            raise SystemCommandError(
                                'Cleaning regridded files, failed to remove file %s'
                                % regridded_file)

            else:
                try:
                    regridded_file = whf.regrid_data(product_data_name, file,
                                                     parser, False)
                except FilenameMatchError:
                    WhfLog.error("Regridding failed")
                    raise
                except NCLError:
                    WhfLog.error("Regridding failed")
                    raise

                # Downscaling...
                try:
                    whf.downscale_data(product_data_name, regridded_file,
                                       parser, True, False)
                except (NCLError, ZeroHourReplacementError) as e:
                    WhfLog.error(
                        "FAIL could not downscale data (not a 0hr file)")
                    raise

                # Move the downscaled file to the finished location
                match = re.match(
                    r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',
                    regridded_file)
                if match:
                    full_dir = finished_downscale_dir + "/" + match.group(1)
                    input_dir = downscale_dir + "/" + match.group(1)
                    full_input_file = input_dir + "/" + match.group(2)
                    full_finished_file = full_dir + "/" + match.group(2)
                    if not os.path.exists(full_dir):
                        WhfLog.info(
                            "finished dir doesn't exist, creating it now...")
                        whf.mkdir_p(full_dir)
                    WhfLog.info("Moving now, source = %s", full_input_file)
                    try:
                        whf.move_to_finished_area(parser, prod,
                                                  full_input_file)
                        #whf.move_to_finished_area(parser, prod, full_finished_file)
                    except UnrecognizedCommandError:
                        raise
                    except FilenameMatchError:
                        raise
                else:
                    WhfLog.error("FAIL- cannot move finished file: %s",
                                 full_finished_file)
                    raise FilenameMatchError(
                        'Cannot move finished file, file %s has unexpected filename format'
                        % full_finished_file)

        else:
            # Skip processing this file, exiting...
            WhfLog.info("Skip processing, requested file is outside max fcst")
    elif action_requested == 'layer':
        WhfLog.info("Layering requested for %s and %s", prod, prod2)
        # Do some checking to make sure that there are two data products
        # and two files indicated.
        if prod2 is None:
            logger.error(
                "ERROR [Short_Range_Forcing]: layering requires two products")
            raise MissingInputError('Layering requires two products')
        elif file2 is None:
            logger.error(
                "ERROR [Short_Range_Forcing]: layering requires two input files"
            )
            raise MissingInputError('Layering requires two input files')
        else:
            # We have everything we need, request layering
            try:
                whf.layer_data(parser, file, file2, prod, prod2, 'Short_Range')
            except FilenameMatchError:
                raise
            except NCLError:
                raise

            try:
                whf.rename_final_files(parser, 'Short_Range')
            except FilenameMatchError:
                raise
            except UnrecognizedCommandError:
                raise

    elif action_requested == 'bias':
        WhfLog.info("Bias correction requested for %s", file)
        WhfLog.info("Bias correction not suppoted for Short Range Forcing")
def forcing(configFile,file_in):
    """ Args:
	1.) configFile (string): The config file with all 
	    the settings.
        2.) file (string): The file name. The full path is 
            not necessary as full paths will be derived from
            parameter directory paths and datetime information.
        Returns:
	None - Performs indicated bias correction, regridding,
               and downscaling of CFSv2 data. Any errors are
               trapped and passed back to the driver.
    """

    WhfLog.debug("file_in = %s", file_in)

    # Obtain CFSv2 forcing engine parameters.
    parser = SafeConfigParser()
    parser.read(configFile)

    # Set up logging environments, etc.
    forcing_config_label = "Long_Range"
    try:
        Whf.initial_setup(parser,forcing_config_label)
    except:
        raise

    out_dir = parser.get('layering','long_range_output') 
    tmp_dir = parser.get('bias_correction','CFS_tmp_dir')

    if (not df.makeDirIfNeeded(out_dir)):
        raise MissingDirectoryError('Dir %s cannot be created', out_dir)
    if (not df.makeDirIfNeeded(tmp_dir)):
        raise MissingDirectoryError('Dir %s cannot be created', tmp_dir)

    # Define CFSv2 cycle date and valid time based on file name.
    (cycleYYYYMMDD,cycleHH,fcsthr,em) = Whf.extract_file_info_cfs(file_in)
    em_str = str(em)

    # Pull path to NCL bias correction module file. Export this as an 
    # environmental variable NCL refers to later. 
    nclBiasMod = parser.get('exe','CFS_bias_correct_mod')
    os.environ["CFS_NCL_BIAS_MOD"] = nclBiasMod

    # Establish datetime objects
    dateCurrent = datetime.datetime.today()
    dateCycleYYYYMMDDHH = datetime.datetime(year=int(cycleYYYYMMDD[0:4]),
                          month=int(cycleYYYYMMDD[4:6]),
                          day=int(cycleYYYYMMDD[6:8]),
                          hour=cycleHH)
    dateFcstYYYYMMDDHH = dateCycleYYYYMMDDHH + \
                         datetime.timedelta(seconds=fcsthr*3600)

    # Determine if this is a 0hr forecast file or not.
    if dateFcstYYYYMMDDHH == dateCycleYYYYMMDDHH:
        fFlag = 1 
    else:
        fFlag = 0 
    # Establish final output directories to hold 'LDASIN' files used for
    # WRF-Hydro long-range forecasting. If the directory does not exist,
    # create it.
    out_path = out_dir + "/Member_" + em_str.zfill(2) + "/" + \
               dateCycleYYYYMMDDHH.strftime("%Y%m%d%H")

    try:
        Whf.mkdir_p(out_path)
    except:
        raise

    in_fcst_range = Whf.is_in_fcst_range("CFSv2",fcsthr,parser)

    if in_fcst_range:
        # First, bias-correct CFSv2 data and generate hourly files 
        # from six-hour forecast
        WhfLog.info("Bias correcting for CFSv2 cycle: " + \
                     dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
                     " CFSv2 forecast time: " + dateFcstYYYYMMDDHH.strftime('%Y%m%d%H'))
        try:
            Whf.bias_correction('CFSV2',file_in,dateCycleYYYYMMDDHH,
                                dateFcstYYYYMMDDHH,parser, em = em)
        except (MissingFileError,NCLError):
            raise

        # Second, regrid to the conus IOC domain
        # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename 
        # generated from bias-correction and call the regridding to go to the conus domain.
        if fFlag == 1:
            begCt = 6 
            endCt = 7
        else:
            begCt = 1
            endCt = 7
        for hour in range(begCt,endCt):
  	    dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)
               
            fileBiasCorrected = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
                                dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
                                dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + ".M" + \
                                em_str.zfill(2) + ".nc"
            WhfLog.info("Regridding CFSv2 to conus domain for cycle: " + \
                         dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
                         " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H'))
            try:
                fileRegridded = Whf.regrid_data("CFSV2",fileBiasCorrected,parser)
            except (MissingFileError,NCLError):
                raise

            # Double check to make sure file was created, delete temporary bias-corrected file
            try:
                Whf.file_exists(fileRegridded)
            except MissingFileError:
                raise	
            cmd = "rm -rf " + fileBiasCorrected
            status = os.system(cmd)
            if status != 0:
		raise SystemCommandError('Command %s failed.'%cmd)

  
        # Third, perform topography downscaling to generate final
        # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename
        # generated from regridding and call the downscaling function.
        for hour in range(begCt,endCt):
            dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)

            WhfLog.info("Downscaling CFSv2 for cycle: " +
                         dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') +
                         " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H'))
            fileRegridded = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
                            dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
                                dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + \
                                "_regridded.M" + em_str.zfill(2) + ".nc"
            LDASIN_path_tmp = tmp_dir + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1.nc"
            LDASIN_path_final = out_path + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1"
            try:
                Whf.downscale_data("CFSv2",fileRegridded,parser, out_path=LDASIN_path_tmp, \
                                   verYYYYMMDDHH=dateTempYYYYMMDDHH)
            except (MissingFileError,FilenameMatchError,NCLError,SystemCommandError):
                raise
            # Double check to make sure file was created, delete temporary regridded file
            try:
                Whf.file_exists(LDASIN_path_tmp)
            except MissingFileError:
                raise
            # Rename file to conform to WRF-Hydro expectations
            cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final
            status = os.system(cmd)
            if status != 0:
                raise SystemCommandError('Command %s failed.'%cmd)
            try:
                Whf.file_exists(LDASIN_path_final)
            except MissingFileError:
                raise
            cmd = "rm -rf " + fileRegridded
            status = os.system(cmd)
            if status != 0:
                raise SystemCommandError('Command %s failed.'%cmd)
       
	WhfLog.info("Long_Range processing for %s%d Forecast Hour: %d Ensemble: %s",
                cycleYYYYMMDD, cycleHH, fcsthr, em_str)
    else:
        # Skip processing this file. Exit gracefully with a 0 exit status.
        WhfLog.info("Requested file is outside max fcst for CFSv2")
def forcing(configFile, action, prod, file, prod2=None, file2=None):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           configFile (string): name of file with settings
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
                             'bias'   - bias correction 
                                        (requires two 
                                        products and two files)
                             'layer'  - layer (requires two
                                        products and two files)
           prod (string):  The first product [mandatory option]:
                            (GFS)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

          prod2 (string):   The second product (????), default
                            is None. Required for layering.
          file2 (string):   The second file name, required for 
                            layering, default is None.
       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """


    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    try:
        parser.read(configFile)
    except (NoSectionErrorException, DuplicateSectionErrorException,\
            DuplicateOptionErrorException,MissingSectionHeaderErrorException,\
            ParsingErrorException) as e:
        raise

    # Set up logging, environments, etc.
    forcing_config_label = 'Medium_Range'
    whf.initial_setup(parser,forcing_config_label)


    # Extract the date, model run time, and forecast hour from the file name
    # Use the fcsthr to process only the files that have a fcst hour less than
    # the max fcst hr defined in the param/config file.
    
    
    # Convert the action to lower case 
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()
    regridded_dir = parser.get('regridding','GFS_output_dir')
    downscale_dir = parser.get('downscaling','GFS_downscale_output_dir')
    finished_downscale_dir = parser.get('downscaling','GFS_finished_output_dir')
    final_dir = parser.get('layering','medium_range_output')
    if action == 'regrid': 
        (date,modelrun,fcsthr) = whf.extract_file_info(file)
        # Determine whether this current file lies within the forecast range
        # for the data product (e.g. if processing RAP, use only the 0hr-18hr forecasts).
        # Skip if this file has a forecast hour greater than the max indicated in the 
        # parm/config file.
        in_fcst_range = whf.is_in_fcst_range(prod, fcsthr, parser)

        if in_fcst_range:
            # Check for RAP or GFS data products.  If this file is
            # a 0 hr fcst and is RAP or GFS, substitute each 0hr forecast
            # with the file from the previous model run and the same valid
            # time.  This is necessary because there are missing variables
            # in the 0hr forecasts (e.g. precip rate for RAP and radiation
            # in GFS).
 
            WhfLog.info("Regridding and Downscaling for %s", product_data_name)
            # Determine if this is a 0hr forecast for RAP data (GFS is also missing
            # some variables for 0hr forecast, but GFS is not used for Medium Range
            # forcing). We will need to substitute this file for the downscaled
            # file from a previous model run with the same valid time.  
            # We only need to do this for downscaled files, as the Medium Range 
            # forcing files that are regridded always get downscaled and we don't want
            # to do this for both the regridding and downscaling.
            if fcsthr == 0 and prod == 'GFS':
                WhfLog.info("Regridding (ignoring f0 GFS files) %s: ", file )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, True)
                except (FilenameMatchError,NCLError,MissingFileError) as e:
                    WhfLog.error('Failure:regridding of GFS (ignore 0hr fcst) file: ' + file)
                    WhfLog.error(e) 
                    raise
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser, True, True)                
                except (MissingFileError, SystemCommandError,\
                        NCLError) as e:
                    WhfLog.error('Downscaling GFS failed: ' + e)
                    raise

                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    ymd_dir = match.group(1)
                    file_only = match.group(2)
                    downscaled_dir = downscale_dir + "/" + ymd_dir
                    downscaled_file = downscaled_dir + "/" + file_only
                    # Check to make sure downscaled file was created
                    try:
                        whf.file_exists(downscaled_file)
                    except MissingFileError as mfe:
                        WhfLog.error('Downscaling, non-existent downscaled file: ' + downscaled_file)
                        WhfLog.error(mfe)
                    try:
                        whf.rename_final_files(parser,"Medium_Range")
                    except FilenameMatchError as fme:
                        WhfLog.error('Failed to rename final files due to unexpected filename format: ' + fme) 
              
                    except UnrecognizedCommandError as uce:
                        WhfLog.error('Failed to rename final files due to unrecognized/unsupported request: ' + uce)
                else:
                    raise FilneameMatchError('MediumRangeForcing regridded_file %s has unexpected filename format'%regridded_file)
                # Remove empty 0hr regridded file if it still exists
                if os.path.exists(regridded_file):
                    cmd = 'rm -rf ' + regridded_file
                    status = os.system(cmd)
                    if status != 0:
                        WhfLog.error("Failure to remove empty file: " + regridded_file)
                        raise SystemCommandError('MediumRangeForcing failed to clean up regridded file %s'%(regridded_file))
            else:
                WhfLog.info("Regridding non-zero hour fcst%s: ", file )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, False)
                except (FilenameMatchError, NCLError) as e:
                    WhfLog.error('Regridding failed for GFS non-zero fcst regrid file: ' + file) 
                    raise
          
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser,True, False)                
                except (MissingFileError, SystemCommandError, NCLError):
                    raise

                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    ymd_dir = match.group(1)
                    file_only = match.group(2)
                    downscaled_dir = downscale_dir + "/" + ymd_dir
                    downscaled_file = downscaled_dir + "/" + file_only
                    # Check to make sure downscaled file was created
                    try:
                        whf.file_exists(downscaled_file)
                    except MissingFileError:
                        raise
                    try:
                        whf.rename_final_files(parser,"Medium_Range")
                    except (FilenameMatchError, UnrecognizedCommandError) as e:
                        raise

                else:
                    raise FilenameMatchError('MediumRangeForcing renaming finished file failed, unexpected filename format for %s'%(regridded_file)) 
        else:
            # Skip processing this file, exiting...
            WhfLog.info("Skip processing, requested file is outside max fcst")
    else:
        WhfLog.info("Unsupported action requested. Only regridding (and downscaling) performed for Medium Range")