def anal_assim_layer(cycleYYYYMMDDHH, fhr, action, config):
    """ Analysis and Assimilation layering
        Performs layering/combination of RAP/HRRR/MRMS
        data for a particular analysis and assimilation
        model cycle and forecast hour.

        Args:
            cycleYYYYMMDDHH (string): Analysis and assimilation
                                      model cycle date.
            fhr (string): Forecast hour of analysis and assimilation 
                          model cycle. Possible values are -2, -1, 0.
            action (string): Specifying which layering to do, given
                             possible available model data. Possible 
                             values are "RAP", "RAP_HRRR", and
                             "RAP_HRRR_MRMS".
            config (string) : Config file name
        Returns: 
            None: Performs specified layering to final input directory
                  used for WRF-Hydro.
    """

    # Determine specific layering route to take
    str_split = action.split("_")
    process = len(str_split)

    # Determine specific date/time information used for composing regridded
    # file paths.
    yearCycle = int(cycleYYYYMMDDHH[0:4])
    monthCycle = int(cycleYYYYMMDDHH[4:6])
    dayCycle = int(cycleYYYYMMDDHH[6:8])
    hourCycle = int(cycleYYYYMMDDHH[8:10])
    fhr = int(fhr)

    dateCurrent = datetime.datetime.today()
    cycleDate = datetime.datetime(year=yearCycle, month=monthCycle, day=dayCycle, hour=hourCycle)
    validDate = cycleDate + datetime.timedelta(seconds=fhr * 3600)
    fcstWindowDate = validDate + datetime.timedelta(seconds=-3 * 3600)  # Used for 3-hr forecast

    # HRRR/RAP files necessary for fluxes and precipitation data.
    # Obtain analysis and assimiltation configuration parameters.
    parser = SafeConfigParser()
    parser.read(config)
    out_dir = parser.get("layering", "analysis_assimilation_output")
    tmp_dir = parser.get("layering", "analysis_assimilation_tmp")
    qpe_parm_dir = parser.get("layering", "qpe_combine_parm_dir")
    hrrr_ds_dir_3hr = parser.get("downscaling", "HRRR_finished_output_dir")
    hrrr_ds_dir_0hr = parser.get("downscaling", "HRRR_finished_output_dir_0hr")
    rap_ds_dir_3hr = parser.get("downscaling", "RAP_finished_output_dir")
    rap_ds_dir_0hr = parser.get("downscaling", "RAP_finished_output_dir_0hr")
    mrms_ds_dir = parser.get("regridding", "MRMS_finished_output_dir")
    layer_exe = parser.get("exe", "Analysis_Assimilation_layering")
    ncl_exec = parser.get("exe", "ncl_exe")

    # in case it is first time, create the output dirs
    df.makeDirIfNeeded(out_dir)
    df.makeDirIfNeeded(tmp_dir)

    # Sanity checking
    try:
        whf.dir_exists(out_dir)
        whf.dir_exists(tmp_dir)
        whf.dir_exists(qpe_parm_dir)
        whf.dir_exists(hrrr_ds_dir_3hr)
        whf.dir_exists(hrrr_ds_dir_0hr)
        whf.dir_exists(rap_ds_dir_3hr)
        whf.dir_exists(rap_ds_dir_0hr)
        whf.dir_exists(mrms_ds_dir)
        whf.file_exists(layer_exe)
    except MissingDirectoryError:
        WhfLog.error("Missing directory during preliminary checking of Analysis Assimilation layering")
        raise

    # Establish final output directories to hold 'LDASIN' files used for
    # WRF-Hydro long-range forecasting. If the directory does not exist,
    # create it.
    out_path = out_dir + "/" + cycleDate.strftime("%Y%m%d%H")

    whf.mkdir_p(out_path)

    # Compose necessary file paths
    hrrr0Path = (
        hrrr_ds_dir_0hr
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    hrrr3Path = (
        hrrr_ds_dir_3hr
        + "/"
        + fcstWindowDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    rap0Path = (
        rap_ds_dir_0hr
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    rap3Path = (
        rap_ds_dir_3hr
        + "/"
        + fcstWindowDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    mrmsPath = (
        mrms_ds_dir
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    hrrrBiasPath = qpe_parm_dir + "/HRRR_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km.grb2"
    hrrrWgtPath = qpe_parm_dir + "/HRRR_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"
    mrmsBiasPath = (
        qpe_parm_dir + "/MRMS_radonly_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km-sm60.grb2"
    )
    mrmsWgtPath = qpe_parm_dir + "/MRMS_radonly_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"
    rapBiasPath = qpe_parm_dir + "/RAPD_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km.grb2"
    rapWgtPath = qpe_parm_dir + "/RAPD_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"

    # Sanity checking on parameter data
    try:
        whf.file_exists(hrrrBiasPath)
        whf.file_exists(hrrrWgtPath)
        whf.file_exists(mrmsBiasPath)
        whf.file_exists(mrmsWgtPath)
        whf.file_exists(rapBiasPath)
        whf.file_exists(rapWgtPath)
    except MissingFileError:
        WhfLog.error("Missing file encountered while checking parameter data for AA")
        raise

    # Compose output file paths
    LDASIN_path_tmp = tmp_dir + "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1_TMP.nc"
    LDASIN_path_final = out_path + "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1"
    # Perform layering/combining depending on processing path.
    if process == 1:  # RAP only
        WhfLog.info(
            "Layering and Combining RAP only for cycle date: "
            + cycleDate.strftime("%Y%m%d%H")
            + " valid date: "
            + validDate.strftime("%Y%m%d%H")
        )
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
        except MissingFileError:
            WhfLog.error("Missing RAP files for layering")
            raise

    elif process == 2:  # HRRR and RAP only
        WhfLog.info(
            "Layering and Combining RAP and HRRR for cycle date: "
            + cycleDate.strftime("%Y%m%d%H")
            + " valid date: "
            + validDate.strftime("%Y%m%d%H")
        )
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
            whf.file_exists(hrrr0Path)
            whf.file_exists(hrrr3Path)
        except MissingFileError:
            WhfLog.error("Missing RAP or HRRR files for layering")
            raise
    elif process == 3:  # HRRR, RAP, and MRMS
        WhfLog.info(
            "Layering and Combining RAP/HRRR/MRMS for cycle date: "
            + cycleDate.strftime("%Y%m%d%H")
            + " valid date: "
            + validDate.strftime("%Y%m%d%H")
        )
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
            whf.file_exists(hrrr0Path)
            whf.file_exists(hrrr3Path)
            whf.file_exists(mrmsPath)
        except MissingFileError:
            WhfLog.error("Missing RAP or HRRR or MRMS files for layering")
            raise

    else:  # Error out
        WhfLog.error("Invalid input action selected, invalid layer combination provided in AA.")
        raise UnrecognizedCommandError

    hrrrB_param = "'hrrrBFile=" + '"' + hrrrBiasPath + '"' + "' "
    mrmsB_param = "'mrmsBFile=" + '"' + mrmsBiasPath + '"' + "' "
    rapB_param = "'rapBFile=" + '"' + rapBiasPath + '"' + "' "
    hrrrW_param = "'hrrrWFile=" + '"' + hrrrWgtPath + '"' + "' "
    mrmsW_param = "'mrmsWFile=" + '"' + mrmsWgtPath + '"' + "' "
    rapW_param = "'rapWFile=" + '"' + rapWgtPath + '"' + "' "
    hrrr0_param = "'hrrr0File=" + '"' + hrrr0Path + '"' + "' "
    hrrr3_param = "'hrrr3File=" + '"' + hrrr3Path + '"' + "' "
    rap0_param = "'rap0File=" + '"' + rap0Path + '"' + "' "
    rap3_param = "'rap3File=" + '"' + rap3Path + '"' + "' "
    mrms_param = "'mrmsFile=" + '"' + mrmsPath + '"' + "' "
    process_param = "'process=" + '"' + str(process) + '"' + "' "
    out_param = "'outPath=" + '"' + LDASIN_path_tmp + '"' + "' "

    cmd_params = (
        hrrrB_param
        + mrmsB_param
        + rapB_param
        + hrrrW_param
        + mrmsW_param
        + rapW_param
        + hrrr0_param
        + hrrr3_param
        + rap0_param
        + rap3_param
        + mrms_param
        + process_param
        + out_param
    )
    cmd = ncl_exec + " -Q " + cmd_params + " " + layer_exe
    status = os.system(cmd)

    if status != 0:
        WhfLog.error("Error in combinining NCL program")
        raise NCLError("NCL error encountered while combining in AA")

    # Double check to make sure file was created, delete temporary regridded file
    whf.file_exists(LDASIN_path_tmp)
    # Rename file to conform to WRF-Hydro expectations
    cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final
    status = os.system(cmd)
    if status != 0:
        WhfLog.error("Failure to rename " + LDASIN_path_tmp)
    try:
        whf.file_exists(LDASIN_path_final)
    except MissingFileError:
        WhfLog.error("Missing LDASIN_path_final file")
        raise
    cmd = "rm -rf " + LDASIN_path_tmp
    status = os.system(cmd)
    if status != 0:
        WhfLog.error("Failure to remove " + LDASIN_path_tmp)
        raise SystemCommandError
def forcing(config, action, prod, file):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           config (string) : Config file name
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
           prod (string):  The first product [mandatory option]:
                            (MRMS, HRRR or RAP)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """

    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    parser.read(config)

    # Set up logging, environments, etc.
    forcing_config_label = "Anal_Assim"
    whf.initial_setup(parser, forcing_config_label)

    # Convert the action to lower case
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()

    # For analysis and assimilation, only 0hr, 3hr forecast fields from HRRR/RAP
    # are necessary. 3hr forecast files are already regridded and downscaled
    # from the short-range configuration, so only 0hr forecast files are regridded/downscaled
    # here. In addition, MRMS data will be regridded, when available.
    if action == "regrid":
        (date, modelrun, fcsthr) = whf.extract_file_info(file)
        # Usually check for forecast range, but only 0, 3 hr forecast/analysis data used

        # Check for HRRR, RAP, MRMS products.
        WhfLog.info("Regridding and Downscaling for %s", product_data_name)

        if fcsthr == 0 and prod == "HRRR":
            downscale_dir = parser.get("downscaling", "HRRR_downscale_output_dir_0hr")
            try:
                regridded_file = whf.regrid_data(product_data_name, file, parser, False, zero_process=True)
            except (FilenameMatchError, NCLError) as e:
                WhfLog.error("Unexpected filename format encountered while regridding 0hr HRRR")
                raise
            except NCLError:
                WhfLog.error("NCL error encountered while regridding 0hr HRRR")
                raise
            try:
                whf.downscale_data(product_data_name, regridded_file, parser, False, False, zero_process=True)

            except (FilenameMatchError, NCLError) as e:
                WhfLog.error("Unexpected filename format encountered while downscaling 0hr HRRR")
                raise
            except NCLError:
                WhfLog.error("NCL error encountered while downscaling 0hr HRRR")
                raise

            # Move downscaled file to staging area where triggering will monitor
            match = re.match(r".*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)", regridded_file)
            if match:
                full_dir = downscale_dir + "/" + match.group(1)
                full_finished_file = full_dir + "/" + match.group(2)
                # File should have been created in downscale_data step.
                try:
                    whf.file_exists(full_finished_file)
                except UnrecognizedCommandError:
                    WhfLog.error("File move failed for regridded/downscaled 0hr HRRR , filename format unexpected")
                    raise
                try:
                    whf.move_to_finished_area(parser, prod, full_finished_file, zero_move=True)
                except:
                    WhfLog.error("Unsupported/unrecognized command encountered while moving file to finished area.")
                    raise
            else:
                WhfLog.error("File name format is unexpected")
                raise FilenameMatchError("File name format is unexpected")
        elif fcsthr == 0 and prod == "RAP":
            downscale_dir = parser.get("downscaling", "RAP_downscale_output_dir_0hr")
            try:
                regridded_file = whf.regrid_data(product_data_name, file, parser, False, zero_process=True)
            except NCLError:
                WhfLog.error("NCL error while regridding 0hr RAP")
                raise
            except FilenameMatchError:
                WhfLog.error("Unexpected filename format encountered, cannot regrid 0hr RAP")
                raise

            try:
                whf.downscale_data(product_data_name, regridded_file, parser, False, False, zero_process=True)
            except (NCLError) as e:
                WhfLog.error("NCL error encountered while regridding 0hr RAP")
                raise

            # Move downscaled file to staging area where triggering will monitor
            match = re.match(r".*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)", regridded_file)
            if match:
                full_dir = downscale_dir + "/" + match.group(1)
                full_finished_file = full_dir + "/" + match.group(2)
                # File should have been created in downscale_data step.
                try:
                    whf.file_exists(full_finished_file)
                except MissingFileError as mfe:
                    WhfLog.error("Missing file encountered while moving 0hr RAP file to staging area.")
                    raise
                try:
                    whf.move_to_finished_area(parser, prod, full_finished_file, zero_move=True)
                except UnrecognizedCommandError:
                    WhfLog.error("Unrecognized command error while trying to move 0hr RAP file to finished area")
                    raise
                except FilenameMatchError:
                    WhfLog.error("File name's format is unexpected.  Cannot move file to finished area")
                    raise
            else:
                WhfLog.error("File name's format is unexpected")
                raise FilenameMatchError("File name format is unexpected")

        elif prod == "MRMS":
            try:
                regridded_file = whf.regrid_data(product_data_name, file, parser, False)
            except NCLError:
                WhfLog.error("NCL error encountered while regridding MRMS")
                raise
            except FilenameMatchError:
                WhfLog.error("File name's format is unexpected, cannot regrid MRMS")
                raise
            # Move regridded file to staging area where triggering will monitor
            # First make sure file exists
            try:
                whf.file_exists(regridded_file)
            except MissingFileError as mfe:
                WhfLog.error("Missing file encountered while moving regridded MRMS file")
                raise

            try:
                whf.move_to_finished_area(parser, prod, regridded_file, zero_move=False)
            except UnrecognizedCommandError:
                WhfLog.error("Unrecognized command error while trying to move MRMS file to finished area")
                raise
            except FilenameMatchError:
                WhfLog.error("File name's format is unexpecte.  Cannot move file to finished area")
                raise
        else:
            WhfLog.error("Either invalid forecast hour or invalid product chosen")
            WhfLog.error("Only 00hr forecast files, and RAP or HRRR or MRMS are valid choices")
            raise InvalidArgumentError(
                "Either invalid forecast hour %s or invalid product requested %s" % (fcsthr, prod)
            )
    else:  # Invalid action selected
        WhfLog.error("ERROR [Anal_Assim_Forcing]- Invalid action selected")
        raise UnrecognizedCommandError(
            "Invalid action selection within Analysis and Assimilation regridding and downscaling"
        )
def forcing(configFile, action, prod, file, prod2=None, file2=None):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           configFile (string): name of file with settings
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
                             'bias'   - bias correction 
                                        (requires two 
                                        products and two files)
                             'layer'  - layer (requires two
                                        products and two files)
           prod (string):  The first product [mandatory option]:
                            (GFS)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

          prod2 (string):   The second product (????), default
                            is None. Required for layering.
          file2 (string):   The second file name, required for 
                            layering, default is None.
       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """


    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    try:
        parser.read(configFile)
    except (NoSectionErrorException, DuplicateSectionErrorException,\
            DuplicateOptionErrorException,MissingSectionHeaderErrorException,\
            ParsingErrorException) as e:
        raise

    # Set up logging, environments, etc.
    forcing_config_label = 'Medium_Range'
    whf.initial_setup(parser,forcing_config_label)


    # Extract the date, model run time, and forecast hour from the file name
    # Use the fcsthr to process only the files that have a fcst hour less than
    # the max fcst hr defined in the param/config file.
    
    
    # Convert the action to lower case 
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()
    regridded_dir = parser.get('regridding','GFS_output_dir')
    downscale_dir = parser.get('downscaling','GFS_downscale_output_dir')
    finished_downscale_dir = parser.get('downscaling','GFS_finished_output_dir')
    final_dir = parser.get('layering','medium_range_output')
    if action == 'regrid': 
        (date,modelrun,fcsthr) = whf.extract_file_info(file)
        # Determine whether this current file lies within the forecast range
        # for the data product (e.g. if processing RAP, use only the 0hr-18hr forecasts).
        # Skip if this file has a forecast hour greater than the max indicated in the 
        # parm/config file.
        in_fcst_range = whf.is_in_fcst_range(prod, fcsthr, parser)

        if in_fcst_range:
            # Check for RAP or GFS data products.  If this file is
            # a 0 hr fcst and is RAP or GFS, substitute each 0hr forecast
            # with the file from the previous model run and the same valid
            # time.  This is necessary because there are missing variables
            # in the 0hr forecasts (e.g. precip rate for RAP and radiation
            # in GFS).
 
            WhfLog.info("Regridding and Downscaling for %s", product_data_name)
            # Determine if this is a 0hr forecast for RAP data (GFS is also missing
            # some variables for 0hr forecast, but GFS is not used for Medium Range
            # forcing). We will need to substitute this file for the downscaled
            # file from a previous model run with the same valid time.  
            # We only need to do this for downscaled files, as the Medium Range 
            # forcing files that are regridded always get downscaled and we don't want
            # to do this for both the regridding and downscaling.
            if fcsthr == 0 and prod == 'GFS':
                WhfLog.info("Regridding (ignoring f0 GFS files) %s: ", file )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, True)
                except (FilenameMatchError,NCLError,MissingFileError) as e:
                    WhfLog.error('Failure:regridding of GFS (ignore 0hr fcst) file: ' + file)
                    WhfLog.error(e) 
                    raise
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser, True, True)                
                except (MissingFileError, SystemCommandError,\
                        NCLError) as e:
                    WhfLog.error('Downscaling GFS failed: ' + e)
                    raise

                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    ymd_dir = match.group(1)
                    file_only = match.group(2)
                    downscaled_dir = downscale_dir + "/" + ymd_dir
                    downscaled_file = downscaled_dir + "/" + file_only
                    # Check to make sure downscaled file was created
                    try:
                        whf.file_exists(downscaled_file)
                    except MissingFileError as mfe:
                        WhfLog.error('Downscaling, non-existent downscaled file: ' + downscaled_file)
                        WhfLog.error(mfe)
                    try:
                        whf.rename_final_files(parser,"Medium_Range")
                    except FilenameMatchError as fme:
                        WhfLog.error('Failed to rename final files due to unexpected filename format: ' + fme) 
              
                    except UnrecognizedCommandError as uce:
                        WhfLog.error('Failed to rename final files due to unrecognized/unsupported request: ' + uce)
                else:
                    raise FilneameMatchError('MediumRangeForcing regridded_file %s has unexpected filename format'%regridded_file)
                # Remove empty 0hr regridded file if it still exists
                if os.path.exists(regridded_file):
                    cmd = 'rm -rf ' + regridded_file
                    status = os.system(cmd)
                    if status != 0:
                        WhfLog.error("Failure to remove empty file: " + regridded_file)
                        raise SystemCommandError('MediumRangeForcing failed to clean up regridded file %s'%(regridded_file))
            else:
                WhfLog.info("Regridding non-zero hour fcst%s: ", file )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, False)
                except (FilenameMatchError, NCLError) as e:
                    WhfLog.error('Regridding failed for GFS non-zero fcst regrid file: ' + file) 
                    raise
          
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser,True, False)                
                except (MissingFileError, SystemCommandError, NCLError):
                    raise

                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    ymd_dir = match.group(1)
                    file_only = match.group(2)
                    downscaled_dir = downscale_dir + "/" + ymd_dir
                    downscaled_file = downscaled_dir + "/" + file_only
                    # Check to make sure downscaled file was created
                    try:
                        whf.file_exists(downscaled_file)
                    except MissingFileError:
                        raise
                    try:
                        whf.rename_final_files(parser,"Medium_Range")
                    except (FilenameMatchError, UnrecognizedCommandError) as e:
                        raise

                else:
                    raise FilenameMatchError('MediumRangeForcing renaming finished file failed, unexpected filename format for %s'%(regridded_file)) 
        else:
            # Skip processing this file, exiting...
            WhfLog.info("Skip processing, requested file is outside max fcst")
    else:
        WhfLog.info("Unsupported action requested. Only regridding (and downscaling) performed for Medium Range")
def forcing(configFile,file_in):
    """ Args:
	1.) configFile (string): The config file with all 
	    the settings.
        2.) file (string): The file name. The full path is 
            not necessary as full paths will be derived from
            parameter directory paths and datetime information.
        Returns:
	None - Performs indicated bias correction, regridding,
               and downscaling of CFSv2 data. Any errors are
               trapped and passed back to the driver.
    """

    WhfLog.debug("file_in = %s", file_in)

    # Obtain CFSv2 forcing engine parameters.
    parser = SafeConfigParser()
    parser.read(configFile)

    # Set up logging environments, etc.
    forcing_config_label = "Long_Range"
    try:
        Whf.initial_setup(parser,forcing_config_label)
    except:
        raise

    out_dir = parser.get('layering','long_range_output') 
    tmp_dir = parser.get('bias_correction','CFS_tmp_dir')

    if (not df.makeDirIfNeeded(out_dir)):
        raise MissingDirectoryError('Dir %s cannot be created', out_dir)
    if (not df.makeDirIfNeeded(tmp_dir)):
        raise MissingDirectoryError('Dir %s cannot be created', tmp_dir)

    # Define CFSv2 cycle date and valid time based on file name.
    (cycleYYYYMMDD,cycleHH,fcsthr,em) = Whf.extract_file_info_cfs(file_in)
    em_str = str(em)

    # Pull path to NCL bias correction module file. Export this as an 
    # environmental variable NCL refers to later. 
    nclBiasMod = parser.get('exe','CFS_bias_correct_mod')
    os.environ["CFS_NCL_BIAS_MOD"] = nclBiasMod

    # Establish datetime objects
    dateCurrent = datetime.datetime.today()
    dateCycleYYYYMMDDHH = datetime.datetime(year=int(cycleYYYYMMDD[0:4]),
                          month=int(cycleYYYYMMDD[4:6]),
                          day=int(cycleYYYYMMDD[6:8]),
                          hour=cycleHH)
    dateFcstYYYYMMDDHH = dateCycleYYYYMMDDHH + \
                         datetime.timedelta(seconds=fcsthr*3600)

    # Determine if this is a 0hr forecast file or not.
    if dateFcstYYYYMMDDHH == dateCycleYYYYMMDDHH:
        fFlag = 1 
    else:
        fFlag = 0 
    # Establish final output directories to hold 'LDASIN' files used for
    # WRF-Hydro long-range forecasting. If the directory does not exist,
    # create it.
    out_path = out_dir + "/Member_" + em_str.zfill(2) + "/" + \
               dateCycleYYYYMMDDHH.strftime("%Y%m%d%H")

    try:
        Whf.mkdir_p(out_path)
    except:
        raise

    in_fcst_range = Whf.is_in_fcst_range("CFSv2",fcsthr,parser)

    if in_fcst_range:
        # First, bias-correct CFSv2 data and generate hourly files 
        # from six-hour forecast
        WhfLog.info("Bias correcting for CFSv2 cycle: " + \
                     dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
                     " CFSv2 forecast time: " + dateFcstYYYYMMDDHH.strftime('%Y%m%d%H'))
        try:
            Whf.bias_correction('CFSV2',file_in,dateCycleYYYYMMDDHH,
                                dateFcstYYYYMMDDHH,parser, em = em)
        except (MissingFileError,NCLError):
            raise

        # Second, regrid to the conus IOC domain
        # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename 
        # generated from bias-correction and call the regridding to go to the conus domain.
        if fFlag == 1:
            begCt = 6 
            endCt = 7
        else:
            begCt = 1
            endCt = 7
        for hour in range(begCt,endCt):
  	    dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)
               
            fileBiasCorrected = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
                                dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
                                dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + ".M" + \
                                em_str.zfill(2) + ".nc"
            WhfLog.info("Regridding CFSv2 to conus domain for cycle: " + \
                         dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
                         " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H'))
            try:
                fileRegridded = Whf.regrid_data("CFSV2",fileBiasCorrected,parser)
            except (MissingFileError,NCLError):
                raise

            # Double check to make sure file was created, delete temporary bias-corrected file
            try:
                Whf.file_exists(fileRegridded)
            except MissingFileError:
                raise	
            cmd = "rm -rf " + fileBiasCorrected
            status = os.system(cmd)
            if status != 0:
		raise SystemCommandError('Command %s failed.'%cmd)

  
        # Third, perform topography downscaling to generate final
        # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename
        # generated from regridding and call the downscaling function.
        for hour in range(begCt,endCt):
            dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)

            WhfLog.info("Downscaling CFSv2 for cycle: " +
                         dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') +
                         " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H'))
            fileRegridded = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
                            dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
                                dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + \
                                "_regridded.M" + em_str.zfill(2) + ".nc"
            LDASIN_path_tmp = tmp_dir + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1.nc"
            LDASIN_path_final = out_path + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1"
            try:
                Whf.downscale_data("CFSv2",fileRegridded,parser, out_path=LDASIN_path_tmp, \
                                   verYYYYMMDDHH=dateTempYYYYMMDDHH)
            except (MissingFileError,FilenameMatchError,NCLError,SystemCommandError):
                raise
            # Double check to make sure file was created, delete temporary regridded file
            try:
                Whf.file_exists(LDASIN_path_tmp)
            except MissingFileError:
                raise
            # Rename file to conform to WRF-Hydro expectations
            cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final
            status = os.system(cmd)
            if status != 0:
                raise SystemCommandError('Command %s failed.'%cmd)
            try:
                Whf.file_exists(LDASIN_path_final)
            except MissingFileError:
                raise
            cmd = "rm -rf " + fileRegridded
            status = os.system(cmd)
            if status != 0:
                raise SystemCommandError('Command %s failed.'%cmd)
       
	WhfLog.info("Long_Range processing for %s%d Forecast Hour: %d Ensemble: %s",
                cycleYYYYMMDD, cycleHH, fcsthr, em_str)
    else:
        # Skip processing this file. Exit gracefully with a 0 exit status.
        WhfLog.info("Requested file is outside max fcst for CFSv2")
def forcing(config, action, prod, file):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           config (string) : Config file name
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
           prod (string):  The first product [mandatory option]:
                            (MRMS, HRRR or RAP)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """

    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    parser.read(config)

    # Set up logging, environments, etc.
    forcing_config_label = "Anal_Assim"
    whf.initial_setup(parser, forcing_config_label)

    # Convert the action to lower case
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()

    # For analysis and assimilation, only 0hr, 3hr forecast fields from HRRR/RAP
    # are necessary. 3hr forecast files are already regridded and downscaled
    # from the short-range configuration, so only 0hr forecast files are regridded/downscaled
    # here. In addition, MRMS data will be regridded, when available.
    if action == 'regrid':
        (date, modelrun, fcsthr) = whf.extract_file_info(file)
        # Usually check for forecast range, but only 0, 3 hr forecast/analysis data used

        # Check for HRRR, RAP, MRMS products.
        WhfLog.info("Regridding and Downscaling for %s", product_data_name)

        if fcsthr == 0 and prod == "HRRR":
            downscale_dir = parser.get('downscaling',
                                       'HRRR_downscale_output_dir_0hr')
            try:
                regridded_file = whf.regrid_data(product_data_name,file,parser,False, \
                                 zero_process=True)
            except (FilenameMatchError, NCLError) as e:
                WhfLog.error(
                    "Unexpected filename format encountered while regridding 0hr HRRR"
                )
                raise
            except NCLError:
                WhfLog.error("NCL error encountered while regridding 0hr HRRR")
                raise
            try:
                whf.downscale_data(product_data_name,regridded_file, parser,False, False, \
                                 zero_process=True)

            except (FilenameMatchError, NCLError) as e:
                WhfLog.error(
                    "Unexpected filename format encountered while downscaling 0hr HRRR"
                )
                raise
            except NCLError:
                WhfLog.error(
                    "NCL error encountered while downscaling 0hr HRRR")
                raise

            # Move downscaled file to staging area where triggering will monitor
            match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',
                             regridded_file)
            if match:
                full_dir = downscale_dir + "/" + match.group(1)
                full_finished_file = full_dir + "/" + match.group(2)
                # File should have been created in downscale_data step.
                try:
                    whf.file_exists(full_finished_file)
                except UnrecognizedCommandError:
                    WhfLog.error(
                        "File move failed for regridded/downscaled 0hr HRRR , filename format unexpected"
                    )
                    raise
                try:
                    whf.move_to_finished_area(parser,
                                              prod,
                                              full_finished_file,
                                              zero_move=True)
                except:
                    WhfLog.error(
                        'Unsupported/unrecognized command encountered while moving file to finished area.'
                    )
                    raise
            else:
                WhfLog.error("File name format is unexpected")
                raise FilenameMatchError("File name format is unexpected")
        elif fcsthr == 0 and prod == "RAP":
            downscale_dir = parser.get('downscaling',
                                       'RAP_downscale_output_dir_0hr')
            try:
                regridded_file = whf.regrid_data(product_data_name,file,parser,False, \
                                 zero_process=True)
            except NCLError:
                WhfLog.error("NCL error while regridding 0hr RAP")
                raise
            except FilenameMatchError:
                WhfLog.error(
                    "Unexpected filename format encountered, cannot regrid 0hr RAP"
                )
                raise

            try:
                whf.downscale_data(product_data_name,regridded_file, parser,False, False, \
                                   zero_process=True)
            except (NCLError) as e:
                WhfLog.error("NCL error encountered while regridding 0hr RAP")
                raise

            # Move downscaled file to staging area where triggering will monitor
            match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',
                             regridded_file)
            if match:
                full_dir = downscale_dir + "/" + match.group(1)
                full_finished_file = full_dir + "/" + match.group(2)
                # File should have been created in downscale_data step.
                try:
                    whf.file_exists(full_finished_file)
                except MissingFileError as mfe:
                    WhfLog.error(
                        "Missing file encountered while moving 0hr RAP file to staging area."
                    )
                    raise
                try:
                    whf.move_to_finished_area(parser,
                                              prod,
                                              full_finished_file,
                                              zero_move=True)
                except UnrecognizedCommandError:
                    WhfLog.error(
                        "Unrecognized command error while trying to move 0hr RAP file to finished area"
                    )
                    raise
                except FilenameMatchError:
                    WhfLog.error(
                        "File name's format is unexpected.  Cannot move file to finished area"
                    )
                    raise
            else:
                WhfLog.error("File name's format is unexpected")
                raise FilenameMatchError('File name format is unexpected')

        elif prod == "MRMS":
            try:
                regridded_file = whf.regrid_data(product_data_name, file,
                                                 parser, False)
            except NCLError:
                WhfLog.error("NCL error encountered while regridding MRMS")
                raise
            except FilenameMatchError:
                WhfLog.error(
                    "File name's format is unexpected, cannot regrid MRMS")
                raise
            # Move regridded file to staging area where triggering will monitor
            # First make sure file exists
            try:
                whf.file_exists(regridded_file)
            except MissingFileError as mfe:
                WhfLog.error(
                    "Missing file encountered while moving regridded MRMS file"
                )
                raise

            try:
                whf.move_to_finished_area(parser,
                                          prod,
                                          regridded_file,
                                          zero_move=False)
            except UnrecognizedCommandError:
                WhfLog.error(
                    "Unrecognized command error while trying to move MRMS file to finished area"
                )
                raise
            except FilenameMatchError:
                WhfLog.error(
                    "File name's format is unexpecte.  Cannot move file to finished area"
                )
                raise
        else:
            WhfLog.error(
                "Either invalid forecast hour or invalid product chosen")
            WhfLog.error(
                "Only 00hr forecast files, and RAP or HRRR or MRMS are valid choices"
            )
            raise InvalidArgumentError(
                "Either invalid forecast hour %s or invalid product requested %s"
                % (fcsthr, prod))
    else:  # Invalid action selected
        WhfLog.error("ERROR [Anal_Assim_Forcing]- Invalid action selected")
        raise UnrecognizedCommandError(
            "Invalid action selection within Analysis and Assimilation regridding and downscaling"
        )
def anal_assim_layer(cycleYYYYMMDDHH, fhr, action, config):
    """ Analysis and Assimilation layering
        Performs layering/combination of RAP/HRRR/MRMS
        data for a particular analysis and assimilation
        model cycle and forecast hour.

        Args:
            cycleYYYYMMDDHH (string): Analysis and assimilation
                                      model cycle date.
            fhr (string): Forecast hour of analysis and assimilation 
                          model cycle. Possible values are -2, -1, 0.
            action (string): Specifying which layering to do, given
                             possible available model data. Possible 
                             values are "RAP", "RAP_HRRR", and
                             "RAP_HRRR_MRMS".
            config (string) : Config file name
        Returns: 
            None: Performs specified layering to final input directory
                  used for WRF-Hydro.
    """

    # Determine specific layering route to take
    str_split = action.split("_")
    process = len(str_split)

    # Determine specific date/time information used for composing regridded
    # file paths.
    yearCycle = int(cycleYYYYMMDDHH[0:4])
    monthCycle = int(cycleYYYYMMDDHH[4:6])
    dayCycle = int(cycleYYYYMMDDHH[6:8])
    hourCycle = int(cycleYYYYMMDDHH[8:10])
    fhr = int(fhr)

    dateCurrent = datetime.datetime.today()
    cycleDate = datetime.datetime(year=yearCycle,month=monthCycle,day=dayCycle, \
                hour=hourCycle)
    validDate = cycleDate + datetime.timedelta(seconds=fhr * 3600)
    fcstWindowDate = validDate + datetime.timedelta(
        seconds=-3 * 3600)  # Used for 3-hr forecast

    # HRRR/RAP files necessary for fluxes and precipitation data.
    # Obtain analysis and assimiltation configuration parameters.
    parser = SafeConfigParser()
    parser.read(config)
    out_dir = parser.get('layering', 'analysis_assimilation_output')
    tmp_dir = parser.get('layering', 'analysis_assimilation_tmp')
    qpe_parm_dir = parser.get('layering', 'qpe_combine_parm_dir')
    hrrr_ds_dir_3hr = parser.get('downscaling', 'HRRR_finished_output_dir')
    hrrr_ds_dir_0hr = parser.get('downscaling', 'HRRR_finished_output_dir_0hr')
    rap_ds_dir_3hr = parser.get('downscaling', 'RAP_finished_output_dir')
    rap_ds_dir_0hr = parser.get('downscaling', 'RAP_finished_output_dir_0hr')
    mrms_ds_dir = parser.get('regridding', 'MRMS_finished_output_dir')
    layer_exe = parser.get('exe', 'Analysis_Assimilation_layering')
    ncl_exec = parser.get('exe', 'ncl_exe')

    # in case it is first time, create the output dirs
    df.makeDirIfNeeded(out_dir)
    df.makeDirIfNeeded(tmp_dir)

    # Sanity checking
    try:
        whf.dir_exists(out_dir)
        whf.dir_exists(tmp_dir)
        whf.dir_exists(qpe_parm_dir)
        whf.dir_exists(hrrr_ds_dir_3hr)
        whf.dir_exists(hrrr_ds_dir_0hr)
        whf.dir_exists(rap_ds_dir_3hr)
        whf.dir_exists(rap_ds_dir_0hr)
        whf.dir_exists(mrms_ds_dir)
        whf.file_exists(layer_exe)
    except MissingDirectoryError:
        WhfLog.error(
            "Missing directory during preliminary checking of Analysis Assimilation layering"
        )
        raise

    # Establish final output directories to hold 'LDASIN' files used for
    # WRF-Hydro long-range forecasting. If the directory does not exist,
    # create it.
    out_path = out_dir + "/" + cycleDate.strftime("%Y%m%d%H")

    whf.mkdir_p(out_path)

    # Compose necessary file paths
    hrrr0Path = hrrr_ds_dir_0hr + "/" + validDate.strftime("%Y%m%d%H") + \
                "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1.nc"
    hrrr3Path = hrrr_ds_dir_3hr + "/" + fcstWindowDate.strftime("%Y%m%d%H") + \
                "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1.nc"
    rap0Path = rap_ds_dir_0hr + "/" + validDate.strftime("%Y%m%d%H") + \
                "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1.nc"
    rap3Path = rap_ds_dir_3hr + "/" + fcstWindowDate.strftime("%Y%m%d%H") + \
                "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1.nc"
    mrmsPath = mrms_ds_dir + "/" + validDate.strftime("%Y%m%d%H") + \
                "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1.nc"
    hrrrBiasPath = qpe_parm_dir + "/HRRR_NLDAS-CPC_bias-corr_m" + \
                   validDate.strftime("%m") + "_v9_wrf1km.grb2"
    hrrrWgtPath = qpe_parm_dir + "/HRRR_wgt_m" + \
                  validDate.strftime("%m") + "_v8_wrf1km.grb2"
    mrmsBiasPath = qpe_parm_dir + "/MRMS_radonly_NLDAS-CPC_bias-corr_m" + \
                   validDate.strftime("%m") + "_v9_wrf1km-sm60.grb2"
    mrmsWgtPath = qpe_parm_dir + "/MRMS_radonly_wgt_m" + \
                  validDate.strftime("%m") + "_v8_wrf1km.grb2"
    rapBiasPath = qpe_parm_dir + "/RAPD_NLDAS-CPC_bias-corr_m" + \
                  validDate.strftime("%m") + "_v9_wrf1km.grb2"
    rapWgtPath = qpe_parm_dir + "/RAPD_wgt_m" + \
                 validDate.strftime("%m") + "_v8_wrf1km.grb2"

    # Sanity checking on parameter data
    try:
        whf.file_exists(hrrrBiasPath)
        whf.file_exists(hrrrWgtPath)
        whf.file_exists(mrmsBiasPath)
        whf.file_exists(mrmsWgtPath)
        whf.file_exists(rapBiasPath)
        whf.file_exists(rapWgtPath)
    except MissingFileError:
        WhfLog.error(
            "Missing file encountered while checking parameter data for AA")
        raise

    # Compose output file paths
    LDASIN_path_tmp = tmp_dir + "/" + validDate.strftime(
        '%Y%m%d%H') + "00.LDASIN_DOMAIN1_TMP.nc"
    LDASIN_path_final = out_path + "/" + validDate.strftime(
        '%Y%m%d%H') + "00.LDASIN_DOMAIN1"
    # Perform layering/combining depending on processing path.
    if process == 1:  # RAP only
        WhfLog.info("Layering and Combining RAP only for cycle date: " + \
                     cycleDate.strftime("%Y%m%d%H") + " valid date: " + \
                     validDate.strftime("%Y%m%d%H"))
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
        except MissingFileError:
            WhfLog.error("Missing RAP files for layering")
            raise

    elif process == 2:  # HRRR and RAP only
        WhfLog.info("Layering and Combining RAP and HRRR for cycle date: " + \
                     cycleDate.strftime("%Y%m%d%H") + " valid date: " + \
                     validDate.strftime("%Y%m%d%H"))
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
            whf.file_exists(hrrr0Path)
            whf.file_exists(hrrr3Path)
        except MissingFileError:
            WhfLog.error("Missing RAP or HRRR files for layering")
            raise
    elif process == 3:  # HRRR, RAP, and MRMS
        WhfLog.info("Layering and Combining RAP/HRRR/MRMS for cycle date: " + \
                     cycleDate.strftime("%Y%m%d%H") + " valid date: " + \
                     validDate.strftime("%Y%m%d%H"))
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
            whf.file_exists(hrrr0Path)
            whf.file_exists(hrrr3Path)
            whf.file_exists(mrmsPath)
        except MissingFileError:
            WhfLog.error("Missing RAP or HRRR or MRMS files for layering")
            raise

    else:  # Error out
        WhfLog.error(
            "Invalid input action selected, invalid layer combination provided in AA."
        )
        raise UnrecognizedCommandError

    hrrrB_param = "'hrrrBFile=" + '"' + hrrrBiasPath + '"' + "' "
    mrmsB_param = "'mrmsBFile=" + '"' + mrmsBiasPath + '"' + "' "
    rapB_param = "'rapBFile=" + '"' + rapBiasPath + '"' + "' "
    hrrrW_param = "'hrrrWFile=" + '"' + hrrrWgtPath + '"' + "' "
    mrmsW_param = "'mrmsWFile=" + '"' + mrmsWgtPath + '"' + "' "
    rapW_param = "'rapWFile=" + '"' + rapWgtPath + '"' + "' "
    hrrr0_param = "'hrrr0File=" + '"' + hrrr0Path + '"' + "' "
    hrrr3_param = "'hrrr3File=" + '"' + hrrr3Path + '"' + "' "
    rap0_param = "'rap0File=" + '"' + rap0Path + '"' + "' "
    rap3_param = "'rap3File=" + '"' + rap3Path + '"' + "' "
    mrms_param = "'mrmsFile=" + '"' + mrmsPath + '"' + "' "
    process_param = "'process=" + '"' + str(process) + '"' + "' "
    out_param = "'outPath=" + '"' + LDASIN_path_tmp + '"' + "' "

    cmd_params = hrrrB_param + mrmsB_param + rapB_param + \
                 hrrrW_param + mrmsW_param + rapW_param + \
                 hrrr0_param + hrrr3_param + rap0_param + rap3_param + \
                 mrms_param + process_param + out_param
    cmd = ncl_exec + " -Q " + cmd_params + " " + layer_exe
    status = os.system(cmd)

    if status != 0:
        WhfLog.error("Error in combinining NCL program")
        raise NCLError("NCL error encountered while combining in AA")

    # Double check to make sure file was created, delete temporary regridded file
    whf.file_exists(LDASIN_path_tmp)
    # Rename file to conform to WRF-Hydro expectations
    cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final
    status = os.system(cmd)
    if status != 0:
        WhfLog.error("Failure to rename " + LDASIN_path_tmp)
    try:
        whf.file_exists(LDASIN_path_final)
    except MissingFileError:
        WhfLog.error("Missing LDASIN_path_final file")
        raise
    cmd = "rm -rf " + LDASIN_path_tmp
    status = os.system(cmd)
    if status != 0:
        WhfLog.error("Failure to remove " + LDASIN_path_tmp)
        raise SystemCommandError
def forcing(configFile, action, prod, file, prod2=None, file2=None):
    """Peforms the action on the given data
       product and corresponding input file.

       Args:
           configFile (string): name of file with settings
           action (string):  Supported actions are:
                             'regrid' - regrid and downscale
                             'bias'   - bias correction 
                                        (requires two 
                                        products and two files)
                             'layer'  - layer (requires two
                                        products and two files)
           prod (string):  The first product [mandatory option]:
                            (GFS)
           file (string):  The file name (full path not necessary,
                            this is derived from the Python config/
                            param file and the YYYMMDD portion of 
                            the file name.

          prod2 (string):   The second product (????), default
                            is None. Required for layering.
          file2 (string):   The second file name, required for 
                            layering, default is None.
       Returns:
           None           Performs the indicated action on the
                          files based on the type of product and
                          any other relevant information provided
                          by the Python config/param file,
                          wrf_hydro_forcing.parm
 
 
    """


    # Read the parameters from the config/param file.
    parser = SafeConfigParser()
    try:
        parser.read(configFile)
    except (NoSectionErrorException, DuplicateSectionErrorException,\
            DuplicateOptionErrorException,MissingSectionHeaderErrorException,\
            ParsingErrorException) as e:
        raise

    # Set up logging, environments, etc.
    forcing_config_label = 'Medium_Range'
    whf.initial_setup(parser,forcing_config_label)


    # Extract the date, model run time, and forecast hour from the file name
    # Use the fcsthr to process only the files that have a fcst hour less than
    # the max fcst hr defined in the param/config file.
    
    
    # Convert the action to lower case 
    # and the product name to upper case
    # for consistent checking
    action_requested = action.lower()
    product_data_name = prod.upper()
    regridded_dir = parser.get('regridding','GFS_output_dir')
    downscale_dir = parser.get('downscaling','GFS_downscale_output_dir')
    finished_downscale_dir = parser.get('downscaling','GFS_finished_output_dir')
    final_dir = parser.get('layering','medium_range_output')
    if action == 'regrid': 
        (date,modelrun,fcsthr) = whf.extract_file_info(file)
        # Determine whether this current file lies within the forecast range
        # for the data product (e.g. if processing RAP, use only the 0hr-18hr forecasts).
        # Skip if this file has a forecast hour greater than the max indicated in the 
        # parm/config file.
        in_fcst_range = whf.is_in_fcst_range(prod, fcsthr, parser)

        if in_fcst_range:
            # Check for RAP or GFS data products.  If this file is
            # a 0 hr fcst and is RAP or GFS, substitute each 0hr forecast
            # with the file from the previous model run and the same valid
            # time.  This is necessary because there are missing variables
            # in the 0hr forecasts (e.g. precip rate for RAP and radiation
            # in GFS).
 
            WhfLog.info("Regridding and Downscaling for %s", product_data_name)
            # Determine if this is a 0hr forecast for RAP data (GFS is also missing
            # some variables for 0hr forecast, but GFS is not used for Medium Range
            # forcing). We will need to substitute this file for the downscaled
            # file from a previous model run with the same valid time.  
            # We only need to do this for downscaled files, as the Medium Range 
            # forcing files that are regridded always get downscaled and we don't want
            # to do this for both the regridding and downscaling.
            if fcsthr == 0 and prod == 'GFS':
                WhfLog.info("Regridding (ignoring f0 GFS files) %s: ", file )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, True)
                except (FilenameMatchError,NCLError,MissingFileError) as e:
                    WhfLog.error('Failure:regridding of GFS (ignore 0hr fcst) file: ' + file)
                    WhfLog.error(e) 
                    raise
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser, True, True)                
                except (MissingFileError, SystemCommandError,\
                        NCLError) as e:
                    WhfLog.error('Downscaling GFS failed: ' + e)
                    raise

                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    ymd_dir = match.group(1)
                    file_only = match.group(2)
                    downscaled_dir = downscale_dir + "/" + ymd_dir
                    downscaled_file = downscaled_dir + "/" + file_only
                    # Check to make sure downscaled file was created
                    try:
                        whf.file_exists(downscaled_file)
                    except MissingFileError as mfe:
                        WhfLog.error('Downscaling, non-existent downscaled file: ' + downscaled_file)
                        WhfLog.error(mfe)
                    try:
                        whf.rename_final_files(parser,"Medium_Range")
                    except FilenameMatchError as fme:
                        WhfLog.error('Failed to rename final files due to unexpected filename format: ' + fme) 
              
                    except UnrecognizedCommandError as uce:
                        WhfLog.error('Failed to rename final files due to unrecognized/unsupported request: ' + uce)
                else:
                    raise FilneameMatchError('MediumRangeForcing regridded_file %s has unexpected filename format'%regridded_file)
                # Remove empty 0hr regridded file if it still exists
                if os.path.exists(regridded_file):
                    cmd = 'rm -rf ' + regridded_file
                    status = os.system(cmd)
                    if status != 0:
                        WhfLog.error("Failure to remove empty file: " + regridded_file)
                        raise SystemCommandError('MediumRangeForcing failed to clean up regridded file %s'%(regridded_file))
            else:
                WhfLog.info("Regridding non-zero hour fcst%s: ", file )
                try:
                    regridded_file = whf.regrid_data(product_data_name, file, parser, False)
                except (FilenameMatchError, NCLError) as e:
                    WhfLog.error('Regridding failed for GFS non-zero fcst regrid file: ' + file) 
                    raise
          
                try:
                    whf.downscale_data(product_data_name,regridded_file, parser,True, False)                
                except (MissingFileError, SystemCommandError, NCLError):
                    raise

                match = re.match(r'.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)',regridded_file)
                if match:
                    ymd_dir = match.group(1)
                    file_only = match.group(2)
                    downscaled_dir = downscale_dir + "/" + ymd_dir
                    downscaled_file = downscaled_dir + "/" + file_only
                    # Check to make sure downscaled file was created
                    try:
                        whf.file_exists(downscaled_file)
                    except MissingFileError:
                        raise
                    try:
                        whf.rename_final_files(parser,"Medium_Range")
                    except (FilenameMatchError, UnrecognizedCommandError) as e:
                        raise

                else:
                    raise FilenameMatchError('MediumRangeForcing renaming finished file failed, unexpected filename format for %s'%(regridded_file)) 
        else:
            # Skip processing this file, exiting...
            WhfLog.info("Skip processing, requested file is outside max fcst")
    else:
        WhfLog.info("Unsupported action requested. Only regridding (and downscaling) performed for Medium Range")