def do_layering(rap_downscale_dir, hrrr_downscale_dir, is_yellowstone=False):
    # Go through the RAP downscaled directories and find 
    # the corresponding HRRR downscaled file for each RAP
    # file.
    rap_file_paths = whf.get_filepaths(rap_downscale_dir)
    hrrr_file_paths = whf.get_filepaths(hrrr_downscale_dir)
    
    # Compare the YYYYMMDDHH/YYYYMMDDhh00.LDASIN_DOMAIN1.nc portions
    rap_files = []
    hrrr_files = []
    if is_yellowstone:    
        for rap in rap_file_paths:
             match = re.match(r'.*/RAP/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',rap)
             rap_files.append(match.group(1)) 
        
        for hrrr in hrrr_file_paths:
             match = re.match(r'.*/HRRR/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',hrrr)
             hrrr_files.append(match.group(1)) 
    else:
        for rap in rap_file_paths:
            match = re.match(r'.*/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',rap)
            rap_files.append(match.group(1))
        for hrrr in hrrr_file_paths:
            match = re.match(r'.*/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',hrrr)
            hrrr_files.append(match.group(1))

    # Find the matching files from each list
    files_to_layer = set(rap_files) & set(hrrr_files)
    for file in files_to_layer:
        srf.forcing("layer","RAP", file, "HRRR", file)
Exemplo n.º 2
0
def do_layering(rap_downscale_dir, hrrr_downscale_dir, is_yellowstone=False):
    # Go through the RAP downscaled directories and find 
    # the corresponding HRRR downscaled file for each RAP
    # file.
    rap_file_paths = whf.get_filepaths(rap_downscale_dir)
    hrrr_file_paths = whf.get_filepaths(hrrr_downscale_dir)
    
    # Compare the YYYYMMDDHH/YYYYMMDDhh00.LDASIN_DOMAIN1.nc portions
    rap_files = []
    hrrr_files = []
    if is_yellowstone:    
        for rap in rap_file_paths:
             match = re.match(r'.*/RAP/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',rap)
             rap_files.append(match.group(1)) 
        
        for hrrr in hrrr_file_paths:
             match = re.match(r'.*/HRRR/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',hrrr)
             hrrr_files.append(match.group(1)) 
    else:
        for rap in rap_file_paths:
            match = re.match(r'.*/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',rap)
            rap_files.append(match.group(1))
        for hrrr in hrrr_file_paths:
            match = re.match(r'.*/[0-9]{8}/([0-9]{8}_i[0-9]{2}_f[0-9]{2,3}.*)',hrrr)
            hrrr_files.append(match.group(1))

    # Find the matching files from each list
    files_to_layer = set(rap_files) & set(hrrr_files)
    for file in files_to_layer:
        srf.forcing("layer","RAP", file, "HRRR", file)
def main():
    """Tests the regridding and downscaling of RAP and HRRR
       data for the Short Range Forcing Configuration.
    """
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # CHANGE THIS TO REFLECT WHICH RUN ENVIRONMENT:
    # YELLOWSTONE OR HYDRO-C!
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Set flag for testing host
    #is_yellowstone = True
    is_yellowstone = False
    parser = SafeConfigParser()

    try:
        parser.read('../../parm/wrf_hydro_forcing.parm')
    except:
        print "d'oh!"

    

    # Start and end dates 
    if is_yellowstone:
         start_dt = datetime.datetime.strptime("20150930","%Y%m%d")
         end_dt = datetime.datetime.strptime("20151001","%Y%m%d")
    else:
         start_dt = datetime.datetime.strptime("20160126","%Y%m%d")
         end_dt = datetime.datetime.strptime("20160127","%Y%m%d")

    # Set the directory where the input data resides.
    # For running on yellowstone:
    # RAP_dir_base = "/glade/scratch/lpan/IOC/data/RAP"
    # HRRR_dir_base = "/glade/scratch/lpan/IOC/data/HRRR"
    # For running on hydro-c1:
    # RAP_downscale_dir =
    # "/glade/scratch/gochis/IOC_evaluation_datasets/
    # Forcing_Engine/workspace/downscaled/RAP"
    # HRRR_downscale_dir = "/glade/scratch/gochis/
    # IOC_evaluation_datasets/Forcing_Engine/workspace/downscaled/HRRR"
    RAP_dir_base = parser.get('data_dir','RAP_data')
    HRRR_dir_base = parser.get('data_dir', 'HRRR_data')
    RAP_downscale_dir = parser.get('downscaling', 'RAP_downscale_output_dir')
    HRRR_downscale_dir = parser.get('downscaling', 'HRRR_downscale_output_dir')

    all_RAP_files_with_path = whf.get_filepaths(RAP_dir_base) 
    all_HRRR_files_with_path = whf.get_filepaths(HRRR_dir_base) 

    # We are only interested in the RAP and HRRR files that are
    # within the start and end forecast times.
    HRRR_files_with_path = [x for x in all_HRRR_files_with_path if is_within_time_range(start_dt,end_dt,x,"HRRR",is_yellowstone)]
        
    RAP_files_with_path = [x for x in all_RAP_files_with_path if is_within_time_range(start_dt,end_dt,x,"RAP",is_yellowstone)]

    #for hrrr in HRRR_files_with_path:
    #    print ("process %s")%(hrrr)
    # do the processing on only the input grib files 
    #do_regrid(RAP_dir_base,'RAP', RAP_files_with_path, is_yellowstone)
    do_regrid(HRRR_dir_base, 'HRRR', HRRR_files_with_path, is_yellowstone)
Exemplo n.º 4
0
def initialize(source_dir, destination_dir):
    '''Initialize by looking at source_dir, and creating destination_dir
   Args:
      source_dir (string) : Full path to source data, subdir yyyymmdd
      destination_dir(string) : Full path to destination
   Returns:
      list of source file paths, ordered
   '''
    try:
        #Source directory
        dirExists(source_dir)
    except MissingDirectoryError:
        print "Source directory missing. Check directory path ", source_dir
        files = []
    else:
        # Get a directory listing and save all files with the specified
        # extension.
        files = whf.get_filepaths(source_dir)
        files = sorted(files)
        print "Numfiles in ", source_dir, " = ", len(files)

    try:
        #Destination directory
        dirExists(destination_dir)
    except MissingDirectoryError:
        print "Destination directory does not exist, creating it now ", destination_dir
        whf.mkdir_p(destination_dir)

    return files
Exemplo n.º 5
0
def initialize(source_dir, destination_dir):
   '''Initialize by looking at source_dir, and creating destination_dir
   Args:
      source_dir (string) : Full path to source data, subdir yyyymmdd
      destination_dir(string) : Full path to destination
   Returns:
      list of source file paths, ordered
   '''
   try:
      #Source directory
      dirExists(source_dir)
   except MissingDirectoryError:
      print "Source directory missing. Check directory path ", source_dir
      files = []
   else:
      # Get a directory listing and save all files with the specified
      # extension.
      files = whf.get_filepaths(source_dir)
      files = sorted(files)
      print "Numfiles in ", source_dir, " = ", len(files)

   try:
      #Destination directory
      dirExists(destination_dir)
   except MissingDirectoryError:
      print "Destination directory does not exist, creating it now ", destination_dir
      whf.mkdir_p(destination_dir)

   return files
Exemplo n.º 6
0
def moveFiles(source_dir, destination_dir, delay=0):
    '''Moves all the files from the source directory to the
       destination directory.
  
       Args:
           source_dir (string):      Full path to the source directory
           destination_dir (string): Full path to the destination directory
           extension (string):       File extension of files to be moved

       Returns:
           None

    '''

    try:
        #Source directory
        dir_exists(source_dir)
    
    except MissingDirectoryError:
        print "Source directory missing. Check directory path"
        
    else:
        # Get a directory listing and save all files with the specified
        # extension.
        files = whf.get_filepaths(source_dir)
        
    try:
        #Destination directory
        dir_exists(destination_dir)
    except MissingDirectoryError:
        print "Destination directory does not exist, creating it now"
        whf.mkdir_p(destination_dir)
    else:
        #move the files
        for file in files:
            #separate the filename from the directory and the
            #date directory
            date_match = re.match(r'.*/([0-9]{8})',file)
            if date_match:
                date_dir = date_match.group(1)
            else:
                print "No date directory found, exiting"
                raise MissingDirectoryError("No date directory")
            
            # Just the filename, no path
            exp = re.compile(r'.*/[0-9]{8}/(.*.grib2|.*.grb2)')
            file_match = exp.match(file)
            if file_match:
                filename_only = file_match.group(1)
            else: 
                print "No file name match, exiting"
                raise MissingFileError("No file matching the expected pattern") 

            dest = (destination_dir,date_dir,"/" ) 
            dest_path = "".join(dest)
            whf.mkdir_p(dest_path)
            dest = dest_path +  filename_only
            shutil.move(file, dest)
            time.sleep(delay)
Exemplo n.º 7
0
def moveFiles(source_dir, destination_dir, delay=0):
    '''Moves all the files from the source directory to the
       destination directory.
  
       Args:
           source_dir (string):      Full path to the source directory
           destination_dir (string): Full path to the destination directory
           extension (string):       File extension of files to be moved

       Returns:
           None

    '''

    try:
        #Source directory
        dir_exists(source_dir)
    
    except MissingDirectoryError:
        print "Source directory missing. Check directory path"
        
    else:
        # Get a directory listing and save all files with the specified
        # extension.
        files = whf.get_filepaths(source_dir)
        
    try:
        #Destination directory
        dir_exists(destination_dir)
    except MissingDirectoryError:
        print "Destination directory does not exist, creating it now"
        whf.mkdir_p(destination_dir)
    else:
        #move the files
        for file in files:
            #separate the filename from the directory and the
            #date directory
            date_match = re.match(r'.*/([0-9]{8})',file)
            if date_match:
                date_dir = date_match.group(1)
            else:
                print "No date directory found, exiting"
                raise MissingDirectoryError("No date directory")
            
            # Just the filename, no path
            exp = re.compile(r'.*/[0-9]{8}/(.*.grib2|.*.grb2)')
            file_match = exp.match(file)
            if file_match:
                filename_only = file_match.group(1)
            else: 
                print "No file name match, exiting"
                raise MissingFileError("No file matching the expected pattern") 

            dest = (destination_dir,date_dir,"/" ) 
            dest_path = "".join(dest)
            whf.mkdir_p(dest_path)
            dest = dest_path +  filename_only
            shutil.move(file, dest)
            time.sleep(delay)
Exemplo n.º 8
0
def main():
    """Tests the regridding and downscaling of GFS
       data for the Medium Range Forcing Configuration.
    """
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # CHANGE THIS TO REFLECT WHICH RUN ENVIRONMENT,
    # YELLOWSTONE OR HYDRO-C1
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Set flag for testing host
    #is_yellowstone = True
    is_yellowstone = False
    parser = SafeConfigParser()
    config_file = "../../parm/wrf_hydro_forcing2.parm"
    parser.read(config_file)

    # Start and end dates
    if is_yellowstone:
        start_dt = datetime.datetime.strptime("20150929", "%Y%m%d")
        end_dt = datetime.datetime.strptime("20150930", "%Y%m%d")
    else:
        start_dt = datetime.datetime.strptime("20160201", "%Y%m%d")
        end_dt = datetime.datetime.strptime("20160202", "%Y%m%d")

    # Set the directory where the input data resides.
    # For running on yellowstone:
    # GFS_dir_base = "/glade/scratch/lpan/IOC/data/gfs5"
    # For running on hydro-c1:
    # /var/autofs/mnt/gfsdmg1/data/grib/GFS_0.25-pgrb2
    # GFS_downscale_dir =
    # "/glade/scratch/gochis/IOC_evaluation_datasets/Forcing_Engine/workspace/downscaled/GFS"
    GFS_dir_base = parser.get('data_dir', 'GFS_data')
    #GFS_downscale_dir = parser.get('downscaling', 'GFS_downscale_output_dir')
    GFS_downscale_dir = parser.get('layering', 'medium_range_output')

    all_GFS_files_with_path = whf.get_filepaths(GFS_dir_base)

    # We are only interested in the GFS files that are
    # within the start and end forecast times.

    GFS_files_with_path = [
        x for x in all_GFS_files_with_path
        if is_within_time_range(start_dt, end_dt, x, is_yellowstone)
    ]
    print("INFO: GFS files within time range:")
    for gfs_files in GFS_files_with_path:
        print(gfs_files)

    # do the processing on only the input grib files
    do_regrid(config_file, GFS_dir_base, 'GFS', GFS_files_with_path,
              is_yellowstone)
def main():
    """Tests the regridding and downscaling of GFS
       data for the Medium Range Forcing Configuration.
    """
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # CHANGE THIS TO REFLECT WHICH RUN ENVIRONMENT,
    # YELLOWSTONE OR HYDRO-C1
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Set flag for testing host
    #is_yellowstone = True
    is_yellowstone = False
    parser = SafeConfigParser()
    config_file = "../../parm/wrf_hydro_forcing2.parm"
    parser.read(config_file)    

    # Start and end dates 
    if is_yellowstone:
         start_dt = datetime.datetime.strptime("20150929","%Y%m%d")
         end_dt = datetime.datetime.strptime("20150930","%Y%m%d")
    else:
         start_dt = datetime.datetime.strptime("20160201","%Y%m%d")
         end_dt = datetime.datetime.strptime("20160202","%Y%m%d")

    # Set the directory where the input data resides.
    # For running on yellowstone:
    # GFS_dir_base = "/glade/scratch/lpan/IOC/data/gfs5"
    # For running on hydro-c1:
    # /var/autofs/mnt/gfsdmg1/data/grib/GFS_0.25-pgrb2
    # GFS_downscale_dir =
    # "/glade/scratch/gochis/IOC_evaluation_datasets/Forcing_Engine/workspace/downscaled/GFS"
    GFS_dir_base = parser.get('data_dir','GFS_data')
    #GFS_downscale_dir = parser.get('downscaling', 'GFS_downscale_output_dir')
    GFS_downscale_dir = parser.get('layering', 'medium_range_output')

    all_GFS_files_with_path = whf.get_filepaths(GFS_dir_base) 

    # We are only interested in the GFS files that are
    # within the start and end forecast times.
        
    GFS_files_with_path = [x for x in all_GFS_files_with_path if is_within_time_range(start_dt,end_dt,x,is_yellowstone)]
    print("INFO: GFS files within time range:")
    for gfs_files in GFS_files_with_path:
        print(gfs_files)

    # do the processing on only the input grib files 
    do_regrid(config_file, GFS_dir_base,'GFS', GFS_files_with_path, is_yellowstone)
def do_layering(config_file,parser,rap_downscale_dir, hrrr_downscale_dir, mrms_downscale_dir, fcst_hr, is_yellowstone=False):
    # Initialize some flags and lists,
    # assume that we only have RAP for now.
    request_hrrr = False
    request_mrms = False 
    rap_files = []
    hrrr_files = []
    mrms_files = []
    
    # Set flags to be used to determine which layers need to be layered.
    if hrrr_downscale_dir is not None:
        request_hrrr = True
    if mrms_downscale_dir is not None:
        request_mrmrs = True
  
    # We will always have RAP when calling do_layering.
    print "RAP downscale dir: %s"%rap_downscale_dir
    rap_file_paths = whf.get_filepaths(rap_downscale_dir)
    size = len(rap_file_paths)
    print("number of RAP files in %s, %s")%(rap_downscale_dir, size)
    if request_hrrr == True:
        print "HRRR true, layer RAP and HRRR"
        # Layer only RAP and HRRR
        hrrr_file_paths = whf.get_filepaths(hrrr_downscale_dir)
        if is_yellowstone:
            for rap in rap_file_paths:
                match = re.match(r'.*/RAP.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                model_run = match.group(1)
                aaf.anal_assim_layer(model_run,fcst_hr,"RAP_HRRR",config_file)

        else:
            for rap in rap_file_paths:
                match = re.match(r'.*/RAP.*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                model_run = match.group(1)
                aaf.anal_assim_layer(model_run,fcst_hr,"RAP_HRRR", config_file)
 
        if request_mrms == True:
            # Layer all three: RAP, HRRR, and MRMS
            # Compare the YYYYMMDDHH/YYYYMMDDhh00.LDASIN_DOMAIN1.nc portions
            mrms_file_paths = whf.get_filepaths(mrms_downscale_dir)
            if is_yellowstone:    
                for rap in rap_file_paths:
                    match = re.match(r'.*/RAP.*([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                    model_run = match.group(1)
                    whf.anal_assim_layer(model_run, fcst_hr, 'RAP_HRRR_MRMS',config_file) 
            else:
                # Testing on development/test host
                for rap in rap_file_paths:
                    match = re.match(r'.*/RAP.*([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                    model_run = match.group(1)
                    whf.anal_assim_layer(model_run, fcst_hr, 'RAP_HRRR_MRMS',config_file) 
    else:
        # Only RAP requested, call layering with just RAP.     
        print ("Only RAP requested, layering called with just RAP")
        if is_yellowstone:    
            for rap in rap_file_paths:
                print("layering rap file: %s")%rap
                match = re.match(r'.*/RAP/.*([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                model_run = match.group(1)
                print("model run: %s, fcst hr %s")%(model_run,fcst_hr)
                aaf.anal_assim_layer(model_run, fcst_hr, "RAP",config_file)
         
        else:
            for rap in rap_file_paths:
                match = re.match(r'.*/RAP/.*([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.*)',rap)
                model_run = match.group(1)
                aaf.anal_assim_layer(model_run, fcst_hr, "RAP",config_file)
def main():
    """Tests the regridding and downscaling of RAP and HRRR
       data for the Short Range Forcing Configuration.
    """
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # CHANGE THIS TO REFLECT WHICH RUN ENVIRONMENT:
    # YELLOWSTONE OR HYDRO-C!
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Set flag for testing host
    #is_yellowstone = True
    is_yellowstone = False
    parser = SafeConfigParser()
    config_file = "../../parm/wrf_hydro_forcing.parm"
    parser.read(config_file)    

    # Set up logger
    wlog.init(parser, "testAA", "AA","Regrid","MRMS")

    # Start and end dates 
    if is_yellowstone:
         start_dt = datetime.datetime.strptime("20151004","%Y%m%d")
         end_dt = datetime.datetime.strptime("20151005","%Y%m%d")
    else:
         start_dt = datetime.datetime.strptime("20160202","%Y%m%d")
         end_dt = datetime.datetime.strptime("20160204","%Y%m%d")

    # Set the directory where the input data resides.
    # For running on yellowstone:
    # RAP_dir_base = "/glade/scratch/lpan/IOC/data/RAP"
    # HRRR_dir_base = "/glade/scratch/lpan/IOC/data/HRRR"
    # MRMS_dir_base = "/glade/scratch/lpan/IOC/data/MRMS"
    # For running on hydro-c1:
    # RAP_downscale_dir =
    # "/glade/scratch/gochis/IOC_evaluation_datasets/
    # Forcing_Engine/workspace/downscaled/RAP"
    # HRRR_downscale_dir = "/glade/scratch/gochis/
    # IOC_evaluation_datasets/Forcing_Engine/workspace/downscaled/HRRR"
    RAP_dir_base = parser.get('data_dir','RAP_data')
    HRRR_dir_base = parser.get('data_dir', 'HRRR_data')
    MRMS_dir_base = parser.get('data_dir', 'MRMS_data')
    RAP_downscale_dir = parser.get('downscaling', 'RAP_finished_output_dir')
    RAP_0hr_downscale_dir = parser.get('downscaling', 'RAP_finished_output_dir_0hr')
    HRRR_downscale_dir = parser.get('downscaling', 'HRRR_finished_output_dir')
    HRRR_0hr_downscale_dir = parser.get('downscaling', 'HRRR_finished_output_dir_0hr')
    MRMS_downscale_dir = parser.get('regridding','MRMS_finished_output_dir')

    all_RAP_files_with_path = whf.get_filepaths(RAP_dir_base) 
    all_HRRR_files_with_path = whf.get_filepaths(HRRR_dir_base) 
    all_MRMS_files_with_path = whf.get_filepaths(MRMS_dir_base) 

    # We are only interested in the MRMS, RAP and HRRR files that are
    # within the start and end forecast times, since the /glade/scratch/lpan/IOC/data
    # directory is continually adding more dates.
    RAP_files_with_path = [x for x in all_RAP_files_with_path if is_within_time_range(start_dt,end_dt,x,"RAP",is_yellowstone)]
    HRRR_files_with_path = [x for x in all_HRRR_files_with_path if is_within_time_range(start_dt,end_dt,x,"HRRR",is_yellowstone)]
    #    
    MRMS_files_with_path = [x for x in all_MRMS_files_with_path if is_within_time_range(start_dt,end_dt,x,"MRMS",is_yellowstone)]

    #do_regrid(config_file, RAP_dir_base,'RAP', RAP_files_with_path, is_yellowstone)
    #do_regrid(config_file, HRRR_dir_base, 'HRRR', HRRR_files_with_path, is_yellowstone)
    do_regrid(config_file,MRMS_dir_base, 'MRMS', MRMS_files_with_path, is_yellowstone)
Exemplo n.º 12
0
def main():
    """Tests the regridding and downscaling of RAP and HRRR
       data for the Short Range Forcing Configuration.
    """
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # CHANGE THIS TO REFLECT WHICH RUN ENVIRONMENT:
    # YELLOWSTONE OR HYDRO-C!
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Set flag for testing host
    #is_yellowstone = True
    is_yellowstone = False
    parser = SafeConfigParser()
    config_file = "../../parm/b_wrf_hydro_forcing.parm" 

    try:
        parser.read(config_file)
    except:
        print "d'oh!"

   # Set up logger
    #wlog.init(parser, "testShort", "Short","Regrid","HRRR")
 

    # Start and end dates 
    if is_yellowstone:
         start_dt = datetime.datetime.strptime("20150930","%Y%m%d")
         end_dt = datetime.datetime.strptime("20151001","%Y%m%d")
    else:
         start_dt = datetime.datetime.strptime("20160215","%Y%m%d")
         end_dt = datetime.datetime.strptime("20160216","%Y%m%d")

    # Set the directory where the input data resides.
    # For running on yellowstone:
    # RAP_dir_base = "/glade/scratch/lpan/IOC/data/RAP"
    # HRRR_dir_base = "/glade/scratch/lpan/IOC/data/HRRR"
    # For running on hydro-c1:
    # RAP_downscale_dir =
    # "/glade/scratch/gochis/IOC_evaluation_datasets/
    # Forcing_Engine/workspace/downscaled/RAP"
    # HRRR_downscale_dir = "/glade/scratch/gochis/
    # IOC_evaluation_datasets/Forcing_Engine/workspace/downscaled/HRRR"
    RAP_dir_base = parser.get('data_dir','RAP_data')
    HRRR_dir_base = parser.get('data_dir', 'HRRR_data')
    RAP_downscale_dir = parser.get('downscaling', 'RAP_downscale_output_dir')
    HRRR_downscale_dir = parser.get('downscaling', 'HRRR_downscale_output_dir')

    all_RAP_files_with_path = whf.get_filepaths(RAP_dir_base) 
    all_HRRR_files_with_path = whf.get_filepaths(HRRR_dir_base) 

    # We are only interested in the RAP and HRRR files that are
    # within the start and end forecast times.
    HRRR_files_with_path = [x for x in all_HRRR_files_with_path if is_within_time_range(start_dt,end_dt,x,"HRRR",is_yellowstone)]
        
    RAP_files_with_path = [x for x in all_RAP_files_with_path if is_within_time_range(start_dt,end_dt,x,"RAP",is_yellowstone)]

    #for hrrr in HRRR_files_with_path:
    #    print ("process %s")%(hrrr)
    # do the processing on only the input grib files 
    wlog.init(parser, "testShort", "Short","Regrid","RAP")
    do_regrid(config_file,RAP_dir_base,'RAP', RAP_files_with_path, is_yellowstone)
    wlog.init(parser, "testShort", "Short","Regrid","HRRR")
    do_regrid(HRRR_dir_base, 'HRRR', HRRR_files_with_path, is_yellowstone)