def moveFile(fname, destination_dir): '''Moves one file to the destination directory. Args: fname (string): File to move destination_dir (string): Full path to the destination directory Returns: None ''' #separate the filename from the directory and the #date directory date_match = re.match(r'.*/([0-9]{8})', fname) if date_match: date_dir = date_match.group(1) else: print "No date directory found, exiting" raise MissingDirectoryError("No date directory") # Just the filename, no path exp = re.compile(r'.*/[0-9]{8}/(.*.grib2|.*.grb2)') file_match = exp.match(fname) if file_match: filename_only = file_match.group(1) else: print "No file name match, exiting" raise MissingFileError("No file matching the expected pattern") dest_path = destination_dir + "/" + date_dir whf.mkdir_p(dest_path) dest = dest_path + "/" + filename_only print "Move ", fname, " to ", dest shutil.move(fname, dest)
def moveFiles(source_dir, destination_dir, delay=0): '''Moves all the files from the source directory to the destination directory. Args: source_dir (string): Full path to the source directory destination_dir (string): Full path to the destination directory extension (string): File extension of files to be moved Returns: None ''' try: #Source directory dir_exists(source_dir) except MissingDirectoryError: print "Source directory missing. Check directory path" else: # Get a directory listing and save all files with the specified # extension. files = whf.get_filepaths(source_dir) try: #Destination directory dir_exists(destination_dir) except MissingDirectoryError: print "Destination directory does not exist, creating it now" whf.mkdir_p(destination_dir) else: #move the files for file in files: #separate the filename from the directory and the #date directory date_match = re.match(r'.*/([0-9]{8})',file) if date_match: date_dir = date_match.group(1) else: print "No date directory found, exiting" raise MissingDirectoryError("No date directory") # Just the filename, no path exp = re.compile(r'.*/[0-9]{8}/(.*.grib2|.*.grb2)') file_match = exp.match(file) if file_match: filename_only = file_match.group(1) else: print "No file name match, exiting" raise MissingFileError("No file matching the expected pattern") dest = (destination_dir,date_dir,"/" ) dest_path = "".join(dest) whf.mkdir_p(dest_path) dest = dest_path + filename_only shutil.move(file, dest) time.sleep(delay)
def dirExists(dir): """ Check for directory existence Args: dir (string): The directory in question. """ if not os.path.isdir(dir): raise MissingDirectoryError('Directory %s not found' % dir)
def forcing(configFile,file_in): """ Args: 1.) configFile (string): The config file with all the settings. 2.) file (string): The file name. The full path is not necessary as full paths will be derived from parameter directory paths and datetime information. Returns: None - Performs indicated bias correction, regridding, and downscaling of CFSv2 data. Any errors are trapped and passed back to the driver. """ WhfLog.debug("file_in = %s", file_in) # Obtain CFSv2 forcing engine parameters. parser = SafeConfigParser() parser.read(configFile) # Set up logging environments, etc. forcing_config_label = "Long_Range" try: Whf.initial_setup(parser,forcing_config_label) except: raise out_dir = parser.get('layering','long_range_output') tmp_dir = parser.get('bias_correction','CFS_tmp_dir') if (not df.makeDirIfNeeded(out_dir)): raise MissingDirectoryError('Dir %s cannot be created', out_dir) if (not df.makeDirIfNeeded(tmp_dir)): raise MissingDirectoryError('Dir %s cannot be created', tmp_dir) # Define CFSv2 cycle date and valid time based on file name. (cycleYYYYMMDD,cycleHH,fcsthr,em) = Whf.extract_file_info_cfs(file_in) em_str = str(em) # Pull path to NCL bias correction module file. Export this as an # environmental variable NCL refers to later. nclBiasMod = parser.get('exe','CFS_bias_correct_mod') os.environ["CFS_NCL_BIAS_MOD"] = nclBiasMod # Establish datetime objects dateCurrent = datetime.datetime.today() dateCycleYYYYMMDDHH = datetime.datetime(year=int(cycleYYYYMMDD[0:4]), month=int(cycleYYYYMMDD[4:6]), day=int(cycleYYYYMMDD[6:8]), hour=cycleHH) dateFcstYYYYMMDDHH = dateCycleYYYYMMDDHH + \ datetime.timedelta(seconds=fcsthr*3600) # Determine if this is a 0hr forecast file or not. if dateFcstYYYYMMDDHH == dateCycleYYYYMMDDHH: fFlag = 1 else: fFlag = 0 # Establish final output directories to hold 'LDASIN' files used for # WRF-Hydro long-range forecasting. If the directory does not exist, # create it. out_path = out_dir + "/Member_" + em_str.zfill(2) + "/" + \ dateCycleYYYYMMDDHH.strftime("%Y%m%d%H") try: Whf.mkdir_p(out_path) except: raise in_fcst_range = Whf.is_in_fcst_range("CFSv2",fcsthr,parser) if in_fcst_range: # First, bias-correct CFSv2 data and generate hourly files # from six-hour forecast WhfLog.info("Bias correcting for CFSv2 cycle: " + \ dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \ " CFSv2 forecast time: " + dateFcstYYYYMMDDHH.strftime('%Y%m%d%H')) try: Whf.bias_correction('CFSV2',file_in,dateCycleYYYYMMDDHH, dateFcstYYYYMMDDHH,parser, em = em) except (MissingFileError,NCLError): raise # Second, regrid to the conus IOC domain # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename # generated from bias-correction and call the regridding to go to the conus domain. if fFlag == 1: begCt = 6 endCt = 7 else: begCt = 1 endCt = 7 for hour in range(begCt,endCt): dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600) fileBiasCorrected = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \ dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \ dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + ".M" + \ em_str.zfill(2) + ".nc" WhfLog.info("Regridding CFSv2 to conus domain for cycle: " + \ dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \ " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H')) try: fileRegridded = Whf.regrid_data("CFSV2",fileBiasCorrected,parser) except (MissingFileError,NCLError): raise # Double check to make sure file was created, delete temporary bias-corrected file try: Whf.file_exists(fileRegridded) except MissingFileError: raise cmd = "rm -rf " + fileBiasCorrected status = os.system(cmd) if status != 0: raise SystemCommandError('Command %s failed.'%cmd) # Third, perform topography downscaling to generate final # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename # generated from regridding and call the downscaling function. for hour in range(begCt,endCt): dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600) WhfLog.info("Downscaling CFSv2 for cycle: " + dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H')) fileRegridded = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \ dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \ dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + \ "_regridded.M" + em_str.zfill(2) + ".nc" LDASIN_path_tmp = tmp_dir + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1.nc" LDASIN_path_final = out_path + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1" try: Whf.downscale_data("CFSv2",fileRegridded,parser, out_path=LDASIN_path_tmp, \ verYYYYMMDDHH=dateTempYYYYMMDDHH) except (MissingFileError,FilenameMatchError,NCLError,SystemCommandError): raise # Double check to make sure file was created, delete temporary regridded file try: Whf.file_exists(LDASIN_path_tmp) except MissingFileError: raise # Rename file to conform to WRF-Hydro expectations cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final status = os.system(cmd) if status != 0: raise SystemCommandError('Command %s failed.'%cmd) try: Whf.file_exists(LDASIN_path_final) except MissingFileError: raise cmd = "rm -rf " + fileRegridded status = os.system(cmd) if status != 0: raise SystemCommandError('Command %s failed.'%cmd) WhfLog.info("Long_Range processing for %s%d Forecast Hour: %d Ensemble: %s", cycleYYYYMMDD, cycleHH, fcsthr, em_str) else: # Skip processing this file. Exit gracefully with a 0 exit status. WhfLog.info("Requested file is outside max fcst for CFSv2")
def setup_for_realtime(product, source_dir, destination_dir): '''Sets up archived data to behave like real-time data. Move the necessary data from one directory to another (to avoid filling disk space). Update the corresponding state file with a sorted list of files. ''' prod = product.lower() # Assign the state file according to the data product if product == "rap": state_file = "State.RapRegrid.txt" elif product == "hrrr": state_file = "State.HrrrRegrid.txt" elif product == "mrms": state_file = "State.MrmsRegrid.txt" elif product == "cfs": state_file = "State.LongRangeRegrid.txt" # Open the state file. If it already exists, overwrite it # otherwise create one. sf = open(state_file, 'w+') sf.write('[latest]\n') sf.write(product + "= ") try: #Source directory dir_exists(source_dir) except MissingDirectoryError: print "Source directory missing. Check directory path" else: # Get a directory listing and save all files with the specified # extension. files = get_filepaths(source_dir) files.sort() try: #Destination directory dir_exists(destination_dir) except MissingDirectoryError: print "Destination directory does not exist, creating it now" mkdir_p(destination_dir) else: #move the files for file in files: #separate the filename from the directory and the #date directory date_match = re.match(r'.*/([0-9]{8})', file) if date_match: date_dir = date_match.group(1) else: print "No date directory found, exiting" raise MissingDirectoryError("No date directory") # Just the filename, no path exp = re.compile(r'.*/[0-9]{8}/(.*.grib2|.*.grb2)') file_match = exp.match(file) if file_match: filename_only = file_match.group(1) else: print "No file name match, exiting" raise MissingFileError("No file matching the expected pattern") dest = (destination_dir, date_dir, "/") dest_path = "".join(dest) mkdir_p(dest_path) dest = dest_path + filename_only date_and_file_tokens = (' ', date_dir, '/', filename_only) date_and_file = "".join(date_and_file_tokens) print "date and file: %s" % date_and_file #Update the state file corresponding to this data product sf.write(date_and_file) sf.write("\n")