def unzip_file(GzFileIn, FileOut, ConfigOptions, MpiConfig): """ Generic I/O function to unzip a .gz file to a new location. :param GzFileIn: :param FileOut: :param ConfigOptions: :param MpiConfig: :return: """ # Ensure all processors are synced up before outputting. MpiConfig.comm.barrier() if MpiConfig.rank == 0: # Unzip the file in place. try: with gzip.open(GzFileIn, 'rb') as fTmpGz: with open(FileOut, 'wb') as fTmp: shutil.copyfileobj(fTmpGz, fTmp) except: ConfigOptions.errMsg = "Unable to unzip: " + GzFileIn errMod.log_critical(ConfigOptions, MpiConfig) return if not os.path.isfile(FileOut): ConfigOptions.errMsg = "Unable to locate expected unzipped file: " + \ FileOut errMod.log_critical(ConfigOptions, MpiConfig) return else: return
def scatter_array(self, geoMeta, array_broadcast, ConfigOptions): """ Generic function for calling scatter functons based on the input dataset type. :param geoMeta: :param array_broadcast: :param ConfigOptions: :return: """ # Determine which type of input array we have based on the # type of numpy array. data_type_flag = -1 if self.rank == 0: if array_broadcast.dtype == np.float32: data_type_flag = 1 if array_broadcast.dtype == np.float64: data_type_flag = 2 # Broadcast the numpy datatype to the other processors. if self.rank == 0: tmpDict = {'varTmp': data_type_flag} else: tmpDict = None try: tmpDict = self.comm.bcast(tmpDict, root=0) except: ConfigOptions.errMsg = "Unable to broadcast numpy datatype value from rank 0" errMod.log_critical(ConfigOptions, MpiConfig) return None data_type_flag = tmpDict['varTmp'] # Broadcast the global array to the child processors, then if self.rank == 0: arrayGlobalTmp = array_broadcast else: if data_type_flag == 1: arrayGlobalTmp = np.empty( [geoMeta.ny_global, geoMeta.nx_global], np.float32) if data_type_flag == 2: arrayGlobalTmp = np.empty( [geoMeta.ny_global, geoMeta.nx_global], np.float64) try: self.comm.Bcast(arrayGlobalTmp, root=0) except: ConfigOptions.errMsg = "Unable to broadcast a global numpy array from rank 0" errMod.log_critical(ConfigOptions, MpiConfig) return None arraySub = arrayGlobalTmp[geoMeta.y_lower_bound:geoMeta.y_upper_bound, geoMeta.x_lower_bound:geoMeta.x_upper_bound] return arraySub
def ncar_topo_adj(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ Topographic adjustment of incoming shortwave radiation fluxes, given input parameters. :param input_forcings: :param ConfigOptions: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Performing topographic adjustment to incoming " \ "shortwave radiation flux." errMod.log_msg(ConfigOptions, MpiConfig) # Establish where we have missing values. try: indNdv = np.where( input_forcings.final_forcings == ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform NDV search on input forcings" errMod.log_critical(ConfigOptions, MpiConfig) return # By the time this function has been called, necessary input static grids (height, slope, etc), # should have been calculated for each local slab of data. DEGRAD = math.pi / 180.0 DPD = 360.0 / 365.0 try: DECLIN, SOLCON = radconst(ConfigOptions) except: ConfigOptions.errMsg = "Unable to calculate solar constants based on datetime information." errMod.log_critical(ConfigOptions, MpiConfig) return try: coszen_loc, hrang_loc = calc_coszen(ConfigOptions, DECLIN, GeoMetaWrfHydro) except: ConfigOptions.errMsg = "Unable to calculate COSZEN or HRANG variables for topographic adjustment " \ "of incoming shortwave radiation" errMod.log_critical(ConfigOptions, MpiConfig) return try: TOPO_RAD_ADJ_DRVR(GeoMetaWrfHydro, input_forcings, coszen_loc, DECLIN, SOLCON, hrang_loc) except: ConfigOptions.errMsg = "Unable to perform final topographic adjustment of incoming " \ "shortwave radiation fluxes." errMod.log_critical(ConfigOptions, MpiConfig) return # Assign missing values based on our mask. input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv # Reset variables to free up memory DECLIN = None SOLCON = None coszen_loc = None hrang_loc = None indNdv = None
def open_netcdf_forcing(NetCdfFileIn, ConfigOptions, MpiConfig): """ Generic function to convert a NetCDF forcing file given a list of input forcing variables. :param GribFileIn: :param NetCdfFileOut: :param ConfigOptions: :return: """ # Ensure all processors are synced up before outputting. MpiConfig.comm.barrier() # Open the NetCDF file on the master processor and read in data. if MpiConfig.rank == 0: # Ensure file exists. if not os.path.isfile(NetCdfFileIn): ConfigOptions.errMsg = "Expected NetCDF file: " + NetCdfFileIn + \ " not found." errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None pass # Open the NetCDF file. try: idTmp = Dataset(NetCdfFileIn, 'r') except: ConfigOptions.errMsg = "Unable to open input NetCDF file: " + \ NetCdfFileIn errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None pass if idTmp is not None: # Check for expected lat/lon variables. if 'latitude' not in idTmp.variables.keys(): ConfigOptions.errMsg = "Unable to locate latitude from: " + \ NetCdfFileIn errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None pass if idTmp is not None: if 'longitude' not in idTmp.variables.keys(): ConfigOptions.errMsg = "Unable t locate longitude from: " + \ NetCdfFileIn errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None pass pass else: idTmp = None errMod.check_program_status(ConfigOptions, MpiConfig) # Ensure all processors are synced up before outputting. MpiConfig.comm.barrier() errMod.check_program_status(ConfigOptions, MpiConfig) # Return the NetCDF file handle back to the user. return idTmp
def no_bias_correct(input_forcings, GeoMetaWrfHydro, ConfigOptions, MpiConfig, force_num): """ Generic routine to simply pass forcing states through without any bias correction. :param input_forcings: :param ConfigOptions: :param force_num: :return: """ try: input_forcings.final_forcings[force_num, :, :] = input_forcings.final_forcings[force_num, :, :] except: ConfigOptions.errMsg = "Unable to set final forcings during bias correction routine." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig)
def broadcast_parameter(self, value_broadcast, ConfigOptions): """ Generic function for sending a parameter value out to the processors. :param ConfigOptions: :return: """ # Create dictionary to hold value. if self.rank == 0: tmpDict = {'varTmp': value_broadcast} else: tmpDict = None try: tmpDict = self.comm.bcast(tmpDict, root=0) except: ConfigOptions.errMsg = "Unable to broadcast single value from rank 0." errMod.log_critical(ConfigOptions, MpiConfig) return None return tmpDict['varTmp']
def simple_lapse(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ Function that applies a single lapse rate adjustment to modeled 2-meter temperature by taking the difference of the native input elevation and the WRF-hydro elevation. :param inpute_forcings: :param ConfigOptions: :param GeoMetaWrfHydro: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Applying simple lapse rate to temperature downscaling" errMod.log_msg(ConfigOptions, MpiConfig) # Calculate the elevation difference. elevDiff = input_forcings.height - GeoMetaWrfHydro.height # Assign existing, un-downscaled temperatures to a temporary placeholder, which # will be used for specific humidity downscaling. if input_forcings.q2dDownscaleOpt > 0: input_forcings.t2dTmp[:, :] = input_forcings.final_forcings[4, :, :] # Apply single lapse rate value to the input 2-meter # temperature values. try: indNdv = np.where( input_forcings.final_forcings == ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform NDV search on input forcings" errMod.log_critical(ConfigOptions, MpiConfig) return try: input_forcings.final_forcings[4,:,:] = input_forcings.final_forcings[4,:,:] + \ (6.49/1000.0)*elevDiff except: ConfigOptions.errMsg = "Unable to apply lapse rate to input 2-meter temperatures." errMod.log_critical(ConfigOptions, MpiConfig) return input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv # Reset for memory efficiency indNdv = None
def pressure_down_classic(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ Generic function to downscale surface pressure to the WRF-Hydro domain. :param input_forcings: :param ConfigOptions: :param GeoMetaWrfHydro: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Performing topographic adjustment to surface pressure." errMod.log_msg(ConfigOptions, MpiConfig) # Calculate the elevation difference. elevDiff = input_forcings.height - GeoMetaWrfHydro.height # Assign existing, un-downscaled pressure values to a temporary placeholder, which # will be used for specific humidity downscaling. if input_forcings.q2dDownscaleOpt > 0: input_forcings.psfcTmp[:, :] = input_forcings.final_forcings[6, :, :] try: indNdv = np.where( input_forcings.final_forcings == ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform NDV search on input forcings" errMod.log_critical(ConfigOptions, MpiConfig) return try: input_forcings.final_forcings[6,:,:] = input_forcings.final_forcings[6,:,:] +\ (input_forcings.final_forcings[6,:,:]*elevDiff*9.8)/\ (input_forcings.final_forcings[4,:,:]*287.05) except: ConfigOptions.errMsg = "Unable to downscale surface pressure to input forcings." errMod.log_critical(ConfigOptions, MpiConfig) return input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv # Reset for memory efficiency indNdv = None
def ncar_blanket_adjustment_lw(input_forcings, GeoMetaWrfHydro, ConfigOptions, MpiConfig, force_num): """ Generic NCAR bias correction for incoming longwave radiation fluxes. NOTE!!! - This is based off HRRRv3 analysis and should be used with extreme caution..... :param input_forcings: :param GeoMetaWrfHydro: :param ConfigOptions: :param MpiConfig: :param force_num: :return: """ # Establish blanket adjustment to apply across the board in W/m^2 adj_lw = 9.0 # Perform adjustment. try: lwTmp = input_forcings.final_forcings[input_forcings.input_map_output[force_num], :, :] except: ConfigOptions.errMsg = "Unable to extract incoming LW from forcing object for: " + input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: indValid = np.where(lwTmp != ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to calculate valid index in incoming LW for: " + input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: lwTmp[indValid] = lwTmp[indValid] + adj_lw except: ConfigOptions.errMsg = "Unable to perform LW bias correction for: " + input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: input_forcings.final_forcings[input_forcings.input_map_output[force_num], :, :] = lwTmp[:, :] except: ConfigOptions.errMsg = "Unable to place temporary LW array back into forcing object for: " + \ input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Reset temporary variables to keep low memory footprint. lwTmp = None indValid = None
def q2_down_classic(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ NCAR function for downscaling 2-meter specific humidity using already downscaled 2-meter temperature, unadjusted surface pressure, and downscaled surface pressure. :param input_forcings: :param ConfigOptions: :param GeoMetaWrfHydro: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Performing topographic adjustment to specific humidity." errMod.log_msg(ConfigOptions, MpiConfig) # Establish where we have missing values. try: indNdv = np.where( input_forcings.final_forcings == ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform NDV search on input forcings" errMod.log_critical(ConfigOptions, MpiConfig) return # First calculate relative humidity given original surface pressure and 2-meter # temperature try: relHum = rel_hum(input_forcings, ConfigOptions) except: ConfigOptions.errMsg = "Unable to perform topographic downscaling of incoming " \ "specific humidity to relative humidity" errMod.log_critical(ConfigOptions, MpiConfig) return # Downscale 2-meter specific humidity try: q2Tmp = mixhum_ptrh(input_forcings, relHum, 2, ConfigOptions) except: ConfigOptions.errMsg = "Unable to perform topographic downscaling of " \ "incoming specific humidity" errMod.log_critical(ConfigOptions, MpiConfig) return input_forcings.final_forcings[5, :, :] = q2Tmp input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv q2Tmp = None indNdv = None
def nwm_monthly_PRISM_downscale(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ NCAR/OWP function for downscaling precipitation using monthly PRISM climatology in a mountain-mapper like fashion. :param input_forcings: :param ConfigOptions: :param GeoMetaWrfHydro: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Performing NWM Monthly PRISM Mountain Mapper " \ "Downscaling of Precipitation" errMod.log_msg(ConfigOptions, MpiConfig) print(ConfigOptions.statusMsg) # Establish whether or not we need to read in new PRISM monthly climatology: # 1.) This is the first output timestep, and no grids have been initialized. # 2.) We have switched months from the last timestep. In this case, we need # to re-initialize the grids for the current month. initializeFlag = False if input_forcings.nwmPRISM_denGrid is None and input_forcings.nwmPRISM_numGrid is None: # We are on situation 1 - This is the first output step. initializeFlag = True print('WE NEED TO READ IN PRISM GRIDS') if ConfigOptions.current_output_date.month != ConfigOptions.prev_output_date.month: # We are on situation #2 - The month has changed so we need to reinitialize the # PRISM grids. initializeFlag = True print('MONTH CHANGE.... NEED TO READ IN NEW PRISM GRIDS.') if initializeFlag == True: while (True): # First reset the local PRISM grids to be safe. input_forcings.nwmPRISM_numGrid = None input_forcings.nwmPRISM_denGrid = None # Compose paths to the expected files. numeratorPath = input_forcings.paramDir + "/PRISM_Precip_Clim_" + \ ConfigOptions.current_output_date.strftime('%h') + '_NWM_Mtn_Mapper_Numer.nc' denominatorPath = input_forcings.paramDir + "/PRISM_Precip_Clim_" + \ ConfigOptions.current_output_date.strftime('%h') + '_NWM_Mtn_Mapper_Denom.nc' print(numeratorPath) print(denominatorPath) # Make sure files exist. if not os.path.isfile(numeratorPath): ConfigOptions.errMsg = "Expected parameter file: " + numeratorPath + \ " for mountain mapper downscaling of precipitation not found." errMod.log_critical(ConfigOptions, MpiConfig) break if not os.path.isfile(denominatorPath): ConfigOptions.errMsg = "Expected parameter file: " + denominatorPath + \ " for mountain mapper downscaling of precipitation not found." errMod.log_critical(ConfigOptions, MpiConfig) break if MpiConfig.rank == 0: # Open the NetCDF parameter files. Check to make sure expected dimension # sizes are in place, along with variable names, etc. try: idNum = Dataset(numeratorPath, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idDenom = Dataset(denominatorPath, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break # Check to make sure expected names, dimension sizes are present. if 'x' not in idNum.variables.keys(): ConfigOptions.errMsg = "Expected 'x' variable not found in parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'x' not in idDenom.variables.keys(): ConfigOptions.errMsg = "Expected 'x' variable not found in parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'y' not in idNum.variables.keys(): ConfigOptions.errMsg = "Expected 'y' variable not found in parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'y' not in idDenom.variables.keys(): ConfigOptions.errMsg = "Expected 'y' variable not found in parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'Data' not in idNum.variables.keys(): ConfigOptions.errMsg = "Expected 'Data' variable not found in parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'Data' not in idDenom.variables.keys(): ConfigOptions.errMsg = "Expected 'Data' variable not found in parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break if idNum.variables['Data'].shape[ 0] != GeoMetaWrfHydro.ny_global: ConfigOptions.errMsg = "Input Y dimension for: " + numeratorPath + \ " does not match the output WRF-Hydro Y dimension size." errMod.log_critical(ConfigOptions, MpiConfig) break if idDenom.variables['Data'].shape[ 0] != GeoMetaWrfHydro.ny_global: ConfigOptions.errMsg = "Input Y dimension for: " + denominatorPath + \ " does not match the output WRF-Hydro Y dimension size." errMod.log_critical(ConfigOptions, MpiConfig) break if idNum.variables['Data'].shape[ 1] != GeoMetaWrfHydro.nx_global: ConfigOptions.errMsg = "Input X dimension for: " + numeratorPath + \ " does not match the output WRF-Hydro X dimension size." errMod.log_critical(ConfigOptions, MpiConfig) break if idDenom.variables['Data'].shape[ 1] != GeoMetaWrfHydro.nx_global: ConfigOptions.errMsg = "Input X dimension for: " + denominatorPath + \ " does not match the output WRF-Hydro X dimension size." errMod.log_critical(ConfigOptions, MpiConfig) break # Read in the PRISM grid on the output grid. Then scatter the array out to the processors. try: numDataTmp = idNum.variables['Data'][:, :] except: ConfigOptions.errMsg = "Unable to extract 'Data' from parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break try: denDataTmp = idDenom.variables['Data'][:, :] except: ConfigOptions.errMsg = "Unable to extract 'Data' from parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break # Close the parameter files. try: idNum.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idDenom.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break else: numDataTmp = None denDataTmp = None break errMod.check_program_status(ConfigOptions, MpiConfig) # Scatter the array out to the local processors input_forcings.nwmPRISM_numGrid = MpiConfig.scatter_array( GeoMetaWrfHydro, numDataTmp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) input_forcings.nwmPRISM_denGrid = MpiConfig.scatter_array( GeoMetaWrfHydro, denDataTmp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) # Create temporary grids from the local slabs of params/precip forcings. localRainRate = input_forcings.final_forcings[3, :, :] numLocal = input_forcings.nwmPRISM_numGrid[:, :] denLocal = input_forcings.nwmPRISM_denGrid[:, :] # Establish index of where we have valid data. try: indValid = np.where((localRainRate > 0.0) & (denLocal > 0.0) & (numLocal > 0.0)) except: ConfigOptions.errMsg = "Unable to run numpy search for valid values on precip and " \ "param grid in mountain mapper downscaling" errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Convert precipitation rate, which is mm/s to mm, which is needed to run the PRISM downscaling. try: localRainRate[indValid] = localRainRate[indValid] * 3600.0 except: ConfigOptions.errMsg = "Unable to convert temporary precip rate from mm/s to mm." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: localRainRate[indValid] = localRainRate[indValid] * numLocal[indValid] except: ConfigOptions.errMsg = "Unable to multiply precip by numerator in mountain mapper downscaling" errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: localRainRate[indValid] = localRainRate[indValid] / denLocal[indValid] except: ConfigOptions.errMsg = "Unable to divide precip by denominator in mountain mapper downscaling" errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Convert local precip back to a rate (mm/s) try: localRainRate[indValid] = localRainRate[indValid] / 3600.0 except: ConfigOptions.errMsg = "Unable to convert temporary precip rate from mm to mm/s." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) input_forcings.final_forcings[3, :, :] = localRainRate # Reset variables for memory efficiency idDenom = None idNum = None localRainRate = None numLocal = None denLocal = None
def ncar_tbl_correction(input_forcings, GeoMetaWrfHydro, ConfigOptions, MpiConfig, force_num): """ Generic NCAR bias correction for forcings based on the forecast hour. A lookup table is used for each different forcing variable. NOTE!!!! - This is based on HRRRv3 analysis and should be used with extreme caution. :param input_forcings: :param GeoMetaWrfHydro: :param ConfigOptions: :param MpiConfig: :param force_num: :return: """ # Establish lookup tables for each forcing, for each forecast hour. adj_tbl = { 0: [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], 1: [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], 2: [0.35, 0.18, 0.15, 0.13, 0.12, 0.11, 0.10, 0.08, 0.07, 0.06, 0.05, 0.03, 0.02, 0.01, -0.01, -0.02, -0.03, -0.4, -0.05], 3: [0.35, 0.18, 0.15, 0.13, 0.12, 0.11, 0.10, 0.08, 0.07, 0.06, 0.05, 0.03, 0.02, 0.01, -0.01, -0.02, -0.03, -0.4, -0.05] } # First check to make sure we are within the accepted forecast range per the above table. For now, this # bias correction only applies to the first 18 forecast hours. if int(input_forcings.fcst_hour2) > 18: ConfigOptions.statusMsg = "Current forecast hour for: " + input_forcings.productName + \ " is greater than allowed forecast range of 18 for table lookup bias correction." errMod.log_warning(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Extract local array of values to perform adjustment on. try: force_tmp = input_forcings.final_forcings[input_forcings.input_map_output[force_num], :, :] except: ConfigOptions.errMsg = "Unable to extract: " + input_forcings.netcdf_var_names[force_num] + \ " from local forcing object for: " + input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: indValid = np.where(force_tmp != ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform valid search for: " + input_forcings.netcdf_var_names[force_num] + \ " from local forcing object for: " + input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Apply the bias correction adjustment based on the current forecast hour. try: force_tmp[indValid] = force_tmp[indValid] + adj_tbl[force_num][int(input_forcings.fcst_hour2)] except: ConfigOptions.errMsg = "Unable to apply table bias correction for: " + \ input_forcings.netcdf_var_names[force_num] + \ " from local forcing object for: " + input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: input_forcings.final_forcings[input_forcings.input_map_output[force_num], :, :] = force_tmp[:, :] except: ConfigOptions.errMsg = "Unable to place temporary LW array back into forcing object for: " + \ input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Reset temporary variables to keep low memory footprint. force_tmp = None indValid = None
def read_rqi_monthly_climo(ConfigOptions, MpiConfig, supplemental_precip, GeoMetaWrfHydro): """ Function to read in monthly RQI grids on the NWM grid. This is an NWM ONLY option. Please do not activate if not executing on the NWM conus grid. :param ConfigOptions: :param MpiConfig: :param supplemental_precip: :return: """ # Ensure all processors are synced up before proceeding. MpiConfig.comm.barrier() # First check to see if the RQI grids have valid values in them. There should # be NO NDV values if the grids have properly been read in. indTmp = np.where( supplemental_precip.regridded_rqi2 != ConfigOptions.globalNdv) rqiPath = ConfigOptions.supp_precip_param_dir + "/MRMS_WGT_RQI0.9_m" + \ supplemental_precip.pcp_date2.strftime('%m') + '_v1.1_geosmth.nc' if len(indTmp[0]) == 0: # We haven't initialized the RQI fields. We need to do this..... if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Reading in RQI Parameter File: " + rqiPath errMod.log_msg(ConfigOptions, MpiConfig) # First make sure the RQI file exists. if not os.path.isfile(rqiPath): ConfigOptions.errMsg = "Expected RQI parameter file: " + rqiPath + " not found." errMod.log_critical(ConfigOptions, MpiConfig) pass # Open the Parameter file. try: idTmp = Dataset(rqiPath, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + rqiPath pass # Extract out the RQI grid. try: varTmp = idTmp.variables['POP_0mabovemeansealevel'][0, :, :] except: ConfigOptions.errMsg = "Unable to extract POP_0mabovemeansealevel from parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass # Sanity checking on grid size. if varTmp.shape[0] != GeoMetaWrfHydro.ny_global or varTmp.shape[ 1] != GeoMetaWrfHydro.nx_global: ConfigOptions.errMsg = "Improper dimension sizes for POP_0mabovemeansealevel " \ "in parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass else: idTmp = None varTmp = None errMod.check_program_status(ConfigOptions, MpiConfig) # Scatter the array out to the local processors varSubTmp = MpiConfig.scatter_array(GeoMetaWrfHydro, varTmp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) supplemental_precip.regridded_rqi2[:, :] = varSubTmp # Reset variables for memory purposes varSubTmp = None varTmp = None # Close the RQI NetCDF file if MpiConfig.rank == 0: try: idTmp.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass errMod.check_program_status(ConfigOptions, MpiConfig) # Also check to see if we have switched to a new month based on the previous # MRMS step and the current one. if supplemental_precip.pcp_date2.month != supplemental_precip.pcp_date1.month: # We need to read in a new RQI monthly grid. if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Reading in RQI Parameter File: " + rqiPath errMod.log_msg(ConfigOptions, MpiConfig) # First make sure the RQI file exists. if not os.path.isfile(rqiPath): ConfigOptions.errMsg = "Expected RQI parameter file: " + rqiPath + " not found." errMod.log_critical(ConfigOptions, MpiConfig) pass # Open the Parameter file. try: idTmp = Dataset(rqiPath, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + rqiPath pass # Extract out the RQI grid. try: varTmp = idTmp.variables['POP_0mabovemeansealevel'][0, :, :] except: ConfigOptions.errMsg = "Unable to extract POP_0mabovemeansealevel from parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass # Sanity checking on grid size. if varTmp.shape[0] != GeoMetaWrfHydro.ny_global or varTmp.shape[ 1] != GeoMetaWrfHydro.nx_global: ConfigOptions.errMsg = "Improper dimension sizes for POP_0mabovemeansealevel " \ "in parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass else: idTmp = None varTmp = None errMod.check_program_status(ConfigOptions, MpiConfig) # Scatter the array out to the local processors varSubTmp = MpiConfig.scatter_array(GeoMetaWrfHydro, varTmp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) supplemental_precip.regridded_rqi2[:, :] = varSubTmp # Reset variables for memory purposes varSubTmp = None varTmp = None # Close the RQI NetCDF file if MpiConfig.rank == 0: try: idTmp.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass errMod.check_program_status(ConfigOptions, MpiConfig)
def process_forecasts(ConfigOptions, wrfHydroGeoMeta, inputForcingMod, suppPcpMod, MpiConfig, OutputObj): """ Main calling module for running realtime forecasts and re-forecasts. :param jobMeta: :return: """ # Loop through each WRF-Hydro forecast cycle being processed. Within # each cycle, perform the following tasks: # 1.) Loop over each output frequency # 2.) Determine the input forcing cycle dates (both before and after) # for temporal interpolation, downscaling, and bias correction reasons. # 3.) If the input forcings haven't been opened and read into memory, # open them. # 4.) Check to see if the ESMF objects for input forcings have been # created. If not, create them, including the regridding object. # 5.) Regrid forcing grids for input cycle dates surrounding the # current output timestep if they haven't been regridded. # 6.) Perform bias correction and/or downscaling. # 7.) Output final grids to LDASIN NetCDF files with associated # WRF-Hydro geospatial metadata to the final output directories. # Throughout this entire process, log progress being made into LOG # files. Once a forecast cycle is complete, we will touch an empty # 'WrfHydroForcing.COMPLETE' flag in the directory. This will be # checked upon the beginning of this program to see if we # need to process any files. for fcstCycleNum in range(ConfigOptions.nFcsts): ConfigOptions.current_fcst_cycle = ConfigOptions.b_date_proc + \ datetime.timedelta( seconds=ConfigOptions.fcst_freq*60*fcstCycleNum ) fcstCycleOutDir = ConfigOptions.output_dir + "/" + \ ConfigOptions.current_fcst_cycle.strftime('%Y%m%d%H%M') completeFlag = fcstCycleOutDir + "/WrfHydroForcing.COMPLETE" if os.path.isfile(completeFlag): ConfigOptions.statusMsg = "Forecast Cycle: " + \ ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M') + \ " has already completed." errMod.log_msg(ConfigOptions, MpiConfig) # We have already completed processing this cycle, # move on. continue if MpiConfig.rank == 0: # If the cycle directory doesn't exist, create it. if not os.path.isdir(fcstCycleOutDir): try: os.mkdir(fcstCycleOutDir) except: ConfigOptions.errMsg = "Unable to create output " \ "directory: " + fcstCycleOutDir errMod.err_out_screen_para(ConfigOptions.errMsg, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Compose a path to a log file, which will contain information # about this forecast cycle. ConfigOptions.logFile = fcstCycleOutDir + "/LOG_" + \ ConfigOptions.d_program_init.strftime('%Y%m%d%H%M') + \ "_" + ConfigOptions.current_fcst_cycle.strftime('%Y%m%d%H%M') # Initialize the log file. try: errMod.init_log(ConfigOptions, MpiConfig) except: errMod.err_out_screen_para(ConfigOptions.errMsg, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Log information about this forecast cycle if MpiConfig.rank == 0: ConfigOptions.statusMsg = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' errMod.log_msg(ConfigOptions, MpiConfig) ConfigOptions.statusMsg = 'Processing Forecast Cycle: ' + \ ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M') errMod.log_msg(ConfigOptions, MpiConfig) ConfigOptions.statusMsg = 'Forecast Cycle Length is: ' + \ str(ConfigOptions.cycle_length_minutes) + " minutes" errMod.log_msg(ConfigOptions, MpiConfig) MpiConfig.comm.barrier() # Loop through each output timestep. Perform the following functions: # 1.) Calculate all necessary input files per user options. # 2.) Read in input forcings from GRIB/NetCDF files. # 3.) Regrid the forcings, and temporally interpolate. # 4.) Downscale. # 5.) Layer, and output as necessary. for outStep in range(1, ConfigOptions.num_output_steps + 1): # Reset out final grids to missing values. OutputObj.output_local[:, :, :] = -9999.0 ConfigOptions.current_output_step = outStep OutputObj.outDate = ConfigOptions.current_fcst_cycle + datetime.timedelta( seconds=ConfigOptions.output_freq * 60 * outStep) ConfigOptions.current_output_date = OutputObj.outDate # Calculate the previous output timestep. This is used in potential downscaling routines. if outStep == 1: ConfigOptions.prev_output_date = ConfigOptions.current_output_date else: ConfigOptions.prev_output_date = ConfigOptions.current_output_date - datetime.timedelta( seconds=ConfigOptions.output_freq * 60) if MpiConfig.rank == 0: ConfigOptions.statusMsg = '=========================================' errMod.log_msg(ConfigOptions, MpiConfig) ConfigOptions.statusMsg = "Processing for output timestep: " + \ OutputObj.outDate.strftime('%Y-%m-%d %H:%M') errMod.log_msg(ConfigOptions, MpiConfig) MpiConfig.comm.barrier() # Compose the expected path to the output file. Check to see if the file exists, # if so, continue to the next time step. Also initialize our output arrays if necessary. OutputObj.outPath = fcstCycleOutDir + "/" + OutputObj.outDate.strftime('%Y%m%d%H%M') + \ ".LDASIN_DOMAIN1" MpiConfig.comm.barrier() if os.path.isfile(OutputObj.outPath): if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Output file: " + OutputObj.outPath + " exists. Moving " + \ " to the next output timestep." errMod.log_msg(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) continue else: ConfigOptions.currentForceNum = 0 ConfigOptions.currentCustomForceNum = 0 # Loop over each of the input forcings specifed. for forceKey in ConfigOptions.input_forcings: # Calculate the previous and next input cycle files from the inputs. inputForcingMod[forceKey].calc_neighbor_files( ConfigOptions, OutputObj.outDate, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Regrid forcings. inputForcingMod[forceKey].regrid_inputs( ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Run check on regridded fields for reasonable values that are not missing values. errMod.check_forcing_bounds(ConfigOptions, inputForcingMod[forceKey], MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # If we are restarting a forecast cycle, re-calculate the neighboring files, and regrid the # next set of forcings as the previous step just regridded the previous forcing. if inputForcingMod[forceKey].rstFlag == 1: if inputForcingMod[forceKey].regridded_forcings1 is not None and \ inputForcingMod[forceKey].regridded_forcings2 is not None: # Set the forcings back to reflect we just regridded the previous set of inputs, not the next. inputForcingMod[forceKey].regridded_forcings1[:, :, :] = \ inputForcingMod[forceKey].regridded_forcings2[:, :, :] # Re-calculate the neighbor files. inputForcingMod[forceKey].calc_neighbor_files( ConfigOptions, OutputObj.outDate, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Regrid the forcings for the end of the window. inputForcingMod[forceKey].regrid_inputs( ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) inputForcingMod[forceKey].rstFlag = 0 # Run temporal interpolation on the grids. inputForcingMod[forceKey].temporal_interpolate_inputs( ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Run bias correction. biasCorrectMod.run_bias_correction( inputForcingMod[forceKey], ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Run downscaling on grids for this output timestep. downscaleMod.run_downscaling(inputForcingMod[forceKey], ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Layer in forcings from this product. layeringMod.layer_final_forcings(OutputObj, inputForcingMod[forceKey], ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) ConfigOptions.currentForceNum = ConfigOptions.currentForceNum + 1 if forceKey == 10: ConfigOptions.currentCustomForceNum = ConfigOptions.currentCustomForceNum + 1 # Process supplemental precipitation if we specified in the configuration file. if ConfigOptions.number_supp_pcp > 0: for suppPcpKey in ConfigOptions.supp_precip_forcings: # Like with input forcings, calculate the neighboring files to use. suppPcpMod[suppPcpKey].calc_neighbor_files( ConfigOptions, OutputObj.outDate, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Regrid the supplemental precipitation. suppPcpMod[suppPcpKey].regrid_inputs( ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) if suppPcpMod[suppPcpKey].regridded_precip1 is not None \ and suppPcpMod[suppPcpKey].regridded_precip2 is not None: #if np.any(suppPcpMod[suppPcpKey].regridded_precip1) and \ # np.any(suppPcpMod[suppPcpKey].regridded_precip2): # Run check on regridded fields for reasonable values that are not missing values. errMod.check_supp_pcp_bounds( ConfigOptions, suppPcpMod[suppPcpKey], MpiConfig) errMod.check_program_status( ConfigOptions, MpiConfig) # Run temporal interpolation on the grids. suppPcpMod[suppPcpKey].temporal_interpolate_inputs( ConfigOptions, MpiConfig) errMod.check_program_status( ConfigOptions, MpiConfig) # Layer in the supplemental precipitation into the current output object. layeringMod.layer_supplemental_precipitation( OutputObj, suppPcpMod[suppPcpKey], ConfigOptions, MpiConfig) errMod.check_program_status( ConfigOptions, MpiConfig) # Call the output routines OutputObj.output_final_ldasin(ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Forcings complete for forecast cycle: " + \ ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M') errMod.log_msg(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: # Close the log file. try: errMod.close_log(ConfigOptions, MpiConfig) except: errMod.err_out_screen_para(ConfigOptions.errMsg, MpiConfig) # Success.... Now touch an empty complete file for this forecast cycle to indicate # completion in case the code is re-ran. try: open(completeFlag, 'a').close() except: ConfigOptions.errMsg = "Unable to create completion file: " + completeFlag errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig)
def open_grib2(GribFileIn, NetCdfFileOut, Wgrib2Cmd, ConfigOptions, MpiConfig, inputVar): """ Generic function to convert a GRIB2 file into a NetCDF file. Function will also open the NetCDF file, and ensure all necessary inputs are in file. :param GribFileIn: :param NetCdfFileOut: :param ConfigOptions: :return: """ # Run wgrib2 command to convert GRIB2 file to NetCDF. if MpiConfig.rank == 0: while (True): # Check to see if output file already exists. If so, delete it and # override. ConfigOptions.statusMsg = "Reading in GRIB2 file: " + GribFileIn errMod.log_msg(ConfigOptions, MpiConfig) if os.path.isfile(NetCdfFileOut): ConfigOptions.statusMsg = "Overriding temporary NetCDF file: " + NetCdfFileOut errMod.log_warning(ConfigOptions, MpiConfig) try: subprocess.run([Wgrib2Cmd], shell=True) except: ConfigOptions.errMsg = "Unable to convert: " + GribFileIn + " to " + \ NetCdfFileOut errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break # Ensure file exists. if not os.path.isfile(NetCdfFileOut): ConfigOptions.errMsg = "Expected NetCDF file: " + NetCdfFileOut + \ " not found. It's possible the GRIB2 variable was not found." errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break # Open the NetCDF file. try: idTmp = Dataset(NetCdfFileOut, 'r') except: ConfigOptions.errMsg = "Unable to open input NetCDF file: " + \ NetCdfFileOut errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break if idTmp is not None: # Check for expected lat/lon variables. if 'latitude' not in idTmp.variables.keys(): ConfigOptions.errMsg = "Unable to locate latitude from: " + \ GribFileIn errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break if idTmp is not None: if 'longitude' not in idTmp.variables.keys(): ConfigOptions.errMsg = "Unable t locate longitude from: " + \ GribFileIn errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break if idTmp is not None: # Loop through all the expected variables. if inputVar not in idTmp.variables.keys(): ConfigOptions.errMsg = "Unable to locate expected variable: " + \ inputVar + " in: " + NetCdfFileOut errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break break else: idTmp = None # Return the NetCDF file handle back to the user. return idTmp
def output_final_ldasin(self, ConfigOptions, geoMetaWrfHydro, MpiConfig): """ Output routine to produce final LDASIN files for the WRF-Hydro modeling system. This function is assuming all regridding, interpolation, downscaling, and bias correction has occurred on the necessary input forcings to generate a final set of outputs on the output grid. Since this program is ran in parallel, all work is done on local "slabs" of data for each processor to make the code more efficient. On this end, this function will collect the "slabs" into final output grids that go into the output files. In addition, detailed geospatial metadata is translated from the input geogrid file, to the final output files. :param ConfiguOptions: :param geoMetaWrfHydro: :param MpiConfig: :return: """ output_variable_attribute_dict = { 'U2D': [0, 'm s-1', 'x_wind', '10-m U-component of wind', 'time: point'], 'V2D': [1, 'm s-1', 'y_wind', '10-m V-component of wind', 'time: point'], 'LWDOWN': [ 2, 'W m-2', 'surface downward longwave_flux', 'Surface downward long-wave radiation flux', 'time: point' ], 'RAINRATE': [ 3, 'mm s^-1', 'precipitation_flux', 'Surface Precipitation Rate', 'time: mean' ], 'T2D': [4, 'K', 'air temperature', '2-m Air Temperature', 'time: point'], 'Q2D': [ 5, 'kg kg-1', 'surface_specific_humidity', '2-m Specific Humidity', 'time: point' ], 'PSFC': [6, 'Pa', 'air_pressure', 'Surface Pressure', 'time: point'], 'SWDOWN': [ 7, 'W m-2', 'surface_downward_shortwave_flux', 'Surface downward short-wave radiation flux', 'time point' ] } # Compose the ESMF remapped string attribute based on the regridding option chosen by the user. # We will default to the regridding method chosen for the first input forcing selected. if ConfigOptions.regrid_opt[0] == 1: regrid_att = "remapped via ESMF regrid_with_weights: Bilinear" elif ConfigOptions.regrid_opt[0] == 2: regrid_att = "remapped via ESMF regrid_with_weights: Nearest Neighbor" elif ConfigOptions.regrid_opt[0] == 3: regrid_att = "remapped via ESMF regrid_with_weights: Conservative Bilinear" # Ensure all processors are synced up before outputting. MpiConfig.comm.barrier() if MpiConfig.rank == 0: while (True): # Only output on the master processor. try: idOut = Dataset(self.outPath, 'w') except: ConfigOptions.errMsg = "Unable to create output file: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Create dimensions. try: idOut.createDimension("time", 1) except: ConfigOptions.errMsg = "Unable to create time dimension in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createDimension("y", geoMetaWrfHydro.ny_global) except: ConfigOptions.errMsg = "Unable to create y dimension in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createDimension("x", geoMetaWrfHydro.nx_global) except: ConfigOptions.errMsg = "Unable to create x dimension in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createDimension("reference_time", 1) except: ConfigOptions.errMsg = "Unable to create reference_time dimension in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Set global attributes try: idOut.model_output_valid_time = self.outDate.strftime( "%Y-%m-%d_%H:%M:00") except: ConfigOptions.errMsg = "Unable to set the model_output_valid_time attribute in :" + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.model_initialization_time = ConfigOptions.current_fcst_cycle.strftime( "%Y-%m-%d_%H:%M:00") except: ConfigOptions.errMsg = "Unable to set the model_initialization_time global " \ "attribute in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Create variables. try: idOut.createVariable('time', 'i4', ('time')) except: ConfigOptions.errMsg = "Unable to create time variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createVariable('reference_time', 'i4', ('reference_time')) except: ConfigOptions.errMsg = "Unable to create reference_time variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Create geospatial metadata coordinate variables if data was read in from an optional # spatial metadata file. if ConfigOptions.spatial_meta is not None: # Create coordinate variables and populate with attributes read in. try: idOut.createVariable('x', 'f8', ('x')) except: ConfigOptions.errMsg = "Unable to create x variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['x'].setncatts( geoMetaWrfHydro.x_coord_atts) except: ConfigOptions.errMsg = "Unable to establish x coordinate attributes in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['x'][:] = geoMetaWrfHydro.x_coords except: ConfigOptions.errMsg = "Unable to place x coordinate values into output variable " \ "for output file: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createVariable('y', 'f8', ('y')) except: ConfigOptions.errMsg = "Unable to create y variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['y'].setncatts( geoMetaWrfHydro.y_coord_atts) except: ConfigOptions.errMsg = "Unable to establish y coordinate attributes in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['y'][:] = geoMetaWrfHydro.y_coords except: ConfigOptions.errMsg = "Unable to place y coordinate values into output variable " \ "for output file: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createVariable('crs', 'S1') except: ConfigOptions.errMsg = "Unable to create crs in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['crs'].setncatts( geoMetaWrfHydro.crs_atts) except: ConfigOptions.errMsg = "Unable to establish crs attributes in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Loop through and create each variable, along with expected attributes. for varTmp in output_variable_attribute_dict: try: idOut.createVariable( varTmp, 'f4', ('time', 'y', 'x'), fill_value=ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to create " + varTmp + " variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ varTmp].cell_methods = output_variable_attribute_dict[ varTmp][4] except: ConfigOptions.errMsg = "Unable to create cell_methods attribute for: " + varTmp + \ " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[varTmp].remap = regrid_att except: ConfigOptions.errMsg = "Unable to create remap attribute for: " + varTmp + \ " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Place geospatial metadata attributes in if we have them. if ConfigOptions.spatial_meta is not None: try: idOut.variables[varTmp].grid_mapping = 'crs' except: ConfigOptions.errMsg = "Unable to create grid_mapping attribute for: " + \ varTmp + " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'esri_pe_string' in geoMetaWrfHydro.crs_atts.keys(): try: idOut.variables[ varTmp].esri_pe_string = geoMetaWrfHydro.crs_atts[ 'esri_pe_string'] except: ConfigOptions.errMsg = "Unable to create esri_pe_string attribute for: " + \ varTmp + " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'proj4' in geoMetaWrfHydro.spatial_global_atts.keys( ): try: idOut.variables[ varTmp].proj4 = geoMetaWrfHydro.spatial_global_atts[ 'proj4'] except: ConfigOptions.errMsg = "Unable to create proj4 attribute for: " + varTmp + \ " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ varTmp].units = output_variable_attribute_dict[ varTmp][1] except: ConfigOptions.errMsg = "Unable to create units attribute for: " + varTmp + " in: " + \ self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ varTmp].standard_name = output_variable_attribute_dict[ varTmp][2] except: ConfigOptions.errMsg = "Unable to create standard_name attribute for: " + varTmp + \ " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ varTmp].long_name = output_variable_attribute_dict[ varTmp][3] except: ConfigOptions.errMsg = "Unable to create long_name attribute for: " + varTmp + \ " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break break errMod.check_program_status(ConfigOptions, MpiConfig) # Now loop through each variable, collect the data (call on each processor), assemble into the final # output grid, and place into the output file (if on processor 0). for varTmp in output_variable_attribute_dict: # Collect data from the various processors, and place into the output file. try: final = MpiConfig.comm.gather(self.output_local[ output_variable_attribute_dict[varTmp][0], :, :], root=0) except: ConfigOptions.errMsg = "Unable to gather final grids for: " + varTmp errMod.log_critical(ConfigOptions, MpiConfig) continue MpiConfig.comm.barrier() if MpiConfig.rank == 0: while (True): try: dataOutTmp = np.concatenate( [final[i] for i in range(MpiConfig.size)], axis=0) except: ConfigOptions.errMsg = "Unable to finalize collection of output grids for: " + varTmp errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[varTmp][0, :, :] = dataOutTmp except: ConfigOptions.errMsg = "Unable to place final output grid for: " + varTmp errMod.log_critical(ConfigOptions, MpiConfig) break # Reset temporary data objects to keep memory usage down. dataOutTmp = None break # Reset temporary data objects to keep memory usage down. final = None errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: while (True): # Close the NetCDF file try: idOut.close() except: ConfigOptions.errMsg = "Unable to close output file: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break break errMod.check_program_status(ConfigOptions, MpiConfig)
def param_lapse(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ Function that applies a apriori lapse rate adjustment to modeled 2-meter temperature by taking the difference of the native input elevation and the WRF-hydro elevation. It's assumed this lapse rate grid has already been regridded to the final output WRF-Hydro grid. :param inpute_forcings: :param ConfigOptions: :param GeoMetaWrfHydro: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Applying aprior lapse rate grid to temperature downscaling" errMod.log_msg(ConfigOptions, MpiConfig) # Calculate the elevation difference. elevDiff = input_forcings.height - GeoMetaWrfHydro.height if not np.any(input_forcings.lapseGrid): # We have not read in our lapse rate file. Read it in, do extensive checks, # scatter the lapse rate grid out to individual processors, then apply the # lapse rate to the 2-meter temperature grid. if MpiConfig.rank == 0: while (True): # First ensure we have a parameter directory if input_forcings.paramDir == "NONE": ConfigOptions.errMsg = "User has specified spatial temperature lapse rate " \ "downscaling while no downscaling parameter directory " \ "exists." errMod.log_critical(ConfigOptions, MpiConfig) break # Compose the path to the lapse rate grid file. lapsePath = input_forcings.paramDir + "/lapse_param.nc" if not os.path.isfile(lapsePath): ConfigOptions.errMsg = "Expected lapse rate parameter file: " + \ lapsePath + " does not exist." errMod.log_critical(ConfigOptions, MpiConfig) break # Open the lapse rate file. Check for the expected variable, along with # the dimension size to make sure everything matches up. try: idTmp = Dataset(lapsePath, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break if not 'lapse' in idTmp.variables.keys(): ConfigOptions.errMsg = "Expected 'lapse' variable not located in parameter " \ "file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break try: lapseTmp = idTmp.variables['lapse'][:, :] except: ConfigOptions.errMsg = "Unable to extracte 'lapse' variable from parameter: " \ "file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break # Check dimensions to ensure they match up to the output grid. if lapseTmp.shape[1] != GeoMetaWrfHydro.nx_global: ConfigOptions.errMsg = "X-Dimension size mismatch between output grid and lapse " \ "rate from parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break if lapseTmp.shape[0] != GeoMetaWrfHydro.ny_global: ConfigOptions.errMsg = "Y-Dimension size mismatch between output grid and lapse " \ "rate from parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break # Perform a quick search to ensure we don't have radical values. indTmp = np.where(lapseTmp < -10.0) if len(indTmp[0]) > 0: ConfigOptions.errMsg = "Found anomolous negative values in the lapse rate grid from " \ "parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break indTmp = np.where(lapseTmp > 100.0) if len(indTmp[0]) > 0: ConfigOptions.errMsg = "Found excessively high values in the lapse rate grid from " \ "parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break # Close the parameter lapse rate file. try: idTmp.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break break else: lapseTmp = None errMod.check_program_status(ConfigOptions, MpiConfig) # Scatter the lapse rate grid to the other processors. input_forcings.lapseGrid = MpiConfig.scatter_array( GeoMetaWrfHydro, lapseTmp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) # Apply the local lapse rate grid to our local slab of 2-meter temperature data. temperature_grid_tmp = input_forcings.final_forcings[4, :, :] try: indNdv = np.where( input_forcings.final_forcings == ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform NDV search on input " + \ input_forcings.productName + " regridded forcings." errMod.log_critical(ConfigOptions, MpiConfig) return try: indValid = np.where(temperature_grid_tmp != ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform search for valid values on input " + \ input_forcings.productName + " regridded temperature forcings." errMod.log_critical(ConfigOptions, MpiConfig) return try: temperature_grid_tmp[indValid] = temperature_grid_tmp[indValid] + \ ((input_forcings.lapseGrid[indValid]/1000.0) * elevDiff[indValid]) except: ConfigOptions.errMsg = "Unable to apply spatial lapse rate values to input " + \ input_forcings.productName + " regridded temperature forcings." errMod.log_critical(ConfigOptions, MpiConfig) return input_forcings.final_forcings[4, :, :] = temperature_grid_tmp input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv # Reset for memory efficiency indTmp = None indNdv = None indValid = None elevDiff = None temperature_grid_tmp = None
def ncar_sw_hrrr_bias_correct(input_forcings, GeoMetaWrfHydro, ConfigOptions, MpiConfig, force_num): """ Function to implement a bias correction to the forecast incoming shortwave radiation fluxes. NOTE!!!! - This bias correction is based on in-situ analysis performed against HRRRv3 fields. It's high discouraged to use this for any other NWP products, or even with the HRRR changes versions in the future. :param input_forcings: :param GeoMetaWrfHydro: :param ConfigOptions: :param MpiConfig: :param force_num: :return: """ # Establish constant parameters. NOTE!!! - These will change with future HRRR upgrades. c1 = -0.159 c2 = -0.077 # Establish current datetime information, along wth solar constants. fHr = input_forcings.fcst_hour2 # For now, hard-coding the total number of forecast hours to be 18, since we # are assuming this is HRRR nFcstHr = 18 # Trig params d2r = math.pi/180.0 r2d = 180.0/math.pi dCurrent = ConfigOptions.current_output_date hh = float(dCurrent.hour) mm = float(dCurrent.minute) ss = float(dCurrent.second) doy = float(time.strptime(dCurrent.strftime('%Y.%m.%d'), '%Y.%m.%d').tm_yday) frac_year = 2.0*math.pi/365.0*(doy - 1.0 + (hh/24.0) + (mm/1440.0) + (ss/86400.0)) # eqtime is the difference in minutes between true solar time and that if solar noon was at actual noon. # This difference is due to Earth's eliptical orbit around the sun. eqtime = 229.18 * (0.000075 + 0.001868 * math.cos(frac_year) - 0.032077 * math.sin(frac_year) - 0.014615 * math.cos(2.0*frac_year) - 0.040849 * math.sin(2.0*frac_year)) # decl is the solar declination angle in radians: how much the Earth is tilted toward or away from the sun decl = 0.006918 - 0.399912 * math.cos(frac_year) + 0.070257 * math.sin(frac_year) - \ 0.006758 * math.cos(2.0 * frac_year) + 0.000907 * math.sin(2.0 * frac_year) - \ 0.002697 * math.cos(3.0 * frac_year) + 0.001480 * math.sin(3.0 * frac_year) # Create temporary grids for calculating the solar zenith angle, which will be used in the bias correction. # time offset in minutes from the prime meridian time_offset = eqtime + 4.0 * GeoMetaWrfHydro.longitude_grid # tst is the true solar time: the number of minutes since solar midnight tst = hh*60.0 + mm + ss/60.0 + time_offset # solar hour angle in radians: the amount the sun is off from due south ha = d2r*((tst/4.0) - 180.0) # solar zenith angle is the angle between straight up and the center of the sun's disc # the cosine of the sol_zen_ang is proportional to the solar intensity # (not accounting for humidity or cloud cover) sol_zen_ang = r2d * np.arccos(np.sin(GeoMetaWrfHydro.latitude_grid * d2r) * math.sin(decl) + np.cos(GeoMetaWrfHydro.latitude_grid * d2r) * math.cos(decl) * np.cos(ha)) # Check for any values greater than 90 degrees. sol_zen_ang[np.where(sol_zen_ang > 90.0)] = 90.0 # Extract the current incoming shortwave field from the forcing object and set it to # a local grid. We will perform the bias correction on this grid, based on forecast # hour and datetime information. Once a correction has taken place, we will place # the corrected field back into the forcing object. try: swTmp = input_forcings.final_forcings[input_forcings.input_map_output[force_num],:,:] except: ConfigOptions.errMsg = "Unable to extract incoming shortwave forcing from object for: " + \ input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Calculate where we have valid values. try: indValid = np.where(swTmp != ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to run a search for valid SW values for: " + input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Perform the bias correction. try: # The second half of this caclulation below is the actual calculation of the incoming SW bias, which is then # added (or subtracted if negative) to the original values. swTmp[indValid] = swTmp[indValid] + \ (c1 + (c2 * ( (fHr - 1) / (nFcstHr - 1)))) * np.cos(sol_zen_ang[indValid] * d2r) * \ swTmp[indValid] except: ConfigOptions.errMsg = "Unable to apply NCAR HRRR bias correction to incoming shortwave radiation." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Place updated states back into the forcing object. try: input_forcings.final_forcings[7, :, :] = swTmp[:, :] except: ConfigOptions.errMsg = "Unable to place bias-corrected incoming SW radiation fluxes back into the forcing " \ "object." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Reset variables to keep memory footprints low. swTmp = None time_offset = None tst = None ha = None sol_zen_ang = None indValid = None
def cfsv2_nldas_nwm_bias_correct(input_forcings, GeoMetaWrfHydro, ConfigOptions, MpiConfig, force_num): """ Routine to run CDF/PDF bias correction parametric corrections SPECIFIC to the NWM long-range configuration. :param input_forcings: :param ConfigOptions: :param force_num: :return: """ # Create a dictionary that maps forcing numbers to the expected NetCDF variable names, etc. nldasParam1Vars = { 2: 'UGRD10M_PARAM_1', 3: 'VGRD10M_PARAM_1', 6: 'LW_PARAM_1', 4: 'PRATE_PARAM_1', 0: 'T2M_PARAM_1', 1: 'Q2M_PARAM_1', 7: 'PSFC_PARAM_1', 5: 'SW_PARAM_1' } nldasParam2Vars = { 2: 'UGRD10M_PARAM_2', 3: 'VGRD10M_PARAM_2', 6: 'LW_PARAM_2', 4: 'PRATE_PARAM_2', 0: 'T2M_PARAM_2', 1: 'Q2M_PARAM_2', 7: 'PSFC_PARAM_2', 5: 'SW_PARAM_2' } cfsParamPathVars = { 2: 'ugrd', 3: 'vgrd', 6: 'dlwsfc', 4: 'prate', 0: 'tmp2m', 1: 'q2m', 7: 'pressfc', 5: 'dswsfc' } # Specify the min/max ranges on CDF/PDF values for each variable valRange1 = { 2: -50.0, 3: -50.0, 6: 1.0, 4: 0.01, 0: 200.0, 1: 0.01, 7: 50000.0, 5: 0.0 } valRange2 = { 2: 50.0, 3: 50.0, 6: 800.0, 4: 100.0, 0: 330.0, 1: 40.0, 7: 1100000.0, 5: 1000.0 } valStep = { 2: 0.1, 3: 0.1, 6: 0.19975, 4: 0.049995, 0: 0.1, 1: 3.9999, 7: 350.0, 5: 10.0 } if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Running NLDAS-CFSv2 CDF/PDF bias correction on variable: " + \ input_forcings.netcdf_var_names[force_num] errMod.log_msg(ConfigOptions, MpiConfig) # Check to ensure we are running with CFSv2 here.... if input_forcings.productName != "CFSv2_6Hr_Global_GRIB2": ConfigOptions.errMsg = "Attempting to run CFSv2-NLDAS bias correction on: " + \ input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Open the necessary parameter grids, which are on the global CFSv2 grid, then scatter them out # to the various processors. if MpiConfig.rank == 0: while (True): nldas_param_file = input_forcings.paramDir + "/NLDAS_Climo/nldas2_" + \ ConfigOptions.current_output_date.strftime('%m%d%H') + \ "_dist_params.nc" if not os.path.isfile(nldas_param_file): ConfigOptions.errMsg = "Unable to locate necessary bias correction parameter file: " + \ nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break # Open the NetCDF file. try: idNldasParam = Dataset(nldas_param_file, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break # Ensure dimensions/variables are as expected. if 'lat_0' not in idNldasParam.dimensions.keys(): ConfigOptions.errMsg = "Expected to find lat_0 dimension in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if 'lon_0' not in idNldasParam.dimensions.keys(): ConfigOptions.errMsg = "Expected to find lon_0 dimension in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if idNldasParam.dimensions['lat_0'].size != 190: ConfigOptions.errMsg = "Expected lat_0 size is 190 - found size of: " + \ str(idNldasParam.dimensions['lat_0'].size) + " in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if idNldasParam.dimensions['lon_0'].size != 384: ConfigOptions.errMsg = "Expected lon_0 size is 384 - found size of: " + \ str(idNldasParam.dimensions['lon_0'].size) + " in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if nldasParam1Vars[force_num] not in idNldasParam.variables.keys(): ConfigOptions.errMsg = "Expected variable: " + nldasParam1Vars[force_num] + " not found " + \ "in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if nldasParam2Vars[force_num] not in idNldasParam.variables.keys(): ConfigOptions.errMsg = "Expected variable: " + nldasParam2Vars[force_num] + " not found " + \ "in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if force_num == 4: if 'ZERO_PRECIP_PROB' not in idNldasParam.variables.keys(): ConfigOptions.errMsg = "Expected variable: ZERO_PRECIP_PROB not found in: " + \ nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break try: nldas_param_1 = idNldasParam.variables[nldasParam1Vars[force_num]][:,:] except: ConfigOptions.errMsg = "Unable to extract: " + nldasParam1Vars[force_num] + \ " from: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break try: nldas_param_2 = idNldasParam.variables[nldasParam2Vars[force_num]][:,:] except: ConfigOptions.errMsg = "Unable to extract: " + nldasParam2Vars[force_num] + \ " from: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if nldas_param_1.shape[0] != 190 or nldas_param_1.shape[1] != 384: ConfigOptions.errMsg = "Parameter variable: " + nldasParam1Vars[force_num] + " from: " + \ nldas_param_file + " not of shape [190,384]." errMod.log_critical(ConfigOptions, MpiConfig) break if nldas_param_2.shape[0] != 190 or nldas_param_2.shape[1] != 384: ConfigOptions.errMsg = "Parameter variable: " + nldasParam2Vars[force_num] + " from: " + \ nldas_param_file + " not of shape [190,384]." errMod.log_critical(ConfigOptions, MpiConfig) break # Extract the fill value try: fillTmp = idNldasParam.variables[nldasParam1Vars[force_num]]._FillValue except: ConfigOptions.errMsg = "Unable to extract Fill_Value from: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break # Read in the zero precip prob grids if we are bias correcting precipitation. if force_num == 4: try: nldas_zero_pcp = idNldasParam.variables['ZERO_PRECIP_PROB'][:,:] except: ConfigOptions.errMsg = "Unable to extract ZERO_PRECIP_PROB from: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if nldas_zero_pcp.shape[0] != 190 or nldas_zero_pcp.shape[1] != 384: ConfigOptions.errMsg = "Parameter variable: ZERO_PRECIP_PROB from: " + nldas_param_file + \ " not of shape [190,384]." errMod.log_critical(ConfigOptions, MpiConfig) break # Set missing values accordingly. nldas_param_1[np.where(nldas_param_1 == fillTmp)] = ConfigOptions.globalNdv nldas_param_2[np.where(nldas_param_1 == fillTmp)] = ConfigOptions.globalNdv if force_num == 4: nldas_zero_pcp[np.where(nldas_zero_pcp == fillTmp)] = ConfigOptions.globalNdv break else: nldas_param_1 = None nldas_param_2 = None nldas_zero_pcp = None errMod.check_program_status(ConfigOptions, MpiConfig) # Reset the temporary fill value fillTmp = None # Scatter NLDAS parameters nldas_param_1_sub = MpiConfig.scatter_array(input_forcings, nldas_param_1, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) nldas_param_2_sub = MpiConfig.scatter_array(input_forcings, nldas_param_2, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) if force_num == 4: nldas_zero_pcp_sub = MpiConfig.scatter_array(input_forcings, nldas_zero_pcp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: while (True): # Read in the CFSv2 parameter files, based on the previous CFSv2 dates cfs_param_path1 = input_forcings.paramDir + "/CFSv2_Climo/cfs_" + \ cfsParamPathVars[force_num] + "_" + \ input_forcings.fcst_date1.strftime('%m%d') + "_" + \ input_forcings.fcst_date1.strftime('%H') + '_dist_params.nc' cfs_param_path2 = input_forcings.paramDir + "/CFSv2_Climo/cfs_" + cfsParamPathVars[force_num] + "_" + \ input_forcings.fcst_date2.strftime('%m%d') + "_" + \ input_forcings.fcst_date2.strftime('%H') + \ '_dist_params.nc' if not os.path.isfile(cfs_param_path1): ConfigOptions.errMsg = "Unable to locate necessary parameter file: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if not os.path.isfile(cfs_param_path2): ConfigOptions.errMsg = "Unable to locate necessary parameter file: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break # Open the files and ensure they contain the correct information. try: idCfsParam1 = Dataset(cfs_param_path1, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break try: idCfsParam2 = Dataset(cfs_param_path2, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break ConfigOptions.statusMsg = "Checking CFS parameter files." errMod.log_msg(ConfigOptions, MpiConfig) if 'DISTRIBUTION_PARAM_1' not in idCfsParam1.variables.keys(): ConfigOptions.errMsg = "Expected DISTRIBUTION_PARAM_1 variable not found in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if 'DISTRIBUTION_PARAM_2' not in idCfsParam1.variables.keys(): ConfigOptions.errMsg = "Expected DISTRIBUTION_PARAM_1 variable not found in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if 'DISTRIBUTION_PARAM_1' not in idCfsParam2.variables.keys(): ConfigOptions.errMsg = "Expected DISTRIBUTION_PARAM_1 variable not found in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break if 'DISTRIBUTION_PARAM_2' not in idCfsParam2.variables.keys(): ConfigOptions.errMsg = "Expected DISTRIBUTION_PARAM_1 variable not found in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: param_1 = idCfsParam2.variables['DISTRIBUTION_PARAM_1'][:,:] except: ConfigOptions.errMsg = "Unable to extract DISTRIBUTION_PARAM_1 from: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: param_2 = idCfsParam2.variables['DISTRIBUTION_PARAM_2'][:,:] except: ConfigOptions.errMsg = "Unable to extract DISTRIBUTION_PARAM_2 from: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: lat_0 = idCfsParam2.variables['lat_0'][:] except: ConfigOptions.errMsg = "Unable to extract lat_0 from: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: lon_0 = idCfsParam2.variables['lon_0'][:] except: ConfigOptions.errMsg = "Unable to extract lon_0 from: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: prev_param_1 = idCfsParam1.variables['DISTRIBUTION_PARAM_1'][:,:] except: ConfigOptions.errMsg = "Unable to extract DISTRIBUTION_PARAM_1 from: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break try: prev_param_2 = idCfsParam1.variables['DISTRIBUTION_PARAM_2'][:,:] except: ConfigOptions.errMsg = "Unable to extract DISTRIBUTION_PARAM_2 from: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if param_1.shape[0] != 190 and param_1.shape[1] != 384: ConfigOptions.errMsg = "Unexpected DISTRIBUTION_PARAM_1 found in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break if param_2.shape[0] != 190 and param_2.shape[1] != 384: ConfigOptions.errMsg = "Unexpected DISTRIBUTION_PARAM_2 found in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break if prev_param_1.shape[0] != 190 and prev_param_1.shape[1] != 384: ConfigOptions.errMsg = "Unexpected DISTRIBUTION_PARAM_1 found in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if prev_param_2.shape[0] != 190 and prev_param_2.shape[1] != 384: ConfigOptions.errMsg = "Unexpected DISTRIBUTION_PARAM_2 found in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break ConfigOptions.statusMsg = "Reading in zero precip probs." errMod.log_msg(ConfigOptions, MpiConfig) # Read in the zero precip prob grids if we are bias correcting precipitation. if force_num == 4: try: zero_pcp = idCfsParam2.variables['ZERO_PRECIP_PROB'][:, :] except: ConfigOptions.errMsg = "Unable to locate ZERO_PRECIP_PROB in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: prev_zero_pcp = idCfsParam2.variables['ZERO_PRECIP_PROB'][:, :] except: ConfigOptions.errMsg = "Unable to locate ZERO_PRECIP_PROB in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if zero_pcp.shape[0] != 190 and zero_pcp.shape[1] != 384: ConfigOptions.errMsg = "Unexpected ZERO_PRECIP_PROB found in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break if prev_zero_pcp.shape[0] != 190 and prev_zero_pcp.shape[1] != 384: ConfigOptions.errMsg = "Unexpected ZERO_PRECIP_PROB found in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break # Reset any missing values. Because the fill values for these files are all over the map, we # will just do a gross check here. For the most part, there shouldn't be missing values. param_1[np.where(param_1 > 500000.0)] = ConfigOptions.globalNdv param_2[np.where(param_2 > 500000.0)] = ConfigOptions.globalNdv prev_param_1[np.where(prev_param_1 > 500000.0)] = ConfigOptions.globalNdv prev_param_2[np.where(prev_param_2 > 500000.0)] = ConfigOptions.globalNdv if force_num == 4: zero_pcp[np.where(zero_pcp > 500000.0)] = ConfigOptions.globalNdv prev_zero_pcp[np.where(prev_zero_pcp > 500000.0)] = ConfigOptions.globalNdv break else: param_1 = None param_2 = None prev_param_1 = None prev_param_2 = None zero_pcp = None prev_zero_pcp = None errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Scattering CFS parameter grids" errMod.log_msg(ConfigOptions, MpiConfig) # Scatter CFS parameters cfs_param_1_sub = MpiConfig.scatter_array(input_forcings, param_1, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) cfs_param_2_sub = MpiConfig.scatter_array(input_forcings, param_2, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) cfs_prev_param_1_sub = MpiConfig.scatter_array(input_forcings, prev_param_1, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) cfs_prev_param_2_sub = MpiConfig.scatter_array(input_forcings, prev_param_2, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) if force_num == 4: cfs_zero_pcp_sub = MpiConfig.scatter_array(input_forcings, zero_pcp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) cfs_prev_zero_pcp_sub = MpiConfig.scatter_array(input_forcings, prev_zero_pcp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Closing CFS bias correction parameter files." errMod.log_msg(ConfigOptions, MpiConfig) while (True): # Close the parameter files. try: idNldasParam.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break try: idCfsParam1.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break try: idCfsParam2.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break break else: idNldasParam = None idCfsParam1 = None idCfsParam2 = None idGridCorr = None errMod.check_program_status(ConfigOptions, MpiConfig) # Now.... Loop through the local CFSv2 grid cells and perform the following steps: # 1.) Interpolate the six-hour values to the current output timestep. # 2.) Calculate the CFSv2 cdf/pdf # 3.) Calculate the NLDAS cdf/pdf # 4.) Adjust CFSv2 values based on the method of pdf matching. # 5.) Regrid the CFSv2 values to the WRF-Hydro domain using the pre-calculated ESMF # regridding object. # 6.) Place the data into the final output arrays for further processing (downscaling). # 7.) Reset variables for memory efficiency and exit the routine. if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Creating local CFS CDF arrays." errMod.log_msg(ConfigOptions, MpiConfig) # Establish local arrays of data. cfs_data = np.empty([input_forcings.ny_local, input_forcings.nx_local], np.float64) # Establish parameters of the CDF matching. vals = np.arange(valRange1[force_num], valRange2[force_num], valStep[force_num]) if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Looping over local arrays to calculate bias corrections." errMod.log_msg(ConfigOptions, MpiConfig) # Process each of the pixel cells for this local processor on the CFS grid. for x_local in range(0,input_forcings.nx_local): for y_local in range(0,input_forcings.ny_local): cfs_prev_tmp = input_forcings.coarse_input_forcings1[input_forcings.input_map_output[force_num], y_local, x_local] cfs_next_tmp = input_forcings.coarse_input_forcings2[input_forcings.input_map_output[force_num], y_local, x_local] # Check for any missing parameter values. If any missing values exist, # set this flag to False. Further down, if it's False, we will simply # set the local CFS adjusted value to the interpolated value. correctFlag = True if cfs_param_1_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if cfs_param_2_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if cfs_prev_param_1_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if cfs_prev_param_2_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if nldas_param_1_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if nldas_param_2_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if force_num == 4: if cfs_prev_zero_pcp_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if cfs_zero_pcp_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if nldas_zero_pcp_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False # Interpolate the two CFS values (and parameters) in time. dtFromPrevious = ConfigOptions.current_output_date - input_forcings.fcst_date1 hrFromPrevious = dtFromPrevious.total_seconds()/3600.0 interpFactor1 = float(1 - (hrFromPrevious / 6.0)) interpFactor2 = float(hrFromPrevious / 6.0) # Since this is only for CFSv2 6-hour data, we will assume 6-hour intervals. # This is already checked at the beginning of this routine for the product name. cfs_param_1_interp = cfs_prev_param_1_sub[y_local, x_local] * interpFactor1 + \ cfs_param_1_sub[y_local, x_local] * interpFactor2 cfs_param_2_interp = cfs_prev_param_2_sub[y_local, x_local] * interpFactor1 + \ cfs_param_2_sub[y_local, x_local] * interpFactor2 cfs_interp_fcst = cfs_prev_tmp * interpFactor1 + cfs_next_tmp * interpFactor2 nldas_nearest_1 = nldas_param_1_sub[y_local, x_local] nldas_nearest_2 = nldas_param_2_sub[y_local, x_local] if correctFlag: if force_num != 4 and force_num != 5 and force_num != 1: # Not incoming shortwave or precip or specific humidity pts = (vals - cfs_param_1_interp) / cfs_param_2_interp spacing = (vals[2] - vals[1]) / cfs_param_2_interp cfs_pdf = (np.exp(-0.5 * (np.power(pts, 2))) / math.sqrt(2 * 3.141592)) * spacing cfs_cdf = np.cumsum(cfs_pdf) pts = (vals - nldas_nearest_1) / nldas_nearest_2 spacing = (vals[2] - vals[1]) / nldas_nearest_2 nldas_pdf = (np.exp(-0.5 * (np.power(pts, 2))) / math.sqrt(2 * 3.141592)) * spacing nldas_cdf = np.cumsum(nldas_pdf) # compute adjusted value now using the CFSv2 forecast value and the two CDFs # find index in vals array diffTmp = np.absolute(vals - cfs_interp_fcst) cfs_ind = np.where(diffTmp == diffTmp.min())[0][0] cfs_cdf_val = cfs_cdf[cfs_ind] # now whats the index of the closest cdf value in the nldas array? diffTmp = np.absolute(cfs_cdf_val - nldas_cdf) cfs_nldas_ind = np.where(diffTmp == diffTmp.min())[0][0] # Adjust the CFS data cfs_data[y_local, x_local] = vals[cfs_nldas_ind] if force_num == 5: # Incoming shortwave radiation flux. # find nearest nldas grid point and then calculate nldas cdf nldas_nearest_1 = nldas_param_1_sub[y_local, x_local] if cfs_interp_fcst > 2.0 and cfs_param_1_interp > 2.0: factor = nldas_nearest_1 / cfs_param_1_interp cfs_data[y_local, x_local] = cfs_interp_fcst * factor else: cfs_data[y_local, x_local] = 0.0 if force_num == 1: # Specific humidity spacing = vals[2]-vals[1] cfs_interp_fcst = cfs_interp_fcst * 1000.0 # units are now g/kg cfs_cdf = 1 - np.exp(-(np.power((vals / cfs_param_1_interp), cfs_param_2_interp))) nldas_cdf = 1 - np.exp(-(np.power((vals / nldas_nearest_1), nldas_nearest_2))) # compute adjusted value now using the CFSv2 forecast value and the two CDFs # find index in vals array diffTmp = np.absolute(vals - cfs_interp_fcst) cfs_ind = np.where(diffTmp == diffTmp.min())[0][0] cfs_cdf_val = cfs_cdf[cfs_ind] # now whats the index of the closest cdf value in the nldas array? diffTmp = np.absolute(cfs_cdf_val - nldas_cdf) cfs_nldas_ind = np.where(diffTmp == diffTmp.min())[0][0] # Adjust the CFS data cfs_data[y_local, x_local] = vals[cfs_nldas_ind]/1000.0 # convert back to kg/kg if force_num == 4: # Precipitation # precipitation is estimated using a weibull distribution # valid values range from 3e-6 mm/s (0.01 mm/hr) up to 100 mm/hr spacing = vals[2] - vals[1] cfs_zero_pcp_interp = cfs_prev_zero_pcp_sub[y_local, x_local] * interpFactor1 + \ cfs_zero_pcp_sub[y_local, x_local] * interpFactor2 cfs_cdf = 1 - np.exp(-(np.power((vals / cfs_param_1_interp), cfs_param_2_interp))) cfs_cdf_scaled = ((1 - cfs_zero_pcp_interp) + cfs_cdf) / \ (cfs_cdf.max() + (1 - cfs_zero_pcp_interp)) nldas_nearest_zero_pcp = nldas_zero_pcp_sub[y_local, x_local] if nldas_nearest_2 == 0.0: # if second weibul parameter is zero, the # distribution has no width, no precipitation outside first bin nldas_cdf = np.empty([2000], np.float64) nldas_cdf[:] = 1.0 nldas_nearest_zero_pcp = 1.0 else: # valid point, see if we need to adjust cfsv2 precip nldas_cdf = 1 - np.exp(-(np.power((vals / nldas_nearest_1), nldas_nearest_2))) # compute adjusted value now using the CFSv2 forecast value and the two CDFs # find index in vals array diffTmp = np.absolute(vals - (cfs_interp_fcst*3600.0)) cfs_ind = np.where(diffTmp == diffTmp.min())[0][0] cfs_cdf_val = cfs_cdf[cfs_ind] # now whats the index of the closest cdf value in the nldas array? diffTmp = np.absolute(cfs_cdf_val - nldas_cdf) cfs_nldas_ind = np.where(diffTmp == diffTmp.min())[0][0] if cfs_interp_fcst == 0.0 and nldas_nearest_zero_pcp == 1.0: # if no rain in cfsv2, no rain in bias corrected field cfs_data[y_local, x_local] = 0.0 else: # else there is rain in cfs forecast, so adjust it in some manner pcp_pop_diff = nldas_nearest_zero_pcp - cfs_zero_pcp_interp if cfs_zero_pcp_interp <= nldas_nearest_zero_pcp: # if cfsv2 zero precip probability is less than nldas, # then do one adjustment if cfs_cdf_val <= pcp_pop_diff: # if cfsv2 precip cdf is still less than pop # difference, set precip to zero cfs_data[y_local, x_local] = 0.0 else: # cfsv2 precip cdf > nldas zero precip probability, # so adjust cfsv2 to nldas2 precip cfs_data[y_local, x_local] = vals[cfs_nldas_ind] / 3600.0 # convert back to mm/s # check for unreasonable corrections of cfs rainfall # ad-hoc setting that cfsv2 precipitation should not be corrected by more than 3x # if it is, this indicated nldas2 distribution is unrealistic # and default back to cfsv2 forecast value if (cfs_data[y_local, x_local] / cfs_interp_fcst) >= 3.0: cfs_data[y_local, x_local] = cfs_interp_fcst else: if cfs_cdf_val <= abs(pcp_pop_diff): # if cfsv2 cdf value less than pop difference, need to randomly # generate precip, since we're in the zero portion of the nldas # zero precip prob still randn = random.uniform(0.0, abs(pcp_pop_diff)) diffTmp = np.absolute(randn - nldas_cdf) new_nldas_ind = np.where(diffTmp == diffTmp.min())[0][0] cfs_data[y_local, x_local] = vals[new_nldas_ind] / 3600.0 # ad-hoc setting that cfsv2 precipitation should not be corrected by more than 3x # if it is, this indicated nldas2 distribution is unrealistic # and default back to cfsv2 forecast value if (cfs_data[y_local, x_local] / cfs_interp_fcst) >= 3.0: cfs_data[y_local, x_local] = cfs_interp_fcst else: cfs_data[y_local, x_local] = vals[cfs_nldas_ind] / 3600.0 # convert back to mm/s # ad-hoc setting that cfsv2 precipitation should not be corrected by more than 3x # if it is, this indicated nldas2 distribution is unrealistic # and default back to cfsv2 forecast value if (cfs_data[y_local, x_local] / cfs_interp_fcst) >= 3.0: cfs_data[y_local, x_local] = cfs_interp_fcst else: # No adjustment for this CFS pixel cell as we have missing parameter values. cfs_data[y_local, x_local] = cfs_interp_fcst # Regrid the local CFS slap to the output array try: input_forcings.esmf_field_in.data[:, :] = cfs_data except: ConfigOptions.errMsg = "Unable to place CFSv2 forcing data into temporary ESMF field." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in, input_forcings.esmf_field_out) except: ConfigOptions.errMsg = "Unable to regrid CFSv2 variable: " + input_forcings.netcdf_var_names[force_num] errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Set any pixel cells outside the input domain to the global missing value. try: input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \ ConfigOptions.globalNdv except: ConfigOptions.errMsg = "Unable to run mask calculation on CFSv2 variable: " + \ input_forcings.netcdf_var_names[force_num] errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: input_forcings.final_forcings[input_forcings.input_map_output[force_num], :, :] = \ input_forcings.esmf_field_out.data except: ConfigOptions.errMsg = "Unable to extract ESMF field data for CFSv2." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig)
def output_final_ldasin(self, ConfigOptions, geoMetaWrfHydro, MpiConfig): """ Output routine to produce final LDASIN files for the WRF-Hydro modeling system. This function is assuming all regridding, interpolation, downscaling, and bias correction has occurred on the necessary input forcings to generate a final set of outputs on the output grid. Since this program is ran in parallel, all work is done on local "slabs" of data for each processor to make the code more efficient. On this end, this function will collect the "slabs" into final output grids that go into the output files. In addition, detailed geospatial metadata is translated from the input geogrid file, to the final output files. :param ConfiguOptions: :param geoMetaWrfHydro: :param MpiConfig: :return: """ output_variable_attribute_dict = { 'U2D': [ 0, 'm s-1', 'x_wind', '10-m U-component of wind', 'time: point', 0.001, 0.0, 3 ], 'V2D': [ 1, 'm s-1', 'y_wind', '10-m V-component of wind', 'time: point', 0.001, 0.0, 3 ], 'LWDOWN': [ 2, 'W m-2', 'surface_downward_longwave_flux', 'Surface downward long-wave radiation flux', 'time: point', 0.001, 0.0, 3 ], 'RAINRATE': [ 3, 'mm s^-1', 'precipitation_flux', 'Surface Precipitation Rate', 'time: mean', 1.0, 0.0, 0 ], 'T2D': [ 4, 'K', 'air_temperature', '2-m Air Temperature', 'time: point', 0.01, 100.0, 2 ], 'Q2D': [ 5, 'kg kg-1', 'surface_specific_humidity', '2-m Specific Humidity', 'time: point', 0.000001, 0.0, 6 ], 'PSFC': [ 6, 'Pa', 'air_pressure', 'Surface Pressure', 'time: point', 0.1, 0.0, 1 ], 'SWDOWN': [ 7, 'W m-2', 'surface_downward_shortwave_flux', 'Surface downward short-wave radiation flux', 'time point', 0.001, 0.0, 3 ] } # Compose the ESMF remapped string attribute based on the regridding option chosen by the user. # We will default to the regridding method chosen for the first input forcing selected. if ConfigOptions.regrid_opt[0] == 1: regrid_att = "remapped via ESMF regrid_with_weights: Bilinear" elif ConfigOptions.regrid_opt[0] == 2: regrid_att = "remapped via ESMF regrid_with_weights: Nearest Neighbor" elif ConfigOptions.regrid_opt[0] == 3: regrid_att = "remapped via ESMF regrid_with_weights: Conservative Bilinear" # Ensure all processors are synced up before outputting. MpiConfig.comm.barrier() if MpiConfig.rank == 0: while (True): # Only output on the master processor. try: idOut = Dataset(self.outPath, 'w') except: ConfigOptions.errMsg = "Unable to create output file: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Create dimensions. try: idOut.createDimension("time", 1) except: ConfigOptions.errMsg = "Unable to create time dimension in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createDimension("y", geoMetaWrfHydro.ny_global) except: ConfigOptions.errMsg = "Unable to create y dimension in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createDimension("x", geoMetaWrfHydro.nx_global) except: ConfigOptions.errMsg = "Unable to create x dimension in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createDimension("reference_time", 1) except: ConfigOptions.errMsg = "Unable to create reference_time dimension in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Set global attributes try: idOut.model_output_valid_time = self.outDate.strftime( "%Y-%m-%d_%H:%M:00") except: ConfigOptions.errMsg = "Unable to set the model_output_valid_time attribute in :" + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.model_initialization_time = ConfigOptions.current_fcst_cycle.strftime( "%Y-%m-%d_%H:%M:00") except: ConfigOptions.errMsg = "Unable to set the model_initialization_time global " \ "attribute in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break if ConfigOptions.nwmVersion is not None: try: idOut.NWM_version_number = "v" + str( ConfigOptions.nwmVersion) except: ConfigOptions.errMsg = "Unable to set the NWM_version_number global attribute in: " \ + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break if ConfigOptions.nwmConfig is not None: try: idOut.model_configuration = ConfigOptions.nwmConfig except: ConfigOptions.errMsg = "Unable to set the model_configuration global attribute in: " + \ self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.model_output_type = "forcing" except: ConfigOptions.errMsg = "Unable to put model_output_type global attribute in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.total_valid_times = float( ConfigOptions.num_output_steps) except: ConfigOptions.errMsg = "Unable to create total_valid_times global attribute in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Create variables. try: idOut.createVariable('time', 'i4', ('time')) except: ConfigOptions.errMsg = "Unable to create time variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createVariable('reference_time', 'i4', ('reference_time')) except: ConfigOptions.errMsg = "Unable to create reference_time variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Populate time and reference time variables with appropriate attributes and time values. try: idOut.variables[ 'time'].units = "minutes since 1970-01-01 00:00:00 UTC" except: ConfigOptions.errMsg = "Unable to create time units attribute in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['time'].standard_name = "time" except: ConfigOptions.errMsg = "Unable to create time standard_name attribute in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['time'].long_name = "valid output time" except: ConfigOptions.errMsg = "Unable to create time long_name attribute in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ 'reference_time'].units = "minutes since 1970-01-01 00:00:00 UTC" except: ConfigOptions.errMsg = "Unable to create reference_time units attribute in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ 'reference_time'].standard_name = "forecast_reference_time" except: ConfigOptions.errMsg = "Unable to create reference_time standard_name attribute in: " + \ self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ 'reference_time'].long_name = "model initialization time" except: ConfigOptions.errMsg = "Unable to create reference_time long_name attribute in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Populate time variables dEpoch = datetime.datetime(1970, 1, 1) dtValid = self.outDate - dEpoch dtRef = ConfigOptions.current_fcst_cycle - dEpoch try: idOut.variables['time'][0] = int(dtValid.days * 24.0 * 60.0) + \ int(math.floor(dtValid.seconds / 60.0)) except: ConfigOptions.errMsg = "Unable to populate the time variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['reference_time'][0] = int(dtRef.days * 24.0 * 60.0) + \ int(math.floor(dtRef.seconds / 60.0)) except: ConfigOptions.errMsg = "Unable to populate the time variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Create geospatial metadata coordinate variables if data was read in from an optional # spatial metadata file. if ConfigOptions.spatial_meta is not None: # Create coordinate variables and populate with attributes read in. try: if ConfigOptions.useCompression == 1: idOut.createVariable('x', 'f8', ('x'), zlib=True, complevel=2) else: idOut.createVariable('x', 'f8', ('x')) except: ConfigOptions.errMsg = "Unable to create x variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['x'].setncatts( geoMetaWrfHydro.x_coord_atts) except: ConfigOptions.errMsg = "Unable to establish x coordinate attributes in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['x'][:] = geoMetaWrfHydro.x_coords except: ConfigOptions.errMsg = "Unable to place x coordinate values into output variable " \ "for output file: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: if ConfigOptions.useCompression == 1: idOut.createVariable('y', 'f8', ('y'), zlib=True, complevel=2) else: idOut.createVariable('y', 'f8', ('y')) except: ConfigOptions.errMsg = "Unable to create y variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['y'].setncatts( geoMetaWrfHydro.y_coord_atts) except: ConfigOptions.errMsg = "Unable to establish y coordinate attributes in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['y'][:] = geoMetaWrfHydro.y_coords except: ConfigOptions.errMsg = "Unable to place y coordinate values into output variable " \ "for output file: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.createVariable('crs', 'S1') except: ConfigOptions.errMsg = "Unable to create crs in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables['crs'].setncatts( geoMetaWrfHydro.crs_atts) except: ConfigOptions.errMsg = "Unable to establish crs attributes in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Loop through and create each variable, along with expected attributes. for varTmp in output_variable_attribute_dict: try: if ConfigOptions.useCompression == 1: if varTmp != 'RAINRATE': idOut.createVariable( varTmp, 'f4', ('time', 'y', 'x'), fill_value=ConfigOptions.globalNdv, zlib=True, complevel=2, least_significant_digit= output_variable_attribute_dict[varTmp][7]) #idOut.createVariable(varTmp, 'i4', ('time', 'y', 'x'), # fill_value=int(ConfigOptions.globalNdv/ # output_variable_attribute_dict[varTmp][5]), # zlib=True, complevel=2) else: idOut.createVariable( varTmp, 'f4', ('time', 'y', 'x'), fill_value=ConfigOptions.globalNdv, zlib=True, complevel=2) else: idOut.createVariable( varTmp, 'f4', ('time', 'y', 'x'), fill_value=ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to create " + varTmp + " variable in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ varTmp].cell_methods = output_variable_attribute_dict[ varTmp][4] except: ConfigOptions.errMsg = "Unable to create cell_methods attribute for: " + varTmp + \ " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[varTmp].remap = regrid_att except: ConfigOptions.errMsg = "Unable to create remap attribute for: " + varTmp + \ " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # Place geospatial metadata attributes in if we have them. if ConfigOptions.spatial_meta is not None: try: idOut.variables[varTmp].grid_mapping = 'crs' except: ConfigOptions.errMsg = "Unable to create grid_mapping attribute for: " + \ varTmp + " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'esri_pe_string' in geoMetaWrfHydro.crs_atts.keys(): try: idOut.variables[ varTmp].esri_pe_string = geoMetaWrfHydro.crs_atts[ 'esri_pe_string'] except: ConfigOptions.errMsg = "Unable to create esri_pe_string attribute for: " + \ varTmp + " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'proj4' in geoMetaWrfHydro.spatial_global_atts.keys( ): try: idOut.variables[ varTmp].proj4 = geoMetaWrfHydro.spatial_global_atts[ 'proj4'] except: ConfigOptions.errMsg = "Unable to create proj4 attribute for: " + varTmp + \ " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ varTmp].units = output_variable_attribute_dict[ varTmp][1] except: ConfigOptions.errMsg = "Unable to create units attribute for: " + varTmp + " in: " + \ self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ varTmp].standard_name = output_variable_attribute_dict[ varTmp][2] except: ConfigOptions.errMsg = "Unable to create standard_name attribute for: " + varTmp + \ " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idOut.variables[ varTmp].long_name = output_variable_attribute_dict[ varTmp][3] except: ConfigOptions.errMsg = "Unable to create long_name attribute for: " + varTmp + \ " in: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break # If we are using scale_factor / add_offset, create here. #if ConfigOptions.useCompression == 1: # if varTmp != 'RAINRATE': # try: # idOut.variables[varTmp].scale_factor = output_variable_attribute_dict[varTmp][5] # except: # ConfigOptions.errMsg = "Unable to create scale_factor attribute for: " + varTmp + \ # " in: " + self.outPath # errMod.log_critical(ConfigOptions, MpiConfig) # break # try: # idOut.variables[varTmp].add_offset = output_variable_attribute_dict[varTmp][6] # except: # ConfigOptions.errMsg = "Unable to create add_offset attribute for: " + varTmp + \ # " in: " + self.outPath # errMod.log_critical(ConfigOptions, MpiConfig) # break break errMod.check_program_status(ConfigOptions, MpiConfig) # Now loop through each variable, collect the data (call on each processor), assemble into the final # output grid, and place into the output file (if on processor 0). for varTmp in output_variable_attribute_dict: # First run a check for missing values. There should be none at this point. errMod.check_missing_final( self.outPath, ConfigOptions, self.output_local[ output_variable_attribute_dict[varTmp][0], :, :], varTmp, MpiConfig) if ConfigOptions.errFlag == 1: continue # Collect data from the various processors, and place into the output file. try: final = MpiConfig.comm.gather(self.output_local[ output_variable_attribute_dict[varTmp][0], :, :], root=0) except: ConfigOptions.errMsg = "Unable to gather final grids for: " + varTmp errMod.log_critical(ConfigOptions, MpiConfig) continue MpiConfig.comm.barrier() if MpiConfig.rank == 0: while (True): try: dataOutTmp = np.concatenate( [final[i] for i in range(MpiConfig.size)], axis=0) except: ConfigOptions.errMsg = "Unable to finalize collection of output grids for: " + varTmp errMod.log_critical(ConfigOptions, MpiConfig) break # If we are using scale_factor/add_offset, create the integer values here. #if ConfigOptions.useCompression == 1: # if varTmp != 'RAINRATE': # try: # dataOutTmp = dataOutTmp - output_variable_attribute_dict[varTmp][6] # dataOutTmp[:,:] = dataOutTmp[:,:] / output_variable_attribute_dict[varTmp][5] # dataOutTmp = dataOutTmp.astype(int) # except: # ConfigOptions.errMsg = "Unable to convert final output grid to integer type for: " + varTmp # errMod.log_critical(ConfigOptions, MpiConfig) # break try: idOut.variables[varTmp][0, :, :] = dataOutTmp except: ConfigOptions.errMsg = "Unable to place final output grid for: " + varTmp errMod.log_critical(ConfigOptions, MpiConfig) break # Reset temporary data objects to keep memory usage down. dataOutTmp = None break # Reset temporary data objects to keep memory usage down. final = None errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: while (True): # Close the NetCDF file try: idOut.close() except: ConfigOptions.errMsg = "Unable to close output file: " + self.outPath errMod.log_critical(ConfigOptions, MpiConfig) break break errMod.check_program_status(ConfigOptions, MpiConfig)