def nearest_neighbor(input_forcings, ConfigOptions, MpiConfig): """ Function for setting the current output regridded forcings to the nearest input forecast step. :param input_forcings: :param ConfigOptions: :param MpiConfig: :return: """ # If we are running CFSv2 with bias correction, bypass as temporal interpolation is done # internally (NWM-only). if ConfigOptions.runCfsNldasBiasCorrect and input_forcings.productName == "CFSv2_6Hr_Global_GRIB2": if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Bypassing temporal interpolation routine due to NWM bias correction for CFSv2" errMod.log_msg(ConfigOptions, MpiConfig) return # Calculate the difference between the current output timestep, # and the previous input forecast output step. dtFromPrevious = ConfigOptions.current_output_date - input_forcings.fcst_date1 # Calculate the difference between the current output timesetp, # and the next forecast output step. dtFromNext = ConfigOptions.current_output_date - input_forcings.fcst_date2 if abs(dtFromNext.total_seconds()) <= abs(dtFromPrevious.total_seconds()): # Default to the regridded states from the next forecast output step. input_forcings.final_forcings[:, :, :] = input_forcings.regridded_forcings2[:, :, :] else: # Default to the regridded states from the previous forecast output # step. input_forcings.final_forcings[:, :, :] = input_forcings.regridded_forcings1[:, :, :]
def ncar_topo_adj(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ Topographic adjustment of incoming shortwave radiation fluxes, given input parameters. :param input_forcings: :param ConfigOptions: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Performing topographic adjustment to incoming " \ "shortwave radiation flux." errMod.log_msg(ConfigOptions, MpiConfig) # Establish where we have missing values. try: indNdv = np.where( input_forcings.final_forcings == ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform NDV search on input forcings" errMod.log_critical(ConfigOptions, MpiConfig) return # By the time this function has been called, necessary input static grids (height, slope, etc), # should have been calculated for each local slab of data. DEGRAD = math.pi / 180.0 DPD = 360.0 / 365.0 try: DECLIN, SOLCON = radconst(ConfigOptions) except: ConfigOptions.errMsg = "Unable to calculate solar constants based on datetime information." errMod.log_critical(ConfigOptions, MpiConfig) return try: coszen_loc, hrang_loc = calc_coszen(ConfigOptions, DECLIN, GeoMetaWrfHydro) except: ConfigOptions.errMsg = "Unable to calculate COSZEN or HRANG variables for topographic adjustment " \ "of incoming shortwave radiation" errMod.log_critical(ConfigOptions, MpiConfig) return try: TOPO_RAD_ADJ_DRVR(GeoMetaWrfHydro, input_forcings, coszen_loc, DECLIN, SOLCON, hrang_loc) except: ConfigOptions.errMsg = "Unable to perform final topographic adjustment of incoming " \ "shortwave radiation fluxes." errMod.log_critical(ConfigOptions, MpiConfig) return # Assign missing values based on our mask. input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv # Reset variables to free up memory DECLIN = None SOLCON = None coszen_loc = None hrang_loc = None indNdv = None
def weighted_average(input_forcings,ConfigOptions,MpiConfig): """ Function for setting the current output regridded fields as a weighted average between the previous output step and the next output step. :param input_forcings: :param ConfigOptions: :param MpiConfig: :return: """ # Check to make sure we have valid grids. if input_forcings.regridded_forcings2 is None: input_forcings.final_forcings[:, :, :] = ConfigOptions.globalNdv return if input_forcings.regridded_forcings1 is None: input_forcings.final_forcings[:, :, :] = ConfigOptions.globalNdv return # If we are running CFSv2 with bias correction, bypass as temporal interpolation is done # internally (NWM-only). if ConfigOptions.runCfsNldasBiasCorrect and input_forcings.productName == "CFSv2_6Hr_Global_GRIB2": if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Bypassing temporal interpolation routine due to NWM bias correction for CFSv2" errMod.log_msg(ConfigOptions, MpiConfig) return # Calculate the difference between the current output timestep, # and the previous input forecast output step. Use this to calculate a fraction # of the previous forcing output to use in the final output for this step. dtFromPrevious = ConfigOptions.current_output_date - input_forcings.fcst_date1 weight1 = 1-(abs(dtFromPrevious.total_seconds())/(input_forcings.outFreq*60.0)) # Calculate the difference between the current output timesetp, # and the next forecast output step. Use this to calculate a fraction of # the next forcing output to use in the final output for this step. dtFromNext = ConfigOptions.current_output_date - input_forcings.fcst_date2 weight2 = 1-(abs(dtFromNext.total_seconds())/(input_forcings.outFreq*60.0)) # Calculate where we have missing data in either the previous or next forcing dataset. ind1Ndv = np.where(input_forcings.regridded_forcings1 == ConfigOptions.globalNdv) ind2Ndv = np.where(input_forcings.regridded_forcings2 == ConfigOptions.globalNdv) input_forcings.final_forcings[:,:,:] = input_forcings.regridded_forcings1[:,:,:]*weight1 + \ input_forcings.regridded_forcings2[:,:,:]*weight2 # Set any pixel cells that were missing for either window to missing value. input_forcings.final_forcings[ind1Ndv] = ConfigOptions.globalNdv input_forcings.final_forcings[ind2Ndv] = ConfigOptions.globalNdv # Reset for memory efficiency. ind1Ndv = None ind2Ndv = None
def q2_down_classic(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ NCAR function for downscaling 2-meter specific humidity using already downscaled 2-meter temperature, unadjusted surface pressure, and downscaled surface pressure. :param input_forcings: :param ConfigOptions: :param GeoMetaWrfHydro: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Performing topographic adjustment to specific humidity." errMod.log_msg(ConfigOptions, MpiConfig) # Establish where we have missing values. try: indNdv = np.where( input_forcings.final_forcings == ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform NDV search on input forcings" errMod.log_critical(ConfigOptions, MpiConfig) return # First calculate relative humidity given original surface pressure and 2-meter # temperature try: relHum = rel_hum(input_forcings, ConfigOptions) except: ConfigOptions.errMsg = "Unable to perform topographic downscaling of incoming " \ "specific humidity to relative humidity" errMod.log_critical(ConfigOptions, MpiConfig) return # Downscale 2-meter specific humidity try: q2Tmp = mixhum_ptrh(input_forcings, relHum, 2, ConfigOptions) except: ConfigOptions.errMsg = "Unable to perform topographic downscaling of " \ "incoming specific humidity" errMod.log_critical(ConfigOptions, MpiConfig) return input_forcings.final_forcings[5, :, :] = q2Tmp input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv q2Tmp = None indNdv = None
def simple_lapse(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ Function that applies a single lapse rate adjustment to modeled 2-meter temperature by taking the difference of the native input elevation and the WRF-hydro elevation. :param inpute_forcings: :param ConfigOptions: :param GeoMetaWrfHydro: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Applying simple lapse rate to temperature downscaling" errMod.log_msg(ConfigOptions, MpiConfig) # Calculate the elevation difference. elevDiff = input_forcings.height - GeoMetaWrfHydro.height # Assign existing, un-downscaled temperatures to a temporary placeholder, which # will be used for specific humidity downscaling. if input_forcings.q2dDownscaleOpt > 0: input_forcings.t2dTmp[:, :] = input_forcings.final_forcings[4, :, :] # Apply single lapse rate value to the input 2-meter # temperature values. try: indNdv = np.where( input_forcings.final_forcings == ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform NDV search on input forcings" errMod.log_critical(ConfigOptions, MpiConfig) return try: input_forcings.final_forcings[4,:,:] = input_forcings.final_forcings[4,:,:] + \ (6.49/1000.0)*elevDiff except: ConfigOptions.errMsg = "Unable to apply lapse rate to input 2-meter temperatures." errMod.log_critical(ConfigOptions, MpiConfig) return input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv # Reset for memory efficiency indNdv = None
def pressure_down_classic(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ Generic function to downscale surface pressure to the WRF-Hydro domain. :param input_forcings: :param ConfigOptions: :param GeoMetaWrfHydro: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Performing topographic adjustment to surface pressure." errMod.log_msg(ConfigOptions, MpiConfig) # Calculate the elevation difference. elevDiff = input_forcings.height - GeoMetaWrfHydro.height # Assign existing, un-downscaled pressure values to a temporary placeholder, which # will be used for specific humidity downscaling. if input_forcings.q2dDownscaleOpt > 0: input_forcings.psfcTmp[:, :] = input_forcings.final_forcings[6, :, :] try: indNdv = np.where( input_forcings.final_forcings == ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform NDV search on input forcings" errMod.log_critical(ConfigOptions, MpiConfig) return try: input_forcings.final_forcings[6,:,:] = input_forcings.final_forcings[6,:,:] +\ (input_forcings.final_forcings[6,:,:]*elevDiff*9.8)/\ (input_forcings.final_forcings[4,:,:]*287.05) except: ConfigOptions.errMsg = "Unable to downscale surface pressure to input forcings." errMod.log_critical(ConfigOptions, MpiConfig) return input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv # Reset for memory efficiency indNdv = None
def cfsv2_nldas_nwm_bias_correct(input_forcings, GeoMetaWrfHydro, ConfigOptions, MpiConfig, force_num): """ Routine to run CDF/PDF bias correction parametric corrections SPECIFIC to the NWM long-range configuration. :param input_forcings: :param ConfigOptions: :param force_num: :return: """ # Create a dictionary that maps forcing numbers to the expected NetCDF variable names, etc. nldasParam1Vars = { 2: 'UGRD10M_PARAM_1', 3: 'VGRD10M_PARAM_1', 6: 'LW_PARAM_1', 4: 'PRATE_PARAM_1', 0: 'T2M_PARAM_1', 1: 'Q2M_PARAM_1', 7: 'PSFC_PARAM_1', 5: 'SW_PARAM_1' } nldasParam2Vars = { 2: 'UGRD10M_PARAM_2', 3: 'VGRD10M_PARAM_2', 6: 'LW_PARAM_2', 4: 'PRATE_PARAM_2', 0: 'T2M_PARAM_2', 1: 'Q2M_PARAM_2', 7: 'PSFC_PARAM_2', 5: 'SW_PARAM_2' } cfsParamPathVars = { 2: 'ugrd', 3: 'vgrd', 6: 'dlwsfc', 4: 'prate', 0: 'tmp2m', 1: 'q2m', 7: 'pressfc', 5: 'dswsfc' } # Specify the min/max ranges on CDF/PDF values for each variable valRange1 = { 2: -50.0, 3: -50.0, 6: 1.0, 4: 0.01, 0: 200.0, 1: 0.01, 7: 50000.0, 5: 0.0 } valRange2 = { 2: 50.0, 3: 50.0, 6: 800.0, 4: 100.0, 0: 330.0, 1: 40.0, 7: 1100000.0, 5: 1000.0 } valStep = { 2: 0.1, 3: 0.1, 6: 0.19975, 4: 0.049995, 0: 0.1, 1: 3.9999, 7: 350.0, 5: 10.0 } if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Running NLDAS-CFSv2 CDF/PDF bias correction on variable: " + \ input_forcings.netcdf_var_names[force_num] errMod.log_msg(ConfigOptions, MpiConfig) # Check to ensure we are running with CFSv2 here.... if input_forcings.productName != "CFSv2_6Hr_Global_GRIB2": ConfigOptions.errMsg = "Attempting to run CFSv2-NLDAS bias correction on: " + \ input_forcings.productName errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Open the necessary parameter grids, which are on the global CFSv2 grid, then scatter them out # to the various processors. if MpiConfig.rank == 0: while (True): nldas_param_file = input_forcings.paramDir + "/NLDAS_Climo/nldas2_" + \ ConfigOptions.current_output_date.strftime('%m%d%H') + \ "_dist_params.nc" if not os.path.isfile(nldas_param_file): ConfigOptions.errMsg = "Unable to locate necessary bias correction parameter file: " + \ nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break # Open the NetCDF file. try: idNldasParam = Dataset(nldas_param_file, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break # Ensure dimensions/variables are as expected. if 'lat_0' not in idNldasParam.dimensions.keys(): ConfigOptions.errMsg = "Expected to find lat_0 dimension in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if 'lon_0' not in idNldasParam.dimensions.keys(): ConfigOptions.errMsg = "Expected to find lon_0 dimension in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if idNldasParam.dimensions['lat_0'].size != 190: ConfigOptions.errMsg = "Expected lat_0 size is 190 - found size of: " + \ str(idNldasParam.dimensions['lat_0'].size) + " in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if idNldasParam.dimensions['lon_0'].size != 384: ConfigOptions.errMsg = "Expected lon_0 size is 384 - found size of: " + \ str(idNldasParam.dimensions['lon_0'].size) + " in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if nldasParam1Vars[force_num] not in idNldasParam.variables.keys(): ConfigOptions.errMsg = "Expected variable: " + nldasParam1Vars[force_num] + " not found " + \ "in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if nldasParam2Vars[force_num] not in idNldasParam.variables.keys(): ConfigOptions.errMsg = "Expected variable: " + nldasParam2Vars[force_num] + " not found " + \ "in: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if force_num == 4: if 'ZERO_PRECIP_PROB' not in idNldasParam.variables.keys(): ConfigOptions.errMsg = "Expected variable: ZERO_PRECIP_PROB not found in: " + \ nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break try: nldas_param_1 = idNldasParam.variables[nldasParam1Vars[force_num]][:,:] except: ConfigOptions.errMsg = "Unable to extract: " + nldasParam1Vars[force_num] + \ " from: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break try: nldas_param_2 = idNldasParam.variables[nldasParam2Vars[force_num]][:,:] except: ConfigOptions.errMsg = "Unable to extract: " + nldasParam2Vars[force_num] + \ " from: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if nldas_param_1.shape[0] != 190 or nldas_param_1.shape[1] != 384: ConfigOptions.errMsg = "Parameter variable: " + nldasParam1Vars[force_num] + " from: " + \ nldas_param_file + " not of shape [190,384]." errMod.log_critical(ConfigOptions, MpiConfig) break if nldas_param_2.shape[0] != 190 or nldas_param_2.shape[1] != 384: ConfigOptions.errMsg = "Parameter variable: " + nldasParam2Vars[force_num] + " from: " + \ nldas_param_file + " not of shape [190,384]." errMod.log_critical(ConfigOptions, MpiConfig) break # Extract the fill value try: fillTmp = idNldasParam.variables[nldasParam1Vars[force_num]]._FillValue except: ConfigOptions.errMsg = "Unable to extract Fill_Value from: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break # Read in the zero precip prob grids if we are bias correcting precipitation. if force_num == 4: try: nldas_zero_pcp = idNldasParam.variables['ZERO_PRECIP_PROB'][:,:] except: ConfigOptions.errMsg = "Unable to extract ZERO_PRECIP_PROB from: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break if nldas_zero_pcp.shape[0] != 190 or nldas_zero_pcp.shape[1] != 384: ConfigOptions.errMsg = "Parameter variable: ZERO_PRECIP_PROB from: " + nldas_param_file + \ " not of shape [190,384]." errMod.log_critical(ConfigOptions, MpiConfig) break # Set missing values accordingly. nldas_param_1[np.where(nldas_param_1 == fillTmp)] = ConfigOptions.globalNdv nldas_param_2[np.where(nldas_param_1 == fillTmp)] = ConfigOptions.globalNdv if force_num == 4: nldas_zero_pcp[np.where(nldas_zero_pcp == fillTmp)] = ConfigOptions.globalNdv break else: nldas_param_1 = None nldas_param_2 = None nldas_zero_pcp = None errMod.check_program_status(ConfigOptions, MpiConfig) # Reset the temporary fill value fillTmp = None # Scatter NLDAS parameters nldas_param_1_sub = MpiConfig.scatter_array(input_forcings, nldas_param_1, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) nldas_param_2_sub = MpiConfig.scatter_array(input_forcings, nldas_param_2, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) if force_num == 4: nldas_zero_pcp_sub = MpiConfig.scatter_array(input_forcings, nldas_zero_pcp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: while (True): # Read in the CFSv2 parameter files, based on the previous CFSv2 dates cfs_param_path1 = input_forcings.paramDir + "/CFSv2_Climo/cfs_" + \ cfsParamPathVars[force_num] + "_" + \ input_forcings.fcst_date1.strftime('%m%d') + "_" + \ input_forcings.fcst_date1.strftime('%H') + '_dist_params.nc' cfs_param_path2 = input_forcings.paramDir + "/CFSv2_Climo/cfs_" + cfsParamPathVars[force_num] + "_" + \ input_forcings.fcst_date2.strftime('%m%d') + "_" + \ input_forcings.fcst_date2.strftime('%H') + \ '_dist_params.nc' if not os.path.isfile(cfs_param_path1): ConfigOptions.errMsg = "Unable to locate necessary parameter file: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if not os.path.isfile(cfs_param_path2): ConfigOptions.errMsg = "Unable to locate necessary parameter file: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break # Open the files and ensure they contain the correct information. try: idCfsParam1 = Dataset(cfs_param_path1, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break try: idCfsParam2 = Dataset(cfs_param_path2, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break ConfigOptions.statusMsg = "Checking CFS parameter files." errMod.log_msg(ConfigOptions, MpiConfig) if 'DISTRIBUTION_PARAM_1' not in idCfsParam1.variables.keys(): ConfigOptions.errMsg = "Expected DISTRIBUTION_PARAM_1 variable not found in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if 'DISTRIBUTION_PARAM_2' not in idCfsParam1.variables.keys(): ConfigOptions.errMsg = "Expected DISTRIBUTION_PARAM_1 variable not found in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if 'DISTRIBUTION_PARAM_1' not in idCfsParam2.variables.keys(): ConfigOptions.errMsg = "Expected DISTRIBUTION_PARAM_1 variable not found in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break if 'DISTRIBUTION_PARAM_2' not in idCfsParam2.variables.keys(): ConfigOptions.errMsg = "Expected DISTRIBUTION_PARAM_1 variable not found in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: param_1 = idCfsParam2.variables['DISTRIBUTION_PARAM_1'][:,:] except: ConfigOptions.errMsg = "Unable to extract DISTRIBUTION_PARAM_1 from: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: param_2 = idCfsParam2.variables['DISTRIBUTION_PARAM_2'][:,:] except: ConfigOptions.errMsg = "Unable to extract DISTRIBUTION_PARAM_2 from: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: lat_0 = idCfsParam2.variables['lat_0'][:] except: ConfigOptions.errMsg = "Unable to extract lat_0 from: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: lon_0 = idCfsParam2.variables['lon_0'][:] except: ConfigOptions.errMsg = "Unable to extract lon_0 from: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: prev_param_1 = idCfsParam1.variables['DISTRIBUTION_PARAM_1'][:,:] except: ConfigOptions.errMsg = "Unable to extract DISTRIBUTION_PARAM_1 from: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break try: prev_param_2 = idCfsParam1.variables['DISTRIBUTION_PARAM_2'][:,:] except: ConfigOptions.errMsg = "Unable to extract DISTRIBUTION_PARAM_2 from: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if param_1.shape[0] != 190 and param_1.shape[1] != 384: ConfigOptions.errMsg = "Unexpected DISTRIBUTION_PARAM_1 found in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break if param_2.shape[0] != 190 and param_2.shape[1] != 384: ConfigOptions.errMsg = "Unexpected DISTRIBUTION_PARAM_2 found in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break if prev_param_1.shape[0] != 190 and prev_param_1.shape[1] != 384: ConfigOptions.errMsg = "Unexpected DISTRIBUTION_PARAM_1 found in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if prev_param_2.shape[0] != 190 and prev_param_2.shape[1] != 384: ConfigOptions.errMsg = "Unexpected DISTRIBUTION_PARAM_2 found in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break ConfigOptions.statusMsg = "Reading in zero precip probs." errMod.log_msg(ConfigOptions, MpiConfig) # Read in the zero precip prob grids if we are bias correcting precipitation. if force_num == 4: try: zero_pcp = idCfsParam2.variables['ZERO_PRECIP_PROB'][:, :] except: ConfigOptions.errMsg = "Unable to locate ZERO_PRECIP_PROB in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break try: prev_zero_pcp = idCfsParam2.variables['ZERO_PRECIP_PROB'][:, :] except: ConfigOptions.errMsg = "Unable to locate ZERO_PRECIP_PROB in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break if zero_pcp.shape[0] != 190 and zero_pcp.shape[1] != 384: ConfigOptions.errMsg = "Unexpected ZERO_PRECIP_PROB found in: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break if prev_zero_pcp.shape[0] != 190 and prev_zero_pcp.shape[1] != 384: ConfigOptions.errMsg = "Unexpected ZERO_PRECIP_PROB found in: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break # Reset any missing values. Because the fill values for these files are all over the map, we # will just do a gross check here. For the most part, there shouldn't be missing values. param_1[np.where(param_1 > 500000.0)] = ConfigOptions.globalNdv param_2[np.where(param_2 > 500000.0)] = ConfigOptions.globalNdv prev_param_1[np.where(prev_param_1 > 500000.0)] = ConfigOptions.globalNdv prev_param_2[np.where(prev_param_2 > 500000.0)] = ConfigOptions.globalNdv if force_num == 4: zero_pcp[np.where(zero_pcp > 500000.0)] = ConfigOptions.globalNdv prev_zero_pcp[np.where(prev_zero_pcp > 500000.0)] = ConfigOptions.globalNdv break else: param_1 = None param_2 = None prev_param_1 = None prev_param_2 = None zero_pcp = None prev_zero_pcp = None errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Scattering CFS parameter grids" errMod.log_msg(ConfigOptions, MpiConfig) # Scatter CFS parameters cfs_param_1_sub = MpiConfig.scatter_array(input_forcings, param_1, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) cfs_param_2_sub = MpiConfig.scatter_array(input_forcings, param_2, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) cfs_prev_param_1_sub = MpiConfig.scatter_array(input_forcings, prev_param_1, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) cfs_prev_param_2_sub = MpiConfig.scatter_array(input_forcings, prev_param_2, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) if force_num == 4: cfs_zero_pcp_sub = MpiConfig.scatter_array(input_forcings, zero_pcp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) cfs_prev_zero_pcp_sub = MpiConfig.scatter_array(input_forcings, prev_zero_pcp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Closing CFS bias correction parameter files." errMod.log_msg(ConfigOptions, MpiConfig) while (True): # Close the parameter files. try: idNldasParam.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + nldas_param_file errMod.log_critical(ConfigOptions, MpiConfig) break try: idCfsParam1.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + cfs_param_path1 errMod.log_critical(ConfigOptions, MpiConfig) break try: idCfsParam2.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + cfs_param_path2 errMod.log_critical(ConfigOptions, MpiConfig) break break else: idNldasParam = None idCfsParam1 = None idCfsParam2 = None idGridCorr = None errMod.check_program_status(ConfigOptions, MpiConfig) # Now.... Loop through the local CFSv2 grid cells and perform the following steps: # 1.) Interpolate the six-hour values to the current output timestep. # 2.) Calculate the CFSv2 cdf/pdf # 3.) Calculate the NLDAS cdf/pdf # 4.) Adjust CFSv2 values based on the method of pdf matching. # 5.) Regrid the CFSv2 values to the WRF-Hydro domain using the pre-calculated ESMF # regridding object. # 6.) Place the data into the final output arrays for further processing (downscaling). # 7.) Reset variables for memory efficiency and exit the routine. if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Creating local CFS CDF arrays." errMod.log_msg(ConfigOptions, MpiConfig) # Establish local arrays of data. cfs_data = np.empty([input_forcings.ny_local, input_forcings.nx_local], np.float64) # Establish parameters of the CDF matching. vals = np.arange(valRange1[force_num], valRange2[force_num], valStep[force_num]) if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Looping over local arrays to calculate bias corrections." errMod.log_msg(ConfigOptions, MpiConfig) # Process each of the pixel cells for this local processor on the CFS grid. for x_local in range(0,input_forcings.nx_local): for y_local in range(0,input_forcings.ny_local): cfs_prev_tmp = input_forcings.coarse_input_forcings1[input_forcings.input_map_output[force_num], y_local, x_local] cfs_next_tmp = input_forcings.coarse_input_forcings2[input_forcings.input_map_output[force_num], y_local, x_local] # Check for any missing parameter values. If any missing values exist, # set this flag to False. Further down, if it's False, we will simply # set the local CFS adjusted value to the interpolated value. correctFlag = True if cfs_param_1_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if cfs_param_2_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if cfs_prev_param_1_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if cfs_prev_param_2_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if nldas_param_1_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if nldas_param_2_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if force_num == 4: if cfs_prev_zero_pcp_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if cfs_zero_pcp_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False if nldas_zero_pcp_sub[y_local,x_local] == ConfigOptions.globalNdv: correctFlag = False # Interpolate the two CFS values (and parameters) in time. dtFromPrevious = ConfigOptions.current_output_date - input_forcings.fcst_date1 hrFromPrevious = dtFromPrevious.total_seconds()/3600.0 interpFactor1 = float(1 - (hrFromPrevious / 6.0)) interpFactor2 = float(hrFromPrevious / 6.0) # Since this is only for CFSv2 6-hour data, we will assume 6-hour intervals. # This is already checked at the beginning of this routine for the product name. cfs_param_1_interp = cfs_prev_param_1_sub[y_local, x_local] * interpFactor1 + \ cfs_param_1_sub[y_local, x_local] * interpFactor2 cfs_param_2_interp = cfs_prev_param_2_sub[y_local, x_local] * interpFactor1 + \ cfs_param_2_sub[y_local, x_local] * interpFactor2 cfs_interp_fcst = cfs_prev_tmp * interpFactor1 + cfs_next_tmp * interpFactor2 nldas_nearest_1 = nldas_param_1_sub[y_local, x_local] nldas_nearest_2 = nldas_param_2_sub[y_local, x_local] if correctFlag: if force_num != 4 and force_num != 5 and force_num != 1: # Not incoming shortwave or precip or specific humidity pts = (vals - cfs_param_1_interp) / cfs_param_2_interp spacing = (vals[2] - vals[1]) / cfs_param_2_interp cfs_pdf = (np.exp(-0.5 * (np.power(pts, 2))) / math.sqrt(2 * 3.141592)) * spacing cfs_cdf = np.cumsum(cfs_pdf) pts = (vals - nldas_nearest_1) / nldas_nearest_2 spacing = (vals[2] - vals[1]) / nldas_nearest_2 nldas_pdf = (np.exp(-0.5 * (np.power(pts, 2))) / math.sqrt(2 * 3.141592)) * spacing nldas_cdf = np.cumsum(nldas_pdf) # compute adjusted value now using the CFSv2 forecast value and the two CDFs # find index in vals array diffTmp = np.absolute(vals - cfs_interp_fcst) cfs_ind = np.where(diffTmp == diffTmp.min())[0][0] cfs_cdf_val = cfs_cdf[cfs_ind] # now whats the index of the closest cdf value in the nldas array? diffTmp = np.absolute(cfs_cdf_val - nldas_cdf) cfs_nldas_ind = np.where(diffTmp == diffTmp.min())[0][0] # Adjust the CFS data cfs_data[y_local, x_local] = vals[cfs_nldas_ind] if force_num == 5: # Incoming shortwave radiation flux. # find nearest nldas grid point and then calculate nldas cdf nldas_nearest_1 = nldas_param_1_sub[y_local, x_local] if cfs_interp_fcst > 2.0 and cfs_param_1_interp > 2.0: factor = nldas_nearest_1 / cfs_param_1_interp cfs_data[y_local, x_local] = cfs_interp_fcst * factor else: cfs_data[y_local, x_local] = 0.0 if force_num == 1: # Specific humidity spacing = vals[2]-vals[1] cfs_interp_fcst = cfs_interp_fcst * 1000.0 # units are now g/kg cfs_cdf = 1 - np.exp(-(np.power((vals / cfs_param_1_interp), cfs_param_2_interp))) nldas_cdf = 1 - np.exp(-(np.power((vals / nldas_nearest_1), nldas_nearest_2))) # compute adjusted value now using the CFSv2 forecast value and the two CDFs # find index in vals array diffTmp = np.absolute(vals - cfs_interp_fcst) cfs_ind = np.where(diffTmp == diffTmp.min())[0][0] cfs_cdf_val = cfs_cdf[cfs_ind] # now whats the index of the closest cdf value in the nldas array? diffTmp = np.absolute(cfs_cdf_val - nldas_cdf) cfs_nldas_ind = np.where(diffTmp == diffTmp.min())[0][0] # Adjust the CFS data cfs_data[y_local, x_local] = vals[cfs_nldas_ind]/1000.0 # convert back to kg/kg if force_num == 4: # Precipitation # precipitation is estimated using a weibull distribution # valid values range from 3e-6 mm/s (0.01 mm/hr) up to 100 mm/hr spacing = vals[2] - vals[1] cfs_zero_pcp_interp = cfs_prev_zero_pcp_sub[y_local, x_local] * interpFactor1 + \ cfs_zero_pcp_sub[y_local, x_local] * interpFactor2 cfs_cdf = 1 - np.exp(-(np.power((vals / cfs_param_1_interp), cfs_param_2_interp))) cfs_cdf_scaled = ((1 - cfs_zero_pcp_interp) + cfs_cdf) / \ (cfs_cdf.max() + (1 - cfs_zero_pcp_interp)) nldas_nearest_zero_pcp = nldas_zero_pcp_sub[y_local, x_local] if nldas_nearest_2 == 0.0: # if second weibul parameter is zero, the # distribution has no width, no precipitation outside first bin nldas_cdf = np.empty([2000], np.float64) nldas_cdf[:] = 1.0 nldas_nearest_zero_pcp = 1.0 else: # valid point, see if we need to adjust cfsv2 precip nldas_cdf = 1 - np.exp(-(np.power((vals / nldas_nearest_1), nldas_nearest_2))) # compute adjusted value now using the CFSv2 forecast value and the two CDFs # find index in vals array diffTmp = np.absolute(vals - (cfs_interp_fcst*3600.0)) cfs_ind = np.where(diffTmp == diffTmp.min())[0][0] cfs_cdf_val = cfs_cdf[cfs_ind] # now whats the index of the closest cdf value in the nldas array? diffTmp = np.absolute(cfs_cdf_val - nldas_cdf) cfs_nldas_ind = np.where(diffTmp == diffTmp.min())[0][0] if cfs_interp_fcst == 0.0 and nldas_nearest_zero_pcp == 1.0: # if no rain in cfsv2, no rain in bias corrected field cfs_data[y_local, x_local] = 0.0 else: # else there is rain in cfs forecast, so adjust it in some manner pcp_pop_diff = nldas_nearest_zero_pcp - cfs_zero_pcp_interp if cfs_zero_pcp_interp <= nldas_nearest_zero_pcp: # if cfsv2 zero precip probability is less than nldas, # then do one adjustment if cfs_cdf_val <= pcp_pop_diff: # if cfsv2 precip cdf is still less than pop # difference, set precip to zero cfs_data[y_local, x_local] = 0.0 else: # cfsv2 precip cdf > nldas zero precip probability, # so adjust cfsv2 to nldas2 precip cfs_data[y_local, x_local] = vals[cfs_nldas_ind] / 3600.0 # convert back to mm/s # check for unreasonable corrections of cfs rainfall # ad-hoc setting that cfsv2 precipitation should not be corrected by more than 3x # if it is, this indicated nldas2 distribution is unrealistic # and default back to cfsv2 forecast value if (cfs_data[y_local, x_local] / cfs_interp_fcst) >= 3.0: cfs_data[y_local, x_local] = cfs_interp_fcst else: if cfs_cdf_val <= abs(pcp_pop_diff): # if cfsv2 cdf value less than pop difference, need to randomly # generate precip, since we're in the zero portion of the nldas # zero precip prob still randn = random.uniform(0.0, abs(pcp_pop_diff)) diffTmp = np.absolute(randn - nldas_cdf) new_nldas_ind = np.where(diffTmp == diffTmp.min())[0][0] cfs_data[y_local, x_local] = vals[new_nldas_ind] / 3600.0 # ad-hoc setting that cfsv2 precipitation should not be corrected by more than 3x # if it is, this indicated nldas2 distribution is unrealistic # and default back to cfsv2 forecast value if (cfs_data[y_local, x_local] / cfs_interp_fcst) >= 3.0: cfs_data[y_local, x_local] = cfs_interp_fcst else: cfs_data[y_local, x_local] = vals[cfs_nldas_ind] / 3600.0 # convert back to mm/s # ad-hoc setting that cfsv2 precipitation should not be corrected by more than 3x # if it is, this indicated nldas2 distribution is unrealistic # and default back to cfsv2 forecast value if (cfs_data[y_local, x_local] / cfs_interp_fcst) >= 3.0: cfs_data[y_local, x_local] = cfs_interp_fcst else: # No adjustment for this CFS pixel cell as we have missing parameter values. cfs_data[y_local, x_local] = cfs_interp_fcst # Regrid the local CFS slap to the output array try: input_forcings.esmf_field_in.data[:, :] = cfs_data except: ConfigOptions.errMsg = "Unable to place CFSv2 forcing data into temporary ESMF field." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in, input_forcings.esmf_field_out) except: ConfigOptions.errMsg = "Unable to regrid CFSv2 variable: " + input_forcings.netcdf_var_names[force_num] errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Set any pixel cells outside the input domain to the global missing value. try: input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \ ConfigOptions.globalNdv except: ConfigOptions.errMsg = "Unable to run mask calculation on CFSv2 variable: " + \ input_forcings.netcdf_var_names[force_num] errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: input_forcings.final_forcings[input_forcings.input_map_output[force_num], :, :] = \ input_forcings.esmf_field_out.data except: ConfigOptions.errMsg = "Unable to extract ESMF field data for CFSv2." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig)
def process_forecasts(ConfigOptions, wrfHydroGeoMeta, inputForcingMod, suppPcpMod, MpiConfig, OutputObj): """ Main calling module for running realtime forecasts and re-forecasts. :param jobMeta: :return: """ # Loop through each WRF-Hydro forecast cycle being processed. Within # each cycle, perform the following tasks: # 1.) Loop over each output frequency # 2.) Determine the input forcing cycle dates (both before and after) # for temporal interpolation, downscaling, and bias correction reasons. # 3.) If the input forcings haven't been opened and read into memory, # open them. # 4.) Check to see if the ESMF objects for input forcings have been # created. If not, create them, including the regridding object. # 5.) Regrid forcing grids for input cycle dates surrounding the # current output timestep if they haven't been regridded. # 6.) Perform bias correction and/or downscaling. # 7.) Output final grids to LDASIN NetCDF files with associated # WRF-Hydro geospatial metadata to the final output directories. # Throughout this entire process, log progress being made into LOG # files. Once a forecast cycle is complete, we will touch an empty # 'WrfHydroForcing.COMPLETE' flag in the directory. This will be # checked upon the beginning of this program to see if we # need to process any files. for fcstCycleNum in range(ConfigOptions.nFcsts): ConfigOptions.current_fcst_cycle = ConfigOptions.b_date_proc + \ datetime.timedelta( seconds=ConfigOptions.fcst_freq*60*fcstCycleNum ) fcstCycleOutDir = ConfigOptions.output_dir + "/" + \ ConfigOptions.current_fcst_cycle.strftime('%Y%m%d%H%M') completeFlag = fcstCycleOutDir + "/WrfHydroForcing.COMPLETE" if os.path.isfile(completeFlag): ConfigOptions.statusMsg = "Forecast Cycle: " + \ ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M') + \ " has already completed." errMod.log_msg(ConfigOptions, MpiConfig) # We have already completed processing this cycle, # move on. continue if MpiConfig.rank == 0: # If the cycle directory doesn't exist, create it. if not os.path.isdir(fcstCycleOutDir): try: os.mkdir(fcstCycleOutDir) except: ConfigOptions.errMsg = "Unable to create output " \ "directory: " + fcstCycleOutDir errMod.err_out_screen_para(ConfigOptions.errMsg, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Compose a path to a log file, which will contain information # about this forecast cycle. ConfigOptions.logFile = fcstCycleOutDir + "/LOG_" + \ ConfigOptions.d_program_init.strftime('%Y%m%d%H%M') + \ "_" + ConfigOptions.current_fcst_cycle.strftime('%Y%m%d%H%M') # Initialize the log file. try: errMod.init_log(ConfigOptions, MpiConfig) except: errMod.err_out_screen_para(ConfigOptions.errMsg, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Log information about this forecast cycle if MpiConfig.rank == 0: ConfigOptions.statusMsg = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' errMod.log_msg(ConfigOptions, MpiConfig) ConfigOptions.statusMsg = 'Processing Forecast Cycle: ' + \ ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M') errMod.log_msg(ConfigOptions, MpiConfig) ConfigOptions.statusMsg = 'Forecast Cycle Length is: ' + \ str(ConfigOptions.cycle_length_minutes) + " minutes" errMod.log_msg(ConfigOptions, MpiConfig) MpiConfig.comm.barrier() # Loop through each output timestep. Perform the following functions: # 1.) Calculate all necessary input files per user options. # 2.) Read in input forcings from GRIB/NetCDF files. # 3.) Regrid the forcings, and temporally interpolate. # 4.) Downscale. # 5.) Layer, and output as necessary. for outStep in range(1, ConfigOptions.num_output_steps + 1): # Reset out final grids to missing values. OutputObj.output_local[:, :, :] = -9999.0 ConfigOptions.current_output_step = outStep OutputObj.outDate = ConfigOptions.current_fcst_cycle + datetime.timedelta( seconds=ConfigOptions.output_freq * 60 * outStep) ConfigOptions.current_output_date = OutputObj.outDate # Calculate the previous output timestep. This is used in potential downscaling routines. if outStep == 1: ConfigOptions.prev_output_date = ConfigOptions.current_output_date else: ConfigOptions.prev_output_date = ConfigOptions.current_output_date - datetime.timedelta( seconds=ConfigOptions.output_freq * 60) if MpiConfig.rank == 0: ConfigOptions.statusMsg = '=========================================' errMod.log_msg(ConfigOptions, MpiConfig) ConfigOptions.statusMsg = "Processing for output timestep: " + \ OutputObj.outDate.strftime('%Y-%m-%d %H:%M') errMod.log_msg(ConfigOptions, MpiConfig) MpiConfig.comm.barrier() # Compose the expected path to the output file. Check to see if the file exists, # if so, continue to the next time step. Also initialize our output arrays if necessary. OutputObj.outPath = fcstCycleOutDir + "/" + OutputObj.outDate.strftime('%Y%m%d%H%M') + \ ".LDASIN_DOMAIN1" MpiConfig.comm.barrier() if os.path.isfile(OutputObj.outPath): if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Output file: " + OutputObj.outPath + " exists. Moving " + \ " to the next output timestep." errMod.log_msg(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) continue else: ConfigOptions.currentForceNum = 0 ConfigOptions.currentCustomForceNum = 0 # Loop over each of the input forcings specifed. for forceKey in ConfigOptions.input_forcings: # Calculate the previous and next input cycle files from the inputs. inputForcingMod[forceKey].calc_neighbor_files( ConfigOptions, OutputObj.outDate, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Regrid forcings. inputForcingMod[forceKey].regrid_inputs( ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Run check on regridded fields for reasonable values that are not missing values. errMod.check_forcing_bounds(ConfigOptions, inputForcingMod[forceKey], MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # If we are restarting a forecast cycle, re-calculate the neighboring files, and regrid the # next set of forcings as the previous step just regridded the previous forcing. if inputForcingMod[forceKey].rstFlag == 1: if inputForcingMod[forceKey].regridded_forcings1 is not None and \ inputForcingMod[forceKey].regridded_forcings2 is not None: # Set the forcings back to reflect we just regridded the previous set of inputs, not the next. inputForcingMod[forceKey].regridded_forcings1[:, :, :] = \ inputForcingMod[forceKey].regridded_forcings2[:, :, :] # Re-calculate the neighbor files. inputForcingMod[forceKey].calc_neighbor_files( ConfigOptions, OutputObj.outDate, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Regrid the forcings for the end of the window. inputForcingMod[forceKey].regrid_inputs( ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) inputForcingMod[forceKey].rstFlag = 0 # Run temporal interpolation on the grids. inputForcingMod[forceKey].temporal_interpolate_inputs( ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Run bias correction. biasCorrectMod.run_bias_correction( inputForcingMod[forceKey], ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Run downscaling on grids for this output timestep. downscaleMod.run_downscaling(inputForcingMod[forceKey], ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Layer in forcings from this product. layeringMod.layer_final_forcings(OutputObj, inputForcingMod[forceKey], ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) ConfigOptions.currentForceNum = ConfigOptions.currentForceNum + 1 if forceKey == 10: ConfigOptions.currentCustomForceNum = ConfigOptions.currentCustomForceNum + 1 # Process supplemental precipitation if we specified in the configuration file. if ConfigOptions.number_supp_pcp > 0: for suppPcpKey in ConfigOptions.supp_precip_forcings: # Like with input forcings, calculate the neighboring files to use. suppPcpMod[suppPcpKey].calc_neighbor_files( ConfigOptions, OutputObj.outDate, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Regrid the supplemental precipitation. suppPcpMod[suppPcpKey].regrid_inputs( ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) if suppPcpMod[suppPcpKey].regridded_precip1 is not None \ and suppPcpMod[suppPcpKey].regridded_precip2 is not None: #if np.any(suppPcpMod[suppPcpKey].regridded_precip1) and \ # np.any(suppPcpMod[suppPcpKey].regridded_precip2): # Run check on regridded fields for reasonable values that are not missing values. errMod.check_supp_pcp_bounds( ConfigOptions, suppPcpMod[suppPcpKey], MpiConfig) errMod.check_program_status( ConfigOptions, MpiConfig) # Run temporal interpolation on the grids. suppPcpMod[suppPcpKey].temporal_interpolate_inputs( ConfigOptions, MpiConfig) errMod.check_program_status( ConfigOptions, MpiConfig) # Layer in the supplemental precipitation into the current output object. layeringMod.layer_supplemental_precipitation( OutputObj, suppPcpMod[suppPcpKey], ConfigOptions, MpiConfig) errMod.check_program_status( ConfigOptions, MpiConfig) # Call the output routines OutputObj.output_final_ldasin(ConfigOptions, wrfHydroGeoMeta, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Forcings complete for forecast cycle: " + \ ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M') errMod.log_msg(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) if MpiConfig.rank == 0: # Close the log file. try: errMod.close_log(ConfigOptions, MpiConfig) except: errMod.err_out_screen_para(ConfigOptions.errMsg, MpiConfig) # Success.... Now touch an empty complete file for this forecast cycle to indicate # completion in case the code is re-ran. try: open(completeFlag, 'a').close() except: ConfigOptions.errMsg = "Unable to create completion file: " + completeFlag errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig)
def open_grib2(GribFileIn, NetCdfFileOut, Wgrib2Cmd, ConfigOptions, MpiConfig, inputVar): """ Generic function to convert a GRIB2 file into a NetCDF file. Function will also open the NetCDF file, and ensure all necessary inputs are in file. :param GribFileIn: :param NetCdfFileOut: :param ConfigOptions: :return: """ # Run wgrib2 command to convert GRIB2 file to NetCDF. if MpiConfig.rank == 0: while (True): # Check to see if output file already exists. If so, delete it and # override. ConfigOptions.statusMsg = "Reading in GRIB2 file: " + GribFileIn errMod.log_msg(ConfigOptions, MpiConfig) if os.path.isfile(NetCdfFileOut): ConfigOptions.statusMsg = "Overriding temporary NetCDF file: " + NetCdfFileOut errMod.log_warning(ConfigOptions, MpiConfig) try: subprocess.run([Wgrib2Cmd], shell=True) except: ConfigOptions.errMsg = "Unable to convert: " + GribFileIn + " to " + \ NetCdfFileOut errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break # Ensure file exists. if not os.path.isfile(NetCdfFileOut): ConfigOptions.errMsg = "Expected NetCDF file: " + NetCdfFileOut + \ " not found. It's possible the GRIB2 variable was not found." errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break # Open the NetCDF file. try: idTmp = Dataset(NetCdfFileOut, 'r') except: ConfigOptions.errMsg = "Unable to open input NetCDF file: " + \ NetCdfFileOut errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break if idTmp is not None: # Check for expected lat/lon variables. if 'latitude' not in idTmp.variables.keys(): ConfigOptions.errMsg = "Unable to locate latitude from: " + \ GribFileIn errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break if idTmp is not None: if 'longitude' not in idTmp.variables.keys(): ConfigOptions.errMsg = "Unable t locate longitude from: " + \ GribFileIn errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break if idTmp is not None: # Loop through all the expected variables. if inputVar not in idTmp.variables.keys(): ConfigOptions.errMsg = "Unable to locate expected variable: " + \ inputVar + " in: " + NetCdfFileOut errMod.log_critical(ConfigOptions, MpiConfig) idTmp = None break break else: idTmp = None # Return the NetCDF file handle back to the user. return idTmp
def nwm_monthly_PRISM_downscale(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ NCAR/OWP function for downscaling precipitation using monthly PRISM climatology in a mountain-mapper like fashion. :param input_forcings: :param ConfigOptions: :param GeoMetaWrfHydro: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Performing NWM Monthly PRISM Mountain Mapper " \ "Downscaling of Precipitation" errMod.log_msg(ConfigOptions, MpiConfig) print(ConfigOptions.statusMsg) # Establish whether or not we need to read in new PRISM monthly climatology: # 1.) This is the first output timestep, and no grids have been initialized. # 2.) We have switched months from the last timestep. In this case, we need # to re-initialize the grids for the current month. initializeFlag = False if input_forcings.nwmPRISM_denGrid is None and input_forcings.nwmPRISM_numGrid is None: # We are on situation 1 - This is the first output step. initializeFlag = True print('WE NEED TO READ IN PRISM GRIDS') if ConfigOptions.current_output_date.month != ConfigOptions.prev_output_date.month: # We are on situation #2 - The month has changed so we need to reinitialize the # PRISM grids. initializeFlag = True print('MONTH CHANGE.... NEED TO READ IN NEW PRISM GRIDS.') if initializeFlag == True: while (True): # First reset the local PRISM grids to be safe. input_forcings.nwmPRISM_numGrid = None input_forcings.nwmPRISM_denGrid = None # Compose paths to the expected files. numeratorPath = input_forcings.paramDir + "/PRISM_Precip_Clim_" + \ ConfigOptions.current_output_date.strftime('%h') + '_NWM_Mtn_Mapper_Numer.nc' denominatorPath = input_forcings.paramDir + "/PRISM_Precip_Clim_" + \ ConfigOptions.current_output_date.strftime('%h') + '_NWM_Mtn_Mapper_Denom.nc' print(numeratorPath) print(denominatorPath) # Make sure files exist. if not os.path.isfile(numeratorPath): ConfigOptions.errMsg = "Expected parameter file: " + numeratorPath + \ " for mountain mapper downscaling of precipitation not found." errMod.log_critical(ConfigOptions, MpiConfig) break if not os.path.isfile(denominatorPath): ConfigOptions.errMsg = "Expected parameter file: " + denominatorPath + \ " for mountain mapper downscaling of precipitation not found." errMod.log_critical(ConfigOptions, MpiConfig) break if MpiConfig.rank == 0: # Open the NetCDF parameter files. Check to make sure expected dimension # sizes are in place, along with variable names, etc. try: idNum = Dataset(numeratorPath, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idDenom = Dataset(denominatorPath, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break # Check to make sure expected names, dimension sizes are present. if 'x' not in idNum.variables.keys(): ConfigOptions.errMsg = "Expected 'x' variable not found in parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'x' not in idDenom.variables.keys(): ConfigOptions.errMsg = "Expected 'x' variable not found in parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'y' not in idNum.variables.keys(): ConfigOptions.errMsg = "Expected 'y' variable not found in parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'y' not in idDenom.variables.keys(): ConfigOptions.errMsg = "Expected 'y' variable not found in parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'Data' not in idNum.variables.keys(): ConfigOptions.errMsg = "Expected 'Data' variable not found in parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break if 'Data' not in idDenom.variables.keys(): ConfigOptions.errMsg = "Expected 'Data' variable not found in parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break if idNum.variables['Data'].shape[ 0] != GeoMetaWrfHydro.ny_global: ConfigOptions.errMsg = "Input Y dimension for: " + numeratorPath + \ " does not match the output WRF-Hydro Y dimension size." errMod.log_critical(ConfigOptions, MpiConfig) break if idDenom.variables['Data'].shape[ 0] != GeoMetaWrfHydro.ny_global: ConfigOptions.errMsg = "Input Y dimension for: " + denominatorPath + \ " does not match the output WRF-Hydro Y dimension size." errMod.log_critical(ConfigOptions, MpiConfig) break if idNum.variables['Data'].shape[ 1] != GeoMetaWrfHydro.nx_global: ConfigOptions.errMsg = "Input X dimension for: " + numeratorPath + \ " does not match the output WRF-Hydro X dimension size." errMod.log_critical(ConfigOptions, MpiConfig) break if idDenom.variables['Data'].shape[ 1] != GeoMetaWrfHydro.nx_global: ConfigOptions.errMsg = "Input X dimension for: " + denominatorPath + \ " does not match the output WRF-Hydro X dimension size." errMod.log_critical(ConfigOptions, MpiConfig) break # Read in the PRISM grid on the output grid. Then scatter the array out to the processors. try: numDataTmp = idNum.variables['Data'][:, :] except: ConfigOptions.errMsg = "Unable to extract 'Data' from parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break try: denDataTmp = idDenom.variables['Data'][:, :] except: ConfigOptions.errMsg = "Unable to extract 'Data' from parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break # Close the parameter files. try: idNum.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + numeratorPath errMod.log_critical(ConfigOptions, MpiConfig) break try: idDenom.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + denominatorPath errMod.log_critical(ConfigOptions, MpiConfig) break else: numDataTmp = None denDataTmp = None break errMod.check_program_status(ConfigOptions, MpiConfig) # Scatter the array out to the local processors input_forcings.nwmPRISM_numGrid = MpiConfig.scatter_array( GeoMetaWrfHydro, numDataTmp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) input_forcings.nwmPRISM_denGrid = MpiConfig.scatter_array( GeoMetaWrfHydro, denDataTmp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) # Create temporary grids from the local slabs of params/precip forcings. localRainRate = input_forcings.final_forcings[3, :, :] numLocal = input_forcings.nwmPRISM_numGrid[:, :] denLocal = input_forcings.nwmPRISM_denGrid[:, :] # Establish index of where we have valid data. try: indValid = np.where((localRainRate > 0.0) & (denLocal > 0.0) & (numLocal > 0.0)) except: ConfigOptions.errMsg = "Unable to run numpy search for valid values on precip and " \ "param grid in mountain mapper downscaling" errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Convert precipitation rate, which is mm/s to mm, which is needed to run the PRISM downscaling. try: localRainRate[indValid] = localRainRate[indValid] * 3600.0 except: ConfigOptions.errMsg = "Unable to convert temporary precip rate from mm/s to mm." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: localRainRate[indValid] = localRainRate[indValid] * numLocal[indValid] except: ConfigOptions.errMsg = "Unable to multiply precip by numerator in mountain mapper downscaling" errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) try: localRainRate[indValid] = localRainRate[indValid] / denLocal[indValid] except: ConfigOptions.errMsg = "Unable to divide precip by denominator in mountain mapper downscaling" errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) # Convert local precip back to a rate (mm/s) try: localRainRate[indValid] = localRainRate[indValid] / 3600.0 except: ConfigOptions.errMsg = "Unable to convert temporary precip rate from mm to mm/s." errMod.log_critical(ConfigOptions, MpiConfig) errMod.check_program_status(ConfigOptions, MpiConfig) input_forcings.final_forcings[3, :, :] = localRainRate # Reset variables for memory efficiency idDenom = None idNum = None localRainRate = None numLocal = None denLocal = None
def param_lapse(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): """ Function that applies a apriori lapse rate adjustment to modeled 2-meter temperature by taking the difference of the native input elevation and the WRF-hydro elevation. It's assumed this lapse rate grid has already been regridded to the final output WRF-Hydro grid. :param inpute_forcings: :param ConfigOptions: :param GeoMetaWrfHydro: :return: """ if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Applying aprior lapse rate grid to temperature downscaling" errMod.log_msg(ConfigOptions, MpiConfig) # Calculate the elevation difference. elevDiff = input_forcings.height - GeoMetaWrfHydro.height if not np.any(input_forcings.lapseGrid): # We have not read in our lapse rate file. Read it in, do extensive checks, # scatter the lapse rate grid out to individual processors, then apply the # lapse rate to the 2-meter temperature grid. if MpiConfig.rank == 0: while (True): # First ensure we have a parameter directory if input_forcings.paramDir == "NONE": ConfigOptions.errMsg = "User has specified spatial temperature lapse rate " \ "downscaling while no downscaling parameter directory " \ "exists." errMod.log_critical(ConfigOptions, MpiConfig) break # Compose the path to the lapse rate grid file. lapsePath = input_forcings.paramDir + "/lapse_param.nc" if not os.path.isfile(lapsePath): ConfigOptions.errMsg = "Expected lapse rate parameter file: " + \ lapsePath + " does not exist." errMod.log_critical(ConfigOptions, MpiConfig) break # Open the lapse rate file. Check for the expected variable, along with # the dimension size to make sure everything matches up. try: idTmp = Dataset(lapsePath, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break if not 'lapse' in idTmp.variables.keys(): ConfigOptions.errMsg = "Expected 'lapse' variable not located in parameter " \ "file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break try: lapseTmp = idTmp.variables['lapse'][:, :] except: ConfigOptions.errMsg = "Unable to extracte 'lapse' variable from parameter: " \ "file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break # Check dimensions to ensure they match up to the output grid. if lapseTmp.shape[1] != GeoMetaWrfHydro.nx_global: ConfigOptions.errMsg = "X-Dimension size mismatch between output grid and lapse " \ "rate from parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break if lapseTmp.shape[0] != GeoMetaWrfHydro.ny_global: ConfigOptions.errMsg = "Y-Dimension size mismatch between output grid and lapse " \ "rate from parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break # Perform a quick search to ensure we don't have radical values. indTmp = np.where(lapseTmp < -10.0) if len(indTmp[0]) > 0: ConfigOptions.errMsg = "Found anomolous negative values in the lapse rate grid from " \ "parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break indTmp = np.where(lapseTmp > 100.0) if len(indTmp[0]) > 0: ConfigOptions.errMsg = "Found excessively high values in the lapse rate grid from " \ "parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break # Close the parameter lapse rate file. try: idTmp.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + lapsePath errMod.log_critical(ConfigOptions, MpiConfig) break break else: lapseTmp = None errMod.check_program_status(ConfigOptions, MpiConfig) # Scatter the lapse rate grid to the other processors. input_forcings.lapseGrid = MpiConfig.scatter_array( GeoMetaWrfHydro, lapseTmp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) # Apply the local lapse rate grid to our local slab of 2-meter temperature data. temperature_grid_tmp = input_forcings.final_forcings[4, :, :] try: indNdv = np.where( input_forcings.final_forcings == ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform NDV search on input " + \ input_forcings.productName + " regridded forcings." errMod.log_critical(ConfigOptions, MpiConfig) return try: indValid = np.where(temperature_grid_tmp != ConfigOptions.globalNdv) except: ConfigOptions.errMsg = "Unable to perform search for valid values on input " + \ input_forcings.productName + " regridded temperature forcings." errMod.log_critical(ConfigOptions, MpiConfig) return try: temperature_grid_tmp[indValid] = temperature_grid_tmp[indValid] + \ ((input_forcings.lapseGrid[indValid]/1000.0) * elevDiff[indValid]) except: ConfigOptions.errMsg = "Unable to apply spatial lapse rate values to input " + \ input_forcings.productName + " regridded temperature forcings." errMod.log_critical(ConfigOptions, MpiConfig) return input_forcings.final_forcings[4, :, :] = temperature_grid_tmp input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv # Reset for memory efficiency indTmp = None indNdv = None indValid = None elevDiff = None temperature_grid_tmp = None
def read_rqi_monthly_climo(ConfigOptions, MpiConfig, supplemental_precip, GeoMetaWrfHydro): """ Function to read in monthly RQI grids on the NWM grid. This is an NWM ONLY option. Please do not activate if not executing on the NWM conus grid. :param ConfigOptions: :param MpiConfig: :param supplemental_precip: :return: """ # Ensure all processors are synced up before proceeding. MpiConfig.comm.barrier() # First check to see if the RQI grids have valid values in them. There should # be NO NDV values if the grids have properly been read in. indTmp = np.where( supplemental_precip.regridded_rqi2 != ConfigOptions.globalNdv) rqiPath = ConfigOptions.supp_precip_param_dir + "/MRMS_WGT_RQI0.9_m" + \ supplemental_precip.pcp_date2.strftime('%m') + '_v1.1_geosmth.nc' if len(indTmp[0]) == 0: # We haven't initialized the RQI fields. We need to do this..... if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Reading in RQI Parameter File: " + rqiPath errMod.log_msg(ConfigOptions, MpiConfig) # First make sure the RQI file exists. if not os.path.isfile(rqiPath): ConfigOptions.errMsg = "Expected RQI parameter file: " + rqiPath + " not found." errMod.log_critical(ConfigOptions, MpiConfig) pass # Open the Parameter file. try: idTmp = Dataset(rqiPath, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + rqiPath pass # Extract out the RQI grid. try: varTmp = idTmp.variables['POP_0mabovemeansealevel'][0, :, :] except: ConfigOptions.errMsg = "Unable to extract POP_0mabovemeansealevel from parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass # Sanity checking on grid size. if varTmp.shape[0] != GeoMetaWrfHydro.ny_global or varTmp.shape[ 1] != GeoMetaWrfHydro.nx_global: ConfigOptions.errMsg = "Improper dimension sizes for POP_0mabovemeansealevel " \ "in parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass else: idTmp = None varTmp = None errMod.check_program_status(ConfigOptions, MpiConfig) # Scatter the array out to the local processors varSubTmp = MpiConfig.scatter_array(GeoMetaWrfHydro, varTmp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) supplemental_precip.regridded_rqi2[:, :] = varSubTmp # Reset variables for memory purposes varSubTmp = None varTmp = None # Close the RQI NetCDF file if MpiConfig.rank == 0: try: idTmp.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass errMod.check_program_status(ConfigOptions, MpiConfig) # Also check to see if we have switched to a new month based on the previous # MRMS step and the current one. if supplemental_precip.pcp_date2.month != supplemental_precip.pcp_date1.month: # We need to read in a new RQI monthly grid. if MpiConfig.rank == 0: ConfigOptions.statusMsg = "Reading in RQI Parameter File: " + rqiPath errMod.log_msg(ConfigOptions, MpiConfig) # First make sure the RQI file exists. if not os.path.isfile(rqiPath): ConfigOptions.errMsg = "Expected RQI parameter file: " + rqiPath + " not found." errMod.log_critical(ConfigOptions, MpiConfig) pass # Open the Parameter file. try: idTmp = Dataset(rqiPath, 'r') except: ConfigOptions.errMsg = "Unable to open parameter file: " + rqiPath pass # Extract out the RQI grid. try: varTmp = idTmp.variables['POP_0mabovemeansealevel'][0, :, :] except: ConfigOptions.errMsg = "Unable to extract POP_0mabovemeansealevel from parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass # Sanity checking on grid size. if varTmp.shape[0] != GeoMetaWrfHydro.ny_global or varTmp.shape[ 1] != GeoMetaWrfHydro.nx_global: ConfigOptions.errMsg = "Improper dimension sizes for POP_0mabovemeansealevel " \ "in parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass else: idTmp = None varTmp = None errMod.check_program_status(ConfigOptions, MpiConfig) # Scatter the array out to the local processors varSubTmp = MpiConfig.scatter_array(GeoMetaWrfHydro, varTmp, ConfigOptions) errMod.check_program_status(ConfigOptions, MpiConfig) supplemental_precip.regridded_rqi2[:, :] = varSubTmp # Reset variables for memory purposes varSubTmp = None varTmp = None # Close the RQI NetCDF file if MpiConfig.rank == 0: try: idTmp.close() except: ConfigOptions.errMsg = "Unable to close parameter file: " + rqiPath errMod.log_critical(ConfigOptions, MpiConfig) pass errMod.check_program_status(ConfigOptions, MpiConfig)