Exemple #1
0
def unzip_file(GzFileIn, FileOut, ConfigOptions, MpiConfig):
    """
    Generic I/O function to unzip a .gz file to a new location.
    :param GzFileIn:
    :param FileOut:
    :param ConfigOptions:
    :param MpiConfig:
    :return:
    """
    # Ensure all processors are synced up before outputting.
    #MpiConfig.comm.barrier()

    if MpiConfig.rank == 0:
        # Unzip the file in place.
        try:
            ConfigOptions.statusMsg = "Unzipping file: {}".format(GzFileIn)
            err_handler.log_msg(ConfigOptions, MpiConfig)
            with gzip.open(GzFileIn, 'rb') as fTmpGz:
                with open(FileOut, 'wb') as fTmp:
                    shutil.copyfileobj(fTmpGz, fTmp)
        except:
            ConfigOptions.errMsg = "Unable to unzip: " + GzFileIn
            err_handler.log_critical(ConfigOptions, MpiConfig)
            return

        if not os.path.isfile(FileOut):
            ConfigOptions.errMsg = "Unable to locate expected unzipped file: " + \
                                   FileOut
            err_handler.log_critical(ConfigOptions, MpiConfig)
            return
    else:
        return
def ncar_topo_adj(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig):
    """
    Topographic adjustment of incoming shortwave radiation fluxes,
    given input parameters.
    :param input_forcings:
    :param ConfigOptions:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Performing topographic adjustment to incoming " \
                                  "shortwave radiation flux."
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Establish where we have missing values.
    try:
        indNdv = np.where(
            input_forcings.final_forcings == ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform NDV search on input forcings"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    # By the time this function has been called, necessary input static grids (height, slope, etc),
    # should have been calculated for each local slab of data.
    DEGRAD = math.pi / 180.0
    DPD = 360.0 / 365.0
    try:
        DECLIN, SOLCON = radconst(ConfigOptions)
    except:
        ConfigOptions.errMsg = "Unable to calculate solar constants based on datetime information."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    try:
        coszen_loc, hrang_loc = calc_coszen(ConfigOptions, DECLIN,
                                            GeoMetaWrfHydro)
    except:
        ConfigOptions.errMsg = "Unable to calculate COSZEN or HRANG variables for topographic adjustment " \
                               "of incoming shortwave radiation"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    try:
        TOPO_RAD_ADJ_DRVR(GeoMetaWrfHydro, input_forcings, coszen_loc, DECLIN,
                          SOLCON, hrang_loc)
    except:
        ConfigOptions.errMsg = "Unable to perform final topographic adjustment of incoming " \
                               "shortwave radiation fluxes."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    # Assign missing values based on our mask.
    input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv

    # Reset variables to free up memory
    DECLIN = None
    SOLCON = None
    coszen_loc = None
    hrang_loc = None
    indNdv = None
def weighted_average(input_forcings, ConfigOptions, MpiConfig):
    """
    Function for setting the current output regridded fields as a weighted
    average between the previous output step and the next output step.
    :param input_forcings:
    :param ConfigOptions:
    :param MpiConfig:
    :return:
    """
    # Check to make sure we have valid grids.
    if input_forcings.regridded_forcings2 is None:
        input_forcings.final_forcings[:, :, :] = ConfigOptions.globalNdv
        return
    if input_forcings.regridded_forcings1 is None:
        input_forcings.final_forcings[:, :, :] = ConfigOptions.globalNdv
        return

    # If we are running CFSv2 with bias correction, bypass as temporal interpolation is done
    # internally (NWM-only).
    if ConfigOptions.runCfsNldasBiasCorrect and input_forcings.productName == "CFSv2_6Hr_Global_GRIB2":
        if MpiConfig.rank == 0:
            ConfigOptions.statusMsg = "Bypassing temporal interpolation routine due to NWM bias correction for CFSv2"
            err_handler.log_msg(ConfigOptions, MpiConfig)
        return

    # Calculate the difference between the current output timestep,
    # and the previous input forecast output step. Use this to calculate a fraction
    # of the previous forcing output to use in the final output for this step.
    dtFromPrevious = ConfigOptions.current_output_date - input_forcings.fcst_date1
    weight1 = 1 - (abs(dtFromPrevious.total_seconds()) /
                   (input_forcings.outFreq * 60.0))

    # Calculate the difference between the current output timesetp,
    # and the next forecast output step. Use this to calculate a fraction of
    # the next forcing output to use in the final output for this step.
    dtFromNext = ConfigOptions.current_output_date - input_forcings.fcst_date2
    weight2 = 1 - (abs(dtFromNext.total_seconds()) /
                   (input_forcings.outFreq * 60.0))

    # Calculate where we have missing data in either the previous or next forcing dataset.
    ind1Ndv = np.where(
        input_forcings.regridded_forcings1 == ConfigOptions.globalNdv)
    ind2Ndv = np.where(
        input_forcings.regridded_forcings2 == ConfigOptions.globalNdv)

    input_forcings.final_forcings[:,:,:] = input_forcings.regridded_forcings1[:,:,:]*weight1 + \
        input_forcings.regridded_forcings2[:,:,:]*weight2

    # Set any pixel cells that were missing for either window to missing value.
    input_forcings.final_forcings[ind1Ndv] = ConfigOptions.globalNdv
    input_forcings.final_forcings[ind2Ndv] = ConfigOptions.globalNdv

    # Reset for memory efficiency.
    ind1Ndv = None
    ind2Ndv = None
def simple_lapse(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig):
    """
    Function that applies a single lapse rate adjustment to modeled
    2-meter temperature by taking the difference of the native
    input elevation and the WRF-hydro elevation.
    :param inpute_forcings:
    :param ConfigOptions:
    :param GeoMetaWrfHydro:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Applying simple lapse rate to temperature downscaling"
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Calculate the elevation difference.
    if input_forcings.height is None:
        ConfigOptions.errMsg = "Unable to perform downscaling without terrain height input"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    elevDiff = input_forcings.height - GeoMetaWrfHydro.height

    # Assign existing, un-downscaled temperatures to a temporary placeholder, which
    # will be used for specific humidity downscaling.
    if input_forcings.q2dDownscaleOpt > 0:
        input_forcings.t2dTmp[:, :] = input_forcings.final_forcings[4, :, :]

    # Apply single lapse rate value to the input 2-meter
    # temperature values.
    try:
        indNdv = np.where(
            input_forcings.final_forcings == ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform NDV search on input forcings"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    try:
        input_forcings.final_forcings[4,:,:] = input_forcings.final_forcings[4,:,:] + \
                                               (6.49/1000.0)*elevDiff
    except:
        ConfigOptions.errMsg = "Unable to apply lapse rate to input 2-meter temperatures."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv

    # Reset for memory efficiency
    indNdv = None
def q2_down_classic(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig):
    """
    NCAR function for downscaling 2-meter specific humidity using already downscaled
    2-meter temperature, unadjusted surface pressure, and downscaled surface
    pressure.
    :param input_forcings:
    :param ConfigOptions:
    :param GeoMetaWrfHydro:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Performing topographic adjustment to specific humidity."
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Establish where we have missing values.
    try:
        indNdv = np.where(
            input_forcings.final_forcings == ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform NDV search on input forcings"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    # First calculate relative humidity given original surface pressure and 2-meter
    # temperature
    try:
        relHum = rel_hum(input_forcings, ConfigOptions)
    except:
        ConfigOptions.errMsg = "Unable to perform topographic downscaling of incoming " \
                               "specific humidity to relative humidity"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    # Downscale 2-meter specific humidity
    try:
        q2Tmp = mixhum_ptrh(input_forcings, relHum, 2, ConfigOptions)
    except:
        ConfigOptions.errMsg = "Unable to perform topographic downscaling of " \
                               "incoming specific humidity"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    input_forcings.final_forcings[5, :, :] = q2Tmp
    input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv
    q2Tmp = None
    indNdv = None
def pressure_down_classic(input_forcings, ConfigOptions, GeoMetaWrfHydro,
                          MpiConfig):
    """
    Generic function to downscale surface pressure to the WRF-Hydro domain.
    :param input_forcings:
    :param ConfigOptions:
    :param GeoMetaWrfHydro:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Performing topographic adjustment to surface pressure."
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Calculate the elevation difference.
    if input_forcings.height is None:
        ConfigOptions.errMsg = "Unable to perform downscaling without terrain height input"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    elevDiff = input_forcings.height - GeoMetaWrfHydro.height

    # Assign existing, un-downscaled pressure values to a temporary placeholder, which
    # will be used for specific humidity downscaling.
    if input_forcings.q2dDownscaleOpt > 0:
        input_forcings.psfcTmp[:, :] = input_forcings.final_forcings[6, :, :]

    try:
        indNdv = np.where(
            input_forcings.final_forcings == ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform NDV search on input forcings"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    try:
        input_forcings.final_forcings[6,:,:] = input_forcings.final_forcings[6,:,:] +\
                                               (input_forcings.final_forcings[6,:,:]*elevDiff*9.8)/\
                                               (input_forcings.final_forcings[4,:,:]*287.05)
    except:
        ConfigOptions.errMsg = "Unable to downscale surface pressure to input forcings."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv

    # Reset for memory efficiency
    indNdv = None
def nearest_neighbor(input_forcings, ConfigOptions, MpiConfig):
    """
    Function for setting the current output regridded forcings to the nearest
    input forecast step.
    :param input_forcings:
    :param ConfigOptions:
    :param MpiConfig:
    :return:
    """
    # If we are running CFSv2 with bias correction, bypass as temporal interpolation is done
    # internally (NWM-only).
    if ConfigOptions.runCfsNldasBiasCorrect and input_forcings.productName == "CFSv2_6Hr_Global_GRIB2":
        if MpiConfig.rank == 0:
            ConfigOptions.statusMsg = "Bypassing temporal interpolation routine due to NWM bias correction for CFSv2"
            err_handler.log_msg(ConfigOptions, MpiConfig)
        return

    # Calculate the difference between the current output timestep,
    # and the previous input forecast output step.
    dtFromPrevious = ConfigOptions.current_output_date - input_forcings.fcst_date1

    # Calculate the difference between the current output timesetp,
    # and the next forecast output step.
    dtFromNext = ConfigOptions.current_output_date - input_forcings.fcst_date2

    if abs(dtFromNext.total_seconds()) <= abs(dtFromPrevious.total_seconds()):
        # Default to the regridded states from the next forecast output step.
        if input_forcings.regridded_forcings2 is None:
            input_forcings.final_forcings[:, :, :] = ConfigOptions.globalNdv
        else:
            input_forcings.final_forcings[:, :, :] = input_forcings.regridded_forcings2[:, :, :]
    else:
        # Default to the regridded states from the previous forecast output
        # step.
        if input_forcings.regridded_forcings1 is None:
            input_forcings.final_forcings[:, :, :] = ConfigOptions.globalNdv
        else:
            input_forcings.final_forcings[:, :, :] = input_forcings.regridded_forcings1[:, :, :]
Exemple #8
0
def process_forecasts(ConfigOptions, wrfHydroGeoMeta, inputForcingMod, suppPcpMod, MpiConfig, OutputObj):
    """
    Main calling module for running realtime forecasts and re-forecasts.
    :param jobMeta:
    :return:
    """
    # Loop through each WRF-Hydro forecast cycle being processed. Within
    # each cycle, perform the following tasks:
    # 1.) Loop over each output frequency
    # 2.) Determine the input forcing cycle dates (both before and after)
    #     for temporal interpolation, downscaling, and bias correction reasons.
    # 3.) If the input forcings haven't been opened and read into memory,
    #     open them.
    # 4.) Check to see if the ESMF objects for input forcings have been
    #     created. If not, create them, including the regridding object.
    # 5.) Regrid forcing grids for input cycle dates surrounding the
    #     current output timestep if they haven't been regridded.
    # 6.) Perform bias correction and/or downscaling.
    # 7.) Output final grids to LDASIN NetCDF files with associated
    #     WRF-Hydro geospatial metadata to the final output directories.
    # Throughout this entire process, log progress being made into LOG
    # files. Once a forecast cycle is complete, we will touch an empty
    # 'WrfHydroForcing.COMPLETE' flag in the directory. This will be
    # checked upon the beginning of this program to see if we
    # need to process any files.

    for fcstCycleNum in range(ConfigOptions.nFcsts):
        ConfigOptions.current_fcst_cycle = ConfigOptions.b_date_proc + datetime.timedelta(
            seconds=ConfigOptions.fcst_freq * 60 * fcstCycleNum)
        if ConfigOptions.first_fcst_cycle is None:
            ConfigOptions.first_fcst_cycle = ConfigOptions.current_fcst_cycle

        if ConfigOptions.ana_flag:
            fcstCycleOutDir = ConfigOptions.output_dir + "/" + ConfigOptions.e_date_proc.strftime('%Y%m%d%H')
        else:
            fcstCycleOutDir = ConfigOptions.output_dir + "/" + ConfigOptions.current_fcst_cycle.strftime('%Y%m%d%H')

        # put all AnA output in the same directory
        if ConfigOptions.ana_flag:
            if ConfigOptions.ana_out_dir is None:
                ConfigOptions.ana_out_dir = fcstCycleOutDir
            fcstCycleOutDir = ConfigOptions.ana_out_dir

        # completeFlag = ConfigOptions.scratch_dir + "/WrfHydroForcing.COMPLETE"
        completeFlag = fcstCycleOutDir + "/WrfHydroForcing.COMPLETE"
        if os.path.isfile(completeFlag):
            ConfigOptions.statusMsg = "Forecast Cycle: " + \
                                      ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M') + \
                                      " has already completed."
            err_handler.log_msg(ConfigOptions, MpiConfig)
            # We have already completed processing this cycle,
            # move on.
            continue

        if (not ConfigOptions.ana_flag) or (ConfigOptions.logFile is None):
            if MpiConfig.rank == 0:
                # If the cycle directory doesn't exist, create it.
                if not os.path.isdir(fcstCycleOutDir):
                    try:
                        os.mkdir(fcstCycleOutDir)
                    except:
                        ConfigOptions.errMsg = "Unable to create output " \
                                               "directory: " + fcstCycleOutDir
                        err_handler.err_out_screen_para(ConfigOptions.errMsg, MpiConfig)
            err_handler.check_program_status(ConfigOptions, MpiConfig)

            # Compose a path to a log file, which will contain information
            # about this forecast cycle.
            # ConfigOptions.logFile = ConfigOptions.output_dir + "/LOG_" + \

            if ConfigOptions.ana_flag:
                log_time = ConfigOptions.e_date_proc
            else:
                log_time = ConfigOptions.current_fcst_cycle

            ConfigOptions.logFile = ConfigOptions.scratch_dir + "/LOG_" + ConfigOptions.nwmConfig + \
                                    ConfigOptions.d_program_init.strftime('%Y%m%d%H%M') + \
                                    "_" + log_time.strftime('%Y%m%d%H%M')

            # Initialize the log file.
            try:
                err_handler.init_log(ConfigOptions, MpiConfig)
            except:
                err_handler.err_out_screen_para(ConfigOptions.errMsg, MpiConfig)
            err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Log information about this forecast cycle
        if MpiConfig.rank == 0:
            ConfigOptions.statusMsg = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
            err_handler.log_msg(ConfigOptions, MpiConfig)
            ConfigOptions.statusMsg = 'Processing Forecast Cycle: ' + \
                                      ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M')
            err_handler.log_msg(ConfigOptions, MpiConfig)
            ConfigOptions.statusMsg = 'Forecast Cycle Length is: ' + \
                                      str(ConfigOptions.cycle_length_minutes) + " minutes"
            err_handler.log_msg(ConfigOptions, MpiConfig)
        # MpiConfig.comm.barrier()

        # Loop through each output timestep. Perform the following functions:
        # 1.) Calculate all necessary input files per user options.
        # 2.) Read in input forcings from GRIB/NetCDF files.
        # 3.) Regrid the forcings, and temporally interpolate.
        # 4.) Downscale.
        # 5.) Layer, and output as necessary.
        ana_factor = 1 if ConfigOptions.ana_flag is False else 0
        for outStep in range(1, ConfigOptions.num_output_steps + 1):
            # Reset out final grids to missing values.
            OutputObj.output_local[:, :, :] = -9999.0

            ConfigOptions.current_output_step = outStep
            OutputObj.outDate = ConfigOptions.current_fcst_cycle + datetime.timedelta(
                    seconds=ConfigOptions.output_freq * 60 * outStep
            )
            ConfigOptions.current_output_date = OutputObj.outDate

            # if AnA, adjust file date for analysis vs forecast
            if ConfigOptions.ana_flag:
                file_date = OutputObj.outDate - datetime.timedelta(seconds=ConfigOptions.output_freq * 60)
            else:
                file_date = OutputObj.outDate

            # Calculate the previous output timestep. This is used in potential downscaling routines.
            if outStep == ana_factor:
                ConfigOptions.prev_output_date = ConfigOptions.current_output_date
            else:
                ConfigOptions.prev_output_date = ConfigOptions.current_output_date - datetime.timedelta(
                        seconds=ConfigOptions.output_freq * 60
                )
            if MpiConfig.rank == 0:
                ConfigOptions.statusMsg = '========================================='
                err_handler.log_msg(ConfigOptions, MpiConfig)
                ConfigOptions.statusMsg = "Processing for output timestep: " + \
                                          file_date.strftime('%Y-%m-%d %H:%M')
                err_handler.log_msg(ConfigOptions, MpiConfig)
            # MpiConfig.comm.barrier()

            # Compose the expected path to the output file. Check to see if the file exists,
            # if so, continue to the next time step. Also initialize our output arrays if necessary.
            OutputObj.outPath = fcstCycleOutDir + "/" + file_date.strftime('%Y%m%d%H%M') + \
                                ".LDASIN_DOMAIN1"
            # MpiConfig.comm.barrier()

            if os.path.isfile(OutputObj.outPath):
                if MpiConfig.rank == 0:
                    ConfigOptions.statusMsg = "Output file: " + OutputObj.outPath + " exists. Moving " + \
                                              " to the next output timestep."
                    err_handler.log_msg(ConfigOptions, MpiConfig)
                err_handler.check_program_status(ConfigOptions, MpiConfig)
                continue
            else:
                ConfigOptions.currentForceNum = 0
                ConfigOptions.currentCustomForceNum = 0
                # Loop over each of the input forcings specifed.
                for forceKey in ConfigOptions.input_forcings:
                    input_forcings = inputForcingMod[forceKey]
                    # Calculate the previous and next input cycle files from the inputs.
                    input_forcings.calc_neighbor_files(ConfigOptions, OutputObj.outDate, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # Regrid forcings.
                    input_forcings.regrid_inputs(ConfigOptions, wrfHydroGeoMeta, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # Run check on regridded fields for reasonable values that are not missing values.
                    err_handler.check_forcing_bounds(ConfigOptions, input_forcings, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # If we are restarting a forecast cycle, re-calculate the neighboring files, and regrid the
                    # next set of forcings as the previous step just regridded the previous forcing.
                    if input_forcings.rstFlag == 1:
                        if input_forcings.regridded_forcings1 is not None and \
                                input_forcings.regridded_forcings2 is not None:
                            # Set the forcings back to reflect we just regridded the previous set of inputs, not the next.
                            input_forcings.regridded_forcings1[:, :, :] = \
                                input_forcings.regridded_forcings2[:, :, :]

                        # Re-calculate the neighbor files.
                        input_forcings.calc_neighbor_files(ConfigOptions, OutputObj.outDate, MpiConfig)
                        err_handler.check_program_status(ConfigOptions, MpiConfig)

                        # Regrid the forcings for the end of the window.
                        input_forcings.regrid_inputs(ConfigOptions, wrfHydroGeoMeta, MpiConfig)
                        err_handler.check_program_status(ConfigOptions, MpiConfig)

                        input_forcings.rstFlag = 0

                    # Run temporal interpolation on the grids.
                    input_forcings.temporal_interpolate_inputs(ConfigOptions, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # Run bias correction.
                    bias_correction.run_bias_correction(input_forcings, ConfigOptions,
                                                        wrfHydroGeoMeta, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # Run downscaling on grids for this output timestep.
                    downscale.run_downscaling(input_forcings, ConfigOptions,
                                              wrfHydroGeoMeta, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # Layer in forcings from this product.
                    layeringMod.layer_final_forcings(OutputObj, input_forcings, ConfigOptions, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    ConfigOptions.currentForceNum = ConfigOptions.currentForceNum + 1

                    if forceKey == 10:
                        ConfigOptions.currentCustomForceNum = ConfigOptions.currentCustomForceNum + 1

                # Process supplemental precipitation if we specified in the configuration file.
                if ConfigOptions.number_supp_pcp > 0:
                    for suppPcpKey in ConfigOptions.supp_precip_forcings:
                        # Like with input forcings, calculate the neighboring files to use.
                        suppPcpMod[suppPcpKey].calc_neighbor_files(ConfigOptions, OutputObj.outDate, MpiConfig)
                        err_handler.check_program_status(ConfigOptions, MpiConfig)

                        # Regrid the supplemental precipitation.
                        suppPcpMod[suppPcpKey].regrid_inputs(ConfigOptions, wrfHydroGeoMeta, MpiConfig)
                        err_handler.check_program_status(ConfigOptions, MpiConfig)

                        if suppPcpMod[suppPcpKey].regridded_precip1 is not None \
                                and suppPcpMod[suppPcpKey].regridded_precip2 is not None:
                            # if np.any(suppPcpMod[suppPcpKey].regridded_precip1) and \
                            #        np.any(suppPcpMod[suppPcpKey].regridded_precip2):
                            # Run check on regridded fields for reasonable values that are not missing values.
                            err_handler.check_supp_pcp_bounds(ConfigOptions, suppPcpMod[suppPcpKey], MpiConfig)
                            err_handler.check_program_status(ConfigOptions, MpiConfig)

                            # Run temporal interpolation on the grids.
                            suppPcpMod[suppPcpKey].temporal_interpolate_inputs(ConfigOptions, MpiConfig)
                            err_handler.check_program_status(ConfigOptions, MpiConfig)

                            # Layer in the supplemental precipitation into the current output object.
                            layeringMod.layer_supplemental_forcing(OutputObj, suppPcpMod[suppPcpKey],
                                                                   ConfigOptions, MpiConfig)
                            err_handler.check_program_status(ConfigOptions, MpiConfig)

                # Call the output routines
                #   adjust date for AnA if necessary
                if ConfigOptions.ana_flag:
                    OutputObj.outDate = file_date

                OutputObj.output_final_ldasin(ConfigOptions, wrfHydroGeoMeta, MpiConfig)
                err_handler.check_program_status(ConfigOptions, MpiConfig)

        if (not ConfigOptions.ana_flag) or (fcstCycleNum == (ConfigOptions.nFcsts - 1)):
            if MpiConfig.rank == 0:
                ConfigOptions.statusMsg = "Forcings complete for forecast cycle: " + \
                                          ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M')
                err_handler.log_msg(ConfigOptions, MpiConfig)
            err_handler.check_program_status(ConfigOptions, MpiConfig)

            if MpiConfig.rank == 0:
                # Close the log file.
                try:
                    err_handler.close_log(ConfigOptions, MpiConfig)
                except:
                    err_handler.err_out_screen_para(ConfigOptions.errMsg, MpiConfig)

            # Success.... Now touch an empty complete file for this forecast cycle to indicate
            # completion in case the code is re-ran.
            try:
                open(completeFlag, 'a').close()
            except:
                ConfigOptions.errMsg = "Unable to create completion file: " + completeFlag
                err_handler.log_critical(ConfigOptions, MpiConfig)
            err_handler.check_program_status(ConfigOptions, MpiConfig)
def nwm_monthly_PRISM_downscale(input_forcings,ConfigOptions,GeoMetaWrfHydro,MpiConfig):
    """
    NCAR/OWP function for downscaling precipitation using monthly PRISM climatology in a
    mountain-mapper like fashion.
    :param input_forcings:
    :param ConfigOptions:
    :param GeoMetaWrfHydro:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Performing NWM Monthly PRISM Mountain Mapper " \
                                  "Downscaling of Precipitation"
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Establish whether or not we need to read in new PRISM monthly climatology:
    # 1.) This is the first output timestep, and no grids have been initialized.
    # 2.) We have switched months from the last timestep. In this case, we need
    #     to re-initialize the grids for the current month.
    initialize_flag = False
    if input_forcings.nwmPRISM_denGrid is None and input_forcings.nwmPRISM_numGrid is None:
        # We are on situation 1 - This is the first output step.
        initialize_flag = True
        # print('WE NEED TO READ IN PRISM GRIDS')
    if ConfigOptions.current_output_date.month != ConfigOptions.prev_output_date.month:
        # We are on situation #2 - The month has changed so we need to reinitialize the
        # PRISM grids.
        initialize_flag = True
        # print('MONTH CHANGE.... NEED TO READ IN NEW PRISM GRIDS.')

    if initialize_flag is True:
        while (True):
            # First reset the local PRISM grids to be safe.
            input_forcings.nwmPRISM_numGrid = None
            input_forcings.nwmPRISM_denGrid = None

            # Compose paths to the expected files.
            numeratorPath = input_forcings.paramDir + "/PRISM_Precip_Clim_" + \
                            ConfigOptions.current_output_date.strftime('%h') + '_NWM_Mtn_Mapper_Numer.nc'
            denominatorPath = input_forcings.paramDir + "/PRISM_Precip_Clim_" + \
                              ConfigOptions.current_output_date.strftime('%h') + '_NWM_Mtn_Mapper_Denom.nc'
            #print(numeratorPath)
            #print(denominatorPath)

            # Make sure files exist.
            if not os.path.isfile(numeratorPath):
                ConfigOptions.errMsg = "Expected parameter file: " + numeratorPath + \
                                       " for mountain mapper downscaling of precipitation not found."
                err_handler.log_critical(ConfigOptions, MpiConfig)
                break

            if not os.path.isfile(denominatorPath):
                ConfigOptions.errMsg = "Expected parameter file: " + denominatorPath + \
                                       " for mountain mapper downscaling of precipitation not found."
                err_handler.log_critical(ConfigOptions, MpiConfig)
                break

            if MpiConfig.rank == 0:
                # Open the NetCDF parameter files. Check to make sure expected dimension
                # sizes are in place, along with variable names, etc.
                try:
                    idNum = Dataset(numeratorPath,'r')
                except:
                    ConfigOptions.errMsg = "Unable to open parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idDenom = Dataset(denominatorPath,'r')
                except:
                    ConfigOptions.errMsg = "Unable to open parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Check to make sure expected names, dimension sizes are present.
                if 'x' not in idNum.variables.keys():
                    ConfigOptions.errMsg = "Expected 'x' variable not found in parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if 'x' not in idDenom.variables.keys():
                    ConfigOptions.errMsg = "Expected 'x' variable not found in parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                if 'y' not in idNum.variables.keys():
                    ConfigOptions.errMsg = "Expected 'y' variable not found in parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if 'y' not in idDenom.variables.keys():
                    ConfigOptions.errMsg = "Expected 'y' variable not found in parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                if 'Data' not in idNum.variables.keys():
                    ConfigOptions.errMsg = "Expected 'Data' variable not found in parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if 'Data' not in idDenom.variables.keys():
                    ConfigOptions.errMsg = "Expected 'Data' variable not found in parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                if idNum.variables['Data'].shape[0] != GeoMetaWrfHydro.ny_global:
                    ConfigOptions.errMsg = "Input Y dimension for: " + numeratorPath + \
                                           " does not match the output WRF-Hydro Y dimension size."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if idDenom.variables['Data'].shape[0] != GeoMetaWrfHydro.ny_global:
                    ConfigOptions.errMsg = "Input Y dimension for: " + denominatorPath + \
                                           " does not match the output WRF-Hydro Y dimension size."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                if idNum.variables['Data'].shape[1] != GeoMetaWrfHydro.nx_global:
                    ConfigOptions.errMsg = "Input X dimension for: " + numeratorPath + \
                                           " does not match the output WRF-Hydro X dimension size."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if idDenom.variables['Data'].shape[1] != GeoMetaWrfHydro.nx_global:
                    ConfigOptions.errMsg = "Input X dimension for: " + denominatorPath + \
                                           " does not match the output WRF-Hydro X dimension size."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Read in the PRISM grid on the output grid. Then scatter the array out to the processors.
                try:
                    numDataTmp = idNum.variables['Data'][:,:]
                except:
                    ConfigOptions.errMsg = "Unable to extract 'Data' from parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    denDataTmp = idDenom.variables['Data'][:,:]
                except:
                    ConfigOptions.errMsg = "Unable to extract 'Data' from parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Close the parameter files.
                try:
                    idNum.close()
                except:
                    ConfigOptions.errMsg = "Unable to close parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idDenom.close()
                except:
                    ConfigOptions.errMsg = "Unable to close parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
            else:
                numDataTmp = None
                denDataTmp = None

            break
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Scatter the array out to the local processors
        input_forcings.nwmPRISM_numGrid = MpiConfig.scatter_array(GeoMetaWrfHydro, numDataTmp, ConfigOptions)
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        input_forcings.nwmPRISM_denGrid = MpiConfig.scatter_array(GeoMetaWrfHydro, denDataTmp, ConfigOptions)
        err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Create temporary grids from the local slabs of params/precip forcings.
    localRainRate = input_forcings.final_forcings[3,:,:]
    numLocal = input_forcings.nwmPRISM_numGrid[:,:]
    denLocal = input_forcings.nwmPRISM_denGrid[:,:]

    # Establish index of where we have valid data.
    try:
        indValid = np.where((localRainRate > 0.0) & (denLocal > 0.0) & (numLocal > 0.0))
    except:
        ConfigOptions.errMsg = "Unable to run numpy search for valid values on precip and " \
                               "param grid in mountain mapper downscaling"
        err_handler.log_critical(ConfigOptions, MpiConfig)
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Convert precipitation rate, which is mm/s to mm, which is needed to run the PRISM downscaling.
    try:
        localRainRate[indValid] = localRainRate[indValid]*3600.0
    except:
        ConfigOptions.errMsg = "Unable to convert temporary precip rate from mm/s to mm."
        err_handler.log_critical(ConfigOptions, MpiConfig)
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    try:
        localRainRate[indValid] = localRainRate[indValid] * numLocal[indValid]
    except:
        ConfigOptions.errMsg = "Unable to multiply precip by numerator in mountain mapper downscaling"
        err_handler.log_critical(ConfigOptions, MpiConfig)
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    try:
        localRainRate[indValid] = localRainRate[indValid] / denLocal[indValid]
    except:
        ConfigOptions.errMsg = "Unable to divide precip by denominator in mountain mapper downscaling"
        err_handler.log_critical(ConfigOptions, MpiConfig)
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Convert local precip back to a rate (mm/s)
    try:
        localRainRate[indValid] = localRainRate[indValid]/3600.0
    except:
        ConfigOptions.errMsg = "Unable to convert temporary precip rate from mm to mm/s."
        err_handler.log_critical(ConfigOptions, MpiConfig)
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    input_forcings.final_forcings[3, :, :] = localRainRate

    # Reset variables for memory efficiency
    idDenom = None
    idNum = None
    localRainRate = None
    numLocal = None
    denLocal = None
def param_lapse(input_forcings,ConfigOptions,GeoMetaWrfHydro,MpiConfig):
    """
    Function that applies a apriori lapse rate adjustment to modeled
    2-meter temperature by taking the difference of the native
    input elevation and the WRF-hydro elevation. It's assumed this lapse
    rate grid has already been regridded to the final output WRF-Hydro
    grid.
    :param inpute_forcings:
    :param ConfigOptions:
    :param GeoMetaWrfHydro:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Applying aprior lapse rate grid to temperature downscaling"
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Calculate the elevation difference.
    elevDiff = input_forcings.height - GeoMetaWrfHydro.height

    if input_forcings.lapseGrid is None:
    #if not np.any(input_forcings.lapseGrid):
        # We have not read in our lapse rate file. Read it in, do extensive checks,
        # scatter the lapse rate grid out to individual processors, then apply the
        # lapse rate to the 2-meter temperature grid.
        if MpiConfig.rank == 0:
            while (True):
                # First ensure we have a parameter directory
                if input_forcings.paramDir == "NONE":
                    ConfigOptions.errMsg = "User has specified spatial temperature lapse rate " \
                                           "downscaling while no downscaling parameter directory " \
                                           "exists."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Compose the path to the lapse rate grid file.
                lapsePath = input_forcings.paramDir + "/lapse_param.nc"
                if not os.path.isfile(lapsePath):
                    ConfigOptions.errMsg = "Expected lapse rate parameter file: " + \
                                           lapsePath + " does not exist."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Open the lapse rate file. Check for the expected variable, along with
                # the dimension size to make sure everything matches up.
                try:
                    idTmp = Dataset(lapsePath,'r')
                except:
                    ConfigOptions.errMsg = "Unable to open parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if not 'lapse' in idTmp.variables.keys():
                    ConfigOptions.errMsg = "Expected 'lapse' variable not located in parameter " \
                                           "file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    lapseTmp = idTmp.variables['lapse'][:,:]
                except:
                    ConfigOptions.errMsg = "Unable to extracte 'lapse' variable from parameter: " \
                                           "file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Check dimensions to ensure they match up to the output grid.
                if lapseTmp.shape[1] != GeoMetaWrfHydro.nx_global:
                    ConfigOptions.errMsg = "X-Dimension size mismatch between output grid and lapse " \
                                           "rate from parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if lapseTmp.shape[0] != GeoMetaWrfHydro.ny_global:
                    ConfigOptions.errMsg = "Y-Dimension size mismatch between output grid and lapse " \
                                           "rate from parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Perform a quick search to ensure we don't have radical values.
                indTmp = np.where(lapseTmp < -10.0)
                if len(indTmp[0]) > 0:
                    ConfigOptions.errMsg = "Found anomolous negative values in the lapse rate grid from " \
                                           "parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                indTmp = np.where(lapseTmp > 100.0)
                if len(indTmp[0]) > 0:
                    ConfigOptions.errMsg = "Found excessively high values in the lapse rate grid from " \
                                           "parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Close the parameter lapse rate file.
                try:
                    idTmp.close()
                except:
                    ConfigOptions.errMsg = "Unable to close parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                break
        else:
            lapseTmp = None
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Scatter the lapse rate grid to the other processors.
        input_forcings.lapseGrid = MpiConfig.scatter_array(GeoMetaWrfHydro,lapseTmp,ConfigOptions)
        err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Apply the local lapse rate grid to our local slab of 2-meter temperature data.
    temperature_grid_tmp = input_forcings.final_forcings[4, :, :]
    try:
        indNdv = np.where(input_forcings.final_forcings == ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform NDV search on input " + \
                               input_forcings.productName + " regridded forcings."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    try:
        indValid = np.where(temperature_grid_tmp != ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform search for valid values on input " + \
                               input_forcings.productName + " regridded temperature forcings."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    try:
        temperature_grid_tmp[indValid] = temperature_grid_tmp[indValid] + \
                                         ((input_forcings.lapseGrid[indValid]/1000.0) * elevDiff[indValid])
    except:
        ConfigOptions.errMsg = "Unable to apply spatial lapse rate values to input " + \
                               input_forcings.productName + " regridded temperature forcings."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    input_forcings.final_forcings[4,:,:] = temperature_grid_tmp
    input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv

    # Reset for memory efficiency
    indTmp = None
    indNdv = None
    indValid = None
    elevDiff = None
    temperature_grid_tmp = None
Exemple #11
0
def read_rqi_monthly_climo(ConfigOptions, MpiConfig, supplemental_precip,
                           GeoMetaWrfHydro):
    """
    Function to read in monthly RQI grids on the NWM grid. This is an NWM ONLY
    option. Please do not activate if not executing on the NWM conus grid.
    :param ConfigOptions:
    :param MpiConfig:
    :param supplemental_precip:
    :return:
    """
    # Ensure all processors are synced up before proceeding.
    #MpiConfig.comm.barrier()

    # First check to see if the RQI grids have valid values in them. There should
    # be NO NDV values if the grids have properly been read in.
    indTmp = np.where(
        supplemental_precip.regridded_rqi2 != ConfigOptions.globalNdv)

    rqiPath = ConfigOptions.supp_precip_param_dir + "/MRMS_WGT_RQI0.9_m" + \
              supplemental_precip.pcp_date2.strftime('%m') + '_v1.1_geosmth.nc'

    if len(indTmp[0]) == 0:
        # We haven't initialized the RQI fields. We need to do this.....
        if MpiConfig.rank == 0:
            ConfigOptions.statusMsg = "Reading in RQI Parameter File: " + rqiPath
            err_handler.log_msg(ConfigOptions, MpiConfig)
            # First make sure the RQI file exists.
            if not os.path.isfile(rqiPath):
                ConfigOptions.errMsg = "Expected RQI parameter file: " + rqiPath + " not found."
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass

            # Open the Parameter file.
            try:
                idTmp = Dataset(rqiPath, 'r')
            except:
                ConfigOptions.errMsg = "Unable to open parameter file: " + rqiPath
                pass

            # Extract out the RQI grid.
            try:
                varTmp = idTmp.variables['POP_0mabovemeansealevel'][0, :, :]
            except:
                ConfigOptions.errMsg = "Unable to extract POP_0mabovemeansealevel from parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass

            # Sanity checking on grid size.
            if varTmp.shape[0] != GeoMetaWrfHydro.ny_global or varTmp.shape[
                    1] != GeoMetaWrfHydro.nx_global:
                ConfigOptions.errMsg = "Improper dimension sizes for POP_0mabovemeansealevel " \
                                       "in parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass
        else:
            idTmp = None
            varTmp = None
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Scatter the array out to the local processors
        varSubTmp = MpiConfig.scatter_array(GeoMetaWrfHydro, varTmp,
                                            ConfigOptions)
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        supplemental_precip.regridded_rqi2[:, :] = varSubTmp

        # Reset variables for memory purposes
        varSubTmp = None
        varTmp = None

        # Close the RQI NetCDF file
        if MpiConfig.rank == 0:
            try:
                idTmp.close()
            except:
                ConfigOptions.errMsg = "Unable to close parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass
        err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Also check to see if we have switched to a new month based on the previous
    # MRMS step and the current one.
    if supplemental_precip.pcp_date2.month != supplemental_precip.pcp_date1.month:
        # We need to read in a new RQI monthly grid.
        if MpiConfig.rank == 0:
            ConfigOptions.statusMsg = "Reading in RQI Parameter File: " + rqiPath
            err_handler.log_msg(ConfigOptions, MpiConfig)
            # First make sure the RQI file exists.
            if not os.path.isfile(rqiPath):
                ConfigOptions.errMsg = "Expected RQI parameter file: " + rqiPath + " not found."
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass

            # Open the Parameter file.
            try:
                idTmp = Dataset(rqiPath, 'r')
            except:
                ConfigOptions.errMsg = "Unable to open parameter file: " + rqiPath
                pass

            # Extract out the RQI grid.
            try:
                varTmp = idTmp.variables['POP_0mabovemeansealevel'][0, :, :]
            except:
                ConfigOptions.errMsg = "Unable to extract POP_0mabovemeansealevel from parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass

            # Sanity checking on grid size.
            if varTmp.shape[0] != GeoMetaWrfHydro.ny_global or varTmp.shape[
                    1] != GeoMetaWrfHydro.nx_global:
                ConfigOptions.errMsg = "Improper dimension sizes for POP_0mabovemeansealevel " \
                                        "in parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass
        else:
            idTmp = None
            varTmp = None
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Scatter the array out to the local processors
        varSubTmp = MpiConfig.scatter_array(GeoMetaWrfHydro, varTmp,
                                            ConfigOptions)
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        supplemental_precip.regridded_rqi2[:, :] = varSubTmp

        # Reset variables for memory purposes
        varSubTmp = None
        varTmp = None

        # Close the RQI NetCDF file
        if MpiConfig.rank == 0:
            try:
                idTmp.close()
            except:
                ConfigOptions.errMsg = "Unable to close parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass
        err_handler.check_program_status(ConfigOptions, MpiConfig)
Exemple #12
0
def open_grib2(GribFileIn, NetCdfFileOut, Wgrib2Cmd, ConfigOptions, MpiConfig,
               inputVar):
    """
    Generic function to convert a GRIB2 file into a NetCDF file. Function
    will also open the NetCDF file, and ensure all necessary inputs are
    in file.
    :param GribFileIn:
    :param NetCdfFileOut:
    :param ConfigOptions:
    :return:
    """
    # Ensure all processors are synced up before outputting.
    # MpiConfig.comm.barrier()

    # Run wgrib2 command to convert GRIB2 file to NetCDF.
    if MpiConfig.rank == 0:
        # Check to see if output file already exists. If so, delete it and
        # override.
        ConfigOptions.statusMsg = "Reading in GRIB2 file: " + GribFileIn
        err_handler.log_msg(ConfigOptions, MpiConfig)
        if os.path.isfile(NetCdfFileOut):
            ConfigOptions.statusMsg = "Overriding temporary NetCDF file: " + NetCdfFileOut
            err_handler.log_warning(ConfigOptions, MpiConfig)
        try:
            # WCOSS fix for WGRIB2 crashing when called on the same file twice in python
            if not os.environ.get('MFE_SILENT'):
                print("command: " + Wgrib2Cmd)

            # set up GRIB2TABLE if needed:
            if not os.environ.get('GRIB2TABLE'):
                g2path = os.path.join(ConfigOptions.scratch_dir, "grib2.tbl")
                with open(g2path, 'wt') as g2t:
                    g2t.write(
                        "209:1:0:0:161:1:6:30:MultiSensorQPE01H:"
                        "Multi-sensor estimated precipitation accumulation 1-hour:mm\n"
                        "209:1:0:0:161:1:6:37:MultiSensorQPE01H:"
                        "Multi-sensor estimated precipitation accumulation 1-hour:mm\n"
                    )
                os.environ['GRIB2TABLE'] = g2path

            exitcode = subprocess.call(Wgrib2Cmd, shell=True)

            #print("exitcode: " + str(exitcode))
            # Call WGRIB2 with subprocess.Popen
            #cmdOutput = subprocess.Popen([Wgrib2Cmd], stdout=subprocess.PIPE,
            #                             stderr=subprocess.PIPE, shell=True)
            #out, err = cmdOutput.communicate()
            #exitcode = cmdOutput.returncode
        except:
            ConfigOptions.errMsg = "Unable to convert: " + GribFileIn + " to " + \
                                   NetCdfFileOut
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        # Reset temporary subprocess variables.
        out = None
        err = None
        exitcode = None

        # Ensure file exists.
        if not os.path.isfile(NetCdfFileOut):
            ConfigOptions.errMsg = "Expected NetCDF file: " + NetCdfFileOut + \
                                   " not found. It's possible the GRIB2 variable was not found."
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        # Open the NetCDF file.
        try:
            idTmp = Dataset(NetCdfFileOut, 'r')
        except:
            ConfigOptions.errMsg = "Unable to open input NetCDF file: " + \
                                   NetCdfFileOut
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        if idTmp is not None:
            # Check for expected lat/lon variables.
            if 'latitude' not in idTmp.variables.keys():
                ConfigOptions.statusMsg = "Unable to locate latitude from: " + \
                                       GribFileIn
                err_handler.log_warning(ConfigOptions, MpiConfig)
                # idTmp = None
                pass
        if idTmp is not None:
            if 'longitude' not in idTmp.variables.keys():
                ConfigOptions.statusMsg = "Unable to locate longitude from: " + \
                                       GribFileIn
                err_handler.log_warning(ConfigOptions, MpiConfig)
                # idTmp = None
                pass

        if idTmp is not None and inputVar is not None:
            # Loop through all the expected variables.
            if inputVar not in idTmp.variables.keys():
                ConfigOptions.errMsg = "Unable to locate expected variable: " + \
                                       inputVar + " in: " + NetCdfFileOut
                err_handler.log_critical(ConfigOptions, MpiConfig)
                idTmp = None
                pass
    else:
        idTmp = None

    # Ensure all processors are synced up before outputting.
    # MpiConfig.comm.barrier()  ## THIS HAPPENS IN check_program_status

    err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Return the NetCDF file handle back to the user.
    return idTmp
Exemple #13
0
def open_grib2(GribFileIn, NetCdfFileOut, Wgrib2Cmd, ConfigOptions, MpiConfig,
               inputVar):
    """
    Generic function to convert a GRIB2 file into a NetCDF file. Function
    will also open the NetCDF file, and ensure all necessary inputs are
    in file.
    :param GribFileIn:
    :param NetCdfFileOut:
    :param ConfigOptions:
    :return:
    """
    # Ensure all processors are synced up before outputting.
    # MpiConfig.comm.barrier()

    # Run wgrib2 command to convert GRIB2 file to NetCDF.
    if MpiConfig.rank == 0:
        # Check to see if output file already exists. If so, delete it and
        # override.
        ConfigOptions.statusMsg = "Reading in GRIB2 file: " + GribFileIn
        err_handler.log_msg(ConfigOptions, MpiConfig)
        if os.path.isfile(NetCdfFileOut):
            ConfigOptions.statusMsg = "Overriding temporary NetCDF file: " + NetCdfFileOut
            err_handler.log_warning(ConfigOptions, MpiConfig)
        try:
            cmdOutput = subprocess.Popen([Wgrib2Cmd],
                                         stdout=subprocess.PIPE,
                                         stderr=subprocess.PIPE,
                                         shell=True)
            out, err = cmdOutput.communicate()
            exitcode = cmdOutput.returncode
            #subprocess.run([Wgrib2Cmd],shell=True)
        except:
            ConfigOptions.errMsg = "Unable to convert: " + GribFileIn + " to " + \
                                   NetCdfFileOut
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        # Reset temporary subprocess variables.
        out = None
        err = None
        exitcode = None

        # Ensure file exists.
        if not os.path.isfile(NetCdfFileOut):
            ConfigOptions.errMsg = "Expected NetCDF file: " + NetCdfFileOut + \
                                   " not found. It's possible the GRIB2 variable was not found."
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        # Open the NetCDF file.
        try:
            idTmp = Dataset(NetCdfFileOut, 'r')
        except:
            ConfigOptions.errMsg = "Unable to open input NetCDF file: " + \
                                   NetCdfFileOut
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        if idTmp is not None:
            # Check for expected lat/lon variables.
            if 'latitude' not in idTmp.variables.keys():
                ConfigOptions.errMsg = "Unable to locate latitude from: " + \
                                       GribFileIn
                err_handler.log_critical(ConfigOptions, MpiConfig)
                idTmp = None
                pass
        if idTmp is not None:
            if 'longitude' not in idTmp.variables.keys():
                ConfigOptions.errMsg = "Unable t locate longitude from: " + \
                                       GribFileIn
                err_handler.log_critical(ConfigOptions, MpiConfig)
                idTmp = None
                pass

        if idTmp is not None:
            # Loop through all the expected variables.
            if inputVar not in idTmp.variables.keys():
                ConfigOptions.errMsg = "Unable to locate expected variable: " + \
                                       inputVar + " in: " + NetCdfFileOut
                err_handler.log_critical(ConfigOptions, MpiConfig)
                idTmp = None
                pass
    else:
        idTmp = None

    # Ensure all processors are synced up before outputting.
    # MpiConfig.comm.barrier()  ## THIS HAPPENS IN check_program_status

    err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Return the NetCDF file handle back to the user.
    return idTmp
Exemple #14
0
def ext_ana_disaggregate(input_forcings, supplemental_precip, config_options, mpi_config):
    """
    Function for disaggregating 6hr SuppPcp data to 1hr Input data
    :param input_forcings:
    :param supplemental_precip:
    :param config_options:
    :param mpi_config:
    :return:
    """
    # Check to make sure we have valid grids.
    if input_forcings.regridded_forcings2 is None or supplemental_precip.regridded_precip2 is None:
        if mpi_config.rank == 0:
            config_options.statusMsg = "Bypassing ext_ana_disaggregation routine due to missing input or supp pcp data"
            err_handler.log_warning(config_options, mpi_config)
        return
            
    if supplemental_precip.ext_ana != "STAGE4":
        if mpi_config.rank == 0:
            config_options.statusMsg = f"Bypassing ext_ana_disaggregation routine due to supplemental_precip.ext_ana = {supplemental_precip.ext_ana}"
            err_handler.log_warning(config_options, mpi_config)
        return
    
    
    #print("ext_ana_disaggregate RAINRATE input_forcings.regridded_forcings2[3,:,:]")
    #print(input_forcings.regridded_forcings2[3,:,:])
    #print("ext_ana_disaggregate supplemental_precip.regridded_precip2[:,:]")
    #print(supplemental_precip.regridded_precip2[:,:])
    #print("supplemental_precip.regridded_precip2[:,:].shape")
    #print(supplemental_precip.regridded_precip2[:,:].shape)

    read_hours = 0
    found_target_hh = False
    ana_data = []
    if mpi_config.rank == 0:
        target_hh = Path(input_forcings.file_in2).stem[-4:-2]
        _,_,_,beg_hh,end_hh,yyyymmdd = Path(supplemental_precip.file_in2).stem.split('_')
        date_iter = datetime.strptime(f"{yyyymmdd}{beg_hh}", '%Y%m%d%H')
        end_date = date_iter + timedelta(hours=6)
        #Advance the date_iter by 1 hour since the beginning of the Stage IV data in date range is excluded, the end is included
        #(begin_date,end_date]
        date_iter += timedelta(hours=1)
        while date_iter <= end_date:
            tmp_file = f"{input_forcings.inDir}/{date_iter.strftime('%Y%m%d%H')}/{date_iter.strftime('%Y%m%d%H')}00.LDASIN_DOMAIN1"
            if os.path.exists(tmp_file):
                config_options.statusMsg = f"Reading {input_forcings.netcdf_var_names[3]} from {tmp_file} for disaggregation"
                err_handler.log_msg(config_options, mpi_config)
                with Dataset(tmp_file,'r') as ds:
                    try:
                        #Read in rainrate
                        data = ds.variables[input_forcings.netcdf_var_names[3]][0, :, :]
                        data[data == config_options.globalNdv] = np.nan
                        ana_data.append(data)
                        read_hours += 1
                        if date_iter.hour == int(target_hh):
                            found_target_hh = True
                    except (ValueError, KeyError, AttributeError) as err:
                        config_options.errMsg = f"Unable to extract: RAINRATE from: {input_forcings.file_in2} ({str(err)})"
                        err_handler.log_critical(config_options, mpi_config)
            else:
                config_options.statusMsg = f"Input file missing {tmp_file}"
                err_handler.log_warning(config_options, mpi_config)

            date_iter += timedelta(hours=1)

    found_target_hh = mpi_config.broadcast_parameter(found_target_hh, config_options, param_type=bool)
    err_handler.check_program_status(config_options, mpi_config)
    if not found_target_hh:
        if mpi_config.rank == 0:
            config_options.statusMsg = f"Could not find AnA target_hh = {target_hh} for disaggregation. Setting output values to {config_options.globalNdv}."
            err_handler.log_warning(config_options, mpi_config)
        supplemental_precip.regridded_precip2[:,:] = config_options.globalNdv
        return

    read_hours = mpi_config.broadcast_parameter(read_hours, config_options, param_type=int)
    err_handler.check_program_status(config_options, mpi_config)
    if read_hours != 6:
        if mpi_config.rank == 0:
            config_options.statusMsg = f"Could not find all 6 AnA files for disaggregation. Only found {read_hours} hours. Setting output values to {config_options.globalNdv}."
            err_handler.log_warning(config_options, mpi_config)
        supplemental_precip.regridded_precip2[:,:] = config_options.globalNdv
        return

    ana_sum = np.array([],dtype=np.float32)
    target_data = np.array([],dtype=np.float32)
    ana_all_zeros = np.array([],dtype=np.bool)
    ana_no_zeros = np.array([],dtype=np.bool)
    target_data_no_zeros = np.array([],dtype=np.bool)
    if mpi_config.rank == 0:
        config_options.statusMsg = f"Performing hourly disaggregation of {supplemental_precip.file_in2}"
        err_handler.log_msg(config_options, mpi_config)

        ana_sum = sum(ana_data)
        target_data = ana_data[(int(target_hh)-1)%6]

        ana_zeros = [(a == 0).astype(int) for a in ana_data]
        target_data_zeros = (target_data == 0)
        target_data_no_zeros = ~target_data_zeros
        ana_zeros_sum = sum(ana_zeros)
        ana_all_zeros = (ana_zeros_sum == 6)
        ana_no_zeros = (ana_zeros_sum == 0)

    err_handler.check_program_status(config_options, mpi_config)
    ana_sum = mpi_config.scatter_array(input_forcings, ana_sum, config_options)
    err_handler.check_program_status(config_options, mpi_config)
    target_data = mpi_config.scatter_array(input_forcings, target_data, config_options)
    err_handler.check_program_status(config_options, mpi_config)

    ana_all_zeros = mpi_config.scatter_array(input_forcings, ana_all_zeros, config_options)
    err_handler.check_program_status(config_options, mpi_config)
    ana_no_zeros = mpi_config.scatter_array(input_forcings, ana_no_zeros, config_options)
    err_handler.check_program_status(config_options, mpi_config)
    target_data_no_zeros = mpi_config.scatter_array(input_forcings, target_data_no_zeros, config_options)
    err_handler.check_program_status(config_options, mpi_config)
    

    if mpi_config.comm.Get_size() == 1 and test_enabled:
        test_file = f"{config_options.scratch_dir}/stage_4_A_PCP_GDS5_SFC_acc6h_{yyyymmdd}_{beg_hh}_{end_hh}.txt"
        np.savetxt(test_file,supplemental_precip.regridded_precip2)
    
        test_file = f"{config_options.scratch_dir}/disaggregation_factors_{target_hh}_{yyyymmdd}{beg_hh}_{end_date.strftime('%Y%m%d%H')}.txt"
        np.savetxt(test_file,np.nan_to_num(np.select([ana_all_zeros,
                                                      (ana_no_zeros | target_data_no_zeros)],
                                                     [1/6.0*np.ones(supplemental_precip.regridded_precip2[:,:].shape),
                                                      target_data/ana_sum],
                                                     0),nan=config_options.globalNdv))

    #supplemental_precip.regridded_precip2[(0.0 < supplemental_precip.regridded_precip2) & (supplemental_precip.regridded_precip2 < 0.00003)] = 0.0
    supplemental_precip.regridded_precip2[:,:] = np.select([ana_all_zeros,
                                                            (ana_no_zeros | target_data_no_zeros)],
                                                           [1/6.0*supplemental_precip.regridded_precip2[:,:],
                                                            supplemental_precip.regridded_precip2[:,:] * target_data/ana_sum],
                                                           0)
    np.nan_to_num(supplemental_precip.regridded_precip2[:,:], copy=False, nan=config_options.globalNdv) 

    if mpi_config.comm.Get_size() == 1 and test_enabled:
        test_file = f"{config_options.scratch_dir}/stage_4_A_PCP_GDS5_SFC_acc6_disaggregation_{target_hh}_{yyyymmdd}{beg_hh}_{end_date.strftime('%Y%m%d%H')}.txt"
        np.savetxt(test_file,supplemental_precip.regridded_precip2)