Пример #1
0
def unzip_file(GzFileIn, FileOut, ConfigOptions, MpiConfig):
    """
    Generic I/O function to unzip a .gz file to a new location.
    :param GzFileIn:
    :param FileOut:
    :param ConfigOptions:
    :param MpiConfig:
    :return:
    """
    # Ensure all processors are synced up before outputting.
    #MpiConfig.comm.barrier()

    if MpiConfig.rank == 0:
        # Unzip the file in place.
        try:
            ConfigOptions.statusMsg = "Unzipping file: {}".format(GzFileIn)
            err_handler.log_msg(ConfigOptions, MpiConfig)
            with gzip.open(GzFileIn, 'rb') as fTmpGz:
                with open(FileOut, 'wb') as fTmp:
                    shutil.copyfileobj(fTmpGz, fTmp)
        except:
            ConfigOptions.errMsg = "Unable to unzip: " + GzFileIn
            err_handler.log_critical(ConfigOptions, MpiConfig)
            return

        if not os.path.isfile(FileOut):
            ConfigOptions.errMsg = "Unable to locate expected unzipped file: " + \
                                   FileOut
            err_handler.log_critical(ConfigOptions, MpiConfig)
            return
    else:
        return
Пример #2
0
    def broadcast_parameter(self,
                            value_broadcast,
                            config_options,
                            param_type=int):
        """
        Generic function for sending a parameter value out to the processors.
        :param value_broadcast:
        :param config_options:
        :return:
        """

        dtype = np.dtype(param_type)

        if self.rank == 0:
            param = np.asarray(value_broadcast, dtype=dtype)
        else:
            param = np.empty(dtype=dtype, shape=())

        try:
            self.comm.Bcast(param, root=0)
        except MPI.Exception:
            config_options.errMsg = "Unable to broadcast single value from rank 0."
            err_handler.log_critical(config_options, self)
            return None
        return param.item(0)
Пример #3
0
    def scatter_array_logan(self, geoMeta, array_broadcast, ConfigOptions):
        """
        Generic function for calling scatter functons based on
        the input dataset type.
        :param geoMeta:
        :param array_broadcast:
        :param ConfigOptions:
        :return:
        """
        # Determine which type of input array we have based on the
        # type of numpy array.
        data_type_flag = -1
        if self.rank == 0:
            if array_broadcast.dtype == np.float32:
                data_type_flag = 1
            if array_broadcast.dtype == np.float64:
                data_type_flag = 2

        # Broadcast the numpy datatype to the other processors.
        if self.rank == 0:
            tmpDict = {'varTmp': data_type_flag}
        else:
            tmpDict = None
        try:
            tmpDict = self.comm.bcast(tmpDict, root=0)
        except:
            ConfigOptions.errMsg = "Unable to broadcast numpy datatype value from rank 0"
            err_handler.log_critical(ConfigOptions, self)
            return None
        data_type_flag = tmpDict['varTmp']

        # Broadcast the global array to the child processors, then
        if self.rank == 0:
            arrayGlobalTmp = array_broadcast
        else:
            if data_type_flag == 1:
                arrayGlobalTmp = np.empty([geoMeta.ny_global,
                                           geoMeta.nx_global],
                                          np.float32)
            else:                                            #data_type_flag == 2:
                arrayGlobalTmp = np.empty([geoMeta.ny_global,
                                           geoMeta.nx_global],
                                          np.float64)
        try:
            self.comm.Bcast(arrayGlobalTmp, root=0)
        except:
            ConfigOptions.errMsg = "Unable to broadcast a global numpy array from rank 0"
            err_handler.log_critical(ConfigOptions, self)
            return None
        arraySub = arrayGlobalTmp[geoMeta.y_lower_bound:geoMeta.y_upper_bound,
                   geoMeta.x_lower_bound:geoMeta.x_upper_bound]
        return arraySub
Пример #4
0
def ncar_topo_adj(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig):
    """
    Topographic adjustment of incoming shortwave radiation fluxes,
    given input parameters.
    :param input_forcings:
    :param ConfigOptions:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Performing topographic adjustment to incoming " \
                                  "shortwave radiation flux."
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Establish where we have missing values.
    try:
        indNdv = np.where(
            input_forcings.final_forcings == ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform NDV search on input forcings"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    # By the time this function has been called, necessary input static grids (height, slope, etc),
    # should have been calculated for each local slab of data.
    DEGRAD = math.pi / 180.0
    DPD = 360.0 / 365.0
    try:
        DECLIN, SOLCON = radconst(ConfigOptions)
    except:
        ConfigOptions.errMsg = "Unable to calculate solar constants based on datetime information."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    try:
        coszen_loc, hrang_loc = calc_coszen(ConfigOptions, DECLIN,
                                            GeoMetaWrfHydro)
    except:
        ConfigOptions.errMsg = "Unable to calculate COSZEN or HRANG variables for topographic adjustment " \
                               "of incoming shortwave radiation"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    try:
        TOPO_RAD_ADJ_DRVR(GeoMetaWrfHydro, input_forcings, coszen_loc, DECLIN,
                          SOLCON, hrang_loc)
    except:
        ConfigOptions.errMsg = "Unable to perform final topographic adjustment of incoming " \
                               "shortwave radiation fluxes."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    # Assign missing values based on our mask.
    input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv

    # Reset variables to free up memory
    DECLIN = None
    SOLCON = None
    coszen_loc = None
    hrang_loc = None
    indNdv = None
Пример #5
0
def open_netcdf_forcing(NetCdfFileIn, ConfigOptions, MpiConfig):
    """
    Generic function to convert a NetCDF forcing file given a list of input forcing
    variables.
    :param GribFileIn:
    :param NetCdfFileOut:
    :param ConfigOptions:
    :return:
    """
    # Ensure all processors are synced up before outputting.
    #MpiConfig.comm.barrier()

    # Open the NetCDF file on the master processor and read in data.
    if MpiConfig.rank == 0:
        # Ensure file exists.
        if not os.path.isfile(NetCdfFileIn):
            ConfigOptions.errMsg = "Expected NetCDF file: " + NetCdfFileIn + \
                                    " not found."
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        # Open the NetCDF file.
        try:
            idTmp = Dataset(NetCdfFileIn, 'r')
        except:
            ConfigOptions.errMsg = "Unable to open input NetCDF file: " + \
                                    NetCdfFileIn
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        if idTmp is not None:
            # Check for expected lat/lon variables.
            if 'latitude' not in idTmp.variables.keys():
                ConfigOptions.errMsg = "Unable to locate latitude from: " + \
                                        NetCdfFileIn
                err_handler.log_critical(ConfigOptions, MpiConfig)
                idTmp = None
                pass
        if idTmp is not None:
            if 'longitude' not in idTmp.variables.keys():
                ConfigOptions.errMsg = "Unable t locate longitude from: " + \
                                        NetCdfFileIn
                err_handler.log_critical(ConfigOptions, MpiConfig)
                idTmp = None
                pass
        pass
    else:
        idTmp = None
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Ensure all processors are synced up before outputting.
    #MpiConfig.comm.barrier()

    err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Return the NetCDF file handle back to the user.
    return idTmp
Пример #6
0
def simple_lapse(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig):
    """
    Function that applies a single lapse rate adjustment to modeled
    2-meter temperature by taking the difference of the native
    input elevation and the WRF-hydro elevation.
    :param inpute_forcings:
    :param ConfigOptions:
    :param GeoMetaWrfHydro:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Applying simple lapse rate to temperature downscaling"
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Calculate the elevation difference.
    elevDiff = input_forcings.height - GeoMetaWrfHydro.height

    # Assign existing, un-downscaled temperatures to a temporary placeholder, which
    # will be used for specific humidity downscaling.
    if input_forcings.q2dDownscaleOpt > 0:
        input_forcings.t2dTmp[:, :] = input_forcings.final_forcings[4, :, :]

    # Apply single lapse rate value to the input 2-meter
    # temperature values.
    try:
        indNdv = np.where(
            input_forcings.final_forcings == ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform NDV search on input forcings"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    try:
        input_forcings.final_forcings[4,:,:] = input_forcings.final_forcings[4,:,:] + \
                                               (6.49/1000.0)*elevDiff
    except:
        ConfigOptions.errMsg = "Unable to apply lapse rate to input 2-meter temperatures."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv

    # Reset for memory efficiency
    indNdv = None
Пример #7
0
 def broadcast_parameter(self, value_broadcast, config_options):
     """
     Generic function for sending a parameter value out to the processors.
     :param value_broadcast:
     :param config_options:
     :return:
     """
     # Create dictionary to hold value.
     if self.rank == 0:
         tmp_dict = {'varTmp': value_broadcast}
     else:
         tmp_dict = None
     try:
         tmp_dict = self.comm.bcast(tmp_dict, root=0)
     except MPI.Exception:
         config_options.errMsg = "Unable to broadcast single value from rank 0."
         err_handler.log_critical(config_options, MpiConfig)
         return None
     return tmp_dict['varTmp']
Пример #8
0
def pressure_down_classic(input_forcings, ConfigOptions, GeoMetaWrfHydro,
                          MpiConfig):
    """
    Generic function to downscale surface pressure to the WRF-Hydro domain.
    :param input_forcings:
    :param ConfigOptions:
    :param GeoMetaWrfHydro:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Performing topographic adjustment to surface pressure."
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Calculate the elevation difference.
    elevDiff = input_forcings.height - GeoMetaWrfHydro.height

    # Assign existing, un-downscaled pressure values to a temporary placeholder, which
    # will be used for specific humidity downscaling.
    if input_forcings.q2dDownscaleOpt > 0:
        input_forcings.psfcTmp[:, :] = input_forcings.final_forcings[6, :, :]

    try:
        indNdv = np.where(
            input_forcings.final_forcings == ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform NDV search on input forcings"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    try:
        input_forcings.final_forcings[6,:,:] = input_forcings.final_forcings[6,:,:] +\
                                               (input_forcings.final_forcings[6,:,:]*elevDiff*9.8)/\
                                               (input_forcings.final_forcings[4,:,:]*287.05)
    except:
        ConfigOptions.errMsg = "Unable to downscale surface pressure to input forcings."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv

    # Reset for memory efficiency
    indNdv = None
Пример #9
0
def q2_down_classic(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig):
    """
    NCAR function for downscaling 2-meter specific humidity using already downscaled
    2-meter temperature, unadjusted surface pressure, and downscaled surface
    pressure.
    :param input_forcings:
    :param ConfigOptions:
    :param GeoMetaWrfHydro:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Performing topographic adjustment to specific humidity."
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Establish where we have missing values.
    try:
        indNdv = np.where(
            input_forcings.final_forcings == ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform NDV search on input forcings"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    # First calculate relative humidity given original surface pressure and 2-meter
    # temperature
    try:
        relHum = rel_hum(input_forcings, ConfigOptions)
    except:
        ConfigOptions.errMsg = "Unable to perform topographic downscaling of incoming " \
                               "specific humidity to relative humidity"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    # Downscale 2-meter specific humidity
    try:
        q2Tmp = mixhum_ptrh(input_forcings, relHum, 2, ConfigOptions)
    except:
        ConfigOptions.errMsg = "Unable to perform topographic downscaling of " \
                               "incoming specific humidity"
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    input_forcings.final_forcings[5, :, :] = q2Tmp
    input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv
    q2Tmp = None
    indNdv = None
Пример #10
0
def param_lapse(input_forcings,ConfigOptions,GeoMetaWrfHydro,MpiConfig):
    """
    Function that applies a apriori lapse rate adjustment to modeled
    2-meter temperature by taking the difference of the native
    input elevation and the WRF-hydro elevation. It's assumed this lapse
    rate grid has already been regridded to the final output WRF-Hydro
    grid.
    :param inpute_forcings:
    :param ConfigOptions:
    :param GeoMetaWrfHydro:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Applying aprior lapse rate grid to temperature downscaling"
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Calculate the elevation difference.
    elevDiff = input_forcings.height - GeoMetaWrfHydro.height

    if input_forcings.lapseGrid is None:
    #if not np.any(input_forcings.lapseGrid):
        # We have not read in our lapse rate file. Read it in, do extensive checks,
        # scatter the lapse rate grid out to individual processors, then apply the
        # lapse rate to the 2-meter temperature grid.
        if MpiConfig.rank == 0:
            while (True):
                # First ensure we have a parameter directory
                if input_forcings.paramDir == "NONE":
                    ConfigOptions.errMsg = "User has specified spatial temperature lapse rate " \
                                           "downscaling while no downscaling parameter directory " \
                                           "exists."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Compose the path to the lapse rate grid file.
                lapsePath = input_forcings.paramDir + "/lapse_param.nc"
                if not os.path.isfile(lapsePath):
                    ConfigOptions.errMsg = "Expected lapse rate parameter file: " + \
                                           lapsePath + " does not exist."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Open the lapse rate file. Check for the expected variable, along with
                # the dimension size to make sure everything matches up.
                try:
                    idTmp = Dataset(lapsePath,'r')
                except:
                    ConfigOptions.errMsg = "Unable to open parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if not 'lapse' in idTmp.variables.keys():
                    ConfigOptions.errMsg = "Expected 'lapse' variable not located in parameter " \
                                           "file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    lapseTmp = idTmp.variables['lapse'][:,:]
                except:
                    ConfigOptions.errMsg = "Unable to extracte 'lapse' variable from parameter: " \
                                           "file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Check dimensions to ensure they match up to the output grid.
                if lapseTmp.shape[1] != GeoMetaWrfHydro.nx_global:
                    ConfigOptions.errMsg = "X-Dimension size mismatch between output grid and lapse " \
                                           "rate from parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if lapseTmp.shape[0] != GeoMetaWrfHydro.ny_global:
                    ConfigOptions.errMsg = "Y-Dimension size mismatch between output grid and lapse " \
                                           "rate from parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Perform a quick search to ensure we don't have radical values.
                indTmp = np.where(lapseTmp < -10.0)
                if len(indTmp[0]) > 0:
                    ConfigOptions.errMsg = "Found anomolous negative values in the lapse rate grid from " \
                                           "parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                indTmp = np.where(lapseTmp > 100.0)
                if len(indTmp[0]) > 0:
                    ConfigOptions.errMsg = "Found excessively high values in the lapse rate grid from " \
                                           "parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Close the parameter lapse rate file.
                try:
                    idTmp.close()
                except:
                    ConfigOptions.errMsg = "Unable to close parameter file: " + lapsePath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                break
        else:
            lapseTmp = None
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Scatter the lapse rate grid to the other processors.
        input_forcings.lapseGrid = MpiConfig.scatter_array(GeoMetaWrfHydro,lapseTmp,ConfigOptions)
        err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Apply the local lapse rate grid to our local slab of 2-meter temperature data.
    temperature_grid_tmp = input_forcings.final_forcings[4, :, :]
    try:
        indNdv = np.where(input_forcings.final_forcings == ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform NDV search on input " + \
                               input_forcings.productName + " regridded forcings."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    try:
        indValid = np.where(temperature_grid_tmp != ConfigOptions.globalNdv)
    except:
        ConfigOptions.errMsg = "Unable to perform search for valid values on input " + \
                               input_forcings.productName + " regridded temperature forcings."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return
    try:
        temperature_grid_tmp[indValid] = temperature_grid_tmp[indValid] + \
                                         ((input_forcings.lapseGrid[indValid]/1000.0) * elevDiff[indValid])
    except:
        ConfigOptions.errMsg = "Unable to apply spatial lapse rate values to input " + \
                               input_forcings.productName + " regridded temperature forcings."
        err_handler.log_critical(ConfigOptions, MpiConfig)
        return

    input_forcings.final_forcings[4,:,:] = temperature_grid_tmp
    input_forcings.final_forcings[indNdv] = ConfigOptions.globalNdv

    # Reset for memory efficiency
    indTmp = None
    indNdv = None
    indValid = None
    elevDiff = None
    temperature_grid_tmp = None
Пример #11
0
def open_grib2(GribFileIn, NetCdfFileOut, Wgrib2Cmd, ConfigOptions, MpiConfig,
               inputVar):
    """
    Generic function to convert a GRIB2 file into a NetCDF file. Function
    will also open the NetCDF file, and ensure all necessary inputs are
    in file.
    :param GribFileIn:
    :param NetCdfFileOut:
    :param ConfigOptions:
    :return:
    """
    # Ensure all processors are synced up before outputting.
    # MpiConfig.comm.barrier()

    # Run wgrib2 command to convert GRIB2 file to NetCDF.
    if MpiConfig.rank == 0:
        # Check to see if output file already exists. If so, delete it and
        # override.
        ConfigOptions.statusMsg = "Reading in GRIB2 file: " + GribFileIn
        err_handler.log_msg(ConfigOptions, MpiConfig)
        if os.path.isfile(NetCdfFileOut):
            ConfigOptions.statusMsg = "Overriding temporary NetCDF file: " + NetCdfFileOut
            err_handler.log_warning(ConfigOptions, MpiConfig)
        try:
            cmdOutput = subprocess.Popen([Wgrib2Cmd],
                                         stdout=subprocess.PIPE,
                                         stderr=subprocess.PIPE,
                                         shell=True)
            out, err = cmdOutput.communicate()
            exitcode = cmdOutput.returncode
            #subprocess.run([Wgrib2Cmd],shell=True)
        except:
            ConfigOptions.errMsg = "Unable to convert: " + GribFileIn + " to " + \
                                   NetCdfFileOut
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        # Reset temporary subprocess variables.
        out = None
        err = None
        exitcode = None

        # Ensure file exists.
        if not os.path.isfile(NetCdfFileOut):
            ConfigOptions.errMsg = "Expected NetCDF file: " + NetCdfFileOut + \
                                   " not found. It's possible the GRIB2 variable was not found."
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        # Open the NetCDF file.
        try:
            idTmp = Dataset(NetCdfFileOut, 'r')
        except:
            ConfigOptions.errMsg = "Unable to open input NetCDF file: " + \
                                   NetCdfFileOut
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        if idTmp is not None:
            # Check for expected lat/lon variables.
            if 'latitude' not in idTmp.variables.keys():
                ConfigOptions.errMsg = "Unable to locate latitude from: " + \
                                       GribFileIn
                err_handler.log_critical(ConfigOptions, MpiConfig)
                idTmp = None
                pass
        if idTmp is not None:
            if 'longitude' not in idTmp.variables.keys():
                ConfigOptions.errMsg = "Unable t locate longitude from: " + \
                                       GribFileIn
                err_handler.log_critical(ConfigOptions, MpiConfig)
                idTmp = None
                pass

        if idTmp is not None:
            # Loop through all the expected variables.
            if inputVar not in idTmp.variables.keys():
                ConfigOptions.errMsg = "Unable to locate expected variable: " + \
                                       inputVar + " in: " + NetCdfFileOut
                err_handler.log_critical(ConfigOptions, MpiConfig)
                idTmp = None
                pass
    else:
        idTmp = None

    # Ensure all processors are synced up before outputting.
    # MpiConfig.comm.barrier()  ## THIS HAPPENS IN check_program_status

    err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Return the NetCDF file handle back to the user.
    return idTmp
Пример #12
0
def read_rqi_monthly_climo(ConfigOptions, MpiConfig, supplemental_precip,
                           GeoMetaWrfHydro):
    """
    Function to read in monthly RQI grids on the NWM grid. This is an NWM ONLY
    option. Please do not activate if not executing on the NWM conus grid.
    :param ConfigOptions:
    :param MpiConfig:
    :param supplemental_precip:
    :return:
    """
    # Ensure all processors are synced up before proceeding.
    #MpiConfig.comm.barrier()

    # First check to see if the RQI grids have valid values in them. There should
    # be NO NDV values if the grids have properly been read in.
    indTmp = np.where(
        supplemental_precip.regridded_rqi2 != ConfigOptions.globalNdv)

    rqiPath = ConfigOptions.supp_precip_param_dir + "/MRMS_WGT_RQI0.9_m" + \
              supplemental_precip.pcp_date2.strftime('%m') + '_v1.1_geosmth.nc'

    if len(indTmp[0]) == 0:
        # We haven't initialized the RQI fields. We need to do this.....
        if MpiConfig.rank == 0:
            ConfigOptions.statusMsg = "Reading in RQI Parameter File: " + rqiPath
            err_handler.log_msg(ConfigOptions, MpiConfig)
            # First make sure the RQI file exists.
            if not os.path.isfile(rqiPath):
                ConfigOptions.errMsg = "Expected RQI parameter file: " + rqiPath + " not found."
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass

            # Open the Parameter file.
            try:
                idTmp = Dataset(rqiPath, 'r')
            except:
                ConfigOptions.errMsg = "Unable to open parameter file: " + rqiPath
                pass

            # Extract out the RQI grid.
            try:
                varTmp = idTmp.variables['POP_0mabovemeansealevel'][0, :, :]
            except:
                ConfigOptions.errMsg = "Unable to extract POP_0mabovemeansealevel from parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass

            # Sanity checking on grid size.
            if varTmp.shape[0] != GeoMetaWrfHydro.ny_global or varTmp.shape[
                    1] != GeoMetaWrfHydro.nx_global:
                ConfigOptions.errMsg = "Improper dimension sizes for POP_0mabovemeansealevel " \
                                       "in parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass
        else:
            idTmp = None
            varTmp = None
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Scatter the array out to the local processors
        varSubTmp = MpiConfig.scatter_array(GeoMetaWrfHydro, varTmp,
                                            ConfigOptions)
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        supplemental_precip.regridded_rqi2[:, :] = varSubTmp

        # Reset variables for memory purposes
        varSubTmp = None
        varTmp = None

        # Close the RQI NetCDF file
        if MpiConfig.rank == 0:
            try:
                idTmp.close()
            except:
                ConfigOptions.errMsg = "Unable to close parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass
        err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Also check to see if we have switched to a new month based on the previous
    # MRMS step and the current one.
    if supplemental_precip.pcp_date2.month != supplemental_precip.pcp_date1.month:
        # We need to read in a new RQI monthly grid.
        if MpiConfig.rank == 0:
            ConfigOptions.statusMsg = "Reading in RQI Parameter File: " + rqiPath
            err_handler.log_msg(ConfigOptions, MpiConfig)
            # First make sure the RQI file exists.
            if not os.path.isfile(rqiPath):
                ConfigOptions.errMsg = "Expected RQI parameter file: " + rqiPath + " not found."
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass

            # Open the Parameter file.
            try:
                idTmp = Dataset(rqiPath, 'r')
            except:
                ConfigOptions.errMsg = "Unable to open parameter file: " + rqiPath
                pass

            # Extract out the RQI grid.
            try:
                varTmp = idTmp.variables['POP_0mabovemeansealevel'][0, :, :]
            except:
                ConfigOptions.errMsg = "Unable to extract POP_0mabovemeansealevel from parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass

            # Sanity checking on grid size.
            if varTmp.shape[0] != GeoMetaWrfHydro.ny_global or varTmp.shape[
                    1] != GeoMetaWrfHydro.nx_global:
                ConfigOptions.errMsg = "Improper dimension sizes for POP_0mabovemeansealevel " \
                                        "in parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass
        else:
            idTmp = None
            varTmp = None
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Scatter the array out to the local processors
        varSubTmp = MpiConfig.scatter_array(GeoMetaWrfHydro, varTmp,
                                            ConfigOptions)
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        supplemental_precip.regridded_rqi2[:, :] = varSubTmp

        # Reset variables for memory purposes
        varSubTmp = None
        varTmp = None

        # Close the RQI NetCDF file
        if MpiConfig.rank == 0:
            try:
                idTmp.close()
            except:
                ConfigOptions.errMsg = "Unable to close parameter file: " + rqiPath
                err_handler.log_critical(ConfigOptions, MpiConfig)
                pass
        err_handler.check_program_status(ConfigOptions, MpiConfig)
Пример #13
0
def open_grib2(GribFileIn, NetCdfFileOut, Wgrib2Cmd, ConfigOptions, MpiConfig,
               inputVar):
    """
    Generic function to convert a GRIB2 file into a NetCDF file. Function
    will also open the NetCDF file, and ensure all necessary inputs are
    in file.
    :param GribFileIn:
    :param NetCdfFileOut:
    :param ConfigOptions:
    :return:
    """
    # Ensure all processors are synced up before outputting.
    # MpiConfig.comm.barrier()

    # Run wgrib2 command to convert GRIB2 file to NetCDF.
    if MpiConfig.rank == 0:
        # Check to see if output file already exists. If so, delete it and
        # override.
        ConfigOptions.statusMsg = "Reading in GRIB2 file: " + GribFileIn
        err_handler.log_msg(ConfigOptions, MpiConfig)
        if os.path.isfile(NetCdfFileOut):
            ConfigOptions.statusMsg = "Overriding temporary NetCDF file: " + NetCdfFileOut
            err_handler.log_warning(ConfigOptions, MpiConfig)
        try:
            # WCOSS fix for WGRIB2 crashing when called on the same file twice in python
            if not os.environ.get('MFE_SILENT'):
                print("command: " + Wgrib2Cmd)

            # set up GRIB2TABLE if needed:
            if not os.environ.get('GRIB2TABLE'):
                g2path = os.path.join(ConfigOptions.scratch_dir, "grib2.tbl")
                with open(g2path, 'wt') as g2t:
                    g2t.write(
                        "209:1:0:0:161:1:6:30:MultiSensorQPE01H:"
                        "Multi-sensor estimated precipitation accumulation 1-hour:mm\n"
                        "209:1:0:0:161:1:6:37:MultiSensorQPE01H:"
                        "Multi-sensor estimated precipitation accumulation 1-hour:mm\n"
                    )
                os.environ['GRIB2TABLE'] = g2path

            exitcode = subprocess.call(Wgrib2Cmd, shell=True)

            #print("exitcode: " + str(exitcode))
            # Call WGRIB2 with subprocess.Popen
            #cmdOutput = subprocess.Popen([Wgrib2Cmd], stdout=subprocess.PIPE,
            #                             stderr=subprocess.PIPE, shell=True)
            #out, err = cmdOutput.communicate()
            #exitcode = cmdOutput.returncode
        except:
            ConfigOptions.errMsg = "Unable to convert: " + GribFileIn + " to " + \
                                   NetCdfFileOut
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        # Reset temporary subprocess variables.
        out = None
        err = None
        exitcode = None

        # Ensure file exists.
        if not os.path.isfile(NetCdfFileOut):
            ConfigOptions.errMsg = "Expected NetCDF file: " + NetCdfFileOut + \
                                   " not found. It's possible the GRIB2 variable was not found."
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        # Open the NetCDF file.
        try:
            idTmp = Dataset(NetCdfFileOut, 'r')
        except:
            ConfigOptions.errMsg = "Unable to open input NetCDF file: " + \
                                   NetCdfFileOut
            err_handler.log_critical(ConfigOptions, MpiConfig)
            idTmp = None
            pass

        if idTmp is not None:
            # Check for expected lat/lon variables.
            if 'latitude' not in idTmp.variables.keys():
                ConfigOptions.statusMsg = "Unable to locate latitude from: " + \
                                       GribFileIn
                err_handler.log_warning(ConfigOptions, MpiConfig)
                # idTmp = None
                pass
        if idTmp is not None:
            if 'longitude' not in idTmp.variables.keys():
                ConfigOptions.statusMsg = "Unable to locate longitude from: " + \
                                       GribFileIn
                err_handler.log_warning(ConfigOptions, MpiConfig)
                # idTmp = None
                pass

        if idTmp is not None and inputVar is not None:
            # Loop through all the expected variables.
            if inputVar not in idTmp.variables.keys():
                ConfigOptions.errMsg = "Unable to locate expected variable: " + \
                                       inputVar + " in: " + NetCdfFileOut
                err_handler.log_critical(ConfigOptions, MpiConfig)
                idTmp = None
                pass
    else:
        idTmp = None

    # Ensure all processors are synced up before outputting.
    # MpiConfig.comm.barrier()  ## THIS HAPPENS IN check_program_status

    err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Return the NetCDF file handle back to the user.
    return idTmp
Пример #14
0
    def output_final_ldasin(self, ConfigOptions, geoMetaWrfHydro, MpiConfig):
        """
        Output routine to produce final LDASIN files for the WRF-Hydro
        modeling system. This function is assuming all regridding,
        interpolation, downscaling, and bias correction has occurred
        on the necessary input forcings to generate a final set of
        outputs on the output grid. Since this program is ran in parallel,
        all work is done on local "slabs" of data for each processor to
        make the code more efficient. On this end, this function will
        collect the "slabs" into final output grids that go into the
        output files. In addition, detailed geospatial metadata is translated
        from the input geogrid file, to the final output files.
        :param ConfiguOptions:
        :param geoMetaWrfHydro:
        :param MpiConfig:
        :return:
        """
        output_variable_attribute_dict = {
            'U2D': [
                0, 'm s-1', 'x_wind', '10-m U-component of wind',
                'time: point', 0.001, 0.0, 3
            ],
            'V2D': [
                1, 'm s-1', 'y_wind', '10-m V-component of wind',
                'time: point', 0.001, 0.0, 3
            ],
            'LWDOWN': [
                2, 'W m-2', 'surface_downward_longwave_flux',
                'Surface downward long-wave radiation flux', 'time: point',
                0.001, 0.0, 3
            ],
            'RAINRATE': [
                3, 'mm s^-1', 'precipitation_flux',
                'Surface Precipitation Rate', 'time: mean', 1.0, 0.0, 0
            ],
            'T2D': [
                4, 'K', 'air_temperature', '2-m Air Temperature',
                'time: point', 0.01, 100.0, 2
            ],
            'Q2D': [
                5, 'kg kg-1', 'surface_specific_humidity',
                '2-m Specific Humidity', 'time: point', 0.000001, 0.0, 6
            ],
            'PSFC': [
                6, 'Pa', 'air_pressure', 'Surface Pressure', 'time: point',
                0.1, 0.0, 1
            ],
            'SWDOWN': [
                7, 'W m-2', 'surface_downward_shortwave_flux',
                'Surface downward short-wave radiation flux', 'time: point',
                0.001, 0.0, 3
            ],
            'LQFRAC': [
                8, '%', 'liquid_water_fraction',
                'Fraction of precipitation that is liquid vs. frozen',
                'time: point', 0.1, 0.0, 3
            ]
        }

        # Compose the ESMF remapped string attribute based on the regridding option chosen by the user.
        # We will default to the regridding method chosen for the first input forcing selected.
        if ConfigOptions.regrid_opt[0] == 1:
            regrid_att = "remapped via ESMF regrid_with_weights: Bilinear"
        elif ConfigOptions.regrid_opt[0] == 2:
            regrid_att = "remapped via ESMF regrid_with_weights: Nearest Neighbor"
        elif ConfigOptions.regrid_opt[0] == 3:
            regrid_att = "remapped via ESMF regrid_with_weights: Conservative Bilinear"

        # Ensure all processors are synced up before outputting.
        #MpiConfig.comm.barrier()

        idOut = None
        if MpiConfig.rank == 0:
            while (True):
                # Only output on the master processor.
                try:
                    idOut = Dataset(self.outPath, 'w')
                except Exception as e:
                    ConfigOptions.errMsg = "Unable to create output file: " + self.outPath + "\n" + str(
                        e)
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Create dimensions.
                try:
                    idOut.createDimension("time", None)
                except:
                    ConfigOptions.errMsg = "Unable to create time dimension in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idOut.createDimension("y", geoMetaWrfHydro.ny_global)
                except:
                    ConfigOptions.errMsg = "Unable to create y dimension in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idOut.createDimension("x", geoMetaWrfHydro.nx_global)
                except:
                    ConfigOptions.errMsg = "Unable to create x dimension in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idOut.createDimension("reference_time", 1)
                except:
                    ConfigOptions.errMsg = "Unable to create reference_time dimension in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Set global attributes
                try:
                    idOut.model_output_valid_time = self.outDate.strftime(
                        "%Y-%m-%d_%H:%M:00")
                except:
                    ConfigOptions.errMsg = "Unable to set the model_output_valid_time attribute in :" + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    if ConfigOptions.ana_flag:
                        idOut.model_initialization_time = ConfigOptions.e_date_proc.strftime(
                            "%Y-%m-%d_%H:%M:00")
                    else:
                        idOut.model_initialization_time = ConfigOptions.current_fcst_cycle.strftime(
                            "%Y-%m-%d_%H:%M:00")
                except:
                    ConfigOptions.errMsg = "Unable to set the model_initialization_time global " \
                                           "attribute in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                if ConfigOptions.nwmVersion is not None:
                    try:
                        idOut.NWM_version_number = "v" + str(
                            ConfigOptions.nwmVersion)
                    except:
                        ConfigOptions.errMsg = "Unable to set the NWM_version_number global attribute in: " \
                                               + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break

                if ConfigOptions.nwmConfig is not None:
                    try:
                        idOut.model_configuration = ConfigOptions.nwmConfig
                    except:
                        ConfigOptions.errMsg = "Unable to set the model_configuration global attribute in: " + \
                                               self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break

                try:
                    idOut.model_output_type = "forcing"
                except:
                    ConfigOptions.errMsg = "Unable to put model_output_type global attribute in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                try:
                    idOut.model_total_valid_times = float(
                        ConfigOptions.num_output_steps)
                except:
                    ConfigOptions.errMsg = "Unable to create total_valid_times global attribute in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Create variables.
                try:
                    idOut.createVariable('time', 'i4', ('time'))
                except:
                    ConfigOptions.errMsg = "Unable to create time variable in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idOut.createVariable('reference_time', 'i4',
                                         ('reference_time'))
                except:
                    ConfigOptions.errMsg = "Unable to create reference_time variable in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Populate time and reference time variables with appropriate attributes and time values.
                try:
                    idOut.variables[
                        'time'].units = "minutes since 1970-01-01 00:00:00 UTC"
                except:
                    ConfigOptions.errMsg = "Unable to create time units attribute in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idOut.variables['time'].standard_name = "time"
                except:
                    ConfigOptions.errMsg = "Unable to create time standard_name attribute in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idOut.variables['time'].long_name = "valid output time"
                except:
                    ConfigOptions.errMsg = "Unable to create time long_name attribute in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                try:
                    idOut.variables[
                        'reference_time'].units = "minutes since 1970-01-01 00:00:00 UTC"
                except:
                    ConfigOptions.errMsg = "Unable to create reference_time units attribute in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idOut.variables[
                        'reference_time'].standard_name = "forecast_reference_time"
                except:
                    ConfigOptions.errMsg = "Unable to create reference_time standard_name attribute in: " + \
                                           self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idOut.variables[
                        'reference_time'].long_name = "model initialization time"
                except:
                    ConfigOptions.errMsg = "Unable to create reference_time long_name attribute in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Populate time variables
                dEpoch = datetime.datetime(1970, 1, 1)
                dtValid = self.outDate - dEpoch
                dtRef = ConfigOptions.current_fcst_cycle - dEpoch

                try:
                    idOut.variables['time'][0] = int(dtValid.days * 24.0 * 60.0) + \
                                                 int(math.floor(dtValid.seconds / 60.0))
                except:
                    ConfigOptions.errMsg = "Unable to populate the time variable in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                try:
                    idOut.variables['reference_time'][0] = int(dtRef.days * 24.0 * 60.0) + \
                                                           int(math.floor(dtRef.seconds / 60.0))
                except:
                    ConfigOptions.errMsg = "Unable to populate the time variable in: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Create geospatial metadata coordinate variables if data was read in from an optional
                # spatial metadata file.
                if ConfigOptions.spatial_meta is not None:
                    # Create coordinate variables and populate with attributes read in.
                    try:
                        if ConfigOptions.useCompression == 1:
                            idOut.createVariable('x',
                                                 'f8', ('x'),
                                                 zlib=True,
                                                 complevel=2)
                        else:
                            idOut.createVariable('x', 'f8', ('x'))
                    except:
                        ConfigOptions.errMsg = "Unable to create x variable in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    try:
                        idOut.variables['x'].setncatts(
                            geoMetaWrfHydro.x_coord_atts)
                    except:
                        ConfigOptions.errMsg = "Unable to establish x coordinate attributes in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    try:
                        idOut.variables['x'][:] = geoMetaWrfHydro.x_coords
                    except:
                        ConfigOptions.errMsg = "Unable to place x coordinate values into output variable " \
                                               "for output file: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break

                    try:
                        if ConfigOptions.useCompression == 1:
                            idOut.createVariable('y',
                                                 'f8', ('y'),
                                                 zlib=True,
                                                 complevel=2)
                        else:
                            idOut.createVariable('y', 'f8', ('y'))
                    except:
                        ConfigOptions.errMsg = "Unable to create y variable in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    try:
                        idOut.variables['y'].setncatts(
                            geoMetaWrfHydro.y_coord_atts)
                    except:
                        ConfigOptions.errMsg = "Unable to establish y coordinate attributes in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    try:
                        idOut.variables['y'][:] = geoMetaWrfHydro.y_coords
                    except:
                        ConfigOptions.errMsg = "Unable to place y coordinate values into output variable " \
                                               "for output file: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break

                    try:
                        idOut.createVariable('crs', 'S1')
                    except:
                        ConfigOptions.errMsg = "Unable to create crs in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    try:
                        idOut.variables['crs'].setncatts(
                            geoMetaWrfHydro.crs_atts)
                    except:
                        ConfigOptions.errMsg = "Unable to establish crs attributes in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break

                # Loop through and create each variable, along with expected attributes.
                for varTmp in output_variable_attribute_dict:
                    try:
                        if ConfigOptions.useCompression:
                            zlib = True
                            complevel = 2
                            least_significant_digit = None if varTmp == 'RAINRATE' else \
                                output_variable_attribute_dict[varTmp][7]       # use all digits in RAINRATE
                        else:
                            zlib = False
                            complevel = 0
                            least_significant_digit = None

                        if ConfigOptions.useFloats or varTmp == 'RAINRATE':  # RAINRATE always a float
                            fill_value = ConfigOptions.globalNdv
                            dtype = 'f4'
                        else:
                            fill_value = int(ConfigOptions.globalNdv)
                            #fill_value = int((ConfigOptions.globalNdv - output_variable_attribute_dict[varTmp][6]) /
                            #                 output_variable_attribute_dict[varTmp][5])
                            dtype = 'i4'

                        idOut.createVariable(
                            varTmp,
                            dtype, ('time', 'y', 'x'),
                            fill_value=fill_value,
                            zlib=zlib,
                            complevel=complevel,
                            least_significant_digit=least_significant_digit)

                    except:
                        ConfigOptions.errMsg = "Unable to create " + varTmp + " variable in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    try:
                        idOut.variables[
                            varTmp].cell_methods = output_variable_attribute_dict[
                                varTmp][4]
                    except:
                        ConfigOptions.errMsg = "Unable to create cell_methods attribute for: " + varTmp + \
                            " in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    try:
                        idOut.variables[varTmp].remap = regrid_att
                    except:
                        ConfigOptions.errMsg = "Unable to create remap attribute for: " + varTmp + \
                            " in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    # Place geospatial metadata attributes in if we have them.
                    if ConfigOptions.spatial_meta is not None:
                        try:
                            idOut.variables[varTmp].grid_mapping = 'crs'
                        except:
                            ConfigOptions.errMsg = "Unable to create grid_mapping attribute for: " + \
                                                   varTmp + " in: " + self.outPath
                            err_handler.log_critical(ConfigOptions, MpiConfig)
                            break
                        if 'esri_pe_string' in geoMetaWrfHydro.crs_atts.keys():
                            try:
                                idOut.variables[
                                    varTmp].esri_pe_string = geoMetaWrfHydro.crs_atts[
                                        'esri_pe_string']
                            except:
                                ConfigOptions.errMsg = "Unable to create esri_pe_string attribute for: " + \
                                                       varTmp + " in: " + self.outPath
                                err_handler.log_critical(
                                    ConfigOptions, MpiConfig)
                                break
                        if 'proj4' in geoMetaWrfHydro.spatial_global_atts.keys(
                        ):
                            try:
                                idOut.variables[
                                    varTmp].proj4 = geoMetaWrfHydro.spatial_global_atts[
                                        'proj4']
                            except:
                                ConfigOptions.errMsg = "Unable to create proj4 attribute for: " + varTmp + \
                                    " in: " + self.outPath
                                err_handler.log_critical(
                                    ConfigOptions, MpiConfig)
                                break

                    try:
                        idOut.variables[
                            varTmp].units = output_variable_attribute_dict[
                                varTmp][1]
                    except:
                        ConfigOptions.errMsg = "Unable to create units attribute for: " + varTmp + " in: " + \
                            self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    try:
                        idOut.variables[
                            varTmp].standard_name = output_variable_attribute_dict[
                                varTmp][2]
                    except:
                        ConfigOptions.errMsg = "Unable to create standard_name attribute for: " + varTmp + \
                            " in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    try:
                        idOut.variables[
                            varTmp].long_name = output_variable_attribute_dict[
                                varTmp][3]
                    except:
                        ConfigOptions.errMsg = "Unable to create long_name attribute for: " + varTmp + \
                            " in: " + self.outPath
                        err_handler.log_critical(ConfigOptions, MpiConfig)
                        break
                    # If we are using scale_factor / add_offset, create here.
                    if not ConfigOptions.useFloats:
                        if varTmp != 'RAINRATE':
                            try:
                                idOut.variables[
                                    varTmp].scale_factor = output_variable_attribute_dict[
                                        varTmp][5]
                            except (ValueError, IOError):
                                ConfigOptions.errMsg = "Unable to create scale_factor attribute for: " + varTmp + \
                                                       " in: " + self.outPath
                                err_handler.log_critical(
                                    ConfigOptions, MpiConfig)
                                break
                            try:
                                idOut.variables[
                                    varTmp].add_offset = output_variable_attribute_dict[
                                        varTmp][6]
                            except (ValueError, IOError):
                                ConfigOptions.errMsg = "Unable to create add_offset attribute for: " + varTmp + \
                                                       " in: " + self.outPath
                                err_handler.log_critical(
                                    ConfigOptions, MpiConfig)
                                break
                break

        err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Now loop through each variable, collect the data (call on each processor), assemble into the final
        # output grid, and place into the output file (if on processor 0).
        for varTmp in output_variable_attribute_dict:
            # First run a check for missing values. There should be none at this point.
            # err_handler.check_missing_final(self.outPath, ConfigOptions, self.output_local[output_variable_attribute_dict[varTmp][0], :, :],
            #                                 varTmp, MpiConfig)
            # if ConfigOptions.errFlag == 1:
            #     continue

            # Collect data from the various processors, and place into the output file.
            try:
                # TODO change communication call from comm.gather() to comm.Gather for efficency
                # final = MpiConfig.comm.gather(self.output_local[output_variable_attribute_dict[varTmp][0],:,:],root=0)

                # Use gatherv to merge the data slabs
                dataOutTmp = MpiConfig.merge_slabs_gatherv(
                    self.output_local[
                        output_variable_attribute_dict[varTmp][0], :, :],
                    ConfigOptions)
            except Exception as e:
                print(e)
                ConfigOptions.errMsg = "Unable to gather final grids for: " + varTmp
                err_handler.log_critical(ConfigOptions, MpiConfig)
                continue

            if MpiConfig.rank == 0:
                try:
                    idOut.variables[varTmp][0, :, :] = dataOutTmp
                except (ValueError, IOError):
                    ConfigOptions.errMsg = "Unable to place final output grid for: " + varTmp
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                # Reset temporary data objects to keep memory usage down.
                del dataOutTmp

            # Reset temporary data objects to keep memory usage down.
            final = None

            err_handler.check_program_status(ConfigOptions, MpiConfig)

        if MpiConfig.rank == 0:
            while (True):
                # Close the NetCDF file
                try:
                    idOut.close()
                except (ValueError, IOError):
                    ConfigOptions.errMsg = "Unable to close output file: " + self.outPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                break

        err_handler.check_program_status(ConfigOptions, MpiConfig)
Пример #15
0
    def scatter_array(self, geoMeta, src_array, config_options):
        """
            Generic function for calling scatter functons based on
            the input dataset type.
            :param geoMeta:
            :param src_array:
            :param ConfigOptions:
            :return:
        """

        # Determine which type of input array we have based on the
        # type of numpy array.
        data_type_flag = -1
        if self.rank == 0:
            if src_array.dtype == np.float32:
                data_type_flag = 1
            if src_array.dtype == np.float64:
                data_type_flag = 2

        # Broadcast the numpy datatype to the other processors.
        if self.rank == 0:
            tmp_dict = {'varTmp': data_type_flag}
        else:
            tmp_dict = None
        try:
            tmp_dict = self.comm.bcast(tmp_dict, root=0)
        except MPI.Exception:
            config_options.errMsg = "Unable to broadcast numpy datatype value from rank 0"
            err_handler.log_critical(config_options, MpiConfig)
            return None
        data_type_flag = tmp_dict['varTmp']

        # gather buffer offsets and bounds to rank 0
        if geoMeta.has_cache:
            x_lower = geoMeta.global_x_lower
            y_lower = geoMeta.global_y_lower
            x_upper = geoMeta.global_x_upper
            y_upper = geoMeta.global_y_upper
        else:
            try:
                x_lower = np.asarray(self.comm.allgather(np.int32(geoMeta.x_lower_bound)))
            except:
               config_options.errMs = "Failed all gathering buffer x lower at rank " + str(self.comm.rank)
               err_handler.log_critical(config_options,MpiConfig)
               return None

            try:
                y_lower = np.asarray(self.comm.allgather(np.int32(geoMeta.y_lower_bound)))
            except:
                config_options.errMsg = "Failed all gathering buffer y lower at rank " + str(self.comm.rank)
                err_handler.log_critical(config_options,MpiConfig)
                return None

            try:
                x_upper = np.asarray(self.comm.allgather(np.int32(geoMeta.x_upper_bound)))
            except:
                config_options.errMsg = "Failed all gathering buffer x upper at rank " + str(self.comm.rank)
                err_handler.log_critical(config_options,MpiConfig)
                return None

            try:
                y_upper = np.asarray(self.comm.allgather(np.int32(geoMeta.y_upper_bound)))
            except:
                config_options.errMsg = "Failed all gathering buffer x upper at rank " + str(self.comm.rank)
                err_handler.log_critical(config_options,MpiConfig)
                return None

            # all ranks records global intervals all ranks mark existance of cache
            geoMeta.global_x_lower = x_lower
            geoMeta.global_y_lower = y_lower
            geoMeta.global_x_upper = x_upper
            geoMeta.global_y_upper = y_upper
            geoMeta.has_cache = True


        # we know know the local region for each rank
        if self.rank == 0:
            temp = []
            for i in range(0,self.comm.size):
                temp.append(src_array[y_lower[i]:y_upper[i],
                                         x_lower[i]:x_upper[i]].flatten())
            sendbuf = np.concatenate(tuple(temp))
        else:
            sendbuf = None

        # generate counts
        counts = [ (y_upper[i] -y_lower[i]) *(x_upper[i]- x_lower[i])
                   for i in range(0,self.comm.size)]

        #generate offsets
        offsets = [0]
        for i in range(1, len(counts)):
            offsets.append(offsets[i - 1] + counts[i])
        i = None

        #create the recvbuffer
        if data_type_flag == 1:
            data_type = MPI.FLOAT
            recvbuf=np.empty([counts[self.comm.rank]],np.float32)
        else:
            data_type = MPI.DOUBLE
            recvbuf = np.empty([counts[self.comm.rank]], np.float64)

        #scatter the data
        try:
            self.comm.Scatterv( [sendbuf, counts, offsets, data_type], recvbuf, root=0)
        except:
            config_options.errMsg = "Failed to scatter from rank 0"
            err_handler.log_critical(config_options, MpiConfig)
            return None

        try:
            subarray = np.reshape(recvbuf,[y_upper[self.rank] -y_lower[self.rank],x_upper[self.rank]- x_lower[self.rank]])
            return subarray
        except:
            config_options.errMsg = "Reshape failed for dimensions [" + \
                                  str(y_upper[self.rank]-y_lower[self.rank]) + \
                                  ","+str(x_upper[self.rank]-x_upper[self.rank]) + \
                                  "] at rank:" +str(self.rank)
            err_handler.log_critical(config_options, MpiConfig)
            return None
Пример #16
0
    def merge_slabs_gatherv(self, local_slab, options):

        # gather buffer offsets and bounds to rank 0
        shapes = np.array([np.int32(local_slab.shape[0]), np.int32(local_slab.shape[1])])
        global_shapes = np.zeros((self.size * 2), np.int32)

        try:
            self.comm.Allgather([shapes, MPI.INTEGER], [global_shapes, MPI.INTEGER])
        except:
            options.errMsg ="Failed all gathering slab shapes at rank" + str(self.rank)
            err_handler.log_critical(options,self)
            global_bounds = None
        
        #options.errMsg = "All gather for global shapes complete"
        #err_handler.log_msg(options,self)

        width = global_shapes[1]

        # check that all slabes are the same width and sum the number of rows
        total_rows = 0
        for i in range(0,self.size):
            total_rows += global_shapes[2*i]
            if global_shapes[(2*i)+1] != width:
                options.errMsg = "Error: slabs with differing widths detected on slab for rank" + str(i)
                err_handler.log_critical(options,self)
                self.comm.abort()

        #options.errMsg = "Checking of Rows and Columns complete"
        #err_handler.log_msg(options,self)

        # generate counts
        counts = [ global_shapes[i*2] * global_shapes[(i*2)+1]
                   for i in range(0,self.size)]

        #generate offsets:
        offsets = [0]
        for i in range(0, len(counts) -1 ):
            offsets.append(offsets[i] + counts[i])

        #options.errMsg = "Counts and Offsets generated"
        #err_handler.log_msg(options,self)

        # create the receive buffer
        if self.rank == 0:
            recvbuf = np.empty([total_rows, width], local_slab.dtype)
        else:
            recvbuf = None

        # set the MPI data type
        data_type = MPI.BYTE
        if local_slab.dtype == np.float32:
            data_type = MPI.FLOAT
        elif local_slab.dtype == np.float64:
            data_type = MPI.DOUBLE
        elif data_type == np.int32:
            data_type = MPI.INT

        # get the data with Gatherv
        try:
            self.comm.Gatherv(sendbuf=local_slab, recvbuf=[recvbuf, counts, offsets, data_type], root=0)
        except:
            options.errMsg = "Failed to Gatherv to rank 0 from rank " + str(self.rank)
            err_handler.log_critical(options,self)
            return None

        #options.errMsg = "Gatherv complete"
        #err_handler.log_msg(options,self)

        return recvbuf
Пример #17
0
def nwm_monthly_PRISM_downscale(input_forcings,ConfigOptions,GeoMetaWrfHydro,MpiConfig):
    """
    NCAR/OWP function for downscaling precipitation using monthly PRISM climatology in a
    mountain-mapper like fashion.
    :param input_forcings:
    :param ConfigOptions:
    :param GeoMetaWrfHydro:
    :return:
    """
    if MpiConfig.rank == 0:
        ConfigOptions.statusMsg = "Performing NWM Monthly PRISM Mountain Mapper " \
                                  "Downscaling of Precipitation"
        err_handler.log_msg(ConfigOptions, MpiConfig)

    # Establish whether or not we need to read in new PRISM monthly climatology:
    # 1.) This is the first output timestep, and no grids have been initialized.
    # 2.) We have switched months from the last timestep. In this case, we need
    #     to re-initialize the grids for the current month.
    initialize_flag = False
    if input_forcings.nwmPRISM_denGrid is None and input_forcings.nwmPRISM_numGrid is None:
        # We are on situation 1 - This is the first output step.
        initialize_flag = True
        # print('WE NEED TO READ IN PRISM GRIDS')
    if ConfigOptions.current_output_date.month != ConfigOptions.prev_output_date.month:
        # We are on situation #2 - The month has changed so we need to reinitialize the
        # PRISM grids.
        initialize_flag = True
        # print('MONTH CHANGE.... NEED TO READ IN NEW PRISM GRIDS.')

    if initialize_flag is True:
        while (True):
            # First reset the local PRISM grids to be safe.
            input_forcings.nwmPRISM_numGrid = None
            input_forcings.nwmPRISM_denGrid = None

            # Compose paths to the expected files.
            numeratorPath = input_forcings.paramDir + "/PRISM_Precip_Clim_" + \
                            ConfigOptions.current_output_date.strftime('%h') + '_NWM_Mtn_Mapper_Numer.nc'
            denominatorPath = input_forcings.paramDir + "/PRISM_Precip_Clim_" + \
                              ConfigOptions.current_output_date.strftime('%h') + '_NWM_Mtn_Mapper_Denom.nc'
            #print(numeratorPath)
            #print(denominatorPath)

            # Make sure files exist.
            if not os.path.isfile(numeratorPath):
                ConfigOptions.errMsg = "Expected parameter file: " + numeratorPath + \
                                       " for mountain mapper downscaling of precipitation not found."
                err_handler.log_critical(ConfigOptions, MpiConfig)
                break

            if not os.path.isfile(denominatorPath):
                ConfigOptions.errMsg = "Expected parameter file: " + denominatorPath + \
                                       " for mountain mapper downscaling of precipitation not found."
                err_handler.log_critical(ConfigOptions, MpiConfig)
                break

            if MpiConfig.rank == 0:
                # Open the NetCDF parameter files. Check to make sure expected dimension
                # sizes are in place, along with variable names, etc.
                try:
                    idNum = Dataset(numeratorPath,'r')
                except:
                    ConfigOptions.errMsg = "Unable to open parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idDenom = Dataset(denominatorPath,'r')
                except:
                    ConfigOptions.errMsg = "Unable to open parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Check to make sure expected names, dimension sizes are present.
                if 'x' not in idNum.variables.keys():
                    ConfigOptions.errMsg = "Expected 'x' variable not found in parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if 'x' not in idDenom.variables.keys():
                    ConfigOptions.errMsg = "Expected 'x' variable not found in parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                if 'y' not in idNum.variables.keys():
                    ConfigOptions.errMsg = "Expected 'y' variable not found in parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if 'y' not in idDenom.variables.keys():
                    ConfigOptions.errMsg = "Expected 'y' variable not found in parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                if 'Data' not in idNum.variables.keys():
                    ConfigOptions.errMsg = "Expected 'Data' variable not found in parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if 'Data' not in idDenom.variables.keys():
                    ConfigOptions.errMsg = "Expected 'Data' variable not found in parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                if idNum.variables['Data'].shape[0] != GeoMetaWrfHydro.ny_global:
                    ConfigOptions.errMsg = "Input Y dimension for: " + numeratorPath + \
                                           " does not match the output WRF-Hydro Y dimension size."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if idDenom.variables['Data'].shape[0] != GeoMetaWrfHydro.ny_global:
                    ConfigOptions.errMsg = "Input Y dimension for: " + denominatorPath + \
                                           " does not match the output WRF-Hydro Y dimension size."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                if idNum.variables['Data'].shape[1] != GeoMetaWrfHydro.nx_global:
                    ConfigOptions.errMsg = "Input X dimension for: " + numeratorPath + \
                                           " does not match the output WRF-Hydro X dimension size."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                if idDenom.variables['Data'].shape[1] != GeoMetaWrfHydro.nx_global:
                    ConfigOptions.errMsg = "Input X dimension for: " + denominatorPath + \
                                           " does not match the output WRF-Hydro X dimension size."
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Read in the PRISM grid on the output grid. Then scatter the array out to the processors.
                try:
                    numDataTmp = idNum.variables['Data'][:,:]
                except:
                    ConfigOptions.errMsg = "Unable to extract 'Data' from parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    denDataTmp = idDenom.variables['Data'][:,:]
                except:
                    ConfigOptions.errMsg = "Unable to extract 'Data' from parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break

                # Close the parameter files.
                try:
                    idNum.close()
                except:
                    ConfigOptions.errMsg = "Unable to close parameter file: " + numeratorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
                try:
                    idDenom.close()
                except:
                    ConfigOptions.errMsg = "Unable to close parameter file: " + denominatorPath
                    err_handler.log_critical(ConfigOptions, MpiConfig)
                    break
            else:
                numDataTmp = None
                denDataTmp = None

            break
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Scatter the array out to the local processors
        input_forcings.nwmPRISM_numGrid = MpiConfig.scatter_array(GeoMetaWrfHydro, numDataTmp, ConfigOptions)
        err_handler.check_program_status(ConfigOptions, MpiConfig)

        input_forcings.nwmPRISM_denGrid = MpiConfig.scatter_array(GeoMetaWrfHydro, denDataTmp, ConfigOptions)
        err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Create temporary grids from the local slabs of params/precip forcings.
    localRainRate = input_forcings.final_forcings[3,:,:]
    numLocal = input_forcings.nwmPRISM_numGrid[:,:]
    denLocal = input_forcings.nwmPRISM_denGrid[:,:]

    # Establish index of where we have valid data.
    try:
        indValid = np.where((localRainRate > 0.0) & (denLocal > 0.0) & (numLocal > 0.0))
    except:
        ConfigOptions.errMsg = "Unable to run numpy search for valid values on precip and " \
                               "param grid in mountain mapper downscaling"
        err_handler.log_critical(ConfigOptions, MpiConfig)
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Convert precipitation rate, which is mm/s to mm, which is needed to run the PRISM downscaling.
    try:
        localRainRate[indValid] = localRainRate[indValid]*3600.0
    except:
        ConfigOptions.errMsg = "Unable to convert temporary precip rate from mm/s to mm."
        err_handler.log_critical(ConfigOptions, MpiConfig)
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    try:
        localRainRate[indValid] = localRainRate[indValid] * numLocal[indValid]
    except:
        ConfigOptions.errMsg = "Unable to multiply precip by numerator in mountain mapper downscaling"
        err_handler.log_critical(ConfigOptions, MpiConfig)
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    try:
        localRainRate[indValid] = localRainRate[indValid] / denLocal[indValid]
    except:
        ConfigOptions.errMsg = "Unable to divide precip by denominator in mountain mapper downscaling"
        err_handler.log_critical(ConfigOptions, MpiConfig)
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    # Convert local precip back to a rate (mm/s)
    try:
        localRainRate[indValid] = localRainRate[indValid]/3600.0
    except:
        ConfigOptions.errMsg = "Unable to convert temporary precip rate from mm to mm/s."
        err_handler.log_critical(ConfigOptions, MpiConfig)
    err_handler.check_program_status(ConfigOptions, MpiConfig)

    input_forcings.final_forcings[3, :, :] = localRainRate

    # Reset variables for memory efficiency
    idDenom = None
    idNum = None
    localRainRate = None
    numLocal = None
    denLocal = None
Пример #18
0
def process_forecasts(ConfigOptions, wrfHydroGeoMeta, inputForcingMod, suppPcpMod, MpiConfig, OutputObj):
    """
    Main calling module for running realtime forecasts and re-forecasts.
    :param jobMeta:
    :return:
    """
    # Loop through each WRF-Hydro forecast cycle being processed. Within
    # each cycle, perform the following tasks:
    # 1.) Loop over each output frequency
    # 2.) Determine the input forcing cycle dates (both before and after)
    #     for temporal interpolation, downscaling, and bias correction reasons.
    # 3.) If the input forcings haven't been opened and read into memory,
    #     open them.
    # 4.) Check to see if the ESMF objects for input forcings have been
    #     created. If not, create them, including the regridding object.
    # 5.) Regrid forcing grids for input cycle dates surrounding the
    #     current output timestep if they haven't been regridded.
    # 6.) Perform bias correction and/or downscaling.
    # 7.) Output final grids to LDASIN NetCDF files with associated
    #     WRF-Hydro geospatial metadata to the final output directories.
    # Throughout this entire process, log progress being made into LOG
    # files. Once a forecast cycle is complete, we will touch an empty
    # 'WrfHydroForcing.COMPLETE' flag in the directory. This will be
    # checked upon the beginning of this program to see if we
    # need to process any files.

    for fcstCycleNum in range(ConfigOptions.nFcsts):
        ConfigOptions.current_fcst_cycle = ConfigOptions.b_date_proc + datetime.timedelta(
            seconds=ConfigOptions.fcst_freq * 60 * fcstCycleNum)
        if ConfigOptions.first_fcst_cycle is None:
            ConfigOptions.first_fcst_cycle = ConfigOptions.current_fcst_cycle

        if ConfigOptions.ana_flag:
            fcstCycleOutDir = ConfigOptions.output_dir + "/" + ConfigOptions.e_date_proc.strftime('%Y%m%d%H')
        else:
            fcstCycleOutDir = ConfigOptions.output_dir + "/" + ConfigOptions.current_fcst_cycle.strftime('%Y%m%d%H')

        # put all AnA output in the same directory
        if ConfigOptions.ana_flag:
            if ConfigOptions.ana_out_dir is None:
                ConfigOptions.ana_out_dir = fcstCycleOutDir
            fcstCycleOutDir = ConfigOptions.ana_out_dir

        # completeFlag = ConfigOptions.scratch_dir + "/WrfHydroForcing.COMPLETE"
        completeFlag = fcstCycleOutDir + "/WrfHydroForcing.COMPLETE"
        if os.path.isfile(completeFlag):
            ConfigOptions.statusMsg = "Forecast Cycle: " + \
                                      ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M') + \
                                      " has already completed."
            err_handler.log_msg(ConfigOptions, MpiConfig)
            # We have already completed processing this cycle,
            # move on.
            continue

        if (not ConfigOptions.ana_flag) or (ConfigOptions.logFile is None):
            if MpiConfig.rank == 0:
                # If the cycle directory doesn't exist, create it.
                if not os.path.isdir(fcstCycleOutDir):
                    try:
                        os.mkdir(fcstCycleOutDir)
                    except:
                        ConfigOptions.errMsg = "Unable to create output " \
                                               "directory: " + fcstCycleOutDir
                        err_handler.err_out_screen_para(ConfigOptions.errMsg, MpiConfig)
            err_handler.check_program_status(ConfigOptions, MpiConfig)

            # Compose a path to a log file, which will contain information
            # about this forecast cycle.
            # ConfigOptions.logFile = ConfigOptions.output_dir + "/LOG_" + \

            if ConfigOptions.ana_flag:
                log_time = ConfigOptions.e_date_proc
            else:
                log_time = ConfigOptions.current_fcst_cycle

            ConfigOptions.logFile = ConfigOptions.scratch_dir + "/LOG_" + ConfigOptions.nwmConfig + \
                                    ConfigOptions.d_program_init.strftime('%Y%m%d%H%M') + \
                                    "_" + log_time.strftime('%Y%m%d%H%M')

            # Initialize the log file.
            try:
                err_handler.init_log(ConfigOptions, MpiConfig)
            except:
                err_handler.err_out_screen_para(ConfigOptions.errMsg, MpiConfig)
            err_handler.check_program_status(ConfigOptions, MpiConfig)

        # Log information about this forecast cycle
        if MpiConfig.rank == 0:
            ConfigOptions.statusMsg = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
            err_handler.log_msg(ConfigOptions, MpiConfig)
            ConfigOptions.statusMsg = 'Processing Forecast Cycle: ' + \
                                      ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M')
            err_handler.log_msg(ConfigOptions, MpiConfig)
            ConfigOptions.statusMsg = 'Forecast Cycle Length is: ' + \
                                      str(ConfigOptions.cycle_length_minutes) + " minutes"
            err_handler.log_msg(ConfigOptions, MpiConfig)
        # MpiConfig.comm.barrier()

        # Loop through each output timestep. Perform the following functions:
        # 1.) Calculate all necessary input files per user options.
        # 2.) Read in input forcings from GRIB/NetCDF files.
        # 3.) Regrid the forcings, and temporally interpolate.
        # 4.) Downscale.
        # 5.) Layer, and output as necessary.
        ana_factor = 1 if ConfigOptions.ana_flag is False else 0
        for outStep in range(1, ConfigOptions.num_output_steps + 1):
            # Reset out final grids to missing values.
            OutputObj.output_local[:, :, :] = -9999.0

            ConfigOptions.current_output_step = outStep
            OutputObj.outDate = ConfigOptions.current_fcst_cycle + datetime.timedelta(
                    seconds=ConfigOptions.output_freq * 60 * outStep
            )
            ConfigOptions.current_output_date = OutputObj.outDate

            # if AnA, adjust file date for analysis vs forecast
            if ConfigOptions.ana_flag:
                file_date = OutputObj.outDate - datetime.timedelta(seconds=ConfigOptions.output_freq * 60)
            else:
                file_date = OutputObj.outDate

            # Calculate the previous output timestep. This is used in potential downscaling routines.
            if outStep == ana_factor:
                ConfigOptions.prev_output_date = ConfigOptions.current_output_date
            else:
                ConfigOptions.prev_output_date = ConfigOptions.current_output_date - datetime.timedelta(
                        seconds=ConfigOptions.output_freq * 60
                )
            if MpiConfig.rank == 0:
                ConfigOptions.statusMsg = '========================================='
                err_handler.log_msg(ConfigOptions, MpiConfig)
                ConfigOptions.statusMsg = "Processing for output timestep: " + \
                                          file_date.strftime('%Y-%m-%d %H:%M')
                err_handler.log_msg(ConfigOptions, MpiConfig)
            # MpiConfig.comm.barrier()

            # Compose the expected path to the output file. Check to see if the file exists,
            # if so, continue to the next time step. Also initialize our output arrays if necessary.
            OutputObj.outPath = fcstCycleOutDir + "/" + file_date.strftime('%Y%m%d%H%M') + \
                                ".LDASIN_DOMAIN1"
            # MpiConfig.comm.barrier()

            if os.path.isfile(OutputObj.outPath):
                if MpiConfig.rank == 0:
                    ConfigOptions.statusMsg = "Output file: " + OutputObj.outPath + " exists. Moving " + \
                                              " to the next output timestep."
                    err_handler.log_msg(ConfigOptions, MpiConfig)
                err_handler.check_program_status(ConfigOptions, MpiConfig)
                continue
            else:
                ConfigOptions.currentForceNum = 0
                ConfigOptions.currentCustomForceNum = 0
                # Loop over each of the input forcings specifed.
                for forceKey in ConfigOptions.input_forcings:
                    input_forcings = inputForcingMod[forceKey]
                    # Calculate the previous and next input cycle files from the inputs.
                    input_forcings.calc_neighbor_files(ConfigOptions, OutputObj.outDate, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # Regrid forcings.
                    input_forcings.regrid_inputs(ConfigOptions, wrfHydroGeoMeta, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # Run check on regridded fields for reasonable values that are not missing values.
                    err_handler.check_forcing_bounds(ConfigOptions, input_forcings, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # If we are restarting a forecast cycle, re-calculate the neighboring files, and regrid the
                    # next set of forcings as the previous step just regridded the previous forcing.
                    if input_forcings.rstFlag == 1:
                        if input_forcings.regridded_forcings1 is not None and \
                                input_forcings.regridded_forcings2 is not None:
                            # Set the forcings back to reflect we just regridded the previous set of inputs, not the next.
                            input_forcings.regridded_forcings1[:, :, :] = \
                                input_forcings.regridded_forcings2[:, :, :]

                        # Re-calculate the neighbor files.
                        input_forcings.calc_neighbor_files(ConfigOptions, OutputObj.outDate, MpiConfig)
                        err_handler.check_program_status(ConfigOptions, MpiConfig)

                        # Regrid the forcings for the end of the window.
                        input_forcings.regrid_inputs(ConfigOptions, wrfHydroGeoMeta, MpiConfig)
                        err_handler.check_program_status(ConfigOptions, MpiConfig)

                        input_forcings.rstFlag = 0

                    # Run temporal interpolation on the grids.
                    input_forcings.temporal_interpolate_inputs(ConfigOptions, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # Run bias correction.
                    bias_correction.run_bias_correction(input_forcings, ConfigOptions,
                                                        wrfHydroGeoMeta, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # Run downscaling on grids for this output timestep.
                    downscale.run_downscaling(input_forcings, ConfigOptions,
                                              wrfHydroGeoMeta, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    # Layer in forcings from this product.
                    layeringMod.layer_final_forcings(OutputObj, input_forcings, ConfigOptions, MpiConfig)
                    err_handler.check_program_status(ConfigOptions, MpiConfig)

                    ConfigOptions.currentForceNum = ConfigOptions.currentForceNum + 1

                    if forceKey == 10:
                        ConfigOptions.currentCustomForceNum = ConfigOptions.currentCustomForceNum + 1

                # Process supplemental precipitation if we specified in the configuration file.
                if ConfigOptions.number_supp_pcp > 0:
                    for suppPcpKey in ConfigOptions.supp_precip_forcings:
                        # Like with input forcings, calculate the neighboring files to use.
                        suppPcpMod[suppPcpKey].calc_neighbor_files(ConfigOptions, OutputObj.outDate, MpiConfig)
                        err_handler.check_program_status(ConfigOptions, MpiConfig)

                        # Regrid the supplemental precipitation.
                        suppPcpMod[suppPcpKey].regrid_inputs(ConfigOptions, wrfHydroGeoMeta, MpiConfig)
                        err_handler.check_program_status(ConfigOptions, MpiConfig)

                        if suppPcpMod[suppPcpKey].regridded_precip1 is not None \
                                and suppPcpMod[suppPcpKey].regridded_precip2 is not None:
                            # if np.any(suppPcpMod[suppPcpKey].regridded_precip1) and \
                            #        np.any(suppPcpMod[suppPcpKey].regridded_precip2):
                            # Run check on regridded fields for reasonable values that are not missing values.
                            err_handler.check_supp_pcp_bounds(ConfigOptions, suppPcpMod[suppPcpKey], MpiConfig)
                            err_handler.check_program_status(ConfigOptions, MpiConfig)

                            # Run temporal interpolation on the grids.
                            suppPcpMod[suppPcpKey].temporal_interpolate_inputs(ConfigOptions, MpiConfig)
                            err_handler.check_program_status(ConfigOptions, MpiConfig)

                            # Layer in the supplemental precipitation into the current output object.
                            layeringMod.layer_supplemental_forcing(OutputObj, suppPcpMod[suppPcpKey],
                                                                   ConfigOptions, MpiConfig)
                            err_handler.check_program_status(ConfigOptions, MpiConfig)

                # Call the output routines
                #   adjust date for AnA if necessary
                if ConfigOptions.ana_flag:
                    OutputObj.outDate = file_date

                OutputObj.output_final_ldasin(ConfigOptions, wrfHydroGeoMeta, MpiConfig)
                err_handler.check_program_status(ConfigOptions, MpiConfig)

        if (not ConfigOptions.ana_flag) or (fcstCycleNum == (ConfigOptions.nFcsts - 1)):
            if MpiConfig.rank == 0:
                ConfigOptions.statusMsg = "Forcings complete for forecast cycle: " + \
                                          ConfigOptions.current_fcst_cycle.strftime('%Y-%m-%d %H:%M')
                err_handler.log_msg(ConfigOptions, MpiConfig)
            err_handler.check_program_status(ConfigOptions, MpiConfig)

            if MpiConfig.rank == 0:
                # Close the log file.
                try:
                    err_handler.close_log(ConfigOptions, MpiConfig)
                except:
                    err_handler.err_out_screen_para(ConfigOptions.errMsg, MpiConfig)

            # Success.... Now touch an empty complete file for this forecast cycle to indicate
            # completion in case the code is re-ran.
            try:
                open(completeFlag, 'a').close()
            except:
                ConfigOptions.errMsg = "Unable to create completion file: " + completeFlag
                err_handler.log_critical(ConfigOptions, MpiConfig)
            err_handler.check_program_status(ConfigOptions, MpiConfig)
Пример #19
0
def ext_ana_disaggregate(input_forcings, supplemental_precip, config_options, mpi_config):
    """
    Function for disaggregating 6hr SuppPcp data to 1hr Input data
    :param input_forcings:
    :param supplemental_precip:
    :param config_options:
    :param mpi_config:
    :return:
    """
    # Check to make sure we have valid grids.
    if input_forcings.regridded_forcings2 is None or supplemental_precip.regridded_precip2 is None:
        if mpi_config.rank == 0:
            config_options.statusMsg = "Bypassing ext_ana_disaggregation routine due to missing input or supp pcp data"
            err_handler.log_warning(config_options, mpi_config)
        return
            
    if supplemental_precip.ext_ana != "STAGE4":
        if mpi_config.rank == 0:
            config_options.statusMsg = f"Bypassing ext_ana_disaggregation routine due to supplemental_precip.ext_ana = {supplemental_precip.ext_ana}"
            err_handler.log_warning(config_options, mpi_config)
        return
    
    
    #print("ext_ana_disaggregate RAINRATE input_forcings.regridded_forcings2[3,:,:]")
    #print(input_forcings.regridded_forcings2[3,:,:])
    #print("ext_ana_disaggregate supplemental_precip.regridded_precip2[:,:]")
    #print(supplemental_precip.regridded_precip2[:,:])
    #print("supplemental_precip.regridded_precip2[:,:].shape")
    #print(supplemental_precip.regridded_precip2[:,:].shape)

    read_hours = 0
    found_target_hh = False
    ana_data = []
    if mpi_config.rank == 0:
        target_hh = Path(input_forcings.file_in2).stem[-4:-2]
        _,_,_,beg_hh,end_hh,yyyymmdd = Path(supplemental_precip.file_in2).stem.split('_')
        date_iter = datetime.strptime(f"{yyyymmdd}{beg_hh}", '%Y%m%d%H')
        end_date = date_iter + timedelta(hours=6)
        #Advance the date_iter by 1 hour since the beginning of the Stage IV data in date range is excluded, the end is included
        #(begin_date,end_date]
        date_iter += timedelta(hours=1)
        while date_iter <= end_date:
            tmp_file = f"{input_forcings.inDir}/{date_iter.strftime('%Y%m%d%H')}/{date_iter.strftime('%Y%m%d%H')}00.LDASIN_DOMAIN1"
            if os.path.exists(tmp_file):
                config_options.statusMsg = f"Reading {input_forcings.netcdf_var_names[3]} from {tmp_file} for disaggregation"
                err_handler.log_msg(config_options, mpi_config)
                with Dataset(tmp_file,'r') as ds:
                    try:
                        #Read in rainrate
                        data = ds.variables[input_forcings.netcdf_var_names[3]][0, :, :]
                        data[data == config_options.globalNdv] = np.nan
                        ana_data.append(data)
                        read_hours += 1
                        if date_iter.hour == int(target_hh):
                            found_target_hh = True
                    except (ValueError, KeyError, AttributeError) as err:
                        config_options.errMsg = f"Unable to extract: RAINRATE from: {input_forcings.file_in2} ({str(err)})"
                        err_handler.log_critical(config_options, mpi_config)
            else:
                config_options.statusMsg = f"Input file missing {tmp_file}"
                err_handler.log_warning(config_options, mpi_config)

            date_iter += timedelta(hours=1)

    found_target_hh = mpi_config.broadcast_parameter(found_target_hh, config_options, param_type=bool)
    err_handler.check_program_status(config_options, mpi_config)
    if not found_target_hh:
        if mpi_config.rank == 0:
            config_options.statusMsg = f"Could not find AnA target_hh = {target_hh} for disaggregation. Setting output values to {config_options.globalNdv}."
            err_handler.log_warning(config_options, mpi_config)
        supplemental_precip.regridded_precip2[:,:] = config_options.globalNdv
        return

    read_hours = mpi_config.broadcast_parameter(read_hours, config_options, param_type=int)
    err_handler.check_program_status(config_options, mpi_config)
    if read_hours != 6:
        if mpi_config.rank == 0:
            config_options.statusMsg = f"Could not find all 6 AnA files for disaggregation. Only found {read_hours} hours. Setting output values to {config_options.globalNdv}."
            err_handler.log_warning(config_options, mpi_config)
        supplemental_precip.regridded_precip2[:,:] = config_options.globalNdv
        return

    ana_sum = np.array([],dtype=np.float32)
    target_data = np.array([],dtype=np.float32)
    ana_all_zeros = np.array([],dtype=np.bool)
    ana_no_zeros = np.array([],dtype=np.bool)
    target_data_no_zeros = np.array([],dtype=np.bool)
    if mpi_config.rank == 0:
        config_options.statusMsg = f"Performing hourly disaggregation of {supplemental_precip.file_in2}"
        err_handler.log_msg(config_options, mpi_config)

        ana_sum = sum(ana_data)
        target_data = ana_data[(int(target_hh)-1)%6]

        ana_zeros = [(a == 0).astype(int) for a in ana_data]
        target_data_zeros = (target_data == 0)
        target_data_no_zeros = ~target_data_zeros
        ana_zeros_sum = sum(ana_zeros)
        ana_all_zeros = (ana_zeros_sum == 6)
        ana_no_zeros = (ana_zeros_sum == 0)

    err_handler.check_program_status(config_options, mpi_config)
    ana_sum = mpi_config.scatter_array(input_forcings, ana_sum, config_options)
    err_handler.check_program_status(config_options, mpi_config)
    target_data = mpi_config.scatter_array(input_forcings, target_data, config_options)
    err_handler.check_program_status(config_options, mpi_config)

    ana_all_zeros = mpi_config.scatter_array(input_forcings, ana_all_zeros, config_options)
    err_handler.check_program_status(config_options, mpi_config)
    ana_no_zeros = mpi_config.scatter_array(input_forcings, ana_no_zeros, config_options)
    err_handler.check_program_status(config_options, mpi_config)
    target_data_no_zeros = mpi_config.scatter_array(input_forcings, target_data_no_zeros, config_options)
    err_handler.check_program_status(config_options, mpi_config)
    

    if mpi_config.comm.Get_size() == 1 and test_enabled:
        test_file = f"{config_options.scratch_dir}/stage_4_A_PCP_GDS5_SFC_acc6h_{yyyymmdd}_{beg_hh}_{end_hh}.txt"
        np.savetxt(test_file,supplemental_precip.regridded_precip2)
    
        test_file = f"{config_options.scratch_dir}/disaggregation_factors_{target_hh}_{yyyymmdd}{beg_hh}_{end_date.strftime('%Y%m%d%H')}.txt"
        np.savetxt(test_file,np.nan_to_num(np.select([ana_all_zeros,
                                                      (ana_no_zeros | target_data_no_zeros)],
                                                     [1/6.0*np.ones(supplemental_precip.regridded_precip2[:,:].shape),
                                                      target_data/ana_sum],
                                                     0),nan=config_options.globalNdv))

    #supplemental_precip.regridded_precip2[(0.0 < supplemental_precip.regridded_precip2) & (supplemental_precip.regridded_precip2 < 0.00003)] = 0.0
    supplemental_precip.regridded_precip2[:,:] = np.select([ana_all_zeros,
                                                            (ana_no_zeros | target_data_no_zeros)],
                                                           [1/6.0*supplemental_precip.regridded_precip2[:,:],
                                                            supplemental_precip.regridded_precip2[:,:] * target_data/ana_sum],
                                                           0)
    np.nan_to_num(supplemental_precip.regridded_precip2[:,:], copy=False, nan=config_options.globalNdv) 

    if mpi_config.comm.Get_size() == 1 and test_enabled:
        test_file = f"{config_options.scratch_dir}/stage_4_A_PCP_GDS5_SFC_acc6_disaggregation_{target_hh}_{yyyymmdd}{beg_hh}_{end_date.strftime('%Y%m%d%H')}.txt"
        np.savetxt(test_file,supplemental_precip.regridded_precip2)