def get_args(varname, wrfnc, timeidx, method, squeeze): if varname == "avo": ncvars = extract_vars( wrfnc, timeidx, ("U", "V", "MAPFAC_U", "MAPFAC_V", "MAPFAC_M", "F"), method, squeeze, cache=None, meta=True) attrs = extract_global_attrs(wrfnc, attrs=("DX", "DY")) u = ncvars["U"] v = ncvars["V"] msfu = ncvars["MAPFAC_U"] msfv = ncvars["MAPFAC_V"] msfm = ncvars["MAPFAC_M"] cor = ncvars["F"] dx = attrs["DX"] dy = attrs["DY"] return (u, v, msfu, msfv, msfm, cor, dx, dy) if varname == "pvo": ncvars = extract_vars(wrfnc, timeidx, ("U", "V", "T", "P", "PB", "MAPFAC_U", "MAPFAC_V", "MAPFAC_M", "F"), method, squeeze, cache=None, meta=True) attrs = extract_global_attrs(wrfnc, attrs=("DX", "DY")) u = ncvars["U"] v = ncvars["V"] t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] msfu = ncvars["MAPFAC_U"] msfv = ncvars["MAPFAC_V"] msfm = ncvars["MAPFAC_M"] cor = ncvars["F"] dx = attrs["DX"] dy = attrs["DY"] full_t = t + 300 full_p = p + pb return (u, v, full_t, full_p, msfu, msfv, msfm, cor, dx, dy) if varname == "eth": varnames = ("T", "P", "PB", "QVAPOR") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) return (qv, tkel, full_p) if varname == "cape_2d": varnames = ("T", "P", "PB", "QVAPOR", "PH", "PHB", "HGT", "PSFC") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] ph = ncvars["PH"] phb = ncvars["PHB"] ter = ncvars["HGT"] psfc = ncvars["PSFC"] full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) geopt = ph + phb geopt_unstag = destagger(geopt, -3) z = geopt_unstag / Constants.G # Convert pressure to hPa p_hpa = ConversionFactors.PA_TO_HPA * full_p psfc_hpa = ConversionFactors.PA_TO_HPA * psfc i3dflag = 0 ter_follow = 1 return (p_hpa, tkel, qv, z, ter, psfc_hpa, ter_follow) if varname == "cape_3d": varnames = ("T", "P", "PB", "QVAPOR", "PH", "PHB", "HGT", "PSFC") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] ph = ncvars["PH"] phb = ncvars["PHB"] ter = ncvars["HGT"] psfc = ncvars["PSFC"] full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) geopt = ph + phb geopt_unstag = destagger(geopt, -3) z = geopt_unstag / Constants.G # Convert pressure to hPa p_hpa = ConversionFactors.PA_TO_HPA * full_p psfc_hpa = ConversionFactors.PA_TO_HPA * psfc i3dflag = 1 ter_follow = 1 return (p_hpa, tkel, qv, z, ter, psfc_hpa, ter_follow) if varname == "ctt": varnames = ("T", "P", "PB", "PH", "PHB", "HGT", "QVAPOR") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] ph = ncvars["PH"] phb = ncvars["PHB"] ter = ncvars["HGT"] qv = ncvars["QVAPOR"] * 1000.0 # g/kg haveqci = 1 try: icevars = extract_vars(wrfnc, timeidx, "QICE", method, squeeze, cache=None, meta=False) except KeyError: qice = np.zeros(qv.shape, qv.dtype) haveqci = 0 else: qice = icevars["QICE"] * 1000.0 #g/kg try: cldvars = extract_vars(wrfnc, timeidx, "QCLOUD", method, squeeze, cache=None, meta=False) except KeyError: raise RuntimeError("'QCLOUD' not found in NetCDF file") else: qcld = cldvars["QCLOUD"] * 1000.0 #g/kg full_p = p + pb p_hpa = full_p * ConversionFactors.PA_TO_HPA full_t = t + Constants.T_BASE tkel = tk(full_p, full_t, meta=False) geopt = ph + phb geopt_unstag = destagger(geopt, -3) ght = geopt_unstag / Constants.G return (p_hpa, tkel, qv, qcld, ght, ter, qice) if varname == "dbz": varnames = ("T", "P", "PB", "QVAPOR", "QRAIN") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] qr = ncvars["QRAIN"] try: snowvars = extract_vars(wrfnc, timeidx, "QSNOW", method, squeeze, cache=None, meta=False) except KeyError: qs = np.zeros(qv.shape, qv.dtype) else: qs = snowvars["QSNOW"] try: graupvars = extract_vars(wrfnc, timeidx, "QGRAUP", method, squeeze, cache=None, meta=False) except KeyError: qg = np.zeros(qv.shape, qv.dtype) else: qg = graupvars["QGRAUP"] full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) return (full_p, tkel, qv, qr, qs, qg) if varname == "helicity": # Top can either be 3000 or 1000 (for 0-1 srh or 0-3 srh) ncvars = extract_vars(wrfnc, timeidx, ("HGT", "PH", "PHB"), method, squeeze, cache=None, meta=True) ter = ncvars["HGT"] ph = ncvars["PH"] phb = ncvars["PHB"] # As coded in NCL, but not sure this is possible varname = "U" u_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=False) u = destagger(u_vars[varname], -1) varname = "V" v_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=False) v = destagger(v_vars[varname], -2) geopt = ph + phb geopt_unstag = destagger(geopt, -3) z = geopt_unstag / Constants.G return (u, v, z, ter) if varname == "updraft_helicity": ncvars = extract_vars(wrfnc, timeidx, ("W", "PH", "PHB", "MAPFAC_M"), method, squeeze, cache=None, meta=True) wstag = ncvars["W"] ph = ncvars["PH"] phb = ncvars["PHB"] mapfct = ncvars["MAPFAC_M"] attrs = extract_global_attrs(wrfnc, attrs=("DX", "DY")) dx = attrs["DX"] dy = attrs["DY"] # As coded in NCL, but not sure this is possible varname = "U" u_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) u = destagger(u_vars[varname], -1, meta=True) varname = "V" v_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) v = destagger(v_vars[varname], -2, meta=True) zstag = ph + phb return (zstag, mapfct, u, v, wstag, dx, dy) if varname == "omg": varnames = ("T", "P", "W", "PB", "QVAPOR") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] w = ncvars["W"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] wa = destagger(w, -3) full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) return (qv, tkel, wa, full_p) if varname == "pw": varnames = ("T", "P", "PB", "PH", "PHB", "QVAPOR") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] ph = ncvars["PH"] phb = ncvars["PHB"] qv = ncvars["QVAPOR"] # Change this to use real virtual temperature! full_p = p + pb ht = (ph + phb) / Constants.G full_t = t + Constants.T_BASE tkel = tk(full_p, full_t, meta=False) return (full_p, tkel, qv, ht) if varname == "rh": varnames = ("T", "P", "PB", "QVAPOR") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qvapor = to_np(ncvars["QVAPOR"]) full_t = t + Constants.T_BASE full_p = p + pb qvapor[qvapor < 0] = 0 tkel = tk(full_p, full_t, meta=False) return (qvapor, full_p, tkel) if varname == "slp": varnames = ("T", "P", "PB", "QVAPOR", "PH", "PHB") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qvapor = to_np(ncvars["QVAPOR"]) ph = ncvars["PH"] phb = ncvars["PHB"] full_t = t + Constants.T_BASE full_p = p + pb qvapor[qvapor < 0] = 0. full_ph = (ph + phb) / Constants.G destag_ph = destagger(full_ph, -3) tkel = tk(full_p, full_t, meta=False) return (destag_ph, tkel, full_p, qvapor) if varname == "td": varnames = ("P", "PB", "QVAPOR") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) p = ncvars["P"] pb = ncvars["PB"] qvapor = to_np(ncvars["QVAPOR"]) # Algorithm requires hPa full_p = .01 * (p + pb) qvapor[qvapor < 0] = 0 return (full_p, qvapor) if varname == "tk": varnames = ("T", "P", "PB") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] full_t = t + Constants.T_BASE full_p = p + pb return (full_p, full_t) if varname == "tv": varnames = ("T", "P", "PB", "QVAPOR") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t) return (tkel, qv) if varname == "twb": varnames = ("T", "P", "PB", "QVAPOR") ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t) return (full_p, tkel, qv) if varname == "uvmet": varname = "U" u_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) u = destagger(u_vars[varname], -1, meta=True) varname = "V" v_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) v = destagger(v_vars[varname], -2, meta=True) map_proj_attrs = extract_global_attrs(wrfnc, attrs="MAP_PROJ") map_proj = map_proj_attrs["MAP_PROJ"] if map_proj in (0, 3, 6): raise ProjectionError("Map projection does not need rotation") elif map_proj in (1, 2): lat_attrs = extract_global_attrs(wrfnc, attrs=("TRUELAT1", "TRUELAT2")) radians_per_degree = Constants.PI / 180.0 # Rotation needed for Lambert and Polar Stereographic true_lat1 = lat_attrs["TRUELAT1"] true_lat2 = lat_attrs["TRUELAT2"] try: lon_attrs = extract_global_attrs(wrfnc, attrs="STAND_LON") except AttributeError: try: cen_lon_attrs = extract_global_attrs(wrfnc, attrs="CEN_LON") except AttributeError: raise RuntimeError( "longitude attributes not found in NetCDF") else: cen_lon = cen_lon_attrs["CEN_LON"] else: cen_lon = lon_attrs["STAND_LON"] varname = "XLAT" xlat_var = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) lat = xlat_var[varname] varname = "XLONG" xlon_var = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) lon = xlon_var[varname] if map_proj == 1: if ((fabs(true_lat1 - true_lat2) > 0.1) and (fabs(true_lat2 - 90.) > 0.1)): cone = (log(cos(true_lat1 * radians_per_degree)) - log(cos(true_lat2 * radians_per_degree))) cone = (cone / (log( tan((45. - fabs(true_lat1 / 2.)) * radians_per_degree)) - log( tan((45. - fabs(true_lat2 / 2.)) * radians_per_degree)))) else: cone = sin(fabs(true_lat1) * radians_per_degree) else: cone = 1 return (u, v, lat, lon, cen_lon, cone) if varname == "cloudfrac": from wrf.g_geoht import get_height vars = extract_vars(wrfnc, timeidx, ("P", "PB", "QVAPOR", "T"), method, squeeze, cache=None, meta=True) p = vars["P"] pb = vars["PB"] qv = vars["QVAPOR"] t = vars["T"] geoht_agl = get_height(wrfnc, timeidx, method, squeeze, cache=None, meta=True, msl=False) full_p = p + pb full_t = t + Constants.T_BASE tkel = tk(full_p, full_t) relh = rh(qv, full_p, tkel) return (geoht_agl, relh, 1, 300., 2000., 6000.)
def vinterp(wrfin, field, vert_coord, interp_levels, extrapolate=False, field_type=None, log_p=False, timeidx=0, method="cat", squeeze=True, cache=None, meta=True): """Return the field vertically interpolated to the given the type of surface and a set of new levels. Args: wrfin (:class:`netCDF4.Dataset`, :class:`Nio.NioFile`,\ or an iterable): WRF-ARW NetCDF data as a :class:`netCDF4.Dataset`, :class:`Nio.NioFile` or an iterable sequence of the aforementioned types. field (:class:`xarray.DataArray` or :class:`numpy.ndarray`): A three-dimensional field. vert_coord (:obj:`str`): A string indicating the vertical coordinate type to interpolate to. Valid strings are: * 'pressure', 'pres', 'p': pressure [hPa] * 'ght_msl': grid point height msl [km] * 'ght_agl': grid point height agl [km] * 'theta', 'th': potential temperature [K] * 'theta-e', 'thetae', 'eth': equivalent potential temperature \ [K] interp_levels (sequence): A 1D sequence of vertical levels to interpolate to. Values must be in the same units as specified above for the *vert_coord* parameter. extrapolate (:obj:`bool`, optional): Set to True to extrapolate values below ground. This is only performed when *vert_coord* is a pressure or height type, and the *field_type* is a pressure type (with height vertical coordinate), a height type (with pressure as the vertical coordinate), or a temperature type (with either height or pressure as the vertical coordinate). If those conditions are not met, or *field_type* is None, then the lowest model level will be used. Extrapolation is performed using standard atmosphere. Default is False. field_type (:obj:`str`, optional): The type of field. Default is None. Valid strings are: * 'none': None * 'pressure', 'pres', 'p': pressure [Pa] * 'pressure_hpa', 'pres_hpa', 'p_hpa': pressure [hPa] * 'z', 'ght': geopotential height [m] * 'z_km', 'ght_km': geopotential height [km] * 'tc': temperature [degC] * 'tk': temperature [K] * 'theta', 'th': potential temperature [K] * 'theta-e', 'thetae', 'eth': equivalent potential temperature log_p (:obj:`bool`, optional): Set to True to use the log of the vertical coordinate for interpolation. This is mainly intended for pressure vertical coordinate types, but note that the log will still be taken for any vertical coordinate type when this is set to True. Default is False. timeidx (:obj:`int`, optional): The time index to use when extracting auxiallary variables used in the interpolation. This value must be set to match the same value used when the `field` variable was extracted. Default is 0. method (:obj:`str`, optional): The aggregation method to use for sequences. Must be either 'cat' or 'join'. 'cat' combines the data along the Time dimension. 'join' creates a new dimension for the file index. The default is 'cat'. squeeze (:obj:`bool`, optional): Set to False to prevent dimensions with a size of 1 from being automatically removed from the shape of the output. Default is True. cache (:obj:`dict`, optional): A dictionary of (varname, ndarray) that can be used to supply pre-extracted NetCDF variables to the computational routines. It is primarily used for internal purposes, but can also be used to improve performance by eliminating the need to repeatedly extract the same variables used in multiple diagnostics calculations, particularly when using large sequences of files. Default is None. meta (:obj:`bool`, optional): Set to False to disable metadata and return :class:`numpy.ndarray` instead of :class:`xarray.DataArray`. Default is True. Returns: :class:`xarray.DataArray` or :class:`numpy.ndarray`: The interpolated variable. If xarray is enabled and the *meta* parameter is True, then the result will be a :class:`xarray.DataArray` object. Otherwise, the result will be a :class:`numpy.ndarray` object with no metadata. """ _key = get_id(wrfin) _wrfin = get_iterable(wrfin) # Remove case sensitivity field_type = field_type.lower() if field_type is not None else "none" vert_coord = vert_coord.lower() if vert_coord is not None else "none" valid_coords = ("pressure", "pres", "p", "ght_msl", "ght_agl", "theta", "th", "theta-e", "thetae", "eth") valid_field_types = ("none", "pressure", "pres", "p", 'pressure_hpa', 'pres_hpa', 'p_hpa', "z", "tc", "tk", "theta", "th", "theta-e", "thetae", "eth", "ght", 'z_km', 'ght_km') icase_lookup = { "none": 0, "p": 1, "pres": 1, "pressure": 1, "p_hpa": 1, "pres_hpa": 1, "pressure_hpa": 1, "z": 2, "ght": 2, "z_km": 2, "ght_km": 2, "tc": 3, "tk": 4, "theta": 5, "th": 5, "theta-e": 6, "thetae": 6, "eth": 6 } in_unitmap = { "p_hpa": 1.0 / ConversionFactors.PA_TO_HPA, "pres_hpa": 1.0 / ConversionFactors.PA_TO_HPA, "pressure_hpa": 1.0 / ConversionFactors.PA_TO_HPA, "z_km": 1.0 / ConversionFactors.M_TO_KM, "ght_km": 1.0 / ConversionFactors.M_TO_KM, } out_unitmap = { "p_hpa": ConversionFactors.PA_TO_HPA, "pres_hpa": ConversionFactors.PA_TO_HPA, "pressure_hpa": ConversionFactors.PA_TO_HPA, "z_km": ConversionFactors.M_TO_KM, "ght_km": ConversionFactors.M_TO_KM, } # These constants match what's in the fortran code. rgas = Constants.RD ussalr = Constants.USSALR sclht = Constants.SCLHT # interp_levels might be a list or tuple, make a numpy array if not isinstance(interp_levels, np.ndarray): interp_levels = np.asarray(interp_levels, np.float64) if len(interp_levels) == 0: raise ValueError("'interp_levels' contains no values") # Check if field is staggered if is_staggered(_wrfin, field): raise ValueError("Please unstagger field in the vertical") # Check for valid coord if vert_coord not in valid_coords: raise ValueError("'%s' is not a valid vertical " "coordinate type" % vert_coord) # Check for valid field type if field_type not in valid_field_types: raise ValueError("'%s' is not a valid field type" % field_type) log_p_int = 1 if log_p else 0 icase = 0 extrap = 0 if extrapolate: extrap = 1 icase = icase_lookup[field_type] # Extract variables ncvars = extract_vars(_wrfin, timeidx, ("PSFC", "QVAPOR"), method, squeeze, cache, meta=False, _key=_key) sfp = ncvars["PSFC"] * ConversionFactors.PA_TO_HPA qv = ncvars["QVAPOR"] terht = get_terrain(_wrfin, timeidx, units="m", method=method, squeeze=squeeze, cache=cache, meta=False, _key=_key) tk = get_temp(_wrfin, timeidx, units="k", method=method, squeeze=squeeze, cache=cache, meta=False, _key=_key) p = get_pressure(_wrfin, timeidx, units="pa", method=method, squeeze=squeeze, cache=cache, meta=False, _key=_key) ght = get_height(_wrfin, timeidx, msl=True, units="m", method=method, squeeze=squeeze, cache=cache, meta=False, _key=_key) smsfp = _smooth2d(sfp, 3, 2.0) vcor = 0 if vert_coord in ("pressure", "pres", "p"): vcor = 1 vcord_array = p * ConversionFactors.PA_TO_HPA elif vert_coord == "ght_msl": vcor = 2 vcord_array = np.exp(-ght / sclht) elif vert_coord == "ght_agl": ht_agl = get_height(_wrfin, timeidx, msl=False, units="m", method=method, squeeze=squeeze, cache=cache, meta=False, _key=_key) vcor = 3 vcord_array = np.exp(-ht_agl / sclht) elif vert_coord in ("theta", "th"): t = get_theta(_wrfin, timeidx, units="k", method=method, squeeze=squeeze, cache=cache, meta=False, _key=_key) coriolis = extract_vars(_wrfin, timeidx, "F", method, squeeze, cache, meta=False, _key=_key)["F"] vcor = 4 idir = 1 icorsw = 0 delta = 0.01 p_hpa = p * ConversionFactors.PA_TO_HPA vcord_array = _monotonic(t, p_hpa, coriolis, idir, delta, icorsw) # We only extrapolate temperature fields below ground # if we are interpolating to pressure or height vertical surfaces. icase = 0 elif vert_coord in ("theta-e", "thetae", "eth"): vcor = 5 icorsw = 0 idir = 1 delta = 0.01 eth = get_eth(_wrfin, timeidx, method=method, squeeze=squeeze, cache=cache, meta=False, _key=_key) coriolis = extract_vars(_wrfin, timeidx, "F", method, squeeze, cache, meta=False, _key=_key)["F"] p_hpa = p * ConversionFactors.PA_TO_HPA vcord_array = _monotonic(eth, p_hpa, coriolis, idir, delta, icorsw) # We only extrapolate temperature fields below ground if we are # interpolating to pressure or height vertical surfaces icase = 0 # Set the missing value if isinstance(field, ma.MaskedArray): missing = field.fill_value else: missing = default_fill(np.float64) if (field.shape != p.shape): raise ValueError("'field' shape does not match other variable shapes. " "Verify that the 'timeidx' parameter matches the " "same value used when extracting the 'field' " "variable.") # Some field types are in different units than the Fortran routine # expects conv_factor = in_unitmap.get(field_type) if conv_factor is not None: field_ = field * conv_factor else: field_ = field res = _vintrp(field_, p, tk, qv, ght, terht, sfp, smsfp, vcord_array, interp_levels, icase, extrap, vcor, log_p_int, missing) conv_factor = out_unitmap.get(field_type) if conv_factor is not None: res_ = res * conv_factor else: res_ = res return ma.masked_values(res_, missing)
def main(args): fname = args.file save_file = args.save_file # List of variables that are already included in the WRF output variables = ['XLAT', 'XLONG', 'LANDMASK', 'LAKEMASK'] # Output time units time_units = 'seconds since 1970-01-01 00:00:00' os.makedirs(os.path.dirname(save_file), exist_ok=True) # Create list of heights in meters and pressure (mb) for interpolation of U and V components heights_m = [50, 100, 150, 500, 600, 700, 800, 900, 1000, 1500, 2000, 2500] heights_mb = [200, 300, 500, 700, 850, 925] # Open using netCDF toolbox ncfile = xr.open_dataset(fname) original_global_attributes = ncfile.attrs ncfile = ncfile._file_obj.ds # Load variables and append to dictionary nc_vars = {} for v in variables: nc_vars[v] = cf.delete_attr(getvar(ncfile, v)) # Calculate u and v components of wind rotated to Earth coordinates uvm = getvar(ncfile, 'uvmet') # Subtract terrain height from height above sea level for height in meters new_z = getvar(ncfile, 'z') - getvar(ncfile, 'ter') # interpolate u and v components of wind to defined heights (in meters) uvtemp = interplevel(uvm, new_z, heights_m, default_fill(np.float32)) uvtemp = uvtemp.rename({'level': 'height'}) utemp, vtemp = cf.split_uvm(uvtemp) # Concatenate the list of calculated u and v values into data array nc_vars['UH'] = xr.concat(utemp, dim='height') nc_vars['VH'] = xr.concat(vtemp, dim='height') # Get pressure - units are Pa so convert to mb p = getvar(ncfile, 'p') * .01 # interpolate u and v components of wind to defined pressure heights (mb) uvtemp = interplevel(uvm, p, heights_mb, default_fill(np.float32)) uvtemp = uvtemp.rename({'level': 'pressure'}) utemp, vtemp = cf.split_uvm(uvtemp) # get geopotential height and interpolate to defined pressure heights (mb) ght = g_geoht.get_height(ncfile, msl=True) geoht = interplevel(ght, p, heights_mb, default_fill(np.float32)) geoht = geoht.rename({'level': 'pressure'}) # Concatenate the list of calculated u and v values and geopotential height into data array nc_vars['UP'] = xr.concat(utemp, dim='pressure') nc_vars['VP'] = xr.concat(vtemp, dim='pressure') nc_vars['geoht'] = xr.concat(geoht, dim='pressure') # Calculate 10m u and v components of wind rotated to Earth coordinates and split into separate variables nc_vars['U10'], nc_vars['V10'] = cf.split_uvm(getvar(ncfile, 'uvmet10')) # Create xarray dataset of variables ds = xr.Dataset({**nc_vars}) ds['UH'] = ds.UH.astype(np.float32) ds['VH'] = ds.VH.astype(np.float32) ds['UP'] = ds.UP.astype(np.float32) ds['VP'] = ds.VP.astype(np.float32) ds['geoht'] = ds.geoht.astype(np.float32) ds['height'] = ds.height.astype(np.int32) ds['pressure'] = ds.pressure.astype(np.int32) try: del ds.UH.attrs['vert_units'] del ds.VH.attrs['vert_units'] del ds.UP.attrs['vert_units'] del ds.VP.attrs['vert_units'] except KeyError: pass ds['Times'] = np.array([ pd.Timestamp(ds.Time.data).strftime('%Y-%m-%d_%H:%M:%S') ]).astype('<S19') ds = ds.expand_dims('Time', axis=0) # Add description and units for lon, lat dimensions for georeferencing ds['XLAT'].attrs['description'] = 'latitude' ds['XLAT'].attrs['units'] = 'degree_north' ds['XLONG'].attrs['description'] = 'longitude' ds['XLONG'].attrs['units'] = 'degree_east' # Set XTIME attribute ds['XTIME'].attrs['units'] = 'minutes' # Set lon attributes ds['XLONG'].attrs['long_name'] = 'Longitude' ds['XLONG'].attrs['standard_name'] = 'longitude' ds['XLONG'].attrs['short_name'] = 'lon' ds['XLONG'].attrs['units'] = 'degrees_east' ds['XLONG'].attrs['axis'] = 'X' ds['XLONG'].attrs['valid_min'] = np.float32(-180.0) ds['XLONG'].attrs['valid_max'] = np.float32(180.0) # Set lat attributes ds['XLAT'].attrs['long_name'] = 'Latitude' ds['XLAT'].attrs['standard_name'] = 'latitude' ds['XLAT'].attrs['short_name'] = 'lat' ds['XLAT'].attrs['units'] = 'degrees_north' ds['XLAT'].attrs['axis'] = 'Y' ds['XLAT'].attrs['valid_min'] = np.float32(-90.0) ds['XLAT'].attrs['valid_max'] = np.float32(90.0) # Set depth attributes ds['height'].attrs['long_name'] = 'Height Above Ground Level' ds['height'].attrs['standard_name'] = 'height' ds['height'].attrs[ 'comment'] = 'Derived from subtracting terrain height from height above sea level' ds['height'].attrs['units'] = 'm' ds['height'].attrs['axis'] = 'Z' ds['height'].attrs['positive'] = 'up' ds['pressure'].attrs['long_name'] = 'Pressure' ds['pressure'].attrs['standard_name'] = 'air_pressure' ds['pressure'].attrs['units'] = 'millibars' ds['pressure'].attrs['axis'] = 'Z' ds['pressure'].attrs['positive'] = 'up' # Set u attributes - interpolated to height in meters ds['UH'].attrs['long_name'] = 'Eastward Wind Component' ds['UH'].attrs['standard_name'] = 'eastward_wind' ds['UH'].attrs['short_name'] = 'u' ds['UH'].attrs['units'] = 'm s-1' ds['UH'].attrs[ 'description'] = 'earth rotated u, interpolated to Height Above Ground Level in meters' ds['UH'].attrs['valid_min'] = np.float32(-300) ds['UH'].attrs['valid_max'] = np.float32(300) # Set v attributes - interpolated to height in meters ds['VH'].attrs['long_name'] = 'Northward Wind Component' ds['VH'].attrs['standard_name'] = 'northward_wind' ds['VH'].attrs['short_name'] = 'v' ds['VH'].attrs['units'] = 'm s-1' ds['VH'].attrs[ 'description'] = 'earth rotated v, interpolated to Height Above Ground Level in meters' ds['VH'].attrs['valid_min'] = np.float32(-300) ds['VH'].attrs['valid_max'] = np.float32(300) # Set u attributes - interpolated to pressure in mb ds['UP'].attrs['long_name'] = 'Eastward Wind Component' ds['UP'].attrs['standard_name'] = 'eastward_wind' ds['UP'].attrs['short_name'] = 'u' ds['UP'].attrs['units'] = 'm s-1' ds['UP'].attrs[ 'description'] = 'earth rotated u, interpolated to pressure in millibars' ds['UP'].attrs['valid_min'] = np.float32(-300) ds['UP'].attrs['valid_max'] = np.float32(300) # Set v attributes - interpolated to pressure in mb ds['VP'].attrs['long_name'] = 'Northward Wind Component' ds['VP'].attrs['standard_name'] = 'northward_wind' ds['VP'].attrs['short_name'] = 'v' ds['VP'].attrs['units'] = 'm s-1' ds['VP'].attrs[ 'description'] = 'earth rotated v, interpolated to pressure in millibars' ds['VP'].attrs['valid_min'] = np.float32(-300) ds['VP'].attrs['valid_max'] = np.float32(300) # Set u10 attributes ds['U10'].attrs['long_name'] = 'Eastward Wind Component - 10m' ds['U10'].attrs['standard_name'] = 'eastward_wind' ds['U10'].attrs['short_name'] = 'u' ds['U10'].attrs['units'] = 'm s-1' ds['U10'].attrs['description'] = '10m earth rotated u' ds['U10'].attrs['valid_min'] = np.float32(-300) ds['U10'].attrs['valid_max'] = np.float32(300) # Set v10 attributes ds['V10'].attrs['long_name'] = 'Northward Wind Component - 10m' ds['V10'].attrs['standard_name'] = 'northward_wind' ds['V10'].attrs['short_name'] = 'v' ds['V10'].attrs['units'] = 'm s-1' ds['V10'].attrs['description'] = '10m earth rotated v' ds['V10'].attrs['valid_min'] = np.float32(-300) ds['V10'].attrs['valid_max'] = np.float32(300) # Set geopotential height attributes ds['geoht'].attrs['long_name'] = 'Geopotential Height Above Mean Sea Level' ds['geoht'].attrs['standard_name'] = 'geopotential_height' ds['geoht'].attrs['units'] = 'm' ds['geoht'].attrs[ 'description'] = 'geopotential height above mean sea level, interpolated to pressure in millibars' ds['LANDMASK'].attrs['standard_name'] = 'land_binary_mask' ds['LANDMASK'].attrs['long_name'] = 'Land Mask' ds['LAKEMASK'].attrs['long_name'] = 'Lake Mask' ds['XTIME'].attrs['long_name'] = 'minutes since simulation start' # Set time attribute ds['Time'].attrs['standard_name'] = 'time' datetime_format = '%Y%m%dT%H%M%SZ' created = pd.Timestamp(pd.datetime.utcnow()).strftime( datetime_format) # creation time Timestamp time_start = pd.Timestamp(pd.Timestamp( ds.Time.data[0])).strftime(datetime_format) time_end = pd.Timestamp(pd.Timestamp( ds.Time.data[0])).strftime(datetime_format) global_attributes = OrderedDict([ ('title', 'Rutgers Weather Research and Forecasting Model'), ('summary', 'Processed netCDF containing subset of RUWRF output'), ('keywords', 'Weather Advisories > Marine Weather/Forecast'), ('Conventions', 'CF-1.7'), ('naming_authority', 'edu.rutgers.marine.rucool'), ('history', '10-minute WRF raw output processed into new 10-minute file with selected variables.' ), ('processing_level', 'Level 2'), ('comment', 'WRF Model operated by RUCOOL'), ('acknowledgement', 'This data is provided by the Rutgers Center for Ocean Observing Leadership. Funding is provided by the New Jersey Board of Public Utilities).' ), ('standard_name_vocabulary', 'CF Standard Name Table v41'), ('date_created', created), ('creator_name', 'Joseph Brodie'), ('creator_email', '*****@*****.**'), ('creator_url', 'rucool.marine.rutgers.edu'), ('institution', 'Center for Ocean Observing and Leadership, Department of Marine & Coastal Sciences, Rutgers University' ), ('project', 'New Jersey Board of Public Utilities - Offshore Wind Energy - RUWRF Model' ), ('geospatial_lat_min', -90), ('geospatial_lat_max', 90), ('geospatial_lon_min', -180), ('geospatial_lon_max', 180), ('geospatial_vertical_min', 0.0), ('geospatial_vertical_max', 0.0), ('geospatial_vertical_positive', 'down'), ('time_coverage_start', time_start), ('time_coverage_end', time_end), ('creator_type', 'person'), ('creator_institution', 'Rutgers University'), ('contributor_name', 'Joseph Brodie'), ('contributor_role', 'Director of Atmospheric Research'), ('geospatial_lat_units', 'degrees_north'), ('geospatial_lon_units', 'degrees_east'), ('date_modified', created), ('date_issued', created), ('date_metadata_modified', created), ('keywords_vocabulary', 'GCMD Science Keywords'), ('platform', 'WRF Model Run'), ('cdm_data_type', 'Grid'), ('references', 'http://maracoos.org/node/146 https://rucool.marine.rutgers.edu/facilities https://rucool.marine.rutgers.edu/data' ) ]) global_attributes.update(original_global_attributes) ds = ds.assign_attrs(global_attributes) # Add compression to all variables encoding = {} for k in ds.data_vars: encoding[k] = {'zlib': True, 'complevel': 1} # add the encoding for time so xarray exports the proper time. # Also remove compression from dimensions. They should never have fill values encoding['Time'] = dict(units=time_units, calendar='gregorian', zlib=False, _FillValue=False, dtype=np.double) encoding['XLONG'] = dict(zlib=False, _FillValue=False) encoding['XLAT'] = dict(zlib=False, _FillValue=False) encoding['height'] = dict(zlib=False, _FillValue=False, dtype=np.int32) encoding['pressure'] = dict(zlib=False, _FillValue=False, dtype=np.int32) ds.to_netcdf(save_file, encoding=encoding, format='netCDF4', engine='netcdf4', unlimited_dims='Time')