def test_rh_specific_humidity(): """Tests relative humidity from specific humidity.""" p = 1013.25 * units.mbar temperature = 20. * units.degC q = 0.012 rh = relative_humidity_from_specific_humidity(q, temperature, p) assert_almost_equal(rh, 82.7145 * units.percent, 3)
def test_rh_specific_humidity(): """Test relative humidity from specific humidity.""" p = 1013.25 * units.mbar temperature = 20. * units.degC q = 0.012 * units.dimensionless rh = relative_humidity_from_specific_humidity(q, temperature, p) assert_almost_equal(rh, 82.7145 * units.percent, 3)
def rel_hum(p, t, q): """Compute relative humidity in [%] from pressure, temperature, and specific humidity. Arguments: p -- pressure in [Pa] t -- temperature in [K] q -- specific humidity in [kg/kg] Returns: Relative humidity in [%]. """ return mpcalc.relative_humidity_from_specific_humidity( units.Pa * p, units.K * t, q).to("dimensionless").m * 100
def rel_hum(p, t, q): """Compute relative humidity in [%] from pressure, temperature, and specific humidity. Arguments: p -- pressure in [Pa] t -- temperature in [K] q -- specific humidity in [kg/kg] Returns: Relative humidity in [%]. Same dimension as input fields. """ p = units.Quantity(p, "Pa") t = units.Quantity(t, "K") rel_humidity = mpcalc.relative_humidity_from_specific_humidity(p, t, q) # Return specific humidity in [%]. return rel_humidity * 100
def calc_rh_from_q(q, T, p): """ Input : q : specific humidity T : Temperature values estimated from interpolated potential_temperature; p : pressure (hPa) Output : rh : Relative humidity values Function to estimate relative humidity from specific humidity, temperature and pressure in the given dataset. This function uses MetPy's functions to get rh: (i) mpcalc.relative_humidity_from_specific_humidity() """ rh = mpcalc.relative_humidity_from_specific_humidity( q, T * units.degC, p * units.hPa, ).magnitude * 100 return rh
# Get the cross section, and convert lat/lon to supplementary coordinates: cross = cross_section(data, start, end) cross.set_coords(('lat', 'lon'), True) print(cross) ############################## # For this example, we will be plotting potential temperature, relative humidity, and # tangential/normal winds. And so, we need to calculate those, and add them to the dataset: temperature, pressure, specific_humidity = xr.broadcast(cross['Temperature'], cross['isobaric'], cross['Specific_humidity']) theta = mpcalc.potential_temperature(pressure, temperature) rh = mpcalc.relative_humidity_from_specific_humidity(specific_humidity, temperature, pressure) # These calculations return unit arrays, so put those back into DataArrays in our Dataset cross['Potential_temperature'] = xr.DataArray(theta, coords=temperature.coords, dims=temperature.dims, attrs={'units': theta.units}) cross['Relative_humidity'] = xr.DataArray(rh, coords=specific_humidity.coords, dims=specific_humidity.dims, attrs={'units': rh.units}) cross['u_wind'].metpy.convert_units('knots') cross['v_wind'].metpy.convert_units('knots') cross['t_wind'], cross['n_wind'] = mpcalc.cross_section_components(cross['u_wind'], cross['v_wind'])
############################## # Get the cross section, and convert lat/lon to supplementary coordinates: cross = cross_section(data, start, end) cross.set_coords(('lat', 'lon'), True) print(cross) ############################## # For this example, we will be plotting potential temperature, relative humidity, and # tangential/normal winds. And so, we need to calculate those, and add them to the dataset: temperature, pressure, specific_humidity = xr.broadcast( cross['Temperature'], cross['isobaric'], cross['Specific_humidity']) theta = mpcalc.potential_temperature(pressure, temperature) rh = mpcalc.relative_humidity_from_specific_humidity(specific_humidity, temperature, pressure) # These calculations return unit arrays, so put those back into DataArrays in our Dataset cross['Potential_temperature'] = xr.DataArray(theta, coords=temperature.coords, dims=temperature.dims, attrs={'units': theta.units}) cross['Relative_humidity'] = xr.DataArray(rh, coords=specific_humidity.coords, dims=specific_humidity.dims, attrs={'units': rh.units}) cross['u_wind'].metpy.convert_units('knots') cross['v_wind'].metpy.convert_units('knots') cross['t_wind'], cross['n_wind'] = mpcalc.cross_section_components( cross['u_wind'], cross['v_wind'])
# Get the cross section, and convert lat/lon to supplementary coordinates: cross = cross_section(data, start, end).set_coords(('lat', 'lon')) print(cross) ############################## # For this example, we will be plotting potential temperature, relative humidity, and # tangential/normal winds. And so, we need to calculate those, and add them to the dataset: cross['Potential_temperature'] = mpcalc.potential_temperature( cross['isobaric'], cross['Temperature'] ) cross['Relative_humidity'] = mpcalc.relative_humidity_from_specific_humidity( cross['isobaric'], cross['Temperature'], cross['Specific_humidity'] ) cross['u_wind'] = cross['u_wind'].metpy.convert_units('knots') cross['v_wind'] = cross['v_wind'].metpy.convert_units('knots') cross['t_wind'], cross['n_wind'] = mpcalc.cross_section_components( cross['u_wind'], cross['v_wind'] ) print(cross) ############################## # Now, we can make the plot. # Define the figure object and primary axes
######################################## # Note that the units on our wind variables are not ideal for plotting. Instead, let us # convert them to more appropriate values. isent_data['u_wind'] = isent_data['u_wind'].metpy.convert_units('kt') isent_data['v_wind'] = isent_data['v_wind'].metpy.convert_units('kt') ################################# # **Converting to Relative Humidity** # # The NARR only gives specific humidity on isobaric vertical levels, so relative humidity will # have to be calculated after the interpolation to isentropic space. isent_data[ 'Relative_humidity'] = mpcalc.relative_humidity_from_specific_humidity( isent_data['pressure'], isent_data['temperature'], isent_data['Specific_humidity']).metpy.convert_units('percent') ####################################### # **Plotting the Isentropic Analysis** # Set up our projection and coordinates crs = ccrs.LambertConformal(central_longitude=-100.0, central_latitude=45.0) lon = isent_data['pressure'].metpy.longitude lat = isent_data['pressure'].metpy.latitude # Coordinates to limit map area bounds = [(-122., -75., 25., 50.)] # Choose a level to plot, in this case 296 K (our sole level in this example) level = 0
# coordinates, with the number of vertical levels as specified above. print(isentprs.shape) print(isentspech.shape) print(isentu.shape) print(isentv.shape) print(isenttmp.shape) print(isenthgt.shape) ################################# # **Converting to Relative Humidity** # # The NARR only gives specific humidity on isobaric vertical levels, so relative humidity will # have to be calculated after the interpolation to isentropic space. isentrh = 100 * mpcalc.relative_humidity_from_specific_humidity( isentprs, isenttmp, isentspech) ####################################### # **Plotting the Isentropic Analysis** # Set up our projection crs = ccrs.LambertConformal(central_longitude=-100.0, central_latitude=45.0) # Coordinates to limit map area bounds = [(-122., -75., 25., 50.)] # Choose a level to plot, in this case 296 K level = 0 fig = plt.figure(figsize=(17., 12.)) add_metpy_logo(fig, 120, 245, size='large') ax = fig.add_subplot(1, 1, 1, projection=crs)
def read_barpa(domain, time, experiment, forcing_mdl, ensemble): #NOTE: Data has been set to zero for below surface pressure. #But wrf_parallel doesn't use these levels anyway #TODO: The above statement I think is false. -273.15 K values may cause problems for some routines, # even if below ground level. Mask these values to NaN #Create a list of 6-hourly "query" date-times, based on the start and end dates provided. query_dates = date_seq(time, "hours", 6) #Get a list of all BARPA files in the du7 directory, for a given experiment/forcing model geopt_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp2/geop_ht_uv*")) hus_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp2/spec_hum*")) ta_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp2/air_temp*")) ua_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp2/wnd_ucmp*")) va_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp2/wnd_vcmp*")) huss_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp3/qsair_scrn*")) dewpt_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp26/dewpt_scrn*")) tas_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp3/temp_scrn*")) uas_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp3/uwnd10m_b*")) vas_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp3/vwnd10m_b*")) ps_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp26/sfc_pres*")) wg_files = np.sort(glob.glob("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/"+\ experiment+"/"+forcing_mdl+\ "/"+ensemble+"/*/*/pp26/wndgust10m*")) #Get the files that we need geopt_files = geopt_files[file_dates(geopt_files, query_dates)] hus_files = hus_files[file_dates(hus_files, query_dates)] ta_files = ta_files[file_dates(ta_files, query_dates)] ua_files = ua_files[file_dates(ua_files, query_dates)] va_files = va_files[file_dates(va_files, query_dates)] huss_files = huss_files[file_dates(huss_files, query_dates)] dewpt_files = dewpt_files[file_dates(dewpt_files, query_dates)] tas_files = tas_files[file_dates(tas_files, query_dates)] uas_files = uas_files[file_dates(uas_files, query_dates)] vas_files = vas_files[file_dates(vas_files, query_dates)] ps_files = ps_files[file_dates(ps_files, query_dates)] wg_files = wg_files[file_dates(wg_files, query_dates)] #Load in these files, dropping duplicates #Drop the variable "realization", as it appears in some streams but not others, and is not used geopt_ds = drop_duplicates( xr.open_mfdataset(geopt_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #m hus_ds = drop_duplicates( xr.open_mfdataset(hus_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #1 (kg/kg?) ta_ds = drop_duplicates( xr.open_mfdataset(ta_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #K ua_ds = drop_duplicates( xr.open_mfdataset(ua_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #m/s va_ds = drop_duplicates( xr.open_mfdataset(va_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #m/s huss_ds = drop_duplicates( xr.open_mfdataset(huss_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #kg/kg dewpt_ds = drop_duplicates( xr.open_mfdataset(dewpt_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #K tas_ds = drop_duplicates( xr.open_mfdataset(tas_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #K uas_ds = drop_duplicates( xr.open_mfdataset(uas_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #m/s vas_ds = drop_duplicates( xr.open_mfdataset(vas_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #m/s ps_ds = drop_duplicates( xr.open_mfdataset(ps_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #Pa wg_ds = drop_duplicates( xr.open_mfdataset(wg_files, concat_dim="time", combine="nested", drop_variables=["realization"])) #m/s #Slice to query times, spatial domain, convert to dataarray, restrict to below 100 hPa lons = slice(domain[2], domain[3]) lats = slice(domain[0], domain[1]) geopt_da = geopt_ds.sel({ "time": query_dates, "pressure": geopt_ds["pressure"] >= 100, "latitude": lats, "longitude": lons })["geop_ht_uv"] hus_da = hus_ds.sel({ "time": query_dates, "pressure": geopt_ds["pressure"] >= 100, "latitude": lats, "longitude": lons })["spec_hum_uv"] ta_da = ta_ds.sel({ "time": query_dates, "pressure": geopt_ds["pressure"] >= 100, "latitude": lats, "longitude": lons })["air_temp_uv"] ua_da = ua_ds.sel({ "time": query_dates, "pressure": geopt_ds["pressure"] >= 100, "latitude": lats, "longitude": lons })["wnd_ucmp_uv"] va_da = va_ds.sel({ "time": query_dates, "pressure": geopt_ds["pressure"] >= 100, "latitude": lats, "longitude": lons })["wnd_vcmp_uv"] huss_da = huss_ds.sel({ "time": query_dates, "latitude": lats, "longitude": lons })["qsair_scrn"] dewpt_da = dewpt_ds.sel({ "time": query_dates, "latitude": lats, "longitude": lons })["dewpt_scrn"] tas_da = tas_ds.sel({ "time": query_dates, "latitude": lats, "longitude": lons })["temp_scrn"] uas_da = uas_ds.sel({ "time": query_dates, "latitude": lats, "longitude": lons })["uwnd10m_b"] vas_da = vas_ds.sel({ "time": query_dates, "latitude": lats, "longitude": lons })["vwnd10m_b"] ps_da = ps_ds.sel({ "time": query_dates, "latitude": lats, "longitude": lons })["sfc_pres"] wg_da = wg_ds.sel({ "time": query_dates, "latitude": lats, "longitude": lons })["wndgust10m"] #As in read_cmip, make sure that all data arrays have the same times (take the union of the set of times). #If one of the dataarrays goes to size=0 on the time dimension, throw an error common_dates = np.array(list(set(hus_da.time.values) & set(ta_da.time.values) & set(ua_da.time.values)\ & set(va_da.time.values) & set(huss_da.time.values) & set(tas_da.time.values)\ & set(uas_da.time.values) & set(vas_da.time.values) & set(ps_da.time.values)\ & set(geopt_da.time.values) & set(wg_da.time.values) & set(dewpt_da.time.values))) geopt_da = geopt_da.isel({"time": np.in1d(geopt_da.time, common_dates)}) hus_da = hus_da.isel({"time": np.in1d(hus_da.time, common_dates)}) ta_da = ta_da.isel({"time": np.in1d(ta_da.time, common_dates)}) ua_da = ua_da.isel({"time": np.in1d(ua_da.time, common_dates)}) va_da = va_da.isel({"time": np.in1d(va_da.time, common_dates)}) huss_da = huss_da.isel({"time": np.in1d(huss_da.time, common_dates)}) dewpt_da = dewpt_da.isel({"time": np.in1d(dewpt_da.time, common_dates)}) tas_da = tas_da.isel({"time": np.in1d(tas_da.time, common_dates)}) uas_da = uas_da.isel({"time": np.in1d(uas_da.time, common_dates)}) vas_da = vas_da.isel({"time": np.in1d(vas_da.time, common_dates)}) ps_da = ps_da.isel({"time": np.in1d(ps_da.time, common_dates)}) wg_da = wg_da.isel({"time": np.in1d(wg_da.time, common_dates)}) for da in [ geopt_da, hus_da, ta_da, ua_da, va_da, huss_da, dewpt_da, tas_da, uas_da, vas_da, ps_da, wg_da ]: if len(da.time.values) == 0: varname = da.attrs["standard_name"] raise ValueError("ERROR: " + varname + " HAS BEEN SLICED IN TIME DIMENSION TO SIZE=0") #Now linearly interpolate pressure level data to match the BARRA pressure levels kwargs = {"fill_value": None, "bounds_error": False} #barra_levs = [100.0000000001, 150.0000000001, 175.0000000001, # 200.0000000001, 225.0000000001, 250.0000000001, 275.0000000001, # 300.0000000001, 350.0000000001, 400.0000000001, 450.0000000001, # 500.0000000001, 600.0000000001, 700.0000000001, 750.0000000001, # 800.0000000001, 850.0000000001, 900.0000000001, 925.0000000001, # 950.0000000001, 975.0000000001, 1000.0000000001] #geopt_da = geopt_da.interp(coords={"pressure":barra_levs}, method="linear", kwargs=kwargs) #hus_da = hus_da.interp(coords={"pressure":barra_levs}, method="linear", kwargs=kwargs) #ta_da = ta_da.interp(coords={"pressure":barra_levs}, method="linear", kwargs=kwargs) #ua_da = ua_da.interp(coords={"pressure":barra_levs}, method="linear", kwargs=kwargs) #va_da = va_da.interp(coords={"pressure":barra_levs}, method="linear", kwargs=kwargs) #Linearly interpolate variables onto the same lat/lon grid (pressure level U/V grid). Extrapolate to staggered values outside the grid huss_da = huss_da.interp(coords={ "latitude": hus_da.latitude, "longitude": hus_da.longitude }, method="linear", kwargs=kwargs) dewpt_da = dewpt_da.interp(coords={ "latitude": hus_da.latitude, "longitude": hus_da.longitude }, method="linear", kwargs=kwargs) tas_da = tas_da.interp(coords={ "latitude": hus_da.latitude, "longitude": hus_da.longitude }, method="linear", kwargs=kwargs) uas_da = uas_da.interp(coords={ "latitude": hus_da.latitude, "longitude": hus_da.longitude }, method="linear", kwargs=kwargs) vas_da = vas_da.interp(coords={ "latitude": hus_da.latitude, "longitude": hus_da.longitude }, method="linear", kwargs=kwargs) ps_da = ps_da.interp(coords={ "latitude": hus_da.latitude, "longitude": hus_da.longitude }, method="linear", kwargs=kwargs) wg_da = wg_da.interp(coords={ "latitude": hus_da.latitude, "longitude": hus_da.longitude }, method="linear", kwargs=kwargs) #Get numpy arrays of everything, and convert temperatures to degC and sfc pressure to hPa geopt = geopt_da.values hus = hus_da.values ta = ta_da.values - 273.15 ua = ua_da.values va = va_da.values huss = huss_da.values dewpt = dewpt_da.values - 273.15 tas = tas_da.values - 273.15 uas = uas_da.values vas = vas_da.values ps = ps_da.values / 100. wg = wg_da.values #Mask -273.15 K values (these should only be values below surface) mask = (ta == (-273.15)) geopt[mask] = np.nan hus[mask] = np.nan ta[mask] = np.nan ua[mask] = np.nan va[mask] = np.nan #Create 3d pressure variable p = np.moveaxis( np.tile(hus_da.pressure.values, [ta.shape[2], ta.shape[3], 1]), 2, 0) #Get hur from hus, ta and p3d hur = np.array(mpcalc.relative_humidity_from_specific_humidity(hus, \ ta*units.degC, p*units.hectopascal) * 100) hur[hur < 0] = 0 hur[hur > 100] = 100 #Load terrain data terrain = xr.open_dataset("/g/data/du7/barpa/trials/BARPA-EASTAUS_12km/static/topog-BARPA-EASTAUS_12km.nc").\ sel({"latitude":lats, "longitude":lons})["topog"].values #Get lat/lon lat = hus_da.latitude.values lon = hus_da.longitude.values #Flip the pressure dimension ta = np.flip(ta, axis=1) hur = np.flip(hur, axis=1) geopt = np.flip(geopt, axis=1) p = np.flip(p, axis=0) ua = np.flip(ua, axis=1) va = np.flip(va, axis=1) #Return times from one of the data arrays (they are identical in time). If it is different to the query date, then throw a warning query_times = pd.to_datetime(query_dates) times = pd.to_datetime(huss_da.time.values) if all(np.in1d(query_times, times)): pass else: message = "\n ".join( ~query_times[np.in1d(query_times, times)].strftime("%Y%m%d %H:%M")) warnings.warn("WARNING: The following query dates were not loaded..." + message) #Format times for output (datetime objects) out_times = [ dt.datetime.strptime( huss_da.time.dt.strftime("%Y-%m-%d %H:%M").values[i], "%Y-%m-%d %H:%M") for i in np.arange(huss_da.time.shape[0]) ] return [ta, hur, geopt, terrain, p[:,0,0], ps, ua, va, uas, vas, tas, dewpt, wg, lon,\ lat, out_times]
def write_file(self, elev_file, punits=0, datetag=None, filename=None, append=False): if datetag is None: datetag = str(datetime.datetime.now().strftime('%Y_%m_%d')) if not append: ncfile = netCDF4.Dataset(self.opath / (self.fileprefix + 'climate_' + datetag.strftime("%Y_%m_%d") + '.nc'), mode='w', format='NETCDF4_CLASSIC') def getxy(pt): return pt.x, pt.y centroidseries = self.gdf1.geometry.centroid.to_crs(epsg=4327) tlon, tlat = [list(t) for t in zip(*map(getxy, centroidseries))] # Global Attributes ncfile.Conventions = 'CF-1.8' ncfile.featureType = 'timeSeries' ncfile.history = '' sp_dim = len(self.gdf1.index) # Create dimensions ncfile.createDimension('geomid', size=sp_dim) # hru_id ncfile.createDimension( 'time', size=None) # unlimited axis (can be appended to). # Create Variables time = ncfile.createVariable('time', 'f4', ('time', )) time.long_name = 'time' time.standard_name = 'time' time.units = 'days since ' + self.str_start time.calendar = 'standard' time[:] = np.arange(0, self.current_time_index, dtype=np.float) hru = ncfile.createVariable('geomid', 'i', ('geomid', )) hru.cf_role = 'timeseries_id' hru.long_name = 'local model hru id' hru[:] = np.asarray(self.gdf1.index) lat = ncfile.createVariable('hru_lat', np.dtype(np.float32).char, ('geomid', )) lat.long_name = 'Latitude of HRU centroid' lat.units = 'degrees_north' lat.standard_name = 'hru_latitude' lat[:] = tlat lon = ncfile.createVariable('hru_lon', np.dtype(np.float32).char, ('geomid', )) lon.long_name = 'Longitude of HRU centroid' lon.units = 'degrees_east' lon.standard_name = 'hru_longitude' lon[:] = tlon # crs = ncfile.createVariable('crs', np.dtype(np.int)) # crs.GeoTransform = self.grd[0].crs.GeoTransform # # crs.NAME = self.grd[0].crs.NAME # crs.grid_mapping_name = self.grd[0].crs.grid_mapping_name # crs.inverse_flattening = self.grd[0].crs.inverse_flattening # crs.long_name = self.grd[0].crs.long_name # crs.longitude_of_prime_meridian = self.grd[0].crs.longitude_of_prime_meridian # crs.semi_major_axis = self.grd[0].crs.semi_major_axis # crs.spatial_ref = self.grd[0].crs.spatial_ref else: ncfile = netCDF4.Dataset( self.opath / (self.fileprefix + 'climate_' + str(datetime.datetime.now().strftime('%Y%m%d')) + '.nc'), mode='a', format='NETCDF_CLASSIC') for index, tvar in enumerate(self.var_output): vartype = self.grd[index][self.var[index]].dtype ncvar = ncfile.createVariable(tvar, vartype, ('time', 'geomid')) ncvar.fill_value = netCDF4.default_fillvals['f8'] ncvar.long_name = self.grd[index][self.var[index]].long_name ncvar.standard_name = self.grd[index][ self.var[index]].standard_name ncvar.description = self.grd[index][self.var[index]].description # ncvar.grid_mapping = 'crs' ncvar.units = self.grd[index][self.var[index]].units if tvar in ['tmax', 'tmin']: if punits == 1: conv = units.degC ncvar[:, :] = units.Quantity(self._np_var[index, 0:self.current_time_index, :], ncvar.units)\ .to(conv).magnitude ncvar.units = conv.format_babel() else: conv = units.degF # ncvar[:,:] = ((self._np_var[index, 0:self.current_time_index, :]-273.15)*1.8)+32.0 ncvar[:, :] = units.Quantity(self._np_var[index, 0:self.current_time_index, :], ncvar.units)\ .to(conv).magnitude ncvar.units = conv.format_babel() elif tvar == 'prcp': if punits == 1: conv = units('mm') ncvar[:, :] = units.Quantity(self._np_var[index, 0:self.current_time_index, :], ncvar.units)\ .to(conv).magnitude ncvar.units = conv.units.format_babel() else: conv = units('inch') ncvar[:, :] = units.Quantity(self._np_var[index, 0:self.current_time_index, :], ncvar.units)\ .to(conv).magnitude ncvar.units = conv.units.format_babel() # else units are already in mm # ncvar[:,:] = np.multiply(self._np_var[index, 0:self.current_time_index, :], conv.magnitude) else: ncvar[:, :] = self._np_var[index, 0:self.current_time_index, :] ncvar.units = self.grd[index][self.var[index]].units elevf = gpd.read_file(elev_file, layer='hru_elev') elev = elevf['hru_elev'].values if all(x in self.var_output for x in ['tmax', 'tmin', 'shum']): tmax_ind = self.var_output.index('tmax') tmin_ind = self.var_output.index('tmin') shum_ind = self.var_output.index('shum') print(f'tmaxind: {tmax_ind}, tminind: {tmin_ind}, shumind: {shum_ind}') rel_h = np.zeros((self.current_time_index, self.numgeom)) for j in np.arange(np.int(self.numgeom)): pr = mpcalc.height_to_pressure_std(units.Quantity(elev[j], "m")) for i in np.arange(np.int(self.current_time_index)): tmax = units.Quantity(self._np_var[tmax_ind, i, j], units.kelvin) tmin = units.Quantity(self._np_var[tmin_ind, i, j], units.kelvin) spch = units.Quantity(self._np_var[shum_ind, i, j], "kg/kg") rhmax = mpcalc.relative_humidity_from_specific_humidity( pr, tmax, spch) rhmin = mpcalc.relative_humidity_from_specific_humidity( pr, tmin, spch) rel_h[i, j] = (rhmin.magnitude + rhmax.magnitude) / 2.0 ncvar = ncfile.createVariable('humidity', rel_h.dtype, ('time', 'geomid')) ncvar.units = "1" ncvar.fill_value = netCDF4.default_fillvals['f8'] ncvar[:, :] = rel_h[0:self.current_time_index, :] ncfile.close()
def run(self, start, end, dt, time_zone, nldas_ds, elevation_ds, projected_epsg, precip_ds=None, precip_var='', precip_dt='1H', precip_adj=1): station_ds = [] time = nldas_ds.dataset.variables['time'] start_i = nc4.date2index(start, time) - time_zone end_i = nc4.date2index(end, time) - time_zone datetimes = pd.date_range(start, end, freq=dt) records = [] j = 1 logging.debug(nldas_ds.dataset[nldas_ds.xdim][:]) logging.debug(nldas_ds.dataset[nldas_ds.ydim][:]) for i, coord in enumerate(nldas_ds.coords): x = np.floor( (coord.x - nldas_ds.cmin.x) / nldas_ds.res.x).astype(int) y = np.floor( (coord.y - nldas_ds.cmin.y) / nldas_ds.res.y).astype(int) nldas_srs = osr.SpatialReference() nldas_srs.ImportFromEPSG(4326) proj_srs = osr.SpatialReference() proj_srs.ImportFromEPSG(int(projected_epsg[5:])) nldas_to_proj = osr.CoordinateTransformation(nldas_srs, proj_srs) # TransformPoint reverses coordinate order station_proj = CoordProperty( *nldas_to_proj.TransformPoint(coord.y, coord.x)[:-1]) logging.debug(station_proj.x) logging.debug(station_proj.y) logging.debug(elevation_ds.bbox) logging.debug(elevation_ds.bbox.contains(station_proj)) if elevation_ds.bbox.contains(station_proj): record = { 'i': j, 'xi': x, 'yi': y, 'x': station_proj.x, 'y': station_proj.y, 'elevation': elevation_ds.get_value(station_proj) } logging.debug(record) records.append(record) j += 1 csvargs = { 'sep': '\t', 'float_format': '%.5f', 'header': False, 'index_label': 'datetime', 'date_format': '%m/%d/%Y-%H:%M', 'columns': self.columns } self.locs['station'] = ListLoc( template=self.locs['station'], #id={'xi': 'xi', 'yi': 'yi'}, meta=records).configure(self.cfg) for loc, meta in self.locs['station']: ds = DataFrameDataset(loc).new(index=datetimes, columns=self.out_columns) ds.csvargs = csvargs df = ds.dataset for name, id in self.vars.items(): slice = [] for dim in nldas_ds.dataset[id].dimensions: if dim == 'time': # Include end datetime slice.append('{start_i}:{end_i}'.format( start_i=start_i, end_i=end_i + 1)) elif dim == nldas_ds.xdim: slice.append(str(meta['xi'])) elif dim == nldas_ds.ydim: slice.append(str(meta['yi'])) expr = "nldas_ds.dataset['{id}'][{slice}]".format( id=id, slice=','.join(slice)) logging.debug(expr) df[name] = eval(expr) df['wind_speed'] = np.sqrt(df['wind_speed_u']**2 + df['wind_speed_v']**2) df['air_temp'] = df['air_temp'] - 273.15 df['relative_humidity'] = (df.apply( lambda row: (100 * relative_humidity_from_specific_humidity( units.Quantity(row['pressure'], "pascal"), units.Quantity(row['air_temp'], "degC"), units.Quantity(row['specific_humidity'], "dimensionless")). magnitude), axis=1)) df.loc[df.relative_humidity >= 100, 'relative_humidity'] = 100 # mm/hour to m/hour, adjusted for calibration if precip_ds is None: df['precipitation'] = df['precipitation'] / 1000. * precip_adj else: # Resample the original meteorology to match precipitation df_p = df.resample(precip_dt).ffill() logging.debug(df) # Select the correct precipitation range and location # Convert start and end to indices slice = [] p_time = precip_ds.dataset.variables['time'] p_start_i = nc4.date2index(start, p_time) - time_zone p_end_i = nc4.date2index(end, p_time) - time_zone # Construct slice for dim in precip_ds.dataset[precip_var].dimensions: if dim == 'time': slice.append('{start_i}:{end_i}'.format( start_i=p_start_i, end_i=p_end_i + 1)) elif dim == precip_ds.xdim: slice.append(str(meta['xi'])) elif dim == precip_ds.ydim: slice.append(str(meta['yi'])) expr = "precip_ds.dataset['{id}'][{slice}]".format( id=precip_var, slice=','.join(slice)) logging.debug(expr) df_p['precipitation'] = eval(expr) df_p['precipitation'] = df_p[ 'precipitation'] / 1000. * precip_adj ds._dataset = df_p logging.debug(ds.dataset) ds.save()
# ============================================================================= # FIG #7: SFC: MSLP, WIND, TEMPERATURE # ============================================================================= SLP = SLP_DATA.variables['slp'][TIME_INDEX, :, :] * units('hPa') T2M = T2M_DATA.variables['air'][TIME_INDEX, :, :] * units('kelvin') U2M = U2M_DATA.variables['uwnd'][TIME_INDEX, :, :] * units('m/s') V2M = V2M_DATA.variables['vwnd'][TIME_INDEX, :, :] * units('m/s') # ============================================================================= # FIG #8: SFC: HEAT INDEX OR WINDCHILL # ============================================================================= T2M = T2M_DATA.variables['air'][TIME_INDEX, :, :] * units('kelvin') SH2M = SH2M_DATA.variables['shum'][TIME_INDEX, :, :] U2M = U2M_DATA.variables['uwnd'][TIME_INDEX, :, :] * units('m/s') V2M = V2M_DATA.variables['vwnd'][TIME_INDEX, :, :] * units('m/s') PRES = PRES_DATA.variables['pres'][TIME_INDEX, :, :] * units('Pa') RHUM = mpcalc.relative_humidity_from_specific_humidity(SH2M, T2M, PRES) T2M = T2M.to('degF') SFC_SPEED = mpcalc.get_wind_speed(U2M, V2M) SFC_SPEED = SFC_SPEED.to('mph') APPARENT_TEMP = mpcalc.apparent_temperature(T2M, RHUM, SFC_SPEED) # ============================================================================= # ============================================================================= # ============================================================================= # Make a grid of lat/lon values to use for plotting with Basemap. lons, lats = np.meshgrid(np.squeeze(LON), np.squeeze(LAT)) slons, slats = np.meshgrid(np.squeeze(SFLON), np.squeeze(SFLAT)) fig, axarr = plt.subplots(nrows=2, ncols=4, figsize=(35, 25), subplot_kw={'projection': crs}) axlist = axarr.flatten()
def read_cmip(model, experiment, ensemble, year, domain, cmip_ver=5, group="", al33=False, ver6hr="", ver3hr="", project="CMIP"): if cmip_ver == 5: #Get CMIP5 file paths if al33: #NOTE unknown behaviour for al33 directories with multiple versions if group == "": raise ValueError("Group required") if ver6hr == "": ver6hr = "v*" if ver3hr == "": ver3hr = "v*" hus_files = np.sort(glob.glob("/g/data/al33/replicas/CMIP5/combined/"+\ group+"/"+model+"/"+experiment+\ "/6hr/atmos/6hrLev/"+ensemble+"/"+ver6hr+"/hus/*6hrLev*")) ta_files = np.sort(glob.glob("/g/data/al33/replicas/CMIP5/combined/"+\ group+"/"+model+"/"+experiment+\ "/6hr/atmos/6hrLev/"+ensemble+"/"+ver6hr+"/ta/*6hrLev*")) ua_files = np.sort(glob.glob("/g/data/al33/replicas/CMIP5/combined/"+\ group+"/"+model+"/"+experiment+\ "/6hr/atmos/6hrLev/"+ensemble+"/"+ver6hr+"/ua/*6hrLev*")) va_files = np.sort(glob.glob("/g/data/al33/replicas/CMIP5/combined/"+\ group+"/"+model+"/"+experiment+\ "/6hr/atmos/6hrLev/"+ensemble+"/"+ver6hr+"/va/*6hrLev*")) huss_files = np.sort(glob.glob("/g/data/al33/replicas/CMIP5/combined/"+\ group+"/"+model+"/"+experiment+\ "/3hr/atmos/3hr/"+ensemble+"/"+ver3hr+"/huss/*")) tas_files = np.sort(glob.glob("/g/data/al33/replicas/CMIP5/combined/"+\ group+"/"+model+"/"+experiment+\ "/3hr/atmos/3hr/"+ensemble+"/"+ver3hr+"/tas/*")) uas_files = np.sort(glob.glob("/g/data/al33/replicas/CMIP5/combined/"+\ group+"/"+model+"/"+experiment+\ "/3hr/atmos/3hr/"+ensemble+"/"+ver3hr+"/uas/*")) vas_files = np.sort(glob.glob("/g/data/al33/replicas/CMIP5/combined/"+\ group+"/"+model+"/"+experiment+\ "/3hr/atmos/3hr/"+ensemble+"/"+ver3hr+"/vas/*")) ps_files = np.sort(glob.glob("/g/data/al33/replicas/CMIP5/combined/"+\ group+"/"+model+"/"+experiment+\ "/3hr/atmos/3hr/"+ensemble+"/"+ver3hr+"/ps/*")) else: hus_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP5/"+\ model+"/"+experiment+\ "/6hr/atmos/"+ensemble+"/hus/latest/*6hrLev*")) ta_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP5/"+\ model+"/"+experiment+\ "/6hr/atmos/"+ensemble+"/ta/latest/*6hrLev*")) ua_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP5/"+\ model+"/"+experiment+\ "/6hr/atmos/"+ensemble+"/ua/latest/*6hrLev*")) va_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP5/"+\ model+"/"+experiment+\ "/6hr/atmos/"+ensemble+"/va/latest/*6hrLev*")) huss_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP5/"+\ model+"/"+experiment+\ "/3hr/atmos/"+ensemble+"/huss/latest/*")) tas_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP5/"+\ model+"/"+experiment+\ "/3hr/atmos/"+ensemble+"/tas/latest/*")) uas_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP5/"+\ model+"/"+experiment+\ "/3hr/atmos/"+ensemble+"/uas/latest/*")) vas_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP5/"+\ model+"/"+experiment+\ "/3hr/atmos/"+ensemble+"/vas/latest/*")) ps_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP5/"+\ model+"/"+experiment+\ "/3hr/atmos/"+ensemble+"/ps/latest/*")) elif cmip_ver == 6: #Get CMIP6 file paths hus_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP6/"+project+"/"+group+"/"+model+\ "/"+experiment+"/"+ensemble+"/6hrLev/hus/gn/latest/*")) ta_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP6/"+project+"/"+group+"/"+model+\ "/"+experiment+"/"+ensemble+"/6hrLev/ta/gn/latest/*")) ua_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP6/"+project+"/"+group+"/"+model+\ "/"+experiment+"/"+ensemble+"/6hrLev/ua/gn/latest/*")) va_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP6/"+project+"/"+group+"/"+model+\ "/"+experiment+"/"+ensemble+"/6hrLev/va/gn/latest/*")) huss_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP6/"+project+"/"+group+"/"+model+\ "/"+experiment+"/"+ensemble+"/3hr/huss/gn/latest/*")) tas_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP6/"+project+"/"+group+"/"+model+\ "/"+experiment+"/"+ensemble+"/3hr/tas/gn/latest/*")) uas_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP6/"+project+"/"+group+"/"+model+\ "/"+experiment+"/"+ensemble+"/3hr/uas/gn/latest/*")) vas_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP6/"+project+"/"+group+"/"+model+\ "/"+experiment+"/"+ensemble+"/3hr/vas/gn/latest/*")) ps_files = np.sort(glob.glob("/g/data/r87/DRSv3/CMIP6/"+project+"/"+group+"/"+model+\ "/"+experiment+"/"+ensemble+"/3hr/ps/gn/latest/*")) #Isolate the files relevant for the current "year" #NOTE will have to change to incorperate months if there is more than one file per year hus_fid = get_fid(hus_files, year) ta_fid = get_fid(ta_files, year) ua_fid = get_fid(ua_files, year) va_fid = get_fid(va_files, year) huss_fid = get_fid(huss_files, year) tas_fid = get_fid(tas_files, year) uas_fid = get_fid(uas_files, year) vas_fid = get_fid(vas_files, year) ps_fid = get_fid(ps_files, year) #Load the data hus = xr.open_mfdataset([hus_files[i] for i in hus_fid], use_cftime=True) ta = xr.open_mfdataset([ta_files[i] for i in ta_fid], use_cftime=True) ua = xr.open_mfdataset([ua_files[i] for i in ua_fid], use_cftime=True) va = xr.open_mfdataset([va_files[i] for i in va_fid], use_cftime=True) #Load surface data and match to 6 hourly huss = xr.open_mfdataset([huss_files[i] for i in huss_fid], use_cftime=True) tas = xr.open_mfdataset([tas_files[i] for i in tas_fid], use_cftime=True) uas = xr.open_mfdataset([uas_files[i] for i in uas_fid], use_cftime=True) vas = xr.open_mfdataset([vas_files[i] for i in vas_fid], use_cftime=True) ps = xr.open_mfdataset([ps_files[i] for i in ps_fid], use_cftime=True) #Trim to the domain given by "domain", as well as the year given by "year". Expand domain # for later compairsons with ERA5, and for interpolation of U/V domain[0] = domain[0] - 5 domain[1] = domain[1] + 5 domain[2] = domain[2] - 5 domain[3] = domain[3] + 5 hus = trim_cmip5(hus, domain, year) ta = trim_cmip5(ta, domain, year) huss = trim_cmip5(huss, domain, year) tas = trim_cmip5(tas, domain, year) ps = trim_cmip5(ps, domain, year) #Interpolate u, v, uas and vas, using a slightly bigger domain, then trim to "domain" ua = trim_cmip5(ua, [domain[0], domain[1], domain[2], domain[3]], year) va = trim_cmip5(va, [domain[0], domain[1], domain[2], domain[3]], year) uas = trim_cmip5(uas, [domain[0], domain[1], domain[2], domain[3]], year) vas = trim_cmip5(vas, [domain[0], domain[1], domain[2], domain[3]], year) ua = ua.ua.interp({"lon": hus.lon}, method="linear", assume_sorted=True) va = va.va.interp({"lat": hus.lat}, method="linear", assume_sorted=True) uas = uas.uas.interp({ "lat": hus.lat, "lon": hus.lon }, method="linear", assume_sorted=True) vas = vas.vas.interp({ "lat": hus.lat, "lon": hus.lon }, method="linear", assume_sorted=True) ua = trim_cmip5(ua, domain, year) va = trim_cmip5(va, domain, year) uas = trim_cmip5(uas, domain, year) vas = trim_cmip5(vas, domain, year) #Get common times for all datasets common_times = np.array(list(set(hus.time.values) & set(ta.time.values) & set(ua.time.values)\ & set(va.time.values) & set(huss.time.values) & set(tas.time.values)\ & set(uas.time.values) & set(vas.time.values) & set(ps.time.values))) #Restrict all data to common times hus = hus.sel({"time": np.in1d(hus.time, common_times)}) ta = ta.sel({"time": np.in1d(ta.time, common_times)}) ua = ua.sel({"time": np.in1d(ua.time, common_times)}) va = va.sel({"time": np.in1d(va.time, common_times)}) huss = huss.sel({"time": np.in1d(huss.time, common_times)}) tas = tas.sel({"time": np.in1d(tas.time, common_times)}) uas = uas.sel({"time": np.in1d(uas.time, common_times)}) vas = vas.sel({"time": np.in1d(vas.time, common_times)}) ps = ps.sel({"time": np.in1d(ps.time, common_times)}) #Either convert vertical coordinate to height or pressure, depending on the model names = [] for name, da in hus.data_vars.items(): names.append(name) if "orog" in names: #If the model has been stored on a hybrid height coordinate, it should have the # variable "orog". Convert height coordinate to height ASL, and calculate # pressure via the hydrostatic equation z = hus.lev + (hus.b * hus.orog) orog = hus.orog.values q = hus.hus / (1 - hus.hus) tv = ta.ta * ((q + 0.622) / (0.622 * (1 + q))) p = np.swapaxes( np.swapaxes(ps.ps * np.exp(-9.8 * z / (287 * tv)), 3, 2), 2, 1) if ((model in ["ACCESS1-3", "ACCESS1-0", "ACCESS-CM2", "ACCESS-ESM1-5"])): z = np.swapaxes(z, 0, 1).values orog = orog[0] else: z = np.tile( z.values.astype("float32"), [ta.ta.shape[0], 1, 1, 1], ) elif np.any(np.in1d(["p0", "ap"], names)): #If the model has been stored on a hybrid pressure coordinate, it should have the # variable "p0". Convert hybrid pressure coordinate to pressure, and calculate # height via the hydrostatic equation if "p0" in names: p = (hus.a * hus.p0 + hus.b * hus.ps).transpose( "time", "lev", "lat", "lon") elif "ap" in names: p = (hus.ap + hus.b * hus.ps).transpose("time", "lev", "lat", "lon") else: raise ValueError( "Check the hybrid-pressure coordinate of this model") q = hus.hus / (1 - hus.hus) tv = ta.ta * ((q + 0.622) / (0.622 * (1 + q))) z = (-287 * tv * (np.log(p / ps.ps)).transpose("time", "lev", "lat", "lon")) / 9.8 orog = trim_cmip5( xr.open_dataset(glob.glob("/g/data/r87/DRSv3/CMIP5/"+\ model+"/historical/fx/atmos/r0i0p0/orog/latest/orog*.nc")[0]).orog,\ domain, year).values orog[orog < 0] = 0 z = (z + orog).values else: raise ValueError("Check the vertical coordinate of this model") #Sanity checks on pressure and height, one of which is calculated via hydrostatic approx. Note ACCESS-CM2 is # missing a temperature level, and so that level will have zero pressure. Ignore sanity check for this model. if (z.min() < -1000) | (z.max() > 100000): raise ValueError( "Potentially erroneous Z values (less than -1000 or greater than 100,000 km" ) if (p.max().values > 200000) | (p.min().values < 0): if model != "ACCESS-CM2": raise ValueError( "Potentially erroneous pressure (less than 0 or greater than 200,000 Pa" ) #Convert quantities into those expected by wrf_(non)_parallel.py ta = ta.ta.values - 273.15 hur = mpcalc.relative_humidity_from_specific_humidity(hus.hus.values, \ ta*units.units.degC, p.values*units.units.pascal) * 100 pres = p.values / 100. sfc_pres = ps.ps.values / 100. tas = tas.tas.values - 273.15 ta2d = mpcalc.dewpoint_from_specific_humidity(huss.huss.values, tas*units.units.degC, \ ps.ps.values*units.units.pascal) lon = p.lon.values lat = p.lat.values ua = ua.values va = va.values uas = uas.values vas = vas.values #Mask all data above 100 hPa. For ACCESS-CM2, mask data below 20 m if model == "ACCESS-CM2": ta[(pres < 100) | (p == 0) | (p == np.inf)] = np.nan hur[(pres < 100) | (p == 0) | (p == np.inf)] = np.nan z[(pres < 100) | (p == 0) | (p == np.inf)] = np.nan ua[(pres < 100) | (p == 0) | (p == np.inf)] = np.nan va[(pres < 100) | (p == 0) | (p == np.inf)] = np.nan else: ta[pres < 100] = np.nan hur[pres < 100] = np.nan z[pres < 100] = np.nan ua[pres < 100] = np.nan va[pres < 100] = np.nan date_list = p.time.values date_list = np.array([ dt.datetime.strptime(date_list[t].strftime(), "%Y-%m-%d %H:%M:%S") for t in np.arange(len(date_list)) ]) return [ta, hur, z, orog, pres, sfc_pres, ua, va, uas, vas, tas, ta2d, lon,\ lat, date_list]
def cross_section(isentlev, num, left_lat, left_lon, right_lat, right_lon): """Plot an isentropic cross-section.""" # get the coordinates of the endpoints for the cross-section left_coord = np.array((float(left_lat), float(left_lon))) right_coord = np.array((float(right_lat), float(right_lon))) # Calculate data for the inset isentropic map isent_anal = mcalc.isentropic_interpolation(float(isentlev) * units.kelvin, lev, tmp, spech, tmpk_out=True) isentprs = isent_anal[0] isenttmp = isent_anal[1] isentspech = isent_anal[2] isentrh = mcalc.relative_humidity_from_specific_humidity(isentspech, isenttmp, isentprs) # Find index values for the cross section slice iright = lat_lon_2d_index(lat, lon, right_coord[0], right_coord[1]) ileft = lat_lon_2d_index(lat, lon, left_coord[0], left_coord[1]) # Get the cross-section slice data cross_data = mcalc.extract_cross_section(ileft, iright, lat, lon, tmp, uwnd, vwnd, spech, num=num) cross_lat = cross_data[0] cross_lon = cross_data[1] cross_t = cross_data[2] cross_u = cross_data[3] cross_v = cross_data[4] cross_spech = cross_data[5] # Calculate theta and RH on the cross-section cross_theta = mcalc.potential_temperature(lev[:, np.newaxis], cross_t) cross_rh = mcalc.relative_humidity_from_specific_humidity(cross_spech, cross_t, lev[:, np.newaxis]) # Create figure for ploting fig = plt.figure(1, figsize=(17., 12.)) # Plot the cross section ax1 = plt.axes() ax1.set_yscale('symlog') ax1.grid() cint = np.arange(250, 450, 5) # Determine whether to label x-axis with lat or lon values if np.abs(left_lon - right_lon) > np.abs(left_lat - right_lat): cs = ax1.contour(cross_lon, lev[::-1], cross_theta[::-1, :], cint, colors='tab:red') cf = ax1.contourf(cross_lon, lev[::-1], cross_rh[::-1, :], range(10, 106, 5), cmap=plt.cm.gist_earth_r) ax1.barbs(cross_lon[4::4], lev, cross_u[:, 4::4], cross_v[:, 4::4], length=6) plt.xlabel('Longitude (Degrees East)') else: cs = ax1.contour(cross_lat[::-1], lev[::-1], cross_theta[::-1, ::-1], cint, colors='tab:red') cf = ax1.contourf(cross_lat[::-1], lev[::-1], cross_rh[::-1, ::-1], range(10, 106, 5), cmap=plt.cm.gist_earth_r) ax1.barbs(cross_lat[::-4], lev, cross_u[:, ::-4], cross_v[:, ::-4], length=6) plt.xlim(cross_lat[0], cross_lat[-1]) plt.xlabel('Latitude (Degrees North)') # Label the cross section axes plt.clabel(cs, fontsize=10, inline=1, inline_spacing=7, fmt='%i', rightside_up=True, use_clabeltext=True) cb = plt.colorbar(cf, orientation='horizontal', extend=max, aspect=65, shrink=0.75, pad=0.06, extendrect='True') cb.set_label('Relative Humidity', size='x-large') plt.ylabel('Pressure (hPa)') ax1.set_yticklabels(np.arange(1000, 50, -50)) plt.ylim(lev[0], lev[-1]) plt.yticks(np.arange(1000, 50, -50)) # Add a title plt.title(('NARR Isentropic Cross-Section: ' + str(left_coord[0]) + ' N, ' + str(left_coord[1]) + ' E to ' + str(right_coord[0]) + ' N, ' + str(right_coord[1]) + ' E'), loc='left') plt.title('VALID: {:s}'.format(str(vtimes[0])), loc='right') # Add Inset Map ax2 = fig.add_axes([0.125, 0.643, 0.25, 0.25], projection=crs) # Coordinates to limit map area bounds = [(-122., -75., 25., 50.)] # Limit extent of inset map ax2.set_extent(*bounds, crs=ccrs.PlateCarree()) ax2.coastlines('50m', edgecolor='black', linewidth=0.75) ax2.add_feature(states_provinces, edgecolor='black', linewidth=0.5) # Plot the surface clevisent = np.arange(0, 1000, 25) cs = ax2.contour(tlons, tlats, isentprs[0, :, :], clevisent, colors='k', linewidths=1.0, linestyles='solid') plt.clabel(cs, fontsize=10, inline=1, inline_spacing=7, fmt='%i', rightside_up=True, use_clabeltext=True) # Plot RH cf = ax2.contourf(tlons, tlats, isentrh[0, :, :], range(10, 106, 5), cmap=plt.cm.gist_earth_r) # Convert endpoints of cross-section line left = crs.transform_point(left_coord[1], left_coord[0], ccrs.PlateCarree()) right = crs.transform_point(right_coord[1], right_coord[0], ccrs.PlateCarree()) # Plot the cross section line plt.plot([left[0], right[0]], [left[1], right[1]], color='r') plt.show()
def read_cmip6(group, model, experiment, ensemble, year, domain): #DEPRECIATED - USE READ_CMIP INSTEAD, SPECIFYING CMIP_VER=6 #Read CMIP6 data from the r87 project (from oi10 and fs38) #For the given model, institute, experiment, get the relevant file paths. hus_files = np.sort( glob.glob("/g/data/r87/DRSv3/CMIP6/CMIP/" + group + "/" + model + "/" + experiment + "/" + ensemble + "/6hrLev/hus/gn/latest/*")) ta_files = np.sort( glob.glob("/g/data/r87/DRSv3/CMIP6/CMIP/" + group + "/" + model + "/" + experiment + "/" + ensemble + "/6hrLev/ta/gn/latest/*")) ua_files = np.sort( glob.glob("/g/data/r87/DRSv3/CMIP6/CMIP/" + group + "/" + model + "/" + experiment + "/" + ensemble + "/6hrLev/ua/gn/latest/*")) va_files = np.sort( glob.glob("/g/data/r87/DRSv3/CMIP6/CMIP/" + group + "/" + model + "/" + experiment + "/" + ensemble + "/6hrLev/va/gn/latest/*")) huss_files = np.sort( glob.glob("/g/data/r87/DRSv3/CMIP6/CMIP/" + group + "/" + model + "/" + experiment + "/" + ensemble + "/3hr/huss/gn/latest/*")) tas_files = np.sort( glob.glob("/g/data/r87/DRSv3/CMIP6/CMIP/" + group + "/" + model + "/" + experiment + "/" + ensemble + "/3hr/tas/gn/latest/*")) uas_files = np.sort( glob.glob("/g/data/r87/DRSv3/CMIP6/CMIP/" + group + "/" + model + "/" + experiment + "/" + ensemble + "/3hr/uas/gn/latest/*")) vas_files = np.sort( glob.glob("/g/data/r87/DRSv3/CMIP6/CMIP/" + group + "/" + model + "/" + experiment + "/" + ensemble + "/3hr/vas/gn/latest/*")) ps_files = np.sort( glob.glob("/g/data/r87/DRSv3/CMIP6/CMIP/" + group + "/" + model + "/" + experiment + "/" + ensemble + "/3hr/ps/gn/latest/*")) #Isolate the files relevant for the current "year" #NOTE will have to change to incorperate months if there is more than one file per year hus_fid = get_fid(hus_files, year) ta_fid = get_fid(ta_files, year) ua_fid = get_fid(ua_files, year) va_fid = get_fid(va_files, year) huss_fid = get_fid(huss_files, year) tas_fid = get_fid(tas_files, year) uas_fid = get_fid(uas_files, year) vas_fid = get_fid(vas_files, year) ps_fid = get_fid(ps_files, year) #Load the data, match 3 hourly and 6 hourly data hus = xr.open_mfdataset([hus_files[i] for i in hus_fid]) ta = xr.open_mfdataset([ta_files[i] for i in ta_fid]) ua = xr.open_mfdataset([ua_files[i] for i in ua_fid]) va = xr.open_mfdataset([va_files[i] for i in va_fid]) huss = xr.open_mfdataset([huss_files[i] for i in huss_fid]) huss = huss.sel({"time": np.in1d(huss.time, hus.time)}) tas = xr.open_mfdataset([tas_files[i] for i in tas_fid]) tas = tas.sel({"time": np.in1d(tas.time, ta.time)}) uas = xr.open_mfdataset([uas_files[i] for i in uas_fid]) uas = uas.sel({"time": np.in1d(uas.time, ua.time)}) vas = xr.open_mfdataset([vas_files[i] for i in vas_fid]) vas = vas.sel({"time": np.in1d(vas.time, va.time)}) ps = xr.open_mfdataset([ps_files[i] for i in ps_fid]) ps = ps.sel({"time": np.in1d(ps.time, hus.time)}) #and trim to the domain given by "domain", as well as the year given by "year" hus = trim_cmip5(hus, domain, year) ta = trim_cmip5(ta, domain, year) huss = trim_cmip5(huss, domain, year) tas = trim_cmip5(tas, domain, year) ps = trim_cmip5(ps, domain, year) #Interpolate u, v, uas and vas, using a slightly bigger domain, then trim to "domain" ua = trim_cmip5( ua, [domain[0] - 5, domain[1] + 5, domain[2] - 5, domain[3] + 5], year) va = trim_cmip5( va, [domain[0] - 5, domain[1] + 5, domain[2] - 5, domain[3] + 5], year) uas = trim_cmip5( uas, [domain[0] - 5, domain[1] + 5, domain[2] - 5, domain[3] + 5], year) vas = trim_cmip5( vas, [domain[0] - 5, domain[1] + 5, domain[2] - 5, domain[3] + 5], year) ua = ua.ua.interp({"lon": hus.lon}, method="linear", assume_sorted=True) va = va.va.interp({"lat": hus.lat}, method="linear", assume_sorted=True) uas = uas.uas.interp({ "lat": hus.lat, "lon": hus.lon }, method="linear", assume_sorted=True) vas = vas.vas.interp({ "lat": hus.lat, "lon": hus.lon }, method="linear", assume_sorted=True) ua = trim_cmip5(ua, domain, year).values va = trim_cmip5(va, domain, year).values uas = trim_cmip5(uas, domain, year).values vas = trim_cmip5(vas, domain, year).values #Convert vertical coordinate to height z = hus.lev + (hus.b * hus.orog) orog = hus.orog.values #Calculate pressure via hydrostatic equation q = hus.hus / (1 - hus.hus) tv = ta.ta * ((q + 0.622) / (0.622 * (1 + q))) p = np.swapaxes(np.swapaxes(ps.ps * np.exp(-9.8 * z / (287 * tv)), 3, 2), 2, 1) #Convert quantities into those expected by wrf_parallel.py ta = ta.ta.values - 273.15 hur = mpcalc.relative_humidity_from_specific_humidity(hus.hus.values, \ ta*units.units.degC, p.values*units.units.pascal) * 100 z = np.tile(z.values, [ta.shape[0], 1, 1, 1]) pres = p.values / 100. sfc_pres = ps.ps.values / 100. tas = tas.tas.values - 273.15 ta2d = mpcalc.dewpoint_from_specific_humidity(hus.hus.values, ta*units.units.degC, \ p.values*units.units.pascal) lon = p.lon.values lat = p.lat.values date_list = p.time.values #Mask all data above 100 hPa ta[pres < 100] = np.nan hur[pres < 100] = np.nan z[pres < 100] = np.nan ua[pres < 100] = np.nan va[pres < 100] = np.nan return [ta, hur, z, orog, pres, sfc_pres, ua, va, uas, vas, tas, ta2d, lon,\ lat, date_list]
# coordinates, with the number of vertical levels as specified above. print(isentprs.shape) print(isentspech.shape) print(isentu.shape) print(isentv.shape) print(isenttmp.shape) print(isenthgt.shape) ################################# # **Converting to Relative Humidity** # # The NARR only gives specific humidity on isobaric vertical levels, so relative humidity will # have to be calculated after the interpolation to isentropic space. isentrh = 100 * mpcalc.relative_humidity_from_specific_humidity(isentspech, isenttmp, isentprs) ####################################### # **Plotting the Isentropic Analysis** # Set up our projection crs = ccrs.LambertConformal(central_longitude=-100.0, central_latitude=45.0) # Coordinates to limit map area bounds = [(-122., -75., 25., 50.)] # Choose a level to plot, in this case 296 K level = 0 fig = plt.figure(figsize=(17., 12.)) add_metpy_logo(fig, 120, 245, size='large') ax = fig.add_subplot(1, 1, 1, projection=crs)
def draw_weather_analysis(date_obj, data, map_region, return_dict): """ Draw weather analysis map. """ # image dictionary images = collections.OrderedDict() return_dict[0] = None # draw 2PVU surface pressure image = pv.draw_pres_pv2(data['pres_pv2'].values, data['pres_pv2']['lon'].values, data['pres_pv2']['lat'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'time': date_obj }) images['2PVU_Surface_Pressure'] = image # draw 200hPa wind field image = dynamics.draw_wind_upper(data['u200'].values, data['v200'].values, data['u200']['lon'].values, data['u200']['lat'].values, gh=data['gh200'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "200hPa Wind | GH", 'time': date_obj }) images['200hPa_Wind'] = image # draw 500hPa height and temperature image = dynamics.draw_height_temp(data['gh500'].values, data['t500'].values, data['gh500']['lon'].values, data['gh500']['lat'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "500hPa GH | T", 'time': date_obj }) images['500hPa_Height'] = image # draw 500hPa vorticity image = dynamics.draw_vort_high(data['u500'].values, data['v500'].values, data['u500']['lon'].values, data['u500']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "500hPa Wind | Vorticity | GH", 'time': date_obj }) images['500hPa_Vorticity'] = image # draw 700hPa vertical velocity image = dynamics.draw_vvel_high(data['u700'].values, data['v700'].values, data['w700'].values, data['w700']['lon'].values, data['w700']['lat'].values, gh=data['gh700'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "700hPa Vertical Velocity | Wind | GH", 'time': date_obj }) images['700hPa_Vertical_Velocity'] = image # draw 700hPa wind field image = dynamics.draw_wind_high(data['u700'].values, data['v700'].values, data['u700']['lon'].values, data['u700']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "700hPa Wind | 500hPa GH", 'time': date_obj }) images['700hPa_Wind'] = image # draw 700hPa temperature field image = thermal.draw_temp_high(data['t700'].values, data['t700']['lon'].values, data['t700']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "700hPa T | 500hPa GH", 'time': date_obj }) images['700hPa_Temperature'] = image # draw 700hPa relative humidity rh = calc.relative_humidity_from_specific_humidity( data['q700'], data['t700'], 700 * units.hPa) * 100 image = moisture.draw_rh_high(data['u700'].values, data['v700'].values, rh.magnitude, data['u700']['lon'].values, data['u700']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "700hPa RH | Wind | 500hPa GH", 'time': date_obj }) images['700hPa_Relative_Humidity'] = image # draw 850hPa wind field image = dynamics.draw_wind_high(data['u850'].values, data['v850'].values, data['u850']['lon'].values, data['u850']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "850hPa Wind | 500hPa GH", 'time': date_obj }) images['850hPa_Wind'] = image # draw 850hPa temperature field image = thermal.draw_temp_high(data['t850'].values, data['t850']['lon'].values, data['t850']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "850hPa T | 500hPa GH", 'time': date_obj }) images['850hPa_Temperature'] = image # draw 850hPa relative humidity rh = calc.relative_humidity_from_specific_humidity( data['q850'], data['t850'], 850 * units.hPa) * 100 image = moisture.draw_rh_high(data['u850'].values, data['v850'].values, rh.magnitude, data['u850']['lon'].values, data['u850']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "850hPa RH | Wind | 500hPa GH", 'time': date_obj }) images['850hPa_Relative_Humidity'] = image # draw 850hPa specific field image = moisture.draw_sp_high(data['u850'].values, data['v850'].values, data['q850'].values * 1000., data['q850']['lon'].values, data['q850']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "850hPa SP | Wind | 500hPa GH", 'time': date_obj }) images['850hPa_Specific_Humidity'] = image # draw 925hPa temperature field image = thermal.draw_temp_high(data['t925'].values, data['t925']['lon'].values, data['t925']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "925hPa T | 500hPa GH", 'time': date_obj }) images['925hPa_Temperature'] = image # draw 925hPa wind field image = dynamics.draw_wind_high(data['u925'].values, data['v925'].values, data['u925']['lon'].values, data['u925']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "925hPa Wind | 500hPa GH", 'time': date_obj }) images['925hPa_Wind'] = image # draw 925hPa relative humidity rh = calc.relative_humidity_from_specific_humidity( data['q925'], data['t925'], 925 * units.hPa) * 100 image = moisture.draw_rh_high(data['u925'].values, data['v925'].values, rh.magnitude, data['u925']['lon'].values, data['u925']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "925hPa RH | Wind | 500hPa GH", 'time': date_obj }) images['925hPa_Relative_Humdity'] = image # draw 925hPa specific field image = moisture.draw_sp_high(data['u925'].values, data['v925'].values, data['q925'].values * 1000., data['q925']['lon'].values, data['q925']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "925hPa SP | Wind | 500hPa GH", 'time': date_obj }) images['925hPa_Specific_Humidity'] = image # draw precipitable water field image = moisture.draw_pwat(data['pwat'].values, data['pwat']['lon'].values, data['pwat']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "Precipitable Water | 500hPa GH", 'time': date_obj }) images['Precipitable_Water'] = image # draw mean sea level pressure field image = dynamics.draw_mslp(data['mslp'].values, data['mslp']['lon'].values, data['mslp']['lat'].values, gh=data['gh500'].values, map_region=map_region, title_kwargs={ 'name': 'CFSR', 'head': "MSLP | 500hPa GH", 'time': date_obj }) images['Mean_Sea_Level_Pressure'] = image return_dict[0] = images
# coordinates, with the number of vertical levels as specified above. print(isentprs.shape) print(isentspech.shape) print(isentu.shape) print(isentv.shape) print(isenttmp.shape) print(isenthgt.shape) ################################# # **Converting to Relative Humidity** # # The NARR only gives specific humidity on isobaric vertical levels, so relative humidity will # have to be calculated after the interpolation to isentropic space. isentrh = mcalc.relative_humidity_from_specific_humidity( isentspech, isenttmp, isentprs) ####################################### # **Plotting the Isentropic Analysis** # Set up our projection crs = ccrs.LambertConformal(central_longitude=-100.0, central_latitude=45.0) # Set up our array of latitude and longitude values and transform to # the desired projection. tlatlons = crs.transform_points(ccrs.PlateCarree(), lon, lat) tlons = tlatlons[:, :, 0] tlats = tlatlons[:, :, 1] # Coordinates to limit map area bounds = [(-122., -75., 25., 50.)]
#p_calc_pres = np.zeros(len(z_RS)) #p_calc_pres[0] = lowest_pres_RS #integrant = g*m_mol_air/(R*T_RS_p) #for i in range(1, len(z_RS)): # p_calc_pres[i] = lowest_pres_RS*math.exp(-np.trapz(integrant[0:i], z_RS[0:i])) #p_calc_pres = p_calc_pres * units.hPa ##### spez_hum = data_comma_temp['Specific_humidity'] spez_hum = spez_hum.values * units('g/kg') temp_d_degC = cc.dewpoint_from_specific_humidity(spez_hum, temp_degC, p_1) RH_RA = cc.relative_humidity_from_specific_humidity(spez_hum, temp_degC, p_1) plt.figure(figsize = (5,12)) plt.plot(RH_RA * 100, p_1, color = 'red', zorder = 5) plt.plot(RH_RS, p_RS_original, color = 'black') plt.gca().invert_yaxis() plt.ylabel('Pressure [hPa]') plt.xlabel('RH [%]') plt.figure(figsize = (5,12)) plt.plot(RH_RA * 100, z_RA, color = 'red', zorder = 5) plt.plot(RH_RS, z_RS, color = 'black') plt.ylim(0,10000) plt.ylabel('Height [m]') plt.xlabel('RH [%]')