def test_potential_vorticity_baroclinic_non_unity_derivative(pv_data): """Test potential vorticity calculation with unity stability and height on axis 0.""" u, v, lats, _, dx, dy = pv_data potential_temperature = np.ones((3, 4, 4)) * units.kelvin potential_temperature[0] = 200 * units.kelvin potential_temperature[1] = 300 * units.kelvin potential_temperature[2] = 400 * units.kelvin pressure = np.ones((3, 4, 4)) * units.hPa pressure[2] = 1000 * units.hPa pressure[1] = 999 * units.hPa pressure[0] = 998 * units.hPa pvor = potential_vorticity_baroclinic(potential_temperature, pressure, u, v, dx, dy, lats) abs_vorticity = absolute_vorticity(u, v, dx, dy, lats) vort_difference = pvor - (abs_vorticity * g * (-100 * (units.kelvin / units.hPa))) true_vort = np.zeros_like(u) * (units.kelvin * units.meter ** 2 / (units.second * units.kilogram)) assert_almost_equal(vort_difference, true_vort, 10) # Now try for xy ordered pvor = potential_vorticity_baroclinic(potential_temperature, pressure, u.T, v.T, dx.T, dy.T, lats.T, dim_order='xy') abs_vorticity = absolute_vorticity(u.T, v.T, dx.T, dy.T, lats.T, dim_order='xy') vort_difference = pvor - (abs_vorticity * g * (-100 * (units.kelvin / units.hPa))) assert_almost_equal(vort_difference, true_vort, 10)
def test_potential_vorticity_baroclinic_wrong_number_of_levels_axis_2(pv_data): """Test that potential vorticity calculation errors without 3 levels on axis 2.""" u, v, lats, _, dx, dy = pv_data potential_temperature = np.ones((4, 4, 3)) * units.kelvin potential_temperature[..., 0] = 200 * units.kelvin potential_temperature[..., 1] = 300 * units.kelvin potential_temperature[..., 1] = 400 * units.kelvin pressure = np.ones((4, 4, 3)) * units.hPa pressure[..., 2] = 1000 * units.hPa pressure[..., 1] = 900 * units.hPa pressure[..., 0] = 800 * units.hPa with pytest.raises(ValueError): potential_vorticity_baroclinic(potential_temperature[..., :1], pressure, u, v, dx, dy, lats, axis=2) with pytest.raises(ValueError): potential_vorticity_baroclinic(potential_temperature, pressure[..., :1], u, v, dx, dy, lats, axis=1)
def pv(input_file): # Vars grib_vars = ['t','u','v'] # Load a list of datasets, one for each variable we want ds_list = [cfgrib.open_datasets(input_file,backend_kwargs={'filter_by_keys':{'typeOfLevel':'isobaricInhPa','shortName':v},'indexpath':''}) for v in grib_vars] # Flatten the list of lists to a single list of datasets ds_flat = [x.sel(isobaricInhPa=x.isobaricInhPa[x.isobaricInhPa>=100.0].values) for ds in ds_list for x in ds] # Merge the variables into a single dataset ds = xr.merge(ds_flat) # Add pressure ds['p'] = xr.DataArray(ds.isobaricInhPa.values,dims=['isobaricInhPa'],coords={'isobaricInhPa':ds.isobaricInhPa.values},attrs={'units':'hPa'}).broadcast_like(ds['t']) # Calculate potential temperature ds['theta'] = mpcalc.potential_temperature(ds['p'].metpy.convert_units('Pa'),ds['t']) # Compute baroclinic PV ds['pv'] = mpcalc.potential_vorticity_baroclinic(ds['theta'],ds['p'].metpy.convert_units('Pa'),ds['u'],ds['v'],latitude=ds.latitude)/(1.0e-6) met_data = ds['pv'].sel(isobaricInhPa=slice(float(os.environ.get('PV_LAYER_MAX_PRESSURE',1000.0)),float(os.environ.get('PV_LAYER_MIN_PRESSURE',100.0)))).mean(axis=0).values return met_data
def add_metpy(option, filename): """ Adds the variables possible through metpy (theta, pv, n2) """ with xr.load_dataset(filename) as xin: if option.theta or option.pv: print("Adding potential temperature...") xin["pt"] = potential_temperature(xin["pressure"], xin["t"]) xin["pt"].data = np.array(xin["pt"].data) xin["pt"].attrs["units"] = "K" xin["pt"].attrs["standard_name"] = VARIABLES["pt"][2] if option.pv: print("Adding potential vorticity...") xin = xin.metpy.assign_crs(grid_mapping_name='latitude_longitude', earth_radius=6.356766e6) xin["pv"] = potential_vorticity_baroclinic(xin["pt"], xin["pressure"], xin["u"], xin["v"]) xin["pv"].data = np.array(xin["pv"].data * 10 ** 6) xin = xin.drop("metpy_crs") xin["pv"].attrs["units"] = "kelvin * meter ** 2 / kilogram / second" xin["pv"].attrs["standard_name"] = VARIABLES["pv"][2] xin["mod_pv"] = xin["pv"] * ((xin["pt"] / 360) ** (-4.5)) xin["mod_pv"].attrs["standard_name"] = VARIABLES["mod_pv"][2] if option.n2: print("Adding N2...") xin["n2"] = brunt_vaisala_frequency_squared(geopotential_to_height(xin["zh"]), xin["pt"]) xin["n2"].data = np.array(xin["n2"].data) xin["n2"].attrs["units"] = VARIABLES["n2"][1] xin["n2"].attrs["standard_name"] = "square_of_brunt_vaisala_frequency_in_air" xin.to_netcdf(filename)
def test_potential_vorticity_baroclinic_wrong_number_of_levels_axis_0(pv_data): """Test that potential vorticity calculation errors without 3 levels on axis 0.""" u, v, lats, _, dx, dy = pv_data potential_temperature = np.ones((3, 4, 4)) * units.kelvin potential_temperature[0] = 200 * units.kelvin potential_temperature[1] = 300 * units.kelvin potential_temperature[2] = 400 * units.kelvin pressure = np.ones((3, 4, 4)) * units.hPa pressure[2] = 1000 * units.hPa pressure[1] = 900 * units.hPa pressure[0] = 800 * units.hPa with pytest.raises(ValueError): potential_vorticity_baroclinic(potential_temperature[:1, :, :], pressure, u, v, dx, dy, lats) with pytest.raises(ValueError): potential_vorticity_baroclinic(u, v, dx, dy, lats, potential_temperature, pressure[:1, :, :])
def compute_pv(dset): dx = dset['dx'].values[:] * units(str(dset['dx'].units)) dy = dset['dy'].values[:] * units(str(dset['dy'].units)) lats = dset['lat'].metpy.unit_array pres = dset['plev'].metpy.unit_array theta = dset['theta'].values[:] * units(str(dset['theta'].units)) pv = mpcalc.potential_vorticity_baroclinic(theta, pres[:, None, None], dset['u'], dset['v'], dx[None, :, :], dy[None, :, :], lats[None, :, None]) pv = xr.DataArray(pv.magnitude, coords=dset['u'].coords, attrs={ 'standard_name': 'Potential Vorticity', 'units': pv.units }, name='pv') out = xr.merge([dset, pv]) out.attrs = dset.attrs return out
def PV_Div_uv(initTime=None, fhour=6, day_back=0, model='ECMWF', map_ratio=14 / 9, zoom_ratio=20, cntr_pnt=[104, 34], levels=[ 1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 250, 200, 100 ], lvl_ana=250, Global=False, south_China_sea=True, area=None, city=False, output_dir=None, data_source='MICAPS', **kwargs): # micaps data directory if (area != None): south_China_sea = False # micaps data directory if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='RH', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='TMP', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl='') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server rh = MICAPS_IO.get_model_3D_grid(directory=data_dir[0][0:-1], filename=filename, levels=levels, allExists=False) if rh is None: return u = MICAPS_IO.get_model_3D_grid(directory=data_dir[1][0:-1], filename=filename, levels=levels, allExists=False) if u is None: return v = MICAPS_IO.get_model_3D_grid(directory=data_dir[2][0:-1], filename=filename, levels=levels, allExists=False) if v is None: return t = MICAPS_IO.get_model_3D_grid(directory=data_dir[3][0:-1], filename=filename, levels=levels, allExists=False) if t is None: return if (data_source == 'CIMISS'): # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour, UTC=True) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour, UTC=True) try: # retrieve data from CIMISS server rh = CMISS_IO.cimiss_model_3D_grid( data_code=utl.CMISS_data_code(data_source=model, var_name='RHU'), init_time_str='20' + filename[0:8], valid_time=fhour, fcst_levels=levels, fcst_ele="RHU", units='%') if rh is None: return u = CMISS_IO.cimiss_model_3D_grid( data_code=utl.CMISS_data_code(data_source=model, var_name='WIU'), init_time_str='20' + filename[0:8], valid_time=fhour, fcst_levels=levels, fcst_ele="WIU", units='m/s') if u is None: return v = CMISS_IO.cimiss_model_3D_grid( data_code=utl.CMISS_data_code(data_source=model, var_name='WIV'), init_time_str='20' + filename[0:8], valid_time=fhour, fcst_levels=levels, fcst_ele="WIV", units='m/s') if v is None: return t = CMISS_IO.cimiss_model_3D_grid( data_code=utl.CMISS_data_code(data_source=model, var_name='TEM'), init_time_str='20' + filename[0:8], valid_time=fhour, fcst_levels=levels, fcst_ele="TEM", units='K') if t is None: return t['data'].values = t['data'].values - 273.15 t['data'].attrs['units'] = 'C' except KeyError: raise ValueError('Can not find all data needed') if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours mask1 = (rh['lon'] > map_extent[0] - delt_x) & (rh['lon'] < map_extent[1] + delt_x) & ( rh['lat'] > map_extent[2] - delt_y) & (rh['lat'] < map_extent[3] + delt_y) mask2 = (u['lon'] > map_extent[0] - delt_x) & (u['lon'] < map_extent[1] + delt_x) & ( u['lat'] > map_extent[2] - delt_y) & (u['lat'] < map_extent[3] + delt_y) mask3 = (t['lon'] > map_extent[0] - delt_x) & (t['lon'] < map_extent[1] + delt_x) & ( t['lat'] > map_extent[2] - delt_y) & (t['lat'] < map_extent[3] + delt_y) #- to solve the problem of labels on all the contours rh = rh.where(mask1, drop=True) u = u.where(mask2, drop=True) v = v.where(mask2, drop=True) t = t.where(mask3, drop=True) uv = xr.merge([u.rename({'data': 'u'}), v.rename({'data': 'v'})]) lats = np.squeeze(rh['lat'].values) lons = np.squeeze(rh['lon'].values) pres = np.array(levels) * 100 * units('Pa') tmpk = mpcalc.smooth_n_point( (t['data'].values.squeeze() + 273.15), 9, 2) * units('kelvin') thta = mpcalc.potential_temperature(pres[:, None, None], tmpk) uwnd = mpcalc.smooth_n_point(u['data'].values.squeeze(), 9, 2) * units.meter / units.second vwnd = mpcalc.smooth_n_point(v['data'].values.squeeze(), 9, 2) * units.meter / units.second dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats) # Comput the PV on all isobaric surfaces pv_raw = mpcalc.potential_vorticity_baroclinic( thta, pres[:, None, None], uwnd, vwnd, dx[None, :, :], dy[None, :, :], lats[None, :, None] * units('degrees')) div_raw = mpcalc.divergence(uwnd, vwnd, dx[None, :, :], dy[None, :, :], dim_order='yx') # prepare data idx_z1 = list(pres.m).index(((lvl_ana * units('hPa')).to(pres.units)).m) pv = rh.copy(deep=True) pv['data'].values = np.array(pv_raw).reshape( np.append(1, np.array(pv_raw).shape)) pv['data'].attrs['units'] = str(pv_raw.units) pv.attrs['model'] = model pv = pv.where(pv['level'] == lvl_ana, drop=True) div = u.copy(deep=True) div['data'].values = np.array(div_raw).reshape( np.append(1, np.array(div_raw).shape)) div['data'].attrs['units'] = str(div_raw.units) div = div.where(div['level'] == lvl_ana, drop=True) uv = uv.where(uv['level'] == lvl_ana, drop=True) synoptic_graphics.draw_PV_Div_uv(pv=pv, uv=uv, div=div, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
vwnd = mpcalc.smooth_n_point(vwnd_var, 9, 2) * (units.meter / units.second) # Create a clean datetime object for plotting based on time of Geopotential heights vtime = ds.time.data[0].astype('datetime64[ms]').astype('O') ###################################################################### # Use MetPy to compute the baroclinic potential vorticity on all isobaric # levels and other variables # # Compute dx and dy spacing for use in vorticity calculation dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats) # Comput the PV on all isobaric surfaces pv = mpcalc.potential_vorticity_baroclinic( thta, pres[:, None, None], uwnd, vwnd, dx[None, :, :], dy[None, :, :], lats[None, :, None] * units('degrees')) # Use MetPy to compute the divergence on the pressure surfaces div = mpcalc.divergence(uwnd, vwnd, dx[None, :, :], dy[None, :, :], dim_order='yx') # Find the index value for the 250-hPa surface i250 = list(pres.m).index(((250 * units('hPa')).to(pres.units)).m) ###################################################################### # Map Creation # ------------
def PV_Div_uv(initial_time=None, fhour=6, day_back=0,model='ECMWF', map_ratio=19/9,zoom_ratio=20,cntr_pnt=[102,34], levels=[1000, 950, 925, 900, 850, 800, 700,600,500,400,300,250,200,100],lvl_ana=250, Global=False, south_China_sea=True,area = '全国',city=False,output_dir=None ): # micaps data directory try: data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='RH',lvl=''), utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=''), utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=''), utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=''), utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl='')] except KeyError: raise ValueError('Can not find all directories needed') # get filename if(initial_time != None): filename = utl.model_filename(initial_time, fhour) else: filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour) # retrieve data from micaps server rh=get_model_3D_grid(directory=data_dir[0][0:-1],filename=filename,levels=levels, allExists=False) if rh is None: return u=get_model_3D_grid(directory=data_dir[1][0:-1],filename=filename,levels=levels, allExists=False) if u is None: return v=get_model_3D_grid(directory=data_dir[2][0:-1],filename=filename,levels=levels, allExists=False) if v is None: return t=get_model_3D_grid(directory=data_dir[3][0:-1],filename=filename,levels=levels, allExists=False) if t is None: return # get filename if(initial_time != None): filename = utl.model_filename(initial_time, fhour) else: filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour) lats = np.squeeze(rh['lat'].values) lons = np.squeeze(rh['lon'].values) pres = np.array(levels)*100 * units('Pa') tmpk = mpcalc.smooth_n_point(t['data'].values.squeeze(), 9, 2)*units('degC') thta = mpcalc.potential_temperature(pres[:, None, None], tmpk) uwnd = mpcalc.smooth_n_point(u['data'].values.squeeze(), 9, 2)*units.meter/units.second vwnd = mpcalc.smooth_n_point(v['data'].values.squeeze(), 9, 2)*units.meter/units.second dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats) # Comput the PV on all isobaric surfaces pv = mpcalc.potential_vorticity_baroclinic(thta, pres[:, None, None], uwnd, vwnd, dx[None, :, :], dy[None, :, :], lats[None, :, None] * units('degrees')) div = mpcalc.divergence(uwnd, vwnd, dx[None, :, :], dy[None, :, :], dim_order='yx') # prepare data idx_z1 = list(pres.m).index(((lvl_ana * units('hPa')).to(pres.units)).m) if(area != None): cntr_pnt,zoom_ratio=utl.get_map_area(area_name=area) map_extent=[0,0,0,0] map_extent[0]=cntr_pnt[0]-zoom_ratio*1*map_ratio map_extent[1]=cntr_pnt[0]+zoom_ratio*1*map_ratio map_extent[2]=cntr_pnt[1]-zoom_ratio*1 map_extent[3]=cntr_pnt[1]+zoom_ratio*1 delt_x=(map_extent[1]-map_extent[0])*0.2 delt_y=(map_extent[3]-map_extent[2])*0.1 #+ to solve the problem of labels on all the contours idx_x1 = np.where((lons > map_extent[0]-delt_x) & (lons < map_extent[1]+delt_x)) idx_y1 = np.where((lats > map_extent[2]-delt_y) & (lats < map_extent[3]+delt_y)) #- to solve the problem of labels on all the contours init_time = u.coords['forecast_reference_time'].values pv = { 'lon': lons[idx_x1], 'lat': lats[idx_y1], 'data': np.array(pv)[idx_z1,idx_y1[0][0]:(idx_y1[0][-1]+1),idx_x1[0][0]:(idx_x1[0][-1]+1)], 'lev':str(lvl_ana), 'model':model, 'fhour':fhour, 'init_time':init_time} uv = { 'lon': lons[idx_x1], 'lat': lats[idx_y1], 'udata': np.array(uwnd)[idx_z1,idx_y1[0][0]:(idx_y1[0][-1]+1),idx_x1[0][0]:(idx_x1[0][-1]+1)], 'vdata': np.array(vwnd)[idx_z1,idx_y1[0][0]:(idx_y1[0][-1]+1),idx_x1[0][0]:(idx_x1[0][-1]+1)], 'lev':str(lvl_ana)} div = { 'lon': lons[idx_x1], 'lat': lats[idx_y1], 'data': np.array(div)[idx_z1,idx_y1[0][0]:(idx_y1[0][-1]+1),idx_x1[0][0]:(idx_x1[0][-1]+1)], 'lev':str(lvl_ana)} synoptic_graphics.draw_PV_Div_uv( pv=pv, uv=uv, div=div, map_extent=map_extent, regrid_shape=20, city=city,south_China_sea=south_China_sea, output_dir=output_dir,Global=Global)
def __init__(self, datea, fhr, atcf, config): # Forecast fields to compute wnd_lev_1 = [250, 500] wnd_lev_2 = [350, 500] n_wnd_lev = len(wnd_lev_1) # Read steering flow parameters, or use defaults steerp1 = float(config['fields'].get('steer_level1', '300')) steerp2 = float(config['fields'].get('steer_level2', '850')) tcradius = float(config['fields'].get('steer_radius', '333')) # lat_lon info lat1 = float(config['fields'].get('min_lat', '0.')) lat2 = float(config['fields'].get('max_lat', '65.')) lon1 = float(config['fields'].get('min_lon', '-180.')) lon2 = float(config['fields'].get('max_lon', '-10.')) if not 'min_lat' in config: config.update({'min_lat': lat1}) config.update({'max_lat': lat2}) config.update({'min_lon': lon1}) config.update({'max_lon': lon2}) self.fhr = fhr self.atcf_files = atcf.atcf_files self.config = config self.nens = int(len(self.atcf_files)) df_files = {} self.datea_str = datea self.datea = dt.datetime.strptime(datea, '%Y%m%d%H') self.datea_s = self.datea.strftime("%m%d%H%M") self.fff = str(self.fhr + 1000)[1:] datea_1 = self.datea + dt.timedelta(hours=self.fhr) datea_1 = datea_1.strftime("%m%d%H%M") self.dpp = importlib.import_module(config['io_module']) logging.warning("Computing hour {0} ensemble fields".format(self.fff)) # Obtain the ensemble lat/lon information, replace missing values with mean self.ens_lat, self.ens_lon = atcf.ens_lat_lon_time(self.fhr) e_cnt = 0 m_lat = 0.0 m_lon = 0.0 for n in range(self.nens): if self.ens_lat[n] != atcf.missing and self.ens_lon[ n] != atcf.missing: e_cnt = e_cnt + 1 m_lat = m_lat + self.ens_lat[n] m_lon = m_lon + self.ens_lon[n] if e_cnt > 0: m_lon = m_lon / e_cnt m_lat = m_lat / e_cnt for n in range(self.nens): if self.ens_lat[n] == atcf.missing or self.ens_lon[ n] == atcf.missing: self.ens_lat[n] = m_lat self.ens_lon[n] = m_lon # Read grib file information for this forecast hour g1 = self.dpp.ReadGribFiles(self.datea_str, self.fhr, self.config) dencode = { 'ensemble_data': { 'dtype': 'float32' }, 'latitude': { 'dtype': 'float32' }, 'longitude': { 'dtype': 'float32' }, 'ensemble': { 'dtype': 'int32' } } # Compute steering wind components uoutfile = '{0}/{1}_f{2}_usteer_ens.nc'.format(config['work_dir'], str(self.datea_str), self.fff) voutfile = '{0}/{1}_f{2}_vsteer_ens.nc'.format(config['work_dir'], str(self.datea_str), self.fff) if (not os.path.isfile(uoutfile) or not os.path.isfile(voutfile)) and config['fields'].get( 'calc_uvsteer', 'True') == 'True': logging.warning(" Computing steering wind information") inpDict = {'isobaricInhPa': (steerp1, steerp2)} inpDict = g1.set_var_bounds('zonal_wind', inpDict) # Create output arrays outDict = { 'latitude': (lat1, lat2), 'longitude': (lon1, lon2), 'description': 'zonal steering wind', 'units': 'm/s', '_FillValue': -9999. } outDict = g1.set_var_bounds('zonal_wind', outDict) uensmat = g1.create_ens_array('zonal_wind', self.nens, outDict) outDict = { 'latitude': (lat1, lat2), 'longitude': (lon1, lon2), 'description': 'meridional steering wind', 'units': 'm/s', '_FillValue': -9999. } outDict = g1.set_var_bounds('meridional_wind', outDict) vensmat = g1.create_ens_array('meridional_wind', self.nens, outDict) outDict = { 'latitude': (lat1, lat2), 'longitude': (lon1, lon2), 'description': 'steering wind vorticity', 'units': '1/s', '_FillValue': -9999. } outDict = g1.set_var_bounds('zonal_wind', outDict) vortmat = g1.create_ens_array('zonal_wind', self.nens, outDict) wencode = { 'latitude': { 'dtype': 'float32' }, 'longitude': { 'dtype': 'float32' } } for n in range(self.nens): # Read global zonal and meridional wind, write to file uwnd = g1.read_grib_field('zonal_wind', n, inpDict).rename('u') vwnd = g1.read_grib_field('meridional_wind', n, inpDict).rename('v') # print(uwnd[:,0,0]) # print(vwnd[:,0,0]) # sys.exit(2) uwnd.to_netcdf('wind_info.nc', mode='w', encoding=wencode, format='NETCDF3_CLASSIC') vwnd.to_netcdf('wind_info.nc', mode='a', encoding=wencode, format='NETCDF3_CLASSIC') latvec = uwnd.latitude.values lonvec = uwnd.longitude.values if e_cnt > 0: latcen = latvec[np.abs(latvec - self.ens_lat[n]).argmin()] loncen = lonvec[np.abs(lonvec - self.ens_lon[n]).argmin()] # Call NCL to remove TC winds, read result from file os.system('ncl -Q {0}/tc_steer.ncl tclat={1} tclon={2} tcradius={3}'.format(config['script_dir'],\ str(latcen), str(loncen), str(tcradius))) wfile = nc.Dataset('wind_info.nc') uwnd[:, :, :] = wfile.variables['u'][:, :, :] vwnd[:, :, :] = wfile.variables['v'][:, :, :] os.remove('wind_info.nc') # Integrate the winds over the layer to obtain the steering wind pres, lat, lon = uwnd.indexes.values() nlev = len(pres) uint = uwnd[0, :, :] uint[:, :] = 0.0 vint = vwnd[0, :, :] vint[:, :] = 0.0 for k in range(nlev - 1): uint[:, :] = uint[:, :] + 0.5 * (uwnd[k, :, :] + uwnd[ k + 1, :, :]) * abs(pres[k + 1] - pres[k]) vint[:, :] = vint[:, :] + 0.5 * (vwnd[k, :, :] + vwnd[ k + 1, :, :]) * abs(pres[k + 1] - pres[k]) # if pres[0] > pres[-1]: # uint = -np.trapz(uwnd[:,:,:], pres, axis=0) / abs(pres[-1]-pres[0]) # vint = -np.trapz(vwnd[:,:,:], pres, axis=0) / abs(pres[-1]-pres[0]) # else: # uint = np.trapz(uwnd[:,:,:], pres, axis=0) / abs(pres[-1]-pres[0]) # vint = np.trapz(vwnd[:,:,:], pres, axis=0) / abs(pres[-1]-pres[0]) if lat[0] > lat[-1]: slat1 = lat2 slat2 = lat1 else: slat1 = lat1 slat2 = lat2 # Write steering flow to ensemble arrays uensmat[n, :, :] = np.squeeze( uint.sel(latitude=slice(slat1, slat2), longitude=slice(lon1, lon2))) / abs(pres[-1] - pres[0]) vensmat[n, :, :] = np.squeeze( vint.sel(latitude=slice(slat1, slat2), longitude=slice(lon1, lon2))) / abs(pres[-1] - pres[0]) # Compute the vorticity associated with the steering wind # circ = VectorWind(unew, vnew).vorticity() * 1.0e5 # vortmat[n,:,:] = np.squeeze(circ.sel(latitude=slice(lat2, lat1), longitude=slice(lon1, lon2))) uensmat.to_netcdf(uoutfile, encoding=dencode) vensmat.to_netcdf(voutfile, encoding=dencode) # vortmat.to_netcdf(vortfile, encoding=dencode) else: logging.warning(" Obtaining steering wind information from file") # Read geopotential height from file, if ensemble file is not present if config['fields'].get('calc_height', 'True') == 'True': if 'height_levels' in config['fields']: height_list = json.loads(config['fields'].get('height_levels')) else: height_list = [500] for level in height_list: levstr = '%0.3i' % int(level) outfile = '{0}/{1}_f{2}_h{3}hPa_ens.nc'.format( config['work_dir'], str(self.datea_str), self.fff, levstr) if not os.path.isfile(outfile): logging.warning( ' Computing {0} hPa height'.format(levstr)) vDict = { 'latitude': (lat1, lat2), 'longitude': (lon1, lon2), 'isobaricInhPa': (level, level), 'description': '{0} hPa height'.format(levstr), 'units': 'm', '_FillValue': -9999. } vDict = g1.set_var_bounds('geopotential_height', vDict) ensmat = g1.create_ens_array('geopotential_height', g1.nens, vDict) for n in range(g1.nens): ensmat[n, :, :] = np.squeeze( g1.read_grib_field('geopotential_height', n, vDict)) ensmat.to_netcdf(outfile, encoding=dencode) elif os.path.isfile(outfile): logging.warning( " Obtaining {0} hPa height data from {1}".format( levstr, outfile)) # Compute 250 hPa PV if the file does not exist outfile = '{0}/{1}_f{2}_pv250_ens.nc'.format(config['work_dir'], str(self.datea_str), self.fff) if (not os.path.isfile(outfile) and config['fields'].get('calc_pv250hPa', 'True') == 'True'): logging.warning(" Computing 250 hPa PV") vDict = { 'latitude': (lat1, lat2), 'longitude': (lon1, lon2), 'isobaricInhPa': (200, 300), 'description': '250 hPa Potential Vorticity', 'units': 'PVU', '_FillValue': -9999. } vDict = g1.set_var_bounds('zonal_wind', vDict) ensmat = g1.create_ens_array('zonal_wind', self.nens, vDict) for n in range(self.nens): # Read all the necessary files from file, smooth fields, so sensitivities are useful tmpk = g1.read_grib_field('temperature', n, vDict) * units('K') lats = tmpk.latitude.values * units('degrees') lons = tmpk.longitude.values * units('degrees') pres = tmpk.isobaricInhPa.values * units('hPa') tmpk = mpcalc.smooth_n_point(tmpk, 9, 4) thta = mpcalc.potential_temperature(pres[:, None, None], tmpk) uwnd = mpcalc.smooth_n_point( g1.read_grib_field('zonal_wind', n, vDict) * units('m/s'), 9, 4) vwnd = mpcalc.smooth_n_point( g1.read_grib_field('meridional_wind', n, vDict) * units('m/s'), 9, 4) dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats, x_dim=-1, y_dim=-2, geod=None) # Compute PV and place in ensemble array pvout = mpcalc.potential_vorticity_baroclinic( thta, pres[:, None, None], uwnd, vwnd, dx[None, :, :], dy[None, :, :], lats[None, :, None]) ensmat[n, :, :] = np.squeeze(pvout[np.where( pres == 250 * units('hPa'))[0], :, :]) * 1.0e6 ensmat.to_netcdf(outfile, encoding=dencode) elif os.path.isfile(outfile): logging.warning( " Obtaining 250 hPa PV data from {0}".format(outfile)) # Compute the equivalent potential temperature (if desired and file is missing) if config['fields'].get('calc_theta-e', 'False') == 'True': if 'theta-e_levels' in config['fields']: thetae_list = json.loads( config['fields'].get('theta-e_levels')) else: thetae_list = [700, 850] for level in thetae_list: levstr = '%0.3i' % int(level) outfile = '{0}/{1}_f{2}_e{3}hPa_ens.nc'.format( config['work_dir'], str(self.datea_str), self.fff, levstr) if not os.path.isfile(outfile): logging.warning( ' Computing {0} hPa Theta-E'.format(levstr)) vDict = { 'latitude': (lat1, lat2), 'longitude': (lon1, lon2), 'isobaricInhPa': (level, level), 'description': '{0} hPa Equivalent Potential Temperature'.format( levstr), 'units': 'K', '_FillValue': -9999. } vDict = g1.set_var_bounds('temperature', vDict) ensmat = g1.create_ens_array('temperature', g1.nens, vDict) for n in range(g1.nens): tmpk = g1.read_grib_field('temperature', n, vDict) * units.K pres = tmpk.isobaricInhPa.values * units.hPa if g1.has_specific_humidity: qvap = np.squeeze( g1.read_grib_field('specific_humidity', n, vDict)) tdew = mpcalc.dewpoint_from_specific_humidity( pres[None, None], tmpk, qvap) else: relh = g1.read_grib_field('relative_humidity', n, vDict) relh = np.minimum(np.maximum(relh, 0.01), 100.0) * units.percent tdew = mpcalc.dewpoint_from_relative_humidity( tmpk, relh) ensmat[n, :, :] = np.squeeze( mpcalc.equivalent_potential_temperature( pres[None, None], tmpk, tdew)) ensmat.to_netcdf(outfile, encoding=dencode) elif os.path.isfile(outfile): logging.warning( " Obtaining {0} hPa Theta-e data from {1}".format( levstr, outfile)) # Compute the 500-850 hPa water vapor mixing ratio (if desired and file is missing) outfile = '{0}/{1}_f{2}_q500-850hPa_ens.nc'.format( config['work_dir'], str(self.datea_str), self.fff) if (not os.path.isfile(outfile) and config['fields'].get( 'calc_q500-850hPa', 'False') == 'True'): logging.warning(" Computing 500-850 hPa Water Vapor") vDict = { 'latitude': (lat1, lat2), 'longitude': (lon1, lon2), 'description': '500-850 hPa Integrated Water Vapor', 'units': 'hPa', '_FillValue': -9999. } vDict = g1.set_var_bounds('temperature', vDict) ensmat = g1.create_ens_array('temperature', len(self.atcf_files), vDict) vDict = { 'latitude': (lat1, lat2), 'longitude': (lon1, lon2), 'isobaricInhPa': (500, 850), 'description': '500-850 hPa Integrated Water Vapor', 'units': 'hPa', '_FillValue': -9999. } vDict = g1.set_var_bounds('temperature', vDict) for n in range(self.nens): tmpk = np.squeeze(g1.read_grib_field('temperature', n, vDict)) * units('K') pres = (tmpk.isobaricInhPa.values * units.hPa).to(units.Pa) if g1.has_specific_humidity: qvap = mpcalc.mixing_ratio_from_specific_humidity( g1.read_grib_field('specific_humidity', n, vDict)) else: relh = np.minimum( np.maximum( g1.read_grib_field('relative_humidity', n, vDict), 0.01), 100.0) * units('percent') qvap = mpcalc.mixing_ratio_from_relative_humidity( pres[:, None, None], tmpk, relh) # Integrate water vapor over the pressure levels ensmat[n, :, :] = np.abs(np.trapz( qvap, pres, axis=0)) / mpcon.earth_gravity ensmat.to_netcdf(outfile, encoding=dencode) elif os.path.isfile(outfile): logging.warning( " Obtaining 500-850 hPa water vapor data from {0}".format( outfile)) # Compute wind-related forecast fields (if desired and file is missing) if config['fields'].get('calc_winds', 'False') == 'True': if 'wind_levels' in config['fields']: wind_list = json.loads(config['fields'].get('wind_levels')) else: wind_list = [850] for level in wind_list: levstr = '%0.3i' % int(level) ufile = '{0}/{1}_f{2}_u{3}hPa_ens.nc'.format( config['work_dir'], str(self.datea_str), self.fff, levstr) vfile = '{0}/{1}_f{2}_v{3}hPa_ens.nc'.format( config['work_dir'], str(self.datea_str), self.fff, levstr) if (not os.path.isfile(ufile)) or (not os.path.isfile(vfile)): logging.warning( ' Computing {0} hPa wind information'.format(levstr)) uDict = { 'latitude': (lat1, lat2), 'longitude': (lon1, lon2), 'isobaricInhPa': (level, level), 'description': '{0} hPa zonal wind'.format(levstr), 'units': 'm/s', '_FillValue': -9999. } uDict = g1.set_var_bounds('zonal_wind', uDict) uensmat = g1.create_ens_array('zonal_wind', g1.nens, uDict) vDict = { 'latitude': (lat1, lat2), 'longitude': (lon1, lon2), 'isobaricInhPa': (level, level), 'description': '{0} hPa meridional wind'.format(levstr), 'units': 'm/s', '_FillValue': -9999. } vDict = g1.set_var_bounds('meridional_wind', vDict) vensmat = g1.create_ens_array('meridional_wind', g1.nens, vDict) for n in range(g1.nens): uwnd = g1.read_grib_field('zonal_wind', n, uDict).squeeze() vwnd = g1.read_grib_field('meridional_wind', n, vDict).squeeze() uensmat[n, :, :] = uwnd[:, :] vensmat[n, :, :] = vwnd[:, :] uensmat.to_netcdf(ufile, encoding=dencode) vensmat.to_netcdf(vfile, encoding=dencode) elif os.path.isfile(outfile): logging.warning( " Obtaining {0} hPa wind information from file". format(levstr))