def leap_adjust(data, year): data = atm.squeeze(data) ndays = 365 if year is not None and atm.isleap(year): ndays += 1 else: # Remove NaN for day 366 in non-leap year data = atm.subset(data, {'day' : (1, ndays)}) return data, ndays
import numpy as np import xray import pandas as pd import matplotlib.pyplot as plt import atmos as atm import merra from indices import onset_SJKE, summarize_indices, plot_index_years # ---------------------------------------------------------------------- # Compute SJ indices (Boos and Emmanuel 2009) datadir = atm.homedir() + 'datastore/merra/daily/' years = np.arange(1979, 2015) filestr = 'merra_uv850_40E-120E_60S-60N_' datafiles = [datadir + filestr + '%d.nc' % y for y in years] # Read daily data from each year ds = atm.combine_daily_years(['U', 'V'], datafiles, years) # Remove extra dimension from data u = atm.squeeze(ds['U']) v = atm.squeeze(ds['V']) # Calculate OCI index sjke = onset_SJKE(u, v) # Summary plot and timeseries in individual years summarize_indices(years, sjke['onset']) plot_index_years(sjke, suptitle='SJ', yearnm='Year', daynm='Day')
else: dsyr = xray.concat((dsyr, ds), dim='day') savefile = datafiles[y] print('Saving to ' + savefile) dsyr.to_netcdf(savefile) # Read daily data from each year plist = [200, 400, 600] if plev is not None: plist = np.union1d(plist, [plev]) T_p = {} for p in plist: T1 = atm.combine_daily_years('T', datafiles, years, yearname='year', subset_dict={'plev' : (p, p)}) T_p[p] = atm.squeeze(T1) Tbar = atm.combine_daily_years('Tbar', datafiles, years, yearname='year') if plev is None: T = Tbar varname = 'TT200-600' else: T = T_p[plev] varname = 'TT%d' % plev # Calculate TT index # The north region should go up to 35 N but there is some weirdness # with the topography so I'm setting it to 30 N for now north=(5, 30, 40, 100) south=(-15, 5, 40, 100) suptitle = varname + ' N=%s S=%s' % (str(north), str(south))
pcp_sm = atm.rolling_mean(pcp, nroll[name], axis=-1, center=True) index[key] = get_onset_WLH(years, days, pcp_sm.values, threshold, key, pentad, precip_jan) # Unsmoothed pentad timeserires key = 'WLH_%s_unsmth' % name print(key) index[key] = get_onset_WLH(years, days, pcp, threshold, key, pentad, precip_jan) # ---------------------------------------------------------------------- # OCI index (Wang et al 2009) and SJKE index (Boos and Emmanuel 2009) ds = atm.combine_daily_years(['U', 'V'], ocifiles, years) ds = ds.rename({'Year' : 'year', 'Day' : 'day'}) u850 = atm.squeeze(ds['U']) v850 = atm.squeeze(ds['V']) # OCI Index index['OCI'] = indices.onset_OCI(u850, yearnm='year', daynm='day') index['OCI'].attrs['title'] = 'OCI' # SJKE Index index['SJKE'] = indices.onset_SJKE(u850, v850, yearnm='year', daynm='day') index['SJKE'].attrs['title'] = 'SJKE' # ---------------------------------------------------------------------- # TT index (Goswami et al 2006) # *** NOTES **** # Need to trouble shoot TT index before using in anything final.
filestr = 'comp-onset_%s-%s-%s' % (onset_nm, savestr, vargroup) atm.savefigs(savedir + filestr, 'pdf', merge=True) plt.close('all') # ====================================================================== # OLD STUFF # ====================================================================== # ---------------------------------------------------------------------- # Cross-equatorial atmospheric heat fluxes if run_eht: keys = ['V*DSE950','V*MSE950'] eht = {key : data[key] for key in keys} lat0 = 0.625 for key in eht: eht[key] = atm.squeeze(atm.subset(eht[key], {'lat' : (lat0, lat0)})) eht[key] = eht[key].mean(dim='year') # Plot longitude-time contours figsize = (10, 10) ncont = 20 cmap = 'RdBu_r' for key in eht: plt.figure(figsize=figsize) ehtplot = eht[key] days = ehtplot['dayrel'].values lon = ehtplot['XDim'].values plt.contourf(lon, days, ehtplot, ncont, cmap=cmap) plt.title('Cross-Equatorial ' + key) plt.xlabel('Longitude') plt.ylabel('Relative Day')
def calc_ubudget(datafiles, ndays, lon1, lon2, plev=200): """Calculate momentum budget for daily data in one year. Keys of datafiles dict must be: U, V, DUDP, H, OMEGA, DOMEGADP, DUDTANA """ # Read data data = xray.Dataset() for nm in datafiles: print('Reading ' + datafiles[nm]) with xray.open_dataset(datafiles[nm]) as ds: if nm in ds.data_vars: var = ds[nm] else: var = ds[nm + '%d' % plev] if 'Day' in var.dims: var = var.rename({'Day' : 'day'}) data[nm] = atm.squeeze(var) data['PHI'] = atm.constants.g.values * data['H'] # Put zeros in for any missing variables (e.g. du/dp) for nm in ['OMEGA', 'DUDP', 'DOMEGADP', 'DUDTANA']: if nm not in data.data_vars: data[nm] = 0.0 * data['U'] # Eddy decomposition taxis = 0 for nm in data.data_vars: print('Eddy decomposition for ' + nm) comp = eddy_decomp(data[nm], ndays, lon1, lon2, taxis) for compnm in comp: data[compnm] = comp[compnm] # Momentum budget calcs # du/dt = sum of terms in ubudget ubudget = xray.Dataset() readme = 'Momentum budget: ACCEL = sum of all other data variables' ubudget.attrs['readme'] = readme ubudget.attrs['ndays'] = ndays ubudget.attrs['lon1'] = lon1 ubudget.attrs['lon2'] = lon2 # Advective terms keypairs = [ ('AVG', 'AVG'), ('AVG', 'ST'), ('ST', 'AVG')] print('Computing advective terms') for pair in keypairs: print(pair) ukey, flowkey = pair u = data['U_' + ukey] dudp = data['DUDP_' + ukey] uflow = data['U_' + flowkey] vflow = data['V_' + flowkey] omegaflow = data['OMEGA_' + flowkey] adv = advection(uflow, vflow, omegaflow, u, dudp) for nm in adv.data_vars: key = 'ADV_%s_%s_%s' % (ukey, flowkey, nm) ubudget[key] = - adv[nm] long_name = 'Advection of %s momentum by %s' % (ukey, flowkey) ubudget[key].attrs['long_name'] = long_name # EMFD terms keys = ['TR', 'ST'] print('Computing EMFD terms') for key in keys: print(key) u = data['U_' + key] v = data['V_' + key] omega = data['OMEGA_' + key] dudp = data['DUDP_' + key] domegadp = data['DOMEGADP_' + key] emfd = fluxdiv(u, v, omega, dudp, domegadp) for nm in emfd.data_vars: ubudget['EMFC_%s_%s' % (key, nm)] = - emfd[nm] # Coriolis terms latlon = latlon_data(data['V_ST']) lat = latlon['LAT'] f = atm.coriolis(lat) ubudget['COR_AVG'] = data['V_AVG'] * f ubudget['COR_ST'] = data['V_ST'] * f # Pressure gradient terms a = atm.constants.radius_earth.values coslat = latlon['COSLAT'] lonrad = latlon['LONRAD'] londim = atm.get_coord(data['PHI_ST'], 'lon', 'dim') ubudget['PGF_ST'] = - atm.gradient(data['PHI_ST'], lonrad, londim) / (a*coslat) # Analysis increment for dU/dt ubudget['ANA'] = data['DUDTANA'] # Time mean print('Computing rolling time mean') for nm in ubudget.data_vars: ubudget[nm] = atm.rolling_mean(ubudget[nm], ndays, axis=taxis, center=True) # Acceleration nseconds = 60 * 60 * 24 * ndays delta_u = np.nan * data['U'] u = data['U'].values delta_u.values[ndays//2:-ndays//2] = (u[ndays:] - u[:-ndays]) / nseconds ubudget['ACCEL'] = delta_u return ubudget, data
def get_daily_data(varid, plev, years, datafiles, data, daymin=1, daymax=366, yearnm='year'): """Return daily data (basic variable or calculated variable). Data is read from datafiles if varnm is a basic variable. If varnm is a calculated variable (e.g. potential temperature), the base variables for calculation are provided in the dict data. """ years = atm.makelist(years) datafiles = atm.makelist(datafiles) if isinstance(plev, int) or isinstance(plev, float): pres = atm.pres_convert(plev, 'hPa', 'Pa') elif plev == 'LML' and 'PS' in data: pres = data['PS'] else: pres = None def get_var(data, varnm, plev=None): if plev is None: plev = '' elif plev == 'LML' and varnm == 'QV': varnm = 'Q' return data[varnm + str(plev)] if var_type(varid) == 'calc': print('Computing ' + varid) if varid == 'THETA': var = atm.potential_temp(get_var(data, 'T', plev), pres) elif varid == 'THETA_E': var = atm.equiv_potential_temp(get_var(data, 'T', plev), pres, get_var(data, 'QV', plev)) elif varid == 'DSE': var = atm.dry_static_energy(get_var(data, 'T', plev), get_var(data, 'H', plev)) elif varid == 'MSE': var = atm.moist_static_energy(get_var(data, 'T', plev), get_var(data, 'H', plev), get_var(data, 'QV', plev)) elif varid == 'VFLXMSE': Lv = atm.constants.Lv.values var = data['VFLXCPT'] + data['VFLXPHI'] + data['VFLXQV'] * Lv var.attrs['units'] = data['VFLXCPT'].attrs['units'] var.attrs['long_name'] = 'Vertically integrated MSE meridional flux' else: with xray.open_dataset(datafiles[0]) as ds: if varid not in ds.data_vars: varid = varid + str(plev) var = atm.combine_daily_years(varid, datafiles, years, yearname=yearnm, subset_dict={'day' : (daymin, daymax)}) var = atm.squeeze(var) # Make sure year dimension is included for single year if len(years) == 1 and 'year' not in var.dims: var = atm.expand_dims(var, yearnm, years[0], axis=0) # Wrap years for extended day ranges if daymin < 1 or daymax > 366: var = wrapyear_all(var, daymin, daymax) # Convert precip and evap to mm/day if varid in ['precip', 'PRECTOT', 'EVAP']: var = atm.precip_convert(var, var.attrs['units'], 'mm/day') return var
for nm in varnms: print(nm) var = atm.subset(databar[nm], {'dayrel' : (-npre, npost)}) lat = atm.get_coord(var, 'lat') if nm == 'PSI': var = atm.subset(var, {'lat' : (-25, 10)}) latname = atm.get_coord(var, 'lat', 'name') pname = atm.get_coord(var, 'plev', 'name') var_out = var.max(dim=latname).max(dim=pname) tseries['PSIMAX'] = atm.rolling_mean(var_out, nroll, center=True) else: for lat0 in lat_extract: lat0_str = atm.latlon_labels(lat0, 'lat', deg_symbol=False) key = nm + '_' + lat0_str val, ind = atm.find_closest(lat, lat0) var_out = atm.squeeze(var[:, ind]) tseries[key] = atm.rolling_mean(var_out, nroll, center=True) # ---------------------------------------------------------------------- # Functions for plotting fmt_axes = atm.ax_lims_ticks clear_labels = atm.clear_labels to_dataset = atm.to_dataset def contourf_latday(var, clev=None, title='', nc_pref=40, grp=None, xlims=(-120, 200), xticks=np.arange(-120, 201, 30), ylims=(-60, 60), yticks=np.arange(-60, 61, 20), ssn_length=None): vals = var.values.T lat = atm.get_coord(var, 'lat')