def all_data(datafiles, npre, npost, lon1, lon2, compdays, comp_attrs): # Read daily data fields aligned relative to onset day data = collections.OrderedDict() sectordata = collections.OrderedDict() comp = collections.OrderedDict() sectorcomp = collections.OrderedDict() sector_latmax = {} for varnm in datafiles: print('Reading daily data for ' + varnm) var, onset, retreat = utils.load_dailyrel(datafiles[varnm]) var = atm.subset(var, {'dayrel' : (-npre, npost)}) var = housekeeping(var) # Compute sector mean and composite averages sectorvar = atm.dim_mean(var, 'lon', lon1, lon2) compvar = get_composites(var, compdays, comp_attrs) sectorcompvar = get_composites(sectorvar, compdays, comp_attrs) # Latitude of maximum subcloud theta_e if varnm == 'THETA_E950' or varnm == 'THETA_E_LML': sector_latmax[varnm] = theta_e_latmax(sectorvar) # Compute regression or take the climatology if 'year' in var.dims: var = atm.dim_mean(var, 'year') sectorvar = atm.dim_mean(sectorvar, 'year') compvar = atm.dim_mean(compvar, 'year') sectorcompvar = atm.dim_mean(sectorcompvar, 'year') # Pack everything into dicts for output data[varnm], sectordata[varnm] = var, sectorvar comp[varnm], sectorcomp[varnm] = compvar, sectorcompvar return data, sectordata, sector_latmax, comp, sectorcomp
def theta_e_latmax(var): lat = atm.get_coord(var, 'lat') coords={'year' : var['year'], 'dayrel': var['dayrel']} latdim = atm.get_coord(var, 'lat', 'dim') latmax = lat[np.nanargmax(var, axis=latdim)] latmax = xray.DataArray(latmax, dims=['year', 'dayrel'], coords=coords) latmax = atm.dim_mean(latmax, 'year') return latmax
def sector_mean(var, lon1, lon2): """Return the sector mean of a variable.""" name = var.name lonstr = atm.latlon_str(lon1, lon2, 'lon') if (lon2 - lon1) == 360: lon1, lon2 = None, None name_out = name + '_ZON' else: name_out = name + '_SEC' varbar = atm.dim_mean(var, 'lon', lon1, lon2) varbar.name = name_out varbar.attrs['varnm'] = name varbar.attrs['lonstr'] = lonstr varbar.attrs['filestr'] = '%s_sector_%s' % (name, lonstr) return varbar
def get_data(varnm, datafiles, regdays, seasons, lon1, lon2, nroll=None): var, onset, retreat = utils.load_dailyrel(datafiles[varnm]) if nroll is not None: var = atm.rolling_mean(var, nroll, axis=1, center=True) # Seasonal averages and daily lat-lon data data = xray.Dataset() for season in seasons: key = varnm + '_' + season data[key] = ssn_average(var, onset, retreat, season) # Daily data on regdays data[varnm + '_DAILY'] = var.sel(dayrel=regdays) # Sector mean data var_sector = atm.dim_mean(var, 'lon', lon1, lon2) alldata = {'data_latlon' : data, 'var_sector' : var_sector, 'onset' : onset, 'retreat' : retreat} return alldata
ps = ps / 100 figsize = (7, 9) omitzero = False for ssn in ['ANN', 'DJF', 'JJA', 'MAR']: for lonlims in [(0, 360), (60, 100)]: lon1, lon2 = lonlims lonstr = atm.latlon_str(lon1, lon2, 'lon') suptitle = ssn + ' ' + lonstr months = atm.season_months(ssn) v = data['V'].sel(month=months) if (lon2 - lon1) < 360: v = atm.subset(v, {'lon' : (lon1, lon2)}) sector_scale = (lon2 - lon1) / 360.0 psbar = atm.dim_mean(ps, 'lon', lon1, lon2) clev = 10 else: sector_scale = None psbar = atm.dim_mean(ps, 'lon') clev = 20 vssn = v.mean(dim='month') vssn_bar = atm.dim_mean(vssn, 'lon') psi1 = atm.streamfunction(vssn, sector_scale=sector_scale) psi1 = atm.dim_mean(psi1, 'lon') psi2 = atm.streamfunction(vssn_bar, sector_scale=sector_scale) plt.figure(figsize=figsize) plt.suptitle(suptitle) plt.subplot(2, 1, 1) atm.contour_latpres(psi1, clev=clev, omitzero=omitzero, topo=psbar) plt.title('v -> $\psi$ -> [$\psi$]')
data = {'reg' : {}, 'clim' : {}, 'early' : {}, 'late' : {}} # Load regression coefficients and climatology for varnm in varnms: for key in ['latlon', 'sector']: for key2 in ['reg', 'clim']: filenm = datafiles[varnm][key + '_' + key2] print('Loading ' + filenm) with xray.open_dataset(filenm) as ds: data[key2][varnm + '_' + key] = ds.load() # ---------------------------------------------------------------------- # Calculate strong/ weak composites for sector data for varnm in varnms: key = varnm + '_sector' varbar = atm.dim_mean(data['clim'][key][varnm], 'lon', lon1, lon2) m = data['reg'][key]['m'] data['late'][key] = varbar + m * nstd data['early'][key] = varbar - m * nstd # ---------------------------------------------------------------------- def stipple_mask(p): return ((p >= 0.05) | np.isnan(p)) def sector_plot(var, p, stipple_kw={}, grp=None, ylim=None, yticks=None, clim=None): xname, yname = 'dayrel', 'lat' pts_mask = stipple_mask(p) lat = atm.get_coord(var, 'lat') days = atm.get_coord(var, 'dayrel') xsample = 3
data[nm] = ds[nm].load() # Scale units and rename variables data = data * scale nms = data.data_vars.keys() for nm in nms: data = data.rename({nm : nm.replace('FLX', '')}) # Take subset and smooth with rolling mean daydim = atm.get_coord(data['VMSE'], 'dayrel', 'dim') for nm in data.data_vars: data[nm] = atm.rolling_mean(data[nm], nroll, axis=daydim, center=True) # Average over equatorial region data_eq = atm.dim_mean(data, 'lat', eqlat1, eqlat2) # Cross-equatorial flues integrated over sectors a = atm.constants.radius_earth.values eq_int = xray.Dataset() eq_int.attrs['units'] = sector_units lonranges = [(40, 60), (40, 100), (lon1, lon2)] eq_int.attrs['lonranges'] = ['%dE-%dE' % lonrange for lonrange in lonranges] for lonrange in lonranges: lon1, lon2 = lonrange dist = a * np.radians(lon2 - lon1) for nm in data_eq.data_vars: key = nm + '_%dE-%dE' % (lon1, lon2) eq_int[key] = atm.dim_mean(data_eq[nm], 'lon', lon1, lon2) * dist # Convert to PW eq_int = eq_int * 1e-15 / scale
ps = ps / 100 figsize = (7, 9) omitzero = False for ssn in ['ANN', 'DJF', 'JJA', 'MAR']: for lonlims in [(0, 360), (60, 100)]: lon1, lon2 = lonlims lonstr = atm.latlon_str(lon1, lon2, 'lon') suptitle = ssn + ' ' + lonstr months = atm.season_months(ssn) v = data['V'].sel(month=months) if (lon2 - lon1) < 360: v = atm.subset(v, {'lon': (lon1, lon2)}) sector_scale = (lon2 - lon1) / 360.0 psbar = atm.dim_mean(ps, 'lon', lon1, lon2) clev = 10 else: sector_scale = None psbar = atm.dim_mean(ps, 'lon') clev = 20 vssn = v.mean(dim='month') vssn_bar = atm.dim_mean(vssn, 'lon') psi1 = atm.streamfunction(vssn, sector_scale=sector_scale) psi1 = atm.dim_mean(psi1, 'lon') psi2 = atm.streamfunction(vssn_bar, sector_scale=sector_scale) plt.figure(figsize=figsize) plt.suptitle(suptitle) plt.subplot(2, 1, 1) atm.contour_latpres(psi1, clev=clev, omitzero=omitzero, topo=psbar) plt.title('v -> $\psi$ -> [$\psi$]')
data = {} data['MFC'] = utils.daily_rel2onset(mfc, onset, npre, npost) data[pcp_nm] = utils.daily_rel2onset(pcp, onset, npre, npost) data['MFC_ACC'] = utils.daily_rel2onset(index['tseries'], onset, npre, npost) for nm in varnms: print('Loading ' + relfiles[nm]) with xray.open_dataset(relfiles[nm]) as ds: if nm == 'PSI': data[nm] = atm.streamfunction(ds['V']) psimid = atm.subset(data[nm], {'plev' : (pmid, pmid)}, squeeze=True) psimid.name = 'PSI%d' % pmid data['PSI%d' % pmid] = psimid elif nm == 'VFLXLQV': var = atm.dim_mean(ds['VFLXQV'], 'lon', lon1, lon2) data[nm] = var * atm.constants.Lv.values elif nm == theta_nm: theta = ds[nm] _, _, dtheta = atm.divergence_spherical_2d(theta, theta) data[nm] = atm.dim_mean(ds[nm], 'lon', lon1, lon2) data[dtheta_nm] = atm.dim_mean(dtheta, 'lon', lon1, lon2) elif nm == dtheta_nm: continue else: data[nm] = atm.dim_mean(ds[nm], 'lon', lon1, lon2) databar = {} for nm in data: if 'year' in data[nm].dims: databar[nm] = data[nm].mean(dim='year')
# ---------------------------------------------------------------------- # Daily timeseries ts = xray.Dataset() for nm in ['GPCP', 'PRECTOT']: ts[nm] = atm.mean_over_geobox(data[nm], lat1, lat2, lon1, lon2) ts['MFC'] = utils.daily_rel2onset(index_all['CHP_MFC']['daily_ts'], index[ind_nm], npre, npost) ts['CMFC'] = utils.daily_rel2onset(index_all['CHP_MFC']['tseries'], index[ind_nm], npre, npost) # Extract variables at specified latitudes for nm, lat0 in lat_extract.iteritems(): var = atm.dim_mean(data[nm], 'lon', lon1, lon2) lat = atm.get_coord(var, 'lat') lat0_str = atm.latlon_labels(lat0, 'lat', deg_symbol=False) # key = nm + '_' + lat0_str key = nm lat_closest, _ = atm.find_closest(lat, lat0) print '%s %.2f %.2f' % (nm, lat0, lat_closest) ts[key] = atm.subset(var, {'lat' : (lat_closest, None)}, squeeze=True) # Compute climatology and smooth with rolling mean if 'year' in ts.dims: ts = ts.mean(dim='year') if nroll is not None: for nm in ts.data_vars: ts[nm] = atm.rolling_mean(ts[nm], nroll, center=True) tseries = atm.subset(ts, {'dayrel' : (-npre, npost)})