lat = atm.get_coord(data, 'lat')
lon = atm.get_coord(data, 'lon')
psfile = atm.homedir() + 'dynamics/python/atmos-tools/data/topo/ncep2_ps.nc'
ps = atm.get_ps_clim(lat, lon, psfile)
ps = ps / 100

figsize = (7, 9)
omitzero = False

for ssn in ['ANN', 'DJF', 'JJA', 'MAR']:
    for lonlims in [(0, 360), (60, 100)]:
        lon1, lon2 = lonlims
        lonstr = atm.latlon_str(lon1, lon2, 'lon')
        suptitle = ssn + ' ' + lonstr
        months = atm.season_months(ssn)
        v = data['V'].sel(month=months)
        if (lon2 - lon1) < 360:
            v = atm.subset(v, {'lon' : (lon1, lon2)})
            sector_scale = (lon2 - lon1) / 360.0
            psbar = atm.dim_mean(ps, 'lon', lon1, lon2)
            clev = 10
        else:
            sector_scale = None
            psbar = atm.dim_mean(ps, 'lon')
            clev = 20
        vssn = v.mean(dim='month')
        vssn_bar = atm.dim_mean(vssn, 'lon')
        psi1 = atm.streamfunction(vssn, sector_scale=sector_scale)
        psi1 = atm.dim_mean(psi1, 'lon')
        psi2 = atm.streamfunction(vssn_bar, sector_scale=sector_scale)
Exemple #2
0
keys_dict = {'PRECTOT' : 'PRECTOT', 'CMAP' : 'precip', 'GPCP' : 'PREC',
             'U200' : 'U', 'U850' : 'U', 'V200' : 'V', 'V850' : 'V'}
data = {}
for nm in datafiles:
    print('Loading ' + datafiles[nm])
    with xray.open_dataset(datafiles[nm]) as ds:
        if 'year' in ds.dims:
            ds = ds.mean(dim='year')
        data[nm] = ds[keys_dict[nm]].load()

# ENSO indices
enso = pd.read_csv(ensofile, index_col=0)
enso = enso.loc[years]
for key in enso_keys:
    if key not in enso.columns:
        months = atm.season_months(key)
        month_names = [(atm.month_str(m)).capitalize() for m in months]
        enso[key] = enso[month_names].mean(axis=1)
enso = enso[enso_keys]
col_names = [enso_nm + ' ' + nm for nm in enso.columns]
enso.columns = col_names

# ----------------------------------------------------------------------
# Daily timeseries

ts = xray.Dataset()
for nm in ['GPCP', 'PRECTOT']:
    ts[nm] = atm.mean_over_geobox(data[nm], lat1, lat2, lon1, lon2)
ts['MFC'] = utils.daily_rel2onset(index_all['CHP_MFC']['daily_ts'],
                                  index[ind_nm], npre, npost)
ts['CMFC'] = utils.daily_rel2onset(index_all['CHP_MFC']['tseries'],
Exemple #3
0
topo = atm.get_ps_clim(lat, lon) / 100
topo.units = 'hPa'

# ----------------------------------------------------------------------
# Correct for topography

u_orig = u
u = atm.correct_for_topography(u_orig, topo)

# ----------------------------------------------------------------------
# Zonal mean zonal wind
season = 'jjas'
lon1, lon2 = 60, 100
cint = 5
months = atm.season_months(season)

uplot = atm.subset(u, 'lon', lon1, lon2, 'mon', months)
uplot = uplot.mean(['lon', 'mon'])

ps_plot = atm.subset(topo, 'lon', lon1, lon2)
ps_plot = ps_plot.mean('lon')

plt.figure()
cs = atm.contour_latpres(uplot, clev=cint, topo=ps_plot)
clev = atm.clevels(uplot, cint, omitzero=True)
plt.clabel(cs, clev[::2], fmt='%02d')

plt.figure()
atm.contourf_latpres(uplot, clev=cint, topo=ps_plot)
Exemple #4
0
def load_daily_season(pathstr, year, season='ann', var_ids=None,
                      lat1=-90, lat2=90, lon1=0, lon2=360,
                      verbose=True, concat_dim=None):
    """Return daily data for a selected year, season and lat-lon subset.

    Loads daily data from locally saved files and concatenates it into
    a single DataArray or Dataset for that year and season.

    Parameters
    ----------
    pathstr : str
       Beginning of path for each data file, where each file name is in
       the format *yyyymm.nc.
       e.g. pathstr = '~/datastore/merra/daily/u200_'
    year : int
       Year to load.
    season : str, optional
       Season to load. Valid values are as listed in atm.season_months()
       e.g. 'jul', 'jja', 'ann'
       Default is entire year ('ann')
    var_ids : str or list of str, optional
       Variable(s) to extract. If omitted, all variables in the data are
       included and the output is a Dataset.
    lat1, lat2, lon1, lon2 : floats, optional
        Lat-lon subset to extract.
    concat_dim : str, optional
        Name of time dimension for concatenation. If None, then
        atm.get_coord() is called to get the name from the data file.
    verbose : bool, optional
        If True, print updates while processing files.

    Returns
    -------
    data : xray.DataArray or xray.Dataset
    """

    months = atm.season_months(season)
    paths = []
    for m in months:
        datestr = '%d%02d' % (year, m)
        paths.append(pathstr + datestr + '.nc')

    # Make sure longitude range is consistent with data
    with xray.open_dataset(paths[0]) as ds:
        lonmax = atm.lon_convention(atm.get_coord(ds, 'lon'))
        if concat_dim is None:
            concat_dim = atm.get_coord(ds, 'time', 'name')
    if lon2 - lon1 == 360:
        if lonmax < lon2:
            offset = -180
        elif lonmax > lon2:
            offset = 180
        else:
            offset = 0
        lon1, lon2 = lon1 + offset, lon2 + offset
    print(lon1, lon2, lonmax)

    # Load daily data
    if var_ids is None:
        var_nms = None
    else:
        var_nms = [get_varname(var_id) for var_id in atm.makelist(var_ids)]
    subset_dict = {'lat' : (lat1, lat2), 'lon' : (lon1, lon2)}
    data = atm.load_concat(paths, var_nms, concat_dim, subset_dict, verbose)

    return data