def pgradient(var, lat1, lat2, lon1, lon2, plev):
    """Return d/dp of a lat-lon variable."""
    pwidth = 100
    p1, p2 = plev - pwidth, plev + pwidth
    var = atm.subset(var, {'lat' : (lat1, lat2), 'lon' : (lon1, lon2),
                           'plev' : (p1, p2)}, copy=False)
    latlonstr = latlon_filestr(lat1, lat2, lon1, lon2)
    attrs = var.attrs
    pname = atm.get_coord(var, 'plev', 'name')
    pdim = atm.get_coord(var, 'plev', 'dim')
    pres = var[pname]
    pres = atm.pres_convert(pres, pres.attrs['units'], 'Pa')
    dvar_dp = atm.gradient(var, pres, axis=pdim)
    dvar_dp = atm.subset(dvar_dp, {pname : (plev, plev)}, copy=False,
                         squeeze=True)
    varnm = 'D%sDP' % var.name
    name = '%s%d' % (varnm, plev)
    dvar_dp.name = name
    attrs['long_name'] = 'd/dp of ' + var.attrs['long_name']
    attrs['standard_name'] = 'd/dp of ' + var.attrs['standard_name']
    attrs['units'] = ('(%s)/Pa' % attrs['units'])
    attrs[pname] = plev
    attrs['filestr'] = '%s_%s' % (name, latlonstr)
    attrs['varnm'] = varnm
    dvar_dp.attrs = attrs
    return dvar_dp
def contourf_latday(var, clev=None, title='', nc_pref=40, grp=None,
                    xlims=(-120, 200), xticks=np.arange(-120, 201, 30),
                    ylims=(-60, 60), yticks=np.arange(-60, 61, 20),
                    ssn_length=None):
    vals = var.values.T
    lat = atm.get_coord(var, 'lat')
    days = atm.get_coord(var, 'dayrel')
    if var.name.lower() == 'precip':
        cmap = 'hot_r'
        extend = 'max'
    else:
        cmap = 'RdBu_r'
        extend = 'both'
    if clev == None:
        symmetric = atm.symm_colors(vals)
        cint = atm.cinterval(vals, n_pref=nc_pref, symmetric=symmetric)
        clev = atm.clevels(vals, cint, symmetric=symmetric)
    cticks_dict = {'precip' : np.arange(0, 13, 2),
                   'T200' : np.arange(-208, 227, 2),
                   'U200' : np.arange(-60, 61, 10),
                   'PSI500' : np.arange(-800, 801, 200)}
    cticks = cticks_dict.get(var.name)
    plt.contourf(days, lat, vals, clev, cmap=cmap, extend=extend)
    plt.colorbar(ticks=cticks)
    fmt_axes(xlims, xticks, ylims, yticks)
    plt.grid()
    plt.title(title)
    plt.axvline(0, color='k')
    if ssn_length is not None:
        plt.axvline(ssn_length, color='k')
    if grp is not None and grp.row == grp.ncol - 1:
        plt.xlabel('Rel Day')
    if grp is not None and grp.col == 0:
        plt.ylabel('Latitude')
Beispiel #3
0
def wrapyear_all(data, daymin, daymax):
    """Wrap daily data to extended ranges over each year in yearly data."""

    def extract_year(data, year, years):
        if year in years:
            data_out = atm.subset(data, {'year' : (year, year)})
        else:
            data_out = None
        return data_out

    daynm = atm.get_coord(data, 'day', 'name')
    days = np.arange(daymin, daymax + 1)
    days = xray.DataArray(days, name=daynm, coords={daynm : days})
    years = atm.get_coord(data, 'year')
    yearnm = atm.get_coord(data, 'year', 'name')
    for y, year in enumerate(years):
        year_prev, year_next = year - 1, year + 1
        var = extract_year(data, year, years)
        var_prev = extract_year(data, year_prev, years)
        var_next = extract_year(data, year_next, years)
        var_out = wrapyear(var, var_prev, var_next, daymin, daymax, year)
        var_out = atm.expand_dims(var_out, 'year', year, axis=0)
        var_out = var_out.reindex_like(days)
        if y == 0:
            data_out = var_out
        else:
            data_out = xray.concat([data_out, var_out], dim=yearnm)

    return data_out
Beispiel #4
0
def contourf_latday(var,
                    is_precip=False,
                    clev=None,
                    cticks=None,
                    climits=None,
                    nc_pref=40,
                    grp=None,
                    xlims=(-120, 200),
                    xticks=np.arange(-120, 201, 30),
                    ylims=(-60, 60),
                    yticks=np.arange(-60, 61, 20),
                    dlist=None,
                    grid=False,
                    ind_nm='onset',
                    xlabels=True):
    """Create a filled contour plot of data on latitude-day grid.
    """
    var = atm.subset(var, {'lat': ylims})
    vals = var.values.T
    lat = atm.get_coord(var, 'lat')
    days = atm.get_coord(var, 'dayrel')
    if var.min() < 0:
        symmetric = True
    else:
        symmetric = False
    if is_precip:
        cmap = 'PuBuGn'
        extend = 'max'
    else:
        cmap = 'RdBu_r'
        extend = 'both'

    if clev == None:
        cint = atm.cinterval(vals, n_pref=nc_pref, symmetric=symmetric)
        clev = atm.clevels(vals, cint, symmetric=symmetric)
    elif len(atm.makelist(clev)) == 1:
        if is_precip:
            clev = np.arange(0, 10 + clev / 2.0, clev)
        else:
            clev = atm.clevels(vals, clev, symmetric=symmetric)

    plt.contourf(days, lat, vals, clev, cmap=cmap, extend=extend)
    plt.colorbar(ticks=cticks)
    plt.clim(climits)
    atm.ax_lims_ticks(xlims, xticks, ylims, yticks)
    plt.grid(grid)

    if dlist is not None:
        for d0 in dlist:
            plt.axvline(d0, color='k')
    if xlabels:
        plt.gca().set_xticklabels(xticks)
        plt.xlabel('Days Since ' + ind_nm.capitalize())
    else:
        plt.gca().set_xticklabels([])
    if grp is not None and grp.col == 0:
        plt.ylabel('Latitude')

    return None
def theta_e_latmax(var):
    lat = atm.get_coord(var, 'lat')
    coords={'year' : var['year'], 'dayrel': var['dayrel']}
    latdim = atm.get_coord(var, 'lat', 'dim')
    latmax = lat[np.nanargmax(var, axis=latdim)]
    latmax = xray.DataArray(latmax, dims=['year', 'dayrel'], coords=coords)
    latmax = atm.dim_mean(latmax, 'year')
    return latmax
Beispiel #6
0
def eddy_decomp(var, nt, lon1, lon2, taxis=0):
    """Decompose variable into mean and eddy fields."""

    lonname = atm.get_coord(var, 'lon', 'name')
    tstr = 'Time mean (%d-%s rolling)' % (nt, var.dims[taxis])
    lonstr = atm.latlon_labels([lon1, lon2], 'lon', deg_symbol=False)
    lonstr = 'zonal mean (' + '-'.join(lonstr) + ')'
    name, attrs, coords, dims = atm.meta(var)

    varbar = atm.rolling_mean(var, nt, axis=taxis, center=True)
    varbarzon = atm.subset(varbar, {lonname : (lon1, lon2)})
    varbarzon = varbarzon.mean(dim=lonname)
    varbarzon.attrs = attrs

    comp = xray.Dataset()
    comp[name + '_AVG'] = varbarzon
    comp[name + '_AVG'].attrs['component'] = tstr + ', ' + lonstr
    comp[name + '_ST'] = varbar - varbarzon
    comp[name + '_ST'].attrs = attrs
    comp[name + '_ST'].attrs['component'] = 'Stationary eddy'
    comp[name + '_TR'] = var - varbar
    comp[name + '_TR'].attrs = attrs
    comp[name + '_TR'].attrs['component'] = 'Transient eddy'

    return comp
Beispiel #7
0
def wrapyear(data, data_prev, data_next, daymin, daymax, year=None):
    """Wrap daily data from previous and next years for extended day ranges.
    """
    daynm = atm.get_coord(data, 'day', 'name')

    def leap_adjust(data, year):
        data = atm.squeeze(data)
        ndays = 365
        if year is not None and atm.isleap(year):
            ndays += 1
        else:
            # Remove NaN for day 366 in non-leap year
            data = atm.subset(data, {'day' : (1, ndays)})
        return data, ndays

    data, ndays = leap_adjust(data, year)
    if data_prev is not None:
        data_prev, ndays_prev = leap_adjust(data_prev, year - 1)
        data_prev[daynm] = data_prev[daynm] - ndays_prev
        data_out = xray.concat([data_prev, data], dim=daynm)
    else:
        data_out = data
    if data_next is not None:
        data_next, _ = leap_adjust(data_next, year + 1)
        data_next[daynm] = data_next[daynm] + ndays
        data_out = xray.concat([data_out, data_next], dim=daynm)
    data_out = atm.subset(data_out, {daynm : (daymin, daymax)})

    return data_out
Beispiel #8
0
def plot_reg(pts_reg, nm, clev=0.2, xsample=1, ysample=1,
             axlims=(5, 32, 60, 100), cline=None, color='0.3', alpha=1.0,
             markersize=2):
    """Plot regression of grid point indices onto large-scale index."""
    var = pts_reg[nm]['m']
    mask = pts_reg[nm]['pts_mask']
    xname = atm.get_coord(mask, 'lon', 'name')
    yname = atm.get_coord(mask, 'lat', 'name')
    atm.contourf_latlon(var, clev=clev, axlims=axlims, extend='both')
    atm.stipple_pts(mask, xname=xname, yname=yname, xsample=xsample,
                    ysample=ysample, color=color, alpha=alpha,
                    markersize=markersize)
    if cline is not None:
        atm.contour_latlon(var, clev=[cline], axlims=axlims, colors='b',
                           linewidths=2)
    fix_axes(axlims)
def latlon_plot(varnm, reg, day_or_season, coeff='m', stipple_kw={},
                axlims=(-60, 60, 40, 120)):
    regdata = reg[varnm + '_latlon']
    keys = [key for key in regdata if key.endswith('_' + coeff)]
    clim = atm.climits(regdata[keys].to_array(), symmetric=True,
                        percentile=99.9)
    xname, yname = 'lon', 'lat'
    lat = atm.get_coord(regdata, 'lat')
    if max(np.diff(lat)) > 1:
        xsample, ysample = 1, 1
    else:
        xsample, ysample = 2, 2
    if isinstance(day_or_season, int):
        key = varnm + '_DAILY_'
        var = regdata[key + coeff].sel(dayrel=day_or_season)
        p = regdata[key + 'p'].sel(dayrel=day_or_season)
        titlestr = varnm + ' Day %d' % day_or_season
    else:
        key = varnm + '_' + day_or_season + '_'
        var = regdata[key + coeff]
        p = regdata[key + 'p']
        titlestr = varnm + ' ' + day_or_season
    pts_mask = stipple_mask(p)
    atm.pcolor_latlon(var, axlims=axlims, fancy=False)
    plt.clim(clim)
    atm.stipple_pts(pts_mask, xname, yname, xsample, ysample, **stipple_kw)
    plt.title(titlestr)
Beispiel #10
0
def daily_rel2onset(data, d_onset, npre, npost):
    """Return subset of daily data aligned relative to onset day.

    Parameters
    ----------
    data : xray.DataArray
        Daily data.
    d_onset : ndarray
        Array of onset date (day of year) for each year.
    npre, npost : int
        Number of days before and after onset to extract.

    Returns
    -------
    data_out : xray.DataArray
        Subset of N days of daily data for each year, where
        N = npre + npost + 1 and the day dimension is
        dayrel = day - d_onset.
    """

    name, attrs, coords, dimnames = atm.meta(data)
    yearnm = atm.get_coord(data, 'year', 'name')
    daynm = atm.get_coord(data, 'day', 'name')
    years = atm.makelist(atm.get_coord(data, 'year'))

    if isinstance(d_onset, xray.DataArray):
        d_onset = d_onset.values
    else:
        d_onset = atm.makelist(d_onset)

    relnm = daynm + 'rel'

    for y, year in enumerate(years):
        dmin, dmax = d_onset[y] - npre, d_onset[y] + npost
        subset_dict = {yearnm : (year, None), daynm : (dmin, dmax)}
        sub = atm.subset(data, subset_dict)
        sub = sub.rename({daynm : relnm})
        sub[relnm] = sub[relnm] - d_onset[y]
        sub[relnm].attrs['long_name'] = 'Day of year relative to onset day'
        if y == 0:
            data_out = sub
        else:
            data_out = xray.concat([data_out, sub], dim=yearnm)

    data_out.attrs['d_onset'] = d_onset

    return data_out
Beispiel #11
0
def contour_londay(var, clev=None, grp=None,n_pref=40,
                   yticks=np.arange(-120, 201, 30)):
    lon = atm.get_coord(var, 'lon')
    days = atm.get_coord(var, 'dayrel')
    if clev is None:
        cint = atm.cinterval(var, n_pref=n_pref, symmetric=True)
        clev = atm.clevels(var, cint, symmetric=True)
    plt.contourf(lon, days, var, clev, cmap='RdBu_r', extend='both')
    plt.grid()
    plt.colorbar()
    #plt.gca().invert_yaxis()
    plt.yticks(yticks)
    plt.axhline(0, color='k')
    if grp is not None and grp.row == grp.nrow - 1:
        plt.xlabel('Longitude')
    if grp is not None and grp.col == 0:
        plt.ylabel('Rel Day')
Beispiel #12
0
def calc_dp(var, plev):
    """Extract subset of pressure levels and calculate d/dp."""
    plevs = atm.get_coord(var, 'plev')
    pname = atm.get_coord(var, 'plev', 'name')
    pdim = atm.get_coord(var, 'plev', 'dim')
    ind = (list(plevs)).index(plev)
    i1 = max(0, ind - 1)
    i2 = min(len(plevs) - 1, ind + 1) + 1
    psub = plevs[i1:i2]
    varsub = var.sel(**{pname : psub})
    pres = atm.pres_convert(psub, 'hPa', 'Pa')
    atm.disptime()
    print('Computing d/dp for pressure level %d' % plev)
    dvar = atm.gradient(varsub, pres, axis=pdim)
    dvar = dvar.sel(**{pname : plev})
    dvar.name = 'D%sDP' % var.name
    atm.disptime()
    return dvar
def all_WLH(cmap_file, yearmin=None, yearmax=None, climatology=False,
            kmax=12, threshold=5.0, onset_min=20):
    precip = precipdat.read_cmap(cmap_file, yearmin, yearmax)
    lat = atm.get_coord(precip, 'lat')
    lon = atm.get_coord(precip, 'lon')
    years = precip.year
    if climatology:
        precip = precip.mean(dim='year')
        axis = 0
    else:
        axis = 1
    wlh = onset_WLH(precip, axis, kmax, threshold, onset_min)
    wlh['precip'] = precip
    wlh['lat'] = lat
    wlh['lon'] = lon
    wlh['years'] = years
    wlh['climatology'] = climatology
    return wlh
Beispiel #14
0
def contourf_londay(var,
                    clev=None,
                    grp=None,
                    n_pref=40,
                    yticks=np.arange(-120, 201, 30)):
    """Create a filled contour plot of data on longitude-day grid.
    """
    lon = atm.get_coord(var, 'lon')
    days = atm.get_coord(var, 'dayrel')
    if clev is None:
        cint = atm.cinterval(var, n_pref=n_pref, symmetric=True)
        clev = atm.clevels(var, cint, symmetric=True)

    plt.contourf(lon, days, var, clev, cmap='RdBu_r', extend='both')
    plt.colorbar()
    plt.yticks(yticks)
    plt.axhline(0, color='0.5', linestyle='--', dashes=[6, 1])
    if grp is not None and grp.row == grp.nrow - 1:
        plt.xlabel('Longitude')
    if grp is not None and grp.col == 0:
        plt.ylabel('Days Since Onset')
Beispiel #15
0
def latlon_data(var, latmax=89):
    """Return lat, lon coords in radians and cos(lat)."""

    data = xray.Dataset()

    # Latitude
    latname = atm.get_coord(var, 'lat', 'name')
    latdim = atm.get_coord(var, 'lat', 'dim')
    lat = atm.get_coord(var, 'lat')
    latcoords = {latname: lat.copy()}
    lat[abs(lat) > latmax] = np.nan
    data['LAT'] = xray.DataArray(lat, coords=latcoords)
    latrad = np.radians(lat)
    data['LATRAD'] = xray.DataArray(latrad, coords=latcoords)
    data['COSLAT'] = np.cos(data['LATRAD'])
    data.attrs['latname'] = latname
    data.attrs['latdim'] = latdim

    # Longitude
    try:
        lonname = atm.get_coord(var, 'lon', 'name')
        londim = atm.get_coord(var, 'lon', 'dim')
        lon = atm.get_coord(var, 'lon')
        loncoords = {lonname : lon.copy()}
        data['LON'] = xray.DataArray(lon, coords=loncoords)
        lonrad = np.radians(lon)
        data['LONRAD'] = xray.DataArray(lonrad, coords=loncoords)
        data.attrs['lonname'] = lonname
        data.attrs['londim'] = londim
    except ValueError:
        data.attrs['lonname'] = None
        data.attrs['londim'] = None

    return data
def sector_plot(var, p, stipple_kw={}, grp=None, ylim=None,
                yticks=None, clim=None):
    xname, yname = 'dayrel', 'lat'
    pts_mask = stipple_mask(p)
    lat = atm.get_coord(var, 'lat')
    days = atm.get_coord(var, 'dayrel')
    xsample = 3
    if max(np.diff(lat)) > 1:
        ysample = 1
    else:
        ysample = 2
    #utils.contourf_lat_time(lat, days, var)
    vals = np.ma.masked_array(var.T.values, mask=np.isnan(var.T))
    plt.pcolormesh(days, lat, vals, cmap='RdBu_r')
    cb = plt.colorbar()
    atm.stipple_pts(pts_mask, xname, yname, xsample, ysample, **stipple_kw)
    plt.title(varnm)
    plt.xlabel('Relative Day')
    plt.ylabel('Latitude')
    plt.grid(True)
    xticks = np.arange(-120, 201, 30)
    xlims = (-120, 200)
    plt.xlim(xlims)
    plt.xticks(xticks)
    if grp is not None:
        if grp.col > 0:
            plt.ylabel('')
        if grp.row < grp.nrow - 1:
            plt.xlabel('')
    if ylim is not None:
        plt.ylim(ylim)
    if yticks is not None:
        plt.yticks(yticks)
    if clim is not None:
        plt.clim(clim)
    else:
        clim = atm.climits(var, symmetric=True, percentile=99.9)
        plt.clim(clim)
def lineplot(sectors, ax1=None, y1_label='', y2_label='', title='',
             latmin=None, latmax=None,
             legend_opts = {'fontsize' : 9, 'loc' : 'lower center',
                            'handlelength' : 2.5, 'frameon' : False},
             ax2_color='r', ax2_alpha=0.5, row=1, nrow=1, y1_lims=None):
    if ax1 is None:
        ax1 = plt.gca()

    ax1_fmts = [{'color' : 'k', 'linestyle' : 'dashed'}, {'color' : 'k'},
                {'color' : 'k', 'linewidth' : 1.5}]
    ax2_fmts = [{'linewidth' : 2, 'alpha' : ax2_alpha, 'color' : ax2_color}]
    lat = atm.get_coord(sectors, 'lat')
    ax1.set_title(title)
    if row < nrow - 1:
        ax1.set_xticklabels([])
    else:
        ax1.set_xlabel('Latitude')
    if y1_lims is not None:
        ax1.set_ylim(y1_lims)
    ax1.set_ylabel(y1_label)
    ax1.grid(True)
    i1, i2 = 0, 0
    for key in sectors.data_vars:
        var = sectors[key]
        if var.attrs['axis'] == 1:
            ax, fmts = ax1, ax1_fmts[i1]
            i1 += 1
        else:
            if i2 == 0:
                ax2 = ax1.twinx()
            ax, fmts = ax2, ax2_fmts[i2]
            i2 += 1
        ax.plot(lat, var, label=key, **fmts)

    if latmin is not None:
        ax1.set_xlim(latmin, latmax)

    if legend_opts is not None:
        legend_opts['ncol'] = i1 + i2
        if i2 > 0:
            atm.legend_2ax(ax1, ax2, **legend_opts)
        else:
            ax1.legend(**legend_opts)
    if i2 > 0:
        # ax2.set_ylabel(y2_label, color=ax2_color, alpha=ax2_alpha)
        for t1 in ax2.get_yticklabels():
            t1.set_color(ax2_color)

    plt.draw()
    return None
Beispiel #18
0
def contourf_latday(var, clev=None, title='', nc_pref=40, grp=None,
                    xlims=(-120, 200), xticks=np.arange(-120, 201, 30),
                    ylims=(-60, 60), yticks=np.arange(-60, 61, 20),
                    dlist=None, grid=False):
    vals = var.values.T
    lat = atm.get_coord(var, 'lat')
    days = atm.get_coord(var, 'dayrel')
    if var.min() >= 0:
        cmap, extend, symmetric = 'PuBuGn', 'max', False
    else:
        cmap, extend, symmetric = 'RdBu_r', 'both', True
    if clev == None:
        cint = atm.cinterval(vals, n_pref=nc_pref, symmetric=symmetric)
        clev = atm.clevels(vals, cint, symmetric=symmetric)
    elif len(atm.makelist(clev)) == 1:
        if var.name == 'PREC':
            clev = np.arange(0, 10 + clev/2.0, clev)
        else:
            clev = atm.clevels(vals, clev, symmetric=symmetric)
    cticks_dict = {'PRECTOT' : np.arange(0, 13, 2),
                   'PREC' : np.arange(0, 11, 2),
                   'T200' : np.arange(-208, 227, 2),
                   'U200' : np.arange(-60, 61, 10),
                   'PSI500' : np.arange(-800, 801, 200)}
    cticks = cticks_dict.get(var.name)
    plt.contourf(days, lat, vals, clev, cmap=cmap, extend=extend)
    plt.colorbar(ticks=cticks)
    atm.ax_lims_ticks(xlims, xticks, ylims, yticks)
    plt.grid(grid)
    plt.title(title)
    if dlist is not None:
        for d0 in dlist:
            plt.axvline(d0, color='k')
    if grp is not None and grp.row == grp.ncol - 1:
        plt.xlabel('Rel Day')
    if grp is not None and grp.col == 0:
        plt.ylabel('Latitude')
Beispiel #19
0
def plot_index_years(index, nrow=3, ncol=4,
                     fig_kw={'figsize' : (11, 7), 'sharex' : True,
                             'sharey' : True},
                     gridspec_kw={'left' : 0.1, 'right' : 0.95, 'wspace' : 0.05,
                                  'hspace' : 0.1},
                     incl_fit=False, suptitle='', xlabel='Day', ylabel='Index',
                     xlims=None, ylims=None, xticks=np.arange(0, 401, 100),
                     grid=True):
    """Plot daily timeseries of monsoon onset/retreat index each year.
    """

    years = atm.get_coord(index, 'year')
    days = atm.get_coord(index, 'day')
    grp = atm.FigGroup(nrow, ncol, fig_kw=fig_kw, gridspec_kw=gridspec_kw,
                       suptitle=suptitle)
    for year in years:
        grp.next()
        ind = atm.subset(index, {'year' : (year, year)}, squeeze=True)
        ts = ind['tseries']
        d0_list = [ind['onset'], ind['retreat']]
        plt.plot(days, ts, 'k')
        for d0 in d0_list:
            plt.axvline(d0, color='k')
        if incl_fit and 'tseries_fit_onset' in ind:
            plt.plot(days, ind['tseries_fit_onset'], 'r')
        if incl_fit and 'tseries_fit_retreat' in ind:
            plt.plot(days, ind['tseries_fit_retreat'], 'b')
        atm.text(year, (0.05, 0.9))
        atm.ax_lims_ticks(xlims=xlims, ylims=ylims, xticks=xticks)
        plt.grid(grid)
        if grp.row == grp.nrow - 1:
            plt.xlabel(xlabel)
        if grp.col == 0:
            plt.ylabel(ylabel)

    return grp
def var_calcs(var, jday=0, latlon=(-90, 90, 40, 120), plevs=(850, 200),
              dp_vars=['U', 'OMEGA'], sector_lons=(60, 100)):
    """Process a single variable from a single day."""
    lat1, lat2, lon1, lon2 = latlon
    opts = merra.url_opts(var.name)
    vertical = opts['vertical']
    if vertical == 'X':
        plevs = [None]
    if dp_vars is not None and var.name in dp_vars:
        dp = True
    else:
        dp = False
    data = xray.Dataset()

    # Lat-lon data
    print('Lat-lon data')
    for plev in plevs:
        print('plev', plev)
        var_out = latlon_data(var, lat1, lat2, lon1, lon2, plev)
        data[var_out.name] = var_out
        if dp:
            print('Computing d/dp')
            var_out = pgradient(var, lat1, lat2, lon1, lon2, plev)
        data[var_out.name] = var_out

    # Sector and zonal mean data
    print('Computing zonal mean')
    var_out = sector_mean(var, 0, 360)
    data[var_out.name] = var_out
    if vertical == 'P':
        print('Computing sector mean')
        var_out = sector_mean(var, sector_lons[0], sector_lons[1])
        data[var_out.name] = var_out

    # Compute daily data from subdaily data
    nperday = len(atm.get_coord(data, 'time'))
    data = atm.daily_from_subdaily(data, nperday, dayname='day',
                                   dayvals=[jday])

    # Make sure output is in a Dataset
    if isinstance(data, xray.DataArray):
        data = data.to_dataset()

    return data
def housekeeping(var):
    # Convert units
    unit_dict={'m' : ('km', 1e-3)}
    units_in = var.attrs.get('units')
    if units_in in unit_dict:
        attrs = var.attrs
        attrs['units'] = unit_dict[units_in][0]
        var = var * unit_dict[units_in][1]
        var.attrs = attrs

    # Fill Ro200 with NaNs near equator
    if var.name == 'Ro200':
        latbuf = 5
        lat = atm.get_coord(var, 'lat')
        latbig = atm.biggify(lat, var, tile=True)
        vals = var.values
        vals = np.where(abs(latbig)>latbuf, vals, np.nan)
        var.values = vals

    return var
Beispiel #22
0
def v_components(ubudget, scale=None, eqbuf=5.0):
    """Return mean, eddy-driven, etc components of v for streamfunction.
    """

    comp_dict = {'MMC' : 'ADV_AVG', 'PGF' : 'PGF_ST', 'EDDY_ST' : 'EMFC_ST',
                 'EDDY_TR' : 'EMFC_TR', 'EDDY_CRS' : 'ADV_CRS'}

    if scale is not None:
        ubudget = ubudget * scale
    latname = atm.get_coord(ubudget, 'lat', 'name')
    lat = ubudget[latname]
    f = atm.coriolis(lat)
    f[abs(lat) < eqbuf] = np.nan

    v = xray.Dataset()
    v['TOT'] = ubudget['COR'] / f
    for nm in sorted(comp_dict):
        v[nm] = - ubudget[comp_dict[nm]] / f
    v['EDDY'] = v['EDDY_CRS'] + v['EDDY_TR'] + v['EDDY_ST']
    v['RESID'] = v['TOT'] - v['MMC'] - v['PGF'] - v['EDDY']

    return v
Beispiel #23
0
def onset_OCI(u, latlon = (5, 15, 40, 80), mmdd_thresh=(6,1),
              ndays=7, yearnm='Year', daynm='Day'):
    """Return monsoon Onset Circulation Index.

    Parameters
    ----------
    u : xray.DataArray
        850 hPa zonal wind.
    latlon : 4-tuple of floats, optional
        Tuple of (lat1, lat2, lon1, lon2) defining South Arabian Sea
        region to average over.
    mmdd_thres : 2-tuple of ints, optional
        Tuple of (month, day) defining climatological mean onset date
        to use for threshold value of u.
    ndays : int, optional
        Number of consecutive days threshold must be exceeded to
        define onset.
    yearnm, daynm : str, optional
        Name of year and day dimensions in DataArray

    Returns
    -------
    oci : xray.Dataset
        OCI daily timeseries for each year and monsoon onset day for
        each year.

    Reference
    ---------
    Wang, B., Ding, Q., & Joseph, P. V. (2009). Objective Definition
        of the Indian Summer Monsoon Onset. Journal of Climate, 22(12),
        3303-3316.
    """

    days = atm.get_coord(u, coord_name=daynm)
    years = atm.get_coord(u, coord_name=yearnm)
    nyears = len(years)

    # Average over South Arabian Sea region
    lat1, lat2, lon1, lon2 = latlon
    ubar = atm.mean_over_geobox(u, lat1, lat2, lon1, lon2)

    # Find values at climatological onset
    m0, d0 = mmdd_thresh
    d0 = [atm.mmdd_to_jday(m0, d0, year) for year in years]
    u0 = [ubar.sel(**{daynm : day, yearnm : year}).values
          for year, day in zip(years, d0)]
    u0 = np.array(u0).flatten()
    uthreshold = np.mean(u0)

    # Find first day when OCI exceeds threshold and stays above the
    # threshold for consecutive ndays
    def onset_day(tseries, uthreshold, ndays, daynm):
        above = (tseries.values > uthreshold)
        d0 = above.argmax()
        while not above[d0:d0+ndays].all():
            d0 += 1
        return tseries[daynm].values[d0]

    # Find onset day for each year
    onset = [onset_day(ubar[y], uthreshold, ndays, daynm)
             for y in range(nyears)]

    # Pack into dataset
    oci = xray.Dataset()
    oci['tseries'] = ubar
    oci['onset'] = xray.DataArray(onset, coords={yearnm : years})
    oci.attrs['latlon'] = latlon
    oci.attrs['mmdd_thresh'] = mmdd_thresh
    oci.attrs['ndays'] = ndays
    return oci
Beispiel #24
0
    filenm = '%smerra_%s%s_%d.nc' % (datadir, varnm, subset, year)
    with xray.open_dataset(filenm) as ds:
        var = ds[varnm].load()
    return var


uq_int = get_var(datadir, 'UFLXQV', subset, year)
vq_int = get_var(datadir, 'VFLXQV', subset, year)

mfc = atm.moisture_flux_conv(uq_int, vq_int, already_int=True)
mfcbar = mfc.mean(dim='YDim').mean(dim='XDim')

# Test atm.gradient
a = atm.constants.radius_earth.values
latdim, londim = 1, 2
lat = atm.get_coord(uq_int, 'lat')
latrad = np.radians(lat)
latrad[abs(lat) > 89] = np.nan
coslat = xray.DataArray(np.cos(latrad), coords={'YDim': lat})
lon = atm.get_coord(uq_int, 'lon')
lonrad = np.radians(lon)

mfc_x = atm.gradient(uq_int, lonrad, londim) / (a * coslat)
mfc_y = atm.gradient(vq_int * coslat, latrad, latdim) / (a * coslat)
mfc_test = mfc_x + mfc_y
mfc_test = -atm.precip_convert(mfc_test, 'kg/m2/s', 'mm/day')
mfc_test_bar = mfc_test.mean(dim='YDim').mean(dim='XDim')

diff = mfc_test - mfc
print(diff.max())
print(diff.min())
Beispiel #25
0
    with xray.open_dataset(relfiles[nm]) as ds:
        if nm == 'VFLXLQV':
            var = ds['VFLXQV'].load()
            data[nm] = var * atm.constants.Lv.values
        else:
            data[nm] = ds[nm].load()

# Scale units and rename variables
data = data * scale
nms = data.data_vars.keys()
for nm in nms:
    data = data.rename({nm : nm.replace('FLX', '')})


# Take subset and smooth with rolling mean
daydim = atm.get_coord(data['VMSE'], 'dayrel', 'dim')
for nm in data.data_vars:
    data[nm] = atm.rolling_mean(data[nm], nroll, axis=daydim, center=True)

# Average over equatorial region
data_eq = atm.dim_mean(data, 'lat', eqlat1, eqlat2)

# Cross-equatorial flues integrated over sectors
a = atm.constants.radius_earth.values
eq_int = xray.Dataset()
eq_int.attrs['units'] = sector_units
lonranges = [(40, 60), (40, 100), (lon1, lon2)]
eq_int.attrs['lonranges'] = ['%dE-%dE' % lonrange for lonrange in lonranges]
for lonrange in lonranges:
    lon1, lon2 = lonrange
    dist = a * np.radians(lon2 - lon1)
Beispiel #26
0
    for nm in datafiles[key]:
        var, _, _ = utils.load_dailyrel(datafiles[key][nm])
        if 'year' in var.dims:
            var = var.mean(dim='year')
        data[key][nm] = var


# ----------------------------------------------------------------------
# Housekeeping

# Fill Ro200 with NaNs near equator
varnm = 'Ro200'
for key1 in data:
    if varnm in data[key1]:
        latbuf = 5
        lat = atm.get_coord(data[key1][varnm], 'lat')
        latbig = atm.biggify(lat, data[key1][varnm], tile=True)
        vals = data[key1][varnm].values
        vals = np.where(abs(latbig)>latbuf, vals, np.nan)
        data[key1][varnm].values = vals

# ----------------------------------------------------------------------
# Sector mean data

lonname, latname = 'XDim', 'YDim'
sectordata = {}
for key1 in data:
    sectordata[key1] = collections.OrderedDict()
    for varnm in data[key1]:
        var = atm.subset(data[key1][varnm], {lonname : (lon1, lon2)})
        sectordata[key1][varnm] = var.mean(dim=lonname)
import xray
import numpy as np
import matplotlib.pyplot as plt
import atmos as atm
from atmos import daily_from_subdaily

datadir = '/home/jwalker/eady/datastore/'
#datadir = '/home/jennifer/datastore/'

# ----------------------------------------------------------------------
# MERRA Daily
filename = datadir + 'merra/daily/merra_u200_198601.nc'
ds = atm.ncload(filename)
u = ds['U']
lat = atm.get_coord(u, 'lat')
lon = atm.get_coord(u, 'lon')

# Number of time points per day
n = 8

# Daily mean
u_split = atm.split_timedim(u, n, time0_name='day')
u_new = daily_from_subdaily(u, n, dayvals=np.arange(1,32))
print(np.array_equal(u_new, u_split.mean(axis=1)))

# ndarray version
u_new2 = daily_from_subdaily(u.values, n)
print(np.array_equal(u_new, u_new2))

# Sub-sample version
i = 2
Beispiel #28
0
        varid, plev = get_info(varnm)
        var = get_data_rel(varid, plev, year, files.get(varnm), data, d0, npre,
                           npost)
        var.attrs = atm.odict_delete(var.attrs, 'd_onset')
        var.name = varid
        data[varnm] = var
        savefile = get_savefile(version, savedir, varnm, onset_nm, ind_nm, year)
        print('Saving to ' + savefile)
        atm.save_nc(savefile, var, onset_var, retreat_var)

# ----------------------------------------------------------------------
# Compute climatologies and save
relfiles = {}
for key in datafiles:
    relfiles[key] = [get_savefile(version, savedir, key, onset_nm, ind_nm, yr)
                     % yr for yr in years]

for varnm in relfiles:
    varid, _ = get_info(varnm)
    var, onset, retreat = load_dailyrel(relfiles[varnm])
    ds = xray.Dataset()
    ds[varid], ds['D_ONSET'], ds['D_RETREAT'] = var, onset, retreat
    print('Computing climatological mean')
    yearnm = atm.get_coord(ds, 'year', 'name')
    ds = ds.mean(dim=yearnm)
    ds[varid].attrs = var.attrs
    ds[varid].attrs['years'] = years
    filn = relfiles[varnm][0].replace('%d' % years[0], yearstr)
    print('Saving to ' + filn)
    ds.to_netcdf(filn)
tt = onset_TT(T, north=north, south=south)

# Some weirdness going on in 1991, for now just set to NaN
# Troubleshoot later
for nm in ['ttn', 'tts', 'tseries']:
    vals = tt[nm].values
    vals = np.ma.masked_array(vals, abs(vals) > 1e30).filled(np.nan)
    tt[nm].values = vals

# Plot TTN and TTS
plot_tseries_together(tt[['ttn', 'tts']], tt['onset'].values,
                      suptitle=suptitle, standardize=False)

# Summary plot and timeseries in individual years
summarize_indices(years, tt['onset'])
plt.suptitle(suptitle)
plot_index_years(tt, suptitle=suptitle, yearnm='year', daynm='day')

if save:
    atm.savefigs(varname + '_', 'png')

# Plot contour map of pressure-level data
p_plot = 400
T_plot = T_p[p_plot]
y, d = 0, 80
lat = atm.get_coord(T_plot, 'lat')
lon = atm.get_coord(T_plot, 'lon')
axlims = (lat.min(), lat.max(), lon.min(), lon.max())
plt.figure()
atm.pcolor_latlon(T_plot[y, d], axlims=axlims)
Beispiel #30
0
def onset_SJKE(u, v, latlon = (-5, 20, 50, 70), ndays=3, yearnm='Year',
             daynm='Day', thresh_std=1.0):
    """Return monsoon onset based on Somali Jet kinetic energy.

    Parameters
    ----------
    u, v : xray.DataArray
        850 hPa zonal and meridional wind.
    latlon : 4-tuple of floats, optional
        Tuple of (lat1, lat2, lon1, lon2) defining Somali jet region
        to average over.
    ndays : int, optional
        Number of consecutive days threshold must be exceeded to
        define onset.
    yearnm, daynm : str, optional
        Name of year and day dimensions in DataArray
    thresh_std : float, optional
        Number of standard deviations excursion to use as onset threshold.

    Returns
    -------
    sjke : xray.Dataset
        Somali jet index daily timeseries for each year and monsoon
        onset day for each year.

    Reference
    ---------
    Boos, W. R., & Emanuel, K. A. (2009). Annual intensification of the
        Somali jet in a quasi-equilibrium framework : Observational
        composites. Quarterly Journal of the Royal Meteorological
        Society, 135, 319-335.
    """

    days = atm.get_coord(u, coord_name=daynm)
    years = atm.get_coord(u, coord_name=yearnm)
    nyears = len(years)

    # Kinetic energy index
    ke = np.sqrt(u**2 + v**2)

    # Average over Somali jet region
    lat1, lat2, lon1, lon2 = latlon
    ke = atm.mean_over_geobox(ke, lat1, lat2, lon1, lon2)
    ke.attrs['title'] = 'KE'
    ke.attrs['long_name'] = 'sqrt(u**2 + v**2)'

    # Threshold for onset date
    vals = ke.values.flatten()
    keclim = np.nanmean(vals)
    kestd = np.nanstd(vals)
    threshold = keclim + thresh_std * kestd

    # Find first day when KE exceeds threshold and stays above the
    # threshold for consecutive ndays
    def onset_day(tseries, threshold, ndays, daynm):
        above = (tseries.values > threshold)
        d0 = above.argmax()
        while not above[d0:d0+ndays].all():
            d0 += 1
        return tseries[daynm].values[d0]

    # Find onset day for each year
    onset = [onset_day(ke[y], threshold, ndays, daynm)
             for y in range(nyears)]

    # Pack into dataset
    sjke = xray.Dataset()
    sjke['tseries'] = ke
    sjke['onset'] = xray.DataArray(onset, coords={yearnm : years})
    sjke.attrs = {'latlon' : latlon, 'thresh_std' : thresh_std,
                  'threshold' : threshold, 'ndays' : ndays}

    return sjke
# ----------------------------------------------------------------------
# Monthly climatology

yearstr = '1979-2015'
varnms = ['U', 'V']
datadir = atm.homedir() + 'datastore/merra/monthly/'
filestr = datadir + 'merra_%s_%s.nc'
files = {nm: filestr % (nm, yearstr) for nm in varnms}

data = xray.Dataset()
for nm in varnms:
    with xray.open_dataset(files[nm]) as ds:
        data[nm] = ds[nm].load()

lat = atm.get_coord(data, 'lat')
lon = atm.get_coord(data, 'lon')
psfile = atm.homedir() + 'dynamics/python/atmos-tools/data/topo/ncep2_ps.nc'
ps = atm.get_ps_clim(lat, lon, psfile)
ps = ps / 100

figsize = (7, 9)
omitzero = False

for ssn in ['ANN', 'DJF', 'JJA', 'MAR']:
    for lonlims in [(0, 360), (60, 100)]:
        lon1, lon2 = lonlims
        lonstr = atm.latlon_str(lon1, lon2, 'lon')
        suptitle = ssn + ' ' + lonstr
        months = atm.season_months(ssn)
        v = data['V'].sel(month=months)
Beispiel #32
0
def onset_HOWI(uq_int, vq_int, npts=50, nroll=7, days_pre=range(138, 145),
               days_post=range(159, 166), yearnm='year', daynm='day',
               maxbreak=7):
    """Return monsoon Hydrologic Onset/Withdrawal Index.

    Parameters
    ----------
    uq_int, vq_int : xray.DataArrays
        Vertically integrated moisture fluxes.
    npts : int, optional
        Number of points to use to define HOWI index.
    nroll : int, optional
        Number of days for rolling mean.
    days_pre, days_post : list of ints, optional
        Default values correspond to May 18-24 and June 8-14 (numbered
        as non-leap year).
    yearnm, daynm : str, optional
        Name of year and day dimensions in DataArray
    maxbreak:
        Maximum number of days with index <=0 to consider a break in
        monsoon season rather than end of monsoon season.

    Returns
    -------
    howi : xray.Dataset
        HOWI daily timeseries for each year and monsoon onset and retreat
        days for each year.

    Reference
    ---------
    J. Fasullo and P. J. Webster, 2003: A hydrological definition of
        Indian monsoon onset and withdrawal. J. Climate, 16, 3200-3211.

    Notes
    -----
    In some years the HOWI index can give a bogus onset or bogus retreat
    when the index briefly goes above or below 0 for a few days.  To deal
    with these cases, I'm defining the monsoon season as the longest set
    of consecutive days with HOWI that is positive or has been negative
    for no more than `maxbreak` number of days (monsoon break).
    """

    _, _, coords, _ = atm.meta(uq_int)
    latnm = atm.get_coord(uq_int, 'lat', 'name')
    lonnm = atm.get_coord(uq_int, 'lon', 'name')

    ds = xray.Dataset()
    ds['uq'] = uq_int
    ds['vq'] = vq_int
    ds['vimt'] = np.sqrt(ds['uq']**2 + ds['vq']**2)

    # Climatological moisture fluxes
    dsbar = ds.mean(dim=yearnm)
    ds['uq_bar'], ds['vq_bar'] = dsbar['uq'], dsbar['vq']
    ds['vimt_bar'] = np.sqrt(ds['uq_bar']**2 + ds['vq_bar']**2)

    # Pre- and post- monsoon climatology composites
    dspre = atm.subset(dsbar, {daynm : (days_pre, None)}).mean(dim=daynm)
    dspost = atm.subset(dsbar, {daynm : (days_post, None)}).mean(dim=daynm)
    dsdiff = dspost - dspre
    ds['uq_bar_pre'], ds['vq_bar_pre'] = dspre['uq'], dspre['vq']
    ds['uq_bar_post'], ds['vq_bar_post'] = dspost['uq'], dspost['vq']
    ds['uq_bar_diff'], ds['vq_bar_diff'] = dsdiff['uq'], dsdiff['vq']

    # Magnitude of vector difference
    vimt_bar_diff = np.sqrt(dsdiff['uq']**2 + dsdiff['vq']**2)
    ds['vimt_bar_diff'] = vimt_bar_diff

    # Top N difference vectors
    def top_n(data, n):
        """Return a mask with the highest n values in 2D array."""
        vals = data.copy()
        mask = np.ones(vals.shape, dtype=bool)
        for k in range(n):
            i, j = np.unravel_index(np.nanargmax(vals), vals.shape)
            mask[i, j] = False
            vals[i, j] = np.nan
        return mask

    # Mask to extract top N points
    mask = top_n(vimt_bar_diff, npts)
    ds['mask'] = xray.DataArray(mask, coords={latnm: coords[latnm],
                                              lonnm: coords[lonnm]})

    # Apply mask to DataArrays
    def applymask(data, mask):
        _, _, coords, _ = atm.meta(data)
        maskbig = atm.biggify(mask, data, tile=True)
        vals = np.ma.masked_array(data, maskbig).filled(np.nan)
        data_out = xray.DataArray(vals, coords=coords)
        return data_out

    ds['vimt_bar_masked'] = applymask(ds['vimt_bar'], mask)
    ds['vimt_bar_diff_masked'] = applymask(vimt_bar_diff, mask)
    ds['uq_masked'] = applymask(ds['uq'], mask)
    ds['vq_masked'] = applymask(ds['vq'], mask)
    ds['vimt_masked'] = np.sqrt(ds['uq_masked']**2 + ds['vq_masked']**2)

    # Timeseries data averaged over selected N points
    ds['howi_clim_raw'] = ds['vimt_bar_masked'].mean(dim=latnm).mean(dim=lonnm)
    ds['howi_raw'] = ds['vimt_masked'].mean(dim=latnm).mean(dim=lonnm)

    # Normalize
    howi_min = ds['howi_clim_raw'].min().values
    howi_max = ds['howi_clim_raw'].max().values
    def applynorm(data):
        return 2 * (data - howi_min) / (howi_max - howi_min) - 1
    ds['howi_norm'] = applynorm(ds['howi_raw'])
    ds['howi_clim_norm'] = applynorm(ds['howi_clim_raw'])

    # Apply n-day rolling mean
    def rolling(data, nroll):
        center = True
        _, _, coords, _ = atm.meta(data)
        dims = data.shape
        vals = np.zeros(dims)
        if len(dims) > 1:
            nyears = dims[0]
            for y in range(nyears):
                vals[y] = pd.rolling_mean(data.values[y], nroll, center=center)
        else:
            vals = pd.rolling_mean(data.values, nroll, center=center)
        data_out = xray.DataArray(vals, coords=coords)
        return data_out

    ds['howi_norm_roll'] = rolling(ds['howi_norm'], nroll)
    ds['howi_clim_norm_roll'] = rolling(ds['howi_clim_norm'], nroll)

    # Index timeseries dataset
    howi = xray.Dataset()
    howi['tseries'] = ds['howi_norm_roll']
    howi['tseries_clim'] = ds['howi_clim_norm_roll']

    # Find zero crossings for onset and withdrawal indices
    nyears = len(howi[yearnm])
    onset = np.zeros(nyears, dtype=int)
    retreat = np.zeros(nyears, dtype=int)
    for y in range(nyears):
        # List of days with positive HOWI index
        pos = howi[daynm].values[howi['tseries'][y].values > 0]

        # In case of extra zero crossings, find the longest set of days
        # with positive index
        splitpos = atm.splitdays(pos)
        lengths = np.array([len(v) for v in splitpos])
        imonsoon = lengths.argmax()
        monsoon = splitpos[imonsoon]

        # In case there is a break in the monsoon season, check the
        # sets of days before and after and add to monsoon season
        # if applicable
        if imonsoon > 0:
            predays = splitpos[imonsoon - 1]
            if monsoon.min() - predays.max() <= maxbreak:
                predays = np.arange(predays.min(), monsoon.min())
                monsoon = np.concatenate([predays, monsoon])
        if imonsoon < len(splitpos) - 1:
            postdays = splitpos[imonsoon + 1]
            if postdays.min() - monsoon.max() <= maxbreak:
                postdays = np.arange(monsoon.max() + 1, postdays.max() + 1)
                monsoon = np.concatenate([monsoon, postdays])

        # Onset and retreat days
        onset[y] = monsoon[0]
        retreat[y] = monsoon[-1] + 1

    howi['onset'] = xray.DataArray(onset, coords={yearnm : howi[yearnm]})
    howi['retreat'] = xray.DataArray(retreat, coords={yearnm : howi[yearnm]})
    howi.attrs = {'npts' : npts, 'nroll' : nroll, 'maxbreak' : maxbreak,
                  'days_pre' : days_pre, 'days_post' : days_post}

    return howi, ds
import xray
import numpy as np
import matplotlib.pyplot as plt
import atmos as atm
from atmos import daily_from_subdaily

datadir = '/home/jwalker/eady/datastore/'
#datadir = '/home/jennifer/datastore/'

# ----------------------------------------------------------------------
# MERRA Daily
filename = datadir + 'merra/daily/merra_u200_198601.nc'
ds = atm.ncload(filename)
u = ds['U']
lat = atm.get_coord(u, 'lat')
lon = atm.get_coord(u, 'lon')

# Number of time points per day
n = 8

# Daily mean
u_split = atm.split_timedim(u, n, time0_name='day')
u_new = daily_from_subdaily(u, n, dayvals=np.arange(1, 32))
print(np.array_equal(u_new, u_split.mean(axis=1)))

# ndarray version
u_new2 = daily_from_subdaily(u.values, n)
print(np.array_equal(u_new, u_new2))

# Sub-sample version
i = 2
# ----------------------------------------------------------------------
# Monthly climatology

yearstr = '1979-2015'
varnms = ['U', 'V']
datadir = atm.homedir() + 'datastore/merra/monthly/'
filestr = datadir + 'merra_%s_%s.nc'
files = {nm : filestr % (nm, yearstr) for nm in varnms}

data = xray.Dataset()
for nm in varnms:
    with xray.open_dataset(files[nm]) as ds:
        data[nm] = ds[nm].load()

lat = atm.get_coord(data, 'lat')
lon = atm.get_coord(data, 'lon')
psfile = atm.homedir() + 'dynamics/python/atmos-tools/data/topo/ncep2_ps.nc'
ps = atm.get_ps_clim(lat, lon, psfile)
ps = ps / 100

figsize = (7, 9)
omitzero = False

for ssn in ['ANN', 'DJF', 'JJA', 'MAR']:
    for lonlims in [(0, 360), (60, 100)]:
        lon1, lon2 = lonlims
        lonstr = atm.latlon_str(lon1, lon2, 'lon')
        suptitle = ssn + ' ' + lonstr
        months = atm.season_months(ssn)
        v = data['V'].sel(month=months)
Beispiel #35
0
# ***  NOTES ****
# Need to trouble shoot TT index before using in anything final.
# See testing/testing-indices-onset_TT.py for details.

# Select vertical pressure level to use, or None to use 200-600mb
# vertical mean
plev = None

# Read daily data from each year
if plev is None:
    T = atm.combine_daily_years('Tbar', ttfiles, years, yearname='year')
else:
    T = atm.combine_daily_years('T', ttfiles, years, yearname='year',
                                subset_dict={'plev' : (plev, plev)})
    # Remove extra dimension (vertical)
    pdim = atm.get_coord(T, 'plev', 'dim')
    pname = atm.get_coord(T, 'plev', 'name')
    name, attrs, coords, dims = atm.meta(T)
    dims = list(dims)
    dims.pop(pdim)
    coords = atm.odict_delete(coords, pname)
    T = xray.DataArray(np.squeeze(T.values), dims=dims, coords=coords,
                       name=name, attrs=attrs)

# Calculate index
north=(5, 30, 40, 100)
south=(-15, 5, 40, 100)
index['TT'] = indices.onset_TT(T, north=north, south=south)

# Some weirdness going on in 1991, for now just set to NaN
for nm in ['ttn', 'tts', 'tseries']: