Пример #1
0
def pgradient(var, lat1, lat2, lon1, lon2, plev):
    """Return d/dp of a lat-lon variable."""
    pwidth = 100
    p1, p2 = plev - pwidth, plev + pwidth
    var = atm.subset(var, {'lat' : (lat1, lat2), 'lon' : (lon1, lon2),
                           'plev' : (p1, p2)}, copy=False)
    latlonstr = latlon_filestr(lat1, lat2, lon1, lon2)
    attrs = var.attrs
    pname = atm.get_coord(var, 'plev', 'name')
    pdim = atm.get_coord(var, 'plev', 'dim')
    pres = var[pname]
    pres = atm.pres_convert(pres, pres.attrs['units'], 'Pa')
    dvar_dp = atm.gradient(var, pres, axis=pdim)
    dvar_dp = atm.subset(dvar_dp, {pname : (plev, plev)}, copy=False,
                         squeeze=True)
    varnm = 'D%sDP' % var.name
    name = '%s%d' % (varnm, plev)
    dvar_dp.name = name
    attrs['long_name'] = 'd/dp of ' + var.attrs['long_name']
    attrs['standard_name'] = 'd/dp of ' + var.attrs['standard_name']
    attrs['units'] = ('(%s)/Pa' % attrs['units'])
    attrs[pname] = plev
    attrs['filestr'] = '%s_%s' % (name, latlonstr)
    attrs['varnm'] = varnm
    dvar_dp.attrs = attrs
    return dvar_dp
Пример #2
0
def calc_dp(var, plev):
    """Extract subset of pressure levels and calculate d/dp."""
    plevs = atm.get_coord(var, 'plev')
    pname = atm.get_coord(var, 'plev', 'name')
    pdim = atm.get_coord(var, 'plev', 'dim')
    ind = (list(plevs)).index(plev)
    i1 = max(0, ind - 1)
    i2 = min(len(plevs) - 1, ind + 1) + 1
    psub = plevs[i1:i2]
    varsub = var.sel(**{pname : psub})
    pres = atm.pres_convert(psub, 'hPa', 'Pa')
    atm.disptime()
    print('Computing d/dp for pressure level %d' % plev)
    dvar = atm.gradient(varsub, pres, axis=pdim)
    dvar = dvar.sel(**{pname : plev})
    dvar.name = 'D%sDP' % var.name
    atm.disptime()
    return dvar
Пример #3
0
# ----------------------------------------------------------------------
# Vertical gradient du/dp

lon1, lon2 = 40, 120
pmin, pmax = 100, 300
subset_dict = {'XDim': (lon1, lon2), 'Height': (pmin, pmax)}

urls = merra.merra_urls([year])
month, day = 7, 15
url = urls['%d%02d%02d' % (year, month, day)]
with xray.open_dataset(url) as ds:
    u = atm.subset(ds['U'], subset_dict, copy=False)
    u = u.mean(dim='TIME')

pres = u['Height']
pres = atm.pres_convert(pres, pres.attrs['units'], 'Pa')
dp = np.gradient(pres)

# Calc 1
dims = u.shape
dudp = np.nan * u
for i in range(dims[1]):
    for j in range(dims[2]):
        dudp.values[:, i, j] = np.gradient(u[:, i, j], dp)

# Test atm.gradient
dudp_test = atm.gradient(u, pres, axis=0)
diff = dudp_test - dudp
print(diff.max())
print(diff.min())
Пример #4
0
def get_daily_data(varid, plev, years, datafiles, data, daymin=1,
                   daymax=366, yearnm='year'):
    """Return daily data (basic variable or calculated variable).

    Data is read from datafiles if varnm is a basic variable.
    If varnm is a calculated variable (e.g. potential temperature),
    the base variables for calculation are provided in the dict data.
    """

    years = atm.makelist(years)
    datafiles = atm.makelist(datafiles)

    if isinstance(plev, int) or isinstance(plev, float):
        pres = atm.pres_convert(plev, 'hPa', 'Pa')
    elif plev == 'LML' and 'PS' in data:
        pres = data['PS']
    else:
        pres = None

    def get_var(data, varnm, plev=None):
        if plev is None:
            plev = ''
        elif plev == 'LML' and varnm == 'QV':
            varnm = 'Q'
        return data[varnm + str(plev)]

    if var_type(varid) == 'calc':
        print('Computing ' + varid)
        if varid == 'THETA':
            var = atm.potential_temp(get_var(data, 'T', plev), pres)
        elif varid == 'THETA_E':
            var = atm.equiv_potential_temp(get_var(data, 'T', plev), pres,
                                           get_var(data, 'QV', plev))
        elif varid == 'DSE':
            var = atm.dry_static_energy(get_var(data, 'T', plev),
                                        get_var(data, 'H', plev))
        elif varid == 'MSE':
            var = atm.moist_static_energy(get_var(data, 'T', plev),
                                          get_var(data, 'H', plev),
                                          get_var(data, 'QV', plev))
        elif varid == 'VFLXMSE':
            Lv = atm.constants.Lv.values
            var = data['VFLXCPT'] + data['VFLXPHI'] + data['VFLXQV'] * Lv
            var.attrs['units'] = data['VFLXCPT'].attrs['units']
            var.attrs['long_name'] = 'Vertically integrated MSE meridional flux'
    else:
        with xray.open_dataset(datafiles[0]) as ds:
            if varid not in ds.data_vars:
                varid = varid + str(plev)
        var = atm.combine_daily_years(varid, datafiles, years, yearname=yearnm,
                                      subset_dict={'day' : (daymin, daymax)})
        var = atm.squeeze(var)

        # Make sure year dimension is included for single year
        if len(years) == 1 and 'year' not in var.dims:
            var = atm.expand_dims(var, yearnm, years[0], axis=0)

        # Wrap years for extended day ranges
        if daymin < 1 or daymax > 366:
            var = wrapyear_all(var, daymin, daymax)

    # Convert precip and evap to mm/day
    if varid in ['precip', 'PRECTOT', 'EVAP']:
        var = atm.precip_convert(var, var.attrs['units'], 'mm/day')

    return var
Пример #5
0
def calc_fluxes(year, month,
                var_ids=['u', 'q', 'T', 'theta', 'theta_e', 'hgt'],
                concat_dim='TIME', scratchdir=None, keepscratch=False,
                verbose=True):
    """Return the monthly mean of MERRA daily fluxes.

    Reads MERRA daily data from OpenDAP urls, computes fluxes, and
    returns the monthly mean of the daily variable and its zonal and
    meridional fluxes.

    Parameters
    ----------
    year, month : int
        Numeric year and month (1-12).
    var_ids : list of str, optional
        IDs of variables to include.
    concat_dim : str, optional
        Name of dimension for concatenation.
    scratchdir : str, optional
        Directory path to store temporary files while processing data.
        If omitted, the current working directory is used.
    keepscratch : bool, optional
        If True, scratch files are kept in scratchdir. Otherwise they
        are deleted.
    verbose : bool, optional
        If True, print updates while processing files.

    Returns
    -------
    data : xray.Dataset
        Mean of daily data and the mean of the daily zonal fluxes
        (u * var) and meridional fluxes (v * var), for each variable
        in var_ids.
    """

    nms = [get_varname(nm) for nm in atm.makelist(var_ids)]
    u_nm, v_nm = get_varname('u'), get_varname('v')
    nms.extend([u_nm, v_nm])
    if 'theta' in nms:
        nms.append(get_varname('T'))
    if 'theta_e' in nms:
        nms.extend([get_varname('T'), get_varname('q')])
    nms = set(nms)

    days = range(1, atm.days_this_month(year, month) + 1)

    def scratchfile(nm, k, year, month, day):
        filestr = '%s_level%d_%d%02d%02d.nc' % (nm, k, year, month, day)
        if scratchdir is not None:
            filestr = scratchdir + '/' + filestr
        return filestr

    # Read metadata from one file to get pressure-level array
    dataset = 'p_daily'
    url = url_list(dataset, return_dict=False)[0]
    with xray.open_dataset(url) as ds:
        pname = atm.get_coord(ds, 'plev', 'name')
        plev = atm.get_coord(ds, 'plev')
        # Pressure levels in Pa for theta/theta_e calcs
        p_units = atm.pres_units(ds[pname].units)
        pres = atm.pres_convert(plev, p_units, 'Pa')

    # Get daily data (raw and calculate extended variables)
    def get_data(nms, pres, year, month, day, concat_dim, subset_dict, verbose):
        # Lists of raw and extended variables
        ids = list(nms)
        ext = []
        for var in ['theta', 'theta_e']:
            if var in ids:
                ext.append(var)
                ids.remove(var)

        # Read raw data and calculate extended variables
        data = read_daily(ids, year, month, day, concat_dim=concat_dim,
                          subset_dict=subset_dict, verbose=verbose)
        if 'theta' in ext:
            print_if('Computing potential temperature', verbose)
            T = data[get_varname('T')]
            data['theta'] = atm.potential_temp(T, pres)
        if 'theta_e' in ext:
            print_if('Computing equivalent potential temperature', verbose)
            T = data[get_varname('T')]
            q = data[get_varname('q')]
            data['theta_e'] = atm.equiv_potential_temp(T, pres, q)

        return data

    # Iterate over vertical levels
    for k, p in enumerate(plev):
        subset_dict = {pname : (p, p)}
        print_if('Pressure-level %.1f' % p, verbose)

        files = []

        for day in days:
            # Read data for this level and day
            ds = get_data(nms, pres[k], year, month, day, concat_dim,
                           subset_dict, verbose)

            # Compute fluxes
            print_if('Computing fluxes', verbose)
            u = ds[get_varname('u')]
            v = ds[get_varname('v')]
            for nm in var_ids:
                var = ds[get_varname(nm)]
                varname, attrs, _, _ = atm.meta(var)
                u_var = u * var
                v_var = v * var

                u_var.name = get_varname(u_nm) + '*' +  var.name
                units = var.attrs['units'] + ' * ' + u.attrs['units']
                u_var.attrs['units'] = units
                v_var.name = get_varname(v_nm) + '*' +  var.name
                v_var.attrs['units'] = units
                ds[u_var.name] = u_var
                ds[v_var.name] = v_var

            # Save to temporary scratch file
            filenm = scratchfile('fluxes', k, year, month, day)
            files.append(filenm)
            print_if('Saving to scratch file ' + filenm, verbose)
            ds.to_netcdf(filenm)

        # Concatenate daily scratch files
        ds = atm.load_concat(files)

        if not keepscratch:
            for f in files:
                os.remove(f)

        # Compute monthly means
        print_if('Computing monthly means', verbose)
        if k == 0:
            data = ds.mean(dim=concat_dim)
        else:
            data = xray.concat([data, ds.mean(dim=concat_dim)], dim=pname)

    for var in data.data_vars:
        data[var].attrs = ds[var].attrs

    return data
Пример #6
0
# ----------------------------------------------------------------------
# Vertical gradient du/dp

lon1, lon2 = 40, 120
pmin, pmax = 100, 300
subset_dict = {'XDim' : (lon1, lon2), 'Height' : (pmin, pmax)}

urls = merra.merra_urls([year])
month, day = 7, 15
url = urls['%d%02d%02d' % (year, month, day)]
with xray.open_dataset(url) as ds:
    u = atm.subset(ds['U'], subset_dict, copy=False)
    u = u.mean(dim='TIME')

pres = u['Height']
pres = atm.pres_convert(pres, pres.attrs['units'], 'Pa')
dp = np.gradient(pres)

# Calc 1
dims = u.shape
dudp = np.nan * u
for i in range(dims[1]):
    for j in range(dims[2]):
        dudp.values[:, i, j] = np.gradient(u[:, i, j], dp)

# Test atm.gradient
dudp_test = atm.gradient(u, pres, axis=0)
diff = dudp_test - dudp
print(diff.max())
print(diff.min())