コード例 #1
0
def topo_subset(llcrnrlon=-42, urcrnrlon=-35, llcrnrlat=-23,
                urcrnrlat=-14, tfile='topo30.grd'):
    """
    Get a subset from an etopo1, etopo2 or Smith and Sandwell topography file.

    OBS: Modified from oceans.datasets.etopo_subset() function by Filipe Fernandes ([email protected]).
    """
    topo = Dataset(tfile, 'r')

    if 'smith_sandwell' in tfile.lower() or 'etopo1' in tfile.lower():
        lons = topo.variables["lon"][:]
        lats = topo.variables["lat"][:]
        lons = lon360to180(lons)
    elif 'etopo2' in tfile.lower():
        lons = topo.variables["x"][:]
        lats = topo.variables["y"][:]
    else:
        np.disp('Unknown topography file.')
        return

    res = get_indices(llcrnrlat, urcrnrlat, llcrnrlon, urcrnrlon, lons, lats)
    lon, lat = np.meshgrid(lons[res[0]:res[1]], lats[res[2]:res[3]])
    bathy = topo.variables["z"][int(res[2]):int(res[3]),
                                int(res[0]):int(res[1])]

    return lon, lat, bathy
コード例 #2
0
##----
if CALC_AICE:
    iceconc_thresh = 0.15  # Ice concentration threshold.
    fnames = [fnamen.replace('ocn', 'ice') for fnamen in fnames]
    fnames = [fnamen.replace('.pop.h.', '.cice.h.') for fnamen in fnames]
    AICE = np.array([])
    nfirst = True
    nmo = 0
    for fnamen in fnames:
        yeari = fnamen.split('/')[-1].split('.')[-2]
        yeari2 = yeari[:-3]
        print(yeari)
        nci = Dataset(fnamen)
        if nfirst:
            tarea = nci['tarea'][:].data * 1e-6  # [km2]
            lon = lon360to180(nci['TLON'][:].data)
            lat = nci['TLAT'][:].data
            tmask = nci['tmask'][:]
            nfirst = False
        Aice = nci.variables['aice'][
            0, :
            fcap, :] / 100.  # Convert to fractional sea ice concentration (0-1).
        # Calculate total ice area for valid ice cells.
        # iarea=aice(aice>=dc & aice<=1.0 & aice~=0).*tarea(aice>=dc & aice<=1.0 & aice~=0).*1e-6;
        fice = np.logical_and(Aice >= iceconc_thresh, Aice <= 1.0)
        aice = np.sum(Aice[fice] * tarea[fice])
        t.append(yeari)
        AICE = np.append(AICE, aice)

    t = np.array([Timestamp(str(ti) + '-15').to_pydatetime() for ti in t])
    savez(fname_out_aice, icearea=AICE, lon=lont, lat=latt, tarea=tarea, t=t)
コード例 #3
0
ファイル: sat.py プロジェクト: regeirk/ap_tools
def wind_subset(times=(datetime(2009,12,28), datetime(2010,1,10)),
    dt='daily', llcrnrlon=-45, urcrnrlon=-35, llcrnrlat=-25, urcrnrlat=-18):
    """
    USAGE
    -----
    u,v = wind_subset(...)

    Gets wind vectors from the NCDC/NOAA Blended Seawinds L4 product,
    given a given lon, lat, time bounding box.

    Input
    -----
    * times: A tuple with 2 datetime objects marking the
      start and end of the desired subset.
      If dt=='clim', this should be an integer indicating the
      desired month, from 1 (January) through 12 (December).
      This climatology is built from the 1995-2005 data.

    * dt: Time resolution wanted. Choose `six-hourly`,
      `daily` (default) or `monthly`.

    * llcrnrlon, urcrnrlon: Longitude band wanted.
    * llcrnrlat, urcrnrlat: Latitude band wanted.

    Returns
    -------
    u,v: pandas Panels containing the data subsets.

    Example
    -------
    TODO.
    """
    head = 'http://www.ncdc.noaa.gov/thredds/dodsC/oceanwinds'
    tail = {'six-hourly':'6hr','daily':'dly','monthly':'mon','clim':'clm-agg'}

    url = head + tail[dt]
    nc = Dataset(url)

    lon = nc.variables.pop('lon')[:]
    lon = lon360to180(lon) # Longitude is in the [0,360] range, BB is in the [-180,+180] range.
    lat = nc.variables.pop('lat')[:]
    time = nc.variables.pop('time')
    time = num2date(time[:],time.units,calendar='standard')

    # Subsetting the data.
    maskx = np.logical_and(lon >= llcrnrlon, lon <= urcrnrlon)
    masky = np.logical_and(lat >= llcrnrlat, lat <= urcrnrlat)

    # Ignoring more precise time units than
    # the resolution in the desired dataset.
    nt = time.size
    if dt=='six-hourly':
        for i in xrange(nt):
            time[i] = time[i].replace(minute=0,second=0)
    elif dt=='daily':
        for i in xrange(nt):
            time[i] = time[i].replace(hour=0,minute=0,second=0)
    elif dt=='monthly':
        for i in xrange(nt):
            time[i] = time[i].replace(day=1,hour=0,minute=0,second=0)
    elif dt=='clim':
        time[1] = time[1].replace(month=2) # Fixing February.
        aux = []; [aux.append(Time.month) for Time in time]
        time = np.copy(aux); del aux
    else:
        pass

    # If the time wanted is a single month/day/hour,
    # get the nearest time available in the dataset.
    if dt=='clim':
        maskt=time==times
    else:
        if np.size(times)<2:
            idxt = np.abs(time-times).argmin()
            maskt=time==time[idxt]
        else:
            maskt = np.logical_and(time >= times[0], time <= times[1])

    np.disp('Downloading subset...')
    lon, lat, time = lon[maskx], lat[masky], time[maskt]
    u = nc.variables['u'][maskt,0,masky,maskx]
    v = nc.variables['v'][maskt,0,masky,maskx]
    np.disp('Downloaded.')

    lat,lon,time,u,v = map(np.atleast_1d, [lat,lon,time,u,v])

    # Sets the panels.
    U = Panel(data=u, items=time, major_axis=lat, minor_axis=lon)
    V = Panel(data=v, items=time, major_axis=lat, minor_axis=lon)
    
    # Sorting major axis (longitude) to fix the crossing of the 0º meridian.
    U = U.sort_index(axis=2)
    V = V.sort_index(axis=2)

    # Retrieves the u,v arrays to fix masked values.
    u,v,lon,lat = map(np.atleast_1d, (U.values,V.values,U.minor_axis.values,U.major_axis.values))
    fill_value = -9999.
    u = np.ma.masked_equal(u, fill_value)
    v = np.ma.masked_equal(v, fill_value)

    # Resets the Panels with the sorted and masked u,v arrays.
    U = Panel(data=u, items=time, major_axis=lat, minor_axis=lon)
    V = Panel(data=v, items=time, major_axis=lat, minor_axis=lon)

    return U,V
コード例 #4
0
## Get model topography and grid.
# head = '/lustre/atlas1/cli115/proj-shared/apaloczy/POP_full_simulation_from_monthly_outputs/'
fname_grd = '/home/andre/sio/phd/papers/pathways_antarctica/mkfigs/data_reproduce_figs/SOsubset_avg_2005-2009.nc'
# fname_grd = '/home/andre/sio/phd/papers/pathways_antarctica/mkfigs/data_reproduce_figs/POP_topog.nc'
ncg = Dataset(fname_grd)
lont = ncg.variables['TLONG'][:]
latt = ncg.variables['TLAT'][:]
lonu = ncg.variables['ULONG'][:]
latu = ncg.variables['ULAT'][:]
HTE = ncg.variables['HTE'][:] * cm2m
HTN = ncg.variables['HTN'][:] * cm2m
HUW = ncg.variables['HUW'][:] * cm2m
HUS = ncg.variables['HUS'][:] * cm2m
ht = ncg.variables['HT'][:] * cm2m
lont = lon360to180(lont)
lonu = lon360to180(lonu)
nx = lonu.shape[1]
ny = np.sum(latu[:, 0] <= LATCROP)
fSO = latu <= LATCROP  # Mask to get the Southern Ocean part of the grid.
sh = (ny, nx)
ht = ht[fSO].reshape(sh)
latt = latt[fSO].reshape(sh)
lont = lont[fSO].reshape(sh)
latu = latu[fSO].reshape(sh)
lonu = lonu[fSO].reshape(sh)
HTE = HTE[fSO].reshape(sh)
HTN = HTN[fSO].reshape(sh)
HUW = HUW[fSO].reshape(sh)
HUS = HUS[fSO].reshape(sh)
lonurad, laturad = lonu.ravel() * deg2rad, latu.ravel() * deg2rad
コード例 #5
0
fcap = 501
cm2m = 1e-2
W2TW = 1e-12
rho_sw = 1026.0  # [kg/m3].
cp0 = 39960000.0  # [erg/g/K].
cp0 = cp0 * 1e-7 * 1e3  # [J/kg/degC].
T2Q = rho_sw * cp0 * W2TW
Tf0 = -2.6431782684654603  # [degC] use the minimum freezing temperature found on the wanted isobath during the entire run.
CALC_PSIMEAN_ONLY = False

# Calculate subsegment averages of h(x), pseudo-cross isobath distance
# and the south and north row indices for each subsegment.
nc0 = Dataset(fnames[0])
ht = nc0.variables['HT'][:fcap, :] * cm2m  # [m].
lattx = nc0.variables['TLAT'][:fcap, :]
lontx = lon360to180(nc0.variables['TLONG'][:fcap, :])
lontx0 = lontx[100, :]
hshallow, hdeep = 100, 2000
fhshs = dict()
fhdeeps = dict()
xdists = dict()
hx_yavgs = dict()
loslices = dict()
laslices = dict()
Lattxs = dict()
Lontxs = dict()
Lys = dict()
nlat_stretch = 25
for subseg in segbrys_psi.keys():
    lol, lor = segbrys_psi[subseg]
    flol, flor = near(lontx0, lol, return_index=True), near(lontx0,