예제 #1
0
def load_UVWT(_Nx, _Nz, _dir, _out_files):
    _U = np.zeros((_Nz, 1, _Nx, len(_out_files)))
    _V = np.zeros((_Nz, 1, _Nx, len(_out_files)))
    _W = np.zeros((_Nz, 1, _Nx, len(_out_files)))
    _T = np.zeros((_Nz, 1, _Nx, len(_out_files)))

    for _itr, _out_num in enumerate(_out_files):
        _U[:, :, :, _itr] = rdmds(_dir + '/U', _out_num)
        _V[:, :, :, _itr] = rdmds(_dir + '/V', _out_num)
        _W[:, :, :, _itr] = rdmds(_dir + '/W', _out_num)
        _T[:, :, :, _itr] = rdmds(_dir + '/T', _out_num)

    return np.squeeze(_U), np.squeeze(_V), np.squeeze(_W), np.squeeze(_T)
예제 #2
0
 def read_var_from_pickup(pickup_head, var0, nz):
     data, its, meta = rdmds(output_dir + pickup_head,
                             itrs=np.Inf,
                             returnmeta=True)
     data_unpick = []
     for var in meta['fldlist']:
         if var in [
                 'Uvel', 'Vvel', 'Theta', 'Salt', 'GuNm1', 'GvNm1',
                 'PhiHyd', 'siTICES', 'pTr01', 'AddMass'
         ]:
             data_unpick.append(data[:nz, :])
             data = data[nz:, :]
         elif var in [
                 'EtaN', 'dEtaHdt', 'EtaH', 'siAREA', 'siHEFF', 'siHSNOW',
                 'siUICE', 'siVICE', 'siSigm1', 'siSigm2', 'siSigm12'
         ]:
             data_unpick.append(data[0, :])
             data = data[1:, :]
         else:
             print((
                 'Error (read_var_name_from_pickup): unknown var_nameiable '
                 + var))
             sys.exit()
     i = meta['fldlist'].index(var0)
     return data_unpick[i]
예제 #3
0
def read_mds(fname, xdalike, rec=None, name=None):
    """Read a meta/data file pair and return
    an xarray DataArray similar to xdalike

    Parameters
    ----------
    fname : str
        path and filename base (i.e. not include suffix .meta / .data)
    xdalike : xarray DataArray
        DataArray with the same coordinates as the file to be read in
    rec : int, optional
        if specified, grabs a specific record from the file
    name : str, optional
        returns named DataArray with this very name

    Returns
    -------
    xda : xarray DataArray
    """
    xda = xr.DataArray(rdmds(fname, rec=rec),
                       coords=xdalike.coords,
                       dims=xdalike.dims)
    if name is not None:
        xda.name = name
    return xda
예제 #4
0
def make_sose_climatology (in_file, out_file):

    from MITgcmutils import rdmds

    # Strip .data from filename before reading
    data = rdmds(in_file.replace('.data', ''))
    climatology = np.zeros(tuple([12]) + data.shape[1:])
    for month in range(12):
        climatology[month,:] = np.mean(data[month::12,:], axis=0)
    write_binary(climatology, out_file)
예제 #5
0
파일: mitgcmdiag.py 프로젝트: ASMLAB/asmlpy
def loadgrid(dirGrid, region=None, varname=None, flag=1):
    """
    + objective : read the grid information for the MITgcm simulation
    + inputs
      - dirGrid : the location of the grid files
      - region  : if specified, read the grid information of local area
                  This is passed to "rdmds". see "rdmds" for more detail 
      - varname : the name of grid variables to be loaded.
                  if not specified, it reads 
                  'XC','YC','RAC','DXC','DYC','hFacC','hFacW','hFacS',
                  'Depth','RC','RF','DRC','DRF'
      - flag    : the default value is 1. If it is 2, then it additionally
                  reads : 'XG','YG','RAZ','DXG','DYG'
    + output
      - grd     : class variable that contains "varname"
    """
    tmp = rdmds(dirGrid + "/XC")
    [Ly, Lx] = tmp.shape
    if varname is None:
        varname = ['XC','YC','RAC','DXC','DYC','hFacC','hFacW','hFacS','Depth',\
                   'RC','RF','DRC','DRF'];
        if flag==2:
            varname = np.append(varname,['XG','YG','RAZ','DXG','DYG'])

    class grd(object):
        for iv, vname in enumerate(varname):
            if region is None:
                exec('tmpvar=rdmds("'+dirGrid+'/'+varname[iv]+'")');
                tmpvar = tmpvar.squeeze();
                exec(varname[iv]+'=tmpvar')
            else:
                if vname is 'RC' or vname is 'RF' or vname is 'DRC' or vname is 'DRF':
                    exec('tmpvar=rdmds("'+dirGrid+'/'+varname[iv]+'")');
                else:
                    exec('tmpvar=rdmds("'+dirGrid+'/'+varname[iv]+'",region='+str(region)+')');
                tmpvar = tmpvar.squeeze();
                exec(varname[iv]+'=tmpvar')
            if vname=='hFacC':
                mskC = hFacC.copy()
                mskC[mskC==0] = np.nan
                mskC[np.isfinite(mskC)] = 1.
            if vname=='hFacW':
                mskW = hFacW.copy()
                mskW[mskW==0] = np.nan
                mskW[np.isfinite(mskW)] = 1.
            if vname=='hFacS':
                mskS = hFacS.copy()
                mskS[mskS==0] = np.nan
                mskS[np.isfinite(mskS)] = 1.
            del tmpvar
    del grd.iv,grd.vname

    return grd
예제 #6
0
    def read_field(self, path, dimensions, var_name=None, fill_value=-9999):

        if path.endswith('.nc'):
            if var_name is None:
                print(
                    'Error (SOSEGrid.read_field): Must specify var_name for NetCDF files'
                )
                sys.exit()
            data_orig = read_netcdf(path, var_name)
        elif path.endswith('.data') or os.path.isfile(path + '.data'):
            from MITgcmutils import rdmds
            data_orig = rdmds(path.replace('.data', ''))
            if dimensions == 'z':
                data_orig = data_orig.squeeze()

        if self.trim_extend:
            if dimensions == 'z':
                # 1D depth field
                data_shape = [self.nz]
            else:
                # Split along longitude
                data_orig = split_longitude(data_orig, self.i_split)
                # Create a new array of the correct dimension (including extended regions)
                data_shape = [self.ny, self.nx]
                if 'z' in dimensions:
                    data_shape = [self.nz] + data_shape
                if 't' in dimensions:
                    num_time = data_orig.shape[0]
                    data_shape = [num_time] + data_shape
            data = np.zeros(data_shape) + fill_value

            # Trim
            if dimensions == 'z':
                data[self.k0_after:self.
                     k1_after] = data_orig[self.k0_before:self.k1_before]
            else:
                if 'z' in dimensions:
                    data[..., self.k0_after:self.k1_after,
                         self.j0_after:self.j1_after,
                         self.i0_after:self.i1_after] = data_orig[
                             ..., self.k0_before:self.k1_before,
                             self.j0_before:self.j1_before,
                             self.i0_before:self.i1_before]
                else:
                    data[..., self.j0_after:self.j1_after,
                         self.i0_after:self.i1_after] = data_orig[
                             ..., self.j0_before:self.j1_before,
                             self.i0_before:self.i1_before]
        else:
            data = data_orig

        return data
예제 #7
0
def move(runoff,moves,mask):#{{{2
    '''move runoff from land- to ocean-point
       runoff: runoff field (on MITgcm grid)
       moves: list of source and destination points
       mask: land-ocean-mask
    '''
    
    area = rdmds(args.gridpath+'RAC')

    new_runoff = np.zeros_like(runoff)
    
    for month in np.arange(0,12):     # loop over months
        
        # move mds to list of faces and resort faces
        runoff_copy = np.copy(runoff[month,:,:])*area
        runoff_f = llc.faces(runoff_copy)
        runoff_f = resort_faces(runoff_f)

        for i in np.arange(len(moves)):  # loop over points to move

            # source and destination coordinates for face, y and x
            source_face=moves[i][0][0]
            source_y =  moves[i][0][1]
            source_x =  moves[i][0][2]
            dest_face = moves[i][1][0]
            dest_y =    moves[i][1][1]
            dest_x =    moves[i][1][2]
            
#            print '###########################################'
#            print 'source_coord',source_face,source_y,source_x
#            print 'dest_coord', dest_face,dest_y,dest_x 
#            print 'dest',runoff_f[dest_face][dest_y,dest_x]

            # add runoff of source cell to destination cell
            runoff_f[dest_face][dest_y,dest_x] = runoff_f[source_face][source_y,source_x] + runoff_f[dest_face][dest_y,dest_x]
#            print 'source', runoff_f[source_face][source_y,source_x] 
#            print 'new',runoff_f[dest_face][dest_y,dest_x] 


        # resort faces back and move list to mds array
        runoff_f = resort_back_faces(runoff_f)
        runoff_m = llc.faces2mds(runoff_f)
        
        runoff_m = runoff_m*mask    # delete runoff on land

        area[area == 0.0] = 1e-33
        runoff_m = runoff_m/area

        new_runoff[month,:,:] = runoff_m  # write runoff of month into array
        
    return new_runoff
예제 #8
0
    def read_field(self, file_path, dimensions, fill_value=-9999):

        from MITgcmutils import rdmds

        if self.trim_extend:

            # Read the field and split along longitude
            data_orig = split_longitude(rdmds(file_path.replace('.data', '')),
                                        self.i_split)
            # Create a new array of the correct dimension (including extended regions)
            data_shape = [self.ny, self.nx]
            if 'z' in dimensions:
                data_shape = [self.nz] + data_shape
            if 't' in dimensions:
                num_time = data_orig.shape[0]
                data_shape = [num_time] + data_shape
            data = np.zeros(data_shape) + fill_value

            # Trim
            if 'z' in dimensions:
                data[..., self.k0_after:self.k1_after,
                     self.j0_after:self.j1_after,
                     self.i0_after:self.i1_after] = data_orig[
                         ..., self.k0_before:self.k1_before,
                         self.j0_before:self.j1_before,
                         self.i0_before:self.i1_before]
            else:
                data[..., self.j0_after:self.j1_after,
                     self.i0_after:self.i1_after] = data_orig[
                         ..., self.j0_before:self.j1_before,
                         self.i0_before:self.i1_before]

        else:
            # Nothing fancy to do
            data = rdmds(file_path.replace('.data', ''))

        return data
예제 #9
0
import sys
import numpy as np
import matplotlib.pyplot as plt

from MITgcmutils import rdmds
from mpl_toolkits.basemap import Basemap

m = Basemap(projection='npstere',lon_0 = 0., boundinglat=60, resolution = 'l')
m = Basemap(projection='moll',lon_0 = 0., resolution = 'c')

d = rdmds('diag2Dm',[72319,72381,72437,72499,72559,72621,72681,72743,
                     72805,72865,72927,72987])
d3 = rdmds('diag3Dm',np.Inf)
xc,yc=m(rdmds('XC'),rdmds('YC'))
hf = rdmds('hFacC')

def llc_contourf(*arguments, **kwargs):

    arglen = len(arguments)
    h = []
    if arglen >= 3:
        data = arguments[2].flatten()
        x = arguments[0].flatten()
        y = arguments[1].flatten()
        data = np.ma.masked_array(data)
        data = np.ma.masked_where(data == 0., data)
        if data.min() != 0.:
            i = data != 0
            x = arguments[0].flatten()[i]
            y = arguments[1].flatten()[i]
            data = data[i]
예제 #10
0
# In[2]:


import numpy as np
import matplotlib.pyplot as plt
from MITgcmutils import rdmds
from hspython import loadgrid
get_ipython().run_line_magic('matplotlib', 'inline')

grd = loadgrid('gyre2d', varname=['XC', 'YC'])
# In[3]:


rdir = '/Users/hajsong/Yonsei/Classes/ATM9107/mylectures/MITgcm_2Dgyre/run/'
dt = 1200
XC = rdmds(rdir+'XC')
YC = rdmds(rdir+'YC')


# In[4]:


Eta, its, _ = rdmds(rdir+'Eta', np.nan, returnmeta=True)    
# Read all the sea level output


# In[5]:


Eta.shape
예제 #11
0
파일: plotit.py 프로젝트: mjlosch/raslyboca
# # 1994-05-31
# myiter = 268752; myiter2 = myiter
# # 1997-05-31
# myiter = 191376; myiter2 = myiter
# # 1997-05-31
# myiter = 321360; myiter2 = myiter
# 1998-05-31
myiter = 338880
myiter2 = myiter
# # 1998-12-31
# myiter = 349152; myiter2 = myiter

zdir = os.path.join(bdir, 'runz00')
rdir = os.path.join(bdir, 'runp00')
gdir = '/home/ollie/mlosch/MITgcm/MITgcm/llc270/grid'
xg = rdmds(os.path.join(gdir, 'XG'))
yg = rdmds(os.path.join(gdir, 'YG'))
rac = rdmds(os.path.join(rdir, 'RAC'))
rf = rdmds(os.path.join(rdir, 'RF'))[:, 0, 0]
rc = rdmds(os.path.join(rdir, 'RC'))[:, 0, 0]
drf = rdmds(os.path.join(rdir, 'DRF'))
hf = rdmds(os.path.join(rdir, 'hFacC'))
zf = rdmds(os.path.join(zdir, 'RF'))[:, 0, 0]
zc = rdmds(os.path.join(zdir, 'RC'))[:, 0, 0]
dzf = rdmds(os.path.join(zdir, 'DRF'))
hfz = rdmds(os.path.join(rdir, 'hFacC'))

dpz = rdmds(os.path.join(zdir, 'Depth'))
dpp = rdmds(os.path.join(rdir, 'Depth'))

zdiry = os.path.join(zdir, '20years')
예제 #12
0
    from glob import glob
    for infile in glob(args[0]):
        files.append(infile)
else:
    files = args

mydir = os.getcwd().split(os.sep)[-1]

files.sort()
its = list(files)
for k, file in enumerate(files):
    its[k] = int(file.split('.')[1])

print "reading " + str(len(its)) + " iterations: " + str(its)

xg, yg = rdmds('XG'), rdmds('YG')
xc, yc = rdmds('XC'), rdmds('YC')

drf = rdmds('DRF')
dxg, dyg = rdmds('DXG'), rdmds('DYG')

zg = -np.hstack([0., np.cumsum(drf[:, 0, 0])])
zc = 0.5 * (zg[:-1] + zg[1:])

hf = rdmds('hFacC')
mskv = rdmds('hFacS')
mskv[mskv > 0.] = 1.
msku = rdmds('hFacW')
msku[msku > 0.] = 1.
# atlantic mask
예제 #13
0
import numpy as np
import matplotlib.pyplot as plt
from MITgcmutils import rdmds
from MITgcmutils import llc
from myutils import sq, pcol
import datetime
import os, sys, re

myrun = 'spinup_era_interim'
#myrun = 'runz00'
bdir = '/work/ollie/mlosch/raslyboca/llc270'
gdir = '/home/ollie/mlosch/MITgcm/MITgcm/llc270/grid'
xg,yg=rdmds(os.path.join(gdir,'XG')),rdmds(os.path.join(gdir,'YG'))
xc,yc=rdmds(os.path.join(gdir,'XC')),rdmds(os.path.join(gdir,'YC'))

dxg=rdmds(os.path.join(gdir,'DXG'))
dyg=rdmds(os.path.join(gdir,'DYG'))
drf=rdmds(os.path.join(gdir,'DRF'))
rac=rdmds(os.path.join(gdir,'RAC'))
drf=rdmds(os.path.join(gdir,'DRF'))

n = dxg.shape[-1]

#rf=rdmds(os.path.join(gdir,'RF'))[:,0,0]
rf=rdmds(os.path.join(bdir,myrun,'RF'))[:,0,0]

iters=[]
for file in os.listdir(os.path.join(bdir,myrun)):
    m = re.search('SIdiags.*.meta',file)
    if m: iters.append(int(m.group().split('.')[1]))
예제 #14
0
#its=[35795, 35857, 35913, 35975, 36035, 36097, 36157, 36219, 36281, 36341, 36403, 36463]
#its=[np.Inf]
#its=[72743]
#its=[72437]
#its=[61, 119, 181, 241, 303, 363, 425, 487, 547, 609, 669, 731]

#its=[108497,108581,108674,108764,108857,108947,109040,109133,109223,109316,109406,109499]
#its=[108497]
#its=[np.nan]
#its=[911]

print "reading iterations " + str(its)

if len(its) == 1:
    d, iter, meta = rdmds('diag3Dm', its[0], rec=[4, 5], returnmeta=True)
else:
    d, iter, meta = rdmds('diag3Dm', its, rec=[4, 5], returnmeta=True)

if len(d.shape) == 5: d = np.mean(d, axis=0)

myyear = np.round(np.mean(iter) * 8 * 3600. / (365 * 86400.)) + 1901
mydir = os.getcwd().split(os.sep)[-1]

xg, yg = rdmds('XG'), rdmds('YG')
xc, yc = rdmds('XC'), rdmds('YC')

drf = rdmds('DRF')
dxg, dyg = rdmds('DXG'), rdmds('DYG')

zg = -np.hstack([0., np.cumsum(drf[:, 0, 0])])
예제 #15
0
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
from MITgcmutils import rdmds
from MITgcmutils import llc
from myutils import sq, pcol

d=rdmds('diag3Dm',[120550, 120554])
print d.shape

df=llc.faces(d[:,:,:,:,:])

print len(df)
#print len(df[0])
for k in range(len(df)): print k, df[k].shape
from MITgcmutils import rdmds
from MyMITgcmUtils import get_grid
from matplotlib.animation import FuncAnimation
import numpy.ma as ma

run_dir = '/home/hugke729/mitgcm/dirstu17/proj1/runs/RunFr3900/'
g = get_grid(run_dir)
T = rdmds(run_dir + 'T*', itrs=np.nan).squeeze()
U = rdmds(run_dir + 'U*', itrs=np.nan).squeeze()
hfacW = rdmds(run_dir + 'hFacW').squeeze()
hfacC = rdmds(run_dir + 'hFacC').squeeze()

U = ma.masked_where(hfacW * np.ones((U.shape[0], 1, 1)) == 0, U)
T = ma.masked_where(hfacC * np.ones((T.shape[0], 1, 1)) == 0, T)

fig, ax = plt.subplots()
ax.set(facecolor='Grey')

# cax = ax.pcolormesh(g.xf, g.zf, T[0, :, :],
#                     vmin=0, vmax=26, cmap='RdBu')
cax = ax.pcolormesh(g.xf, g.zf, U[1, :, :], vmin=0, vmax=1, cmap='YlOrRd')

ax.set(xlim=(1.8E5, 2.3E5), ylim=(2000, 0))
# ax.set(ylim=(2000, 0))

fig.colorbar(cax)


def animate(i):
    # cax.set_array((T[i, ...] - T[0, ...]).flatten())
    # cax.set_array((T[i, ...]).flatten())
예제 #17
0
def crash_to_netcdf (crash_dir, grid_path):

    from MITgcmutils import rdmds

    # Make sure crash_dir is a proper directory
    crash_dir = real_dir(crash_dir)

    # Read the grid
    grid = Grid(grid_path)
    # Initialise the NetCDF file
    ncfile = NCfile(crash_dir+'crash.nc', grid, 'xyz')

    # Find all the crash files
    for file in os.listdir(crash_dir):
        if file.startswith('stateThetacrash') and file.endswith('.data'):
            # Found temperature
            # Read it from binary
            temp = rdmds(crash_dir + file.replace('.data', ''))
            # Write it to NetCDF
            ncfile.add_variable('THETA', temp, 'xyz', units='C')
        if file.startswith('stateSaltcrash') and file.endswith('.data'):
            salt = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('SALT', salt, 'xyz', units='psu')
        if file.startswith('stateUvelcrash') and file.endswith('.data'):
            u = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('UVEL', u, 'xyz', gtype='u', units='m/s')
        if file.startswith('stateVvelcrash') and file.endswith('.data'):
            v = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('VVEL', v, 'xyz', gtype='v', units='m/s')
        if file.startswith('stateWvelcrash') and file.endswith('.data'):
            w = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('WVEL', w, 'xyz', gtype='w', units='m/s')
        if file.startswith('stateEtacrash') and file.endswith('.data'):
            eta = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('ETAN', eta, 'xy', units='m')
        if file.startswith('stateAreacrash') and file.endswith('.data'):
            area = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('SIarea', area, 'xy', units='fraction')
        if file.startswith('stateHeffcrash') and file.endswith('.data'):
            heff = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('SIheff', heff, 'xy', units='m')
        if file.startswith('stateUicecrash') and file.endswith('.data'):
            uice = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('SIuice', uice, 'xy', gtype='u', units='m/s')
        if file.startswith('stateVicecrash') and file.endswith('.data'):
            vice = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('SIvice', vice, 'xy', gtype='v', units='m/s')
        if file.startswith('stateQnetcrash') and file.endswith('.data'):
            qnet = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('Qnet', qnet, 'xy', units='W/m^2')
        if file.startswith('stateMxlcrash') and file.endswith('.data'):
            mld = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('MXLDEPTH', mld, 'xy', units='m')
        if file.startswith('stateEmpmrcrash') and file.endswith('.data'):
            empmr = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('Empmr', empmr, 'xy', units='kg/m^2/s')

    ncfile.close()
예제 #18
0
        files.append(infile)
else:
    files = args

mydir = os.getcwd().split(os.sep)[-1]

files.sort()
its = list(files)
for k, file in enumerate(files):
    its[k] = int(file.split('.')[1])

print("reading " + str(len(its)) + " iterations: " + str(its))

bdir = '/work/ollie/mlosch/egerwing/llc270'
gdir = '/home/ollie/mlosch/MITgcm/MITgcm/llc270/grid'
xg, yg = rdmds(os.path.join(gdir, 'XG')), rdmds(os.path.join(gdir, 'YG'))
xc, yc = rdmds(os.path.join(gdir, 'XC')), rdmds(os.path.join(gdir, 'YC'))

drf = rdmds(os.path.join(gdir, 'DRF'))
dxg, dyg = rdmds(os.path.join(gdir, 'DXG')), rdmds(os.path.join(gdir, 'DYG'))

rac = rdmds(os.path.join(gdir, 'RAC'))

zg = -np.hstack([0., np.cumsum(drf[:, 0, 0])])
zc = 0.5 * (zg[:-1] + zg[1:])

hf = rdmds(os.path.join(gdir, 'hFacC'))
mskc = np.copy(hf)
mskc[mskc > 0.] = 1.
mskv = rdmds(os.path.join(gdir, 'hFacS'))
mskv[mskv > 0.] = 1.
예제 #19
0
# compute net precipitation into llc90 and into llc270 ocean

import numpy as np
from MITgcmutils import rdmds
from netCDF4 import Dataset
import os
from myutils import *

mdir = '/home/ollie/mlosch/MITgcm/MITgcm'
rac_llc90 = rdmds(os.path.join(mdir,'llc90/grid','RAC'))
hf_llc90 = rdmds(os.path.join(mdir,'llc90/grid','hFacC'),lev=[0])
rac_llc270 = rdmds(os.path.join(mdir,'llc270/grid','RAC'))
hf_llc270 = rdmds(os.path.join(mdir,'llc270/grid','hFacC'),lev=[0])

a90  = (rac_llc90*hf_llc90).sum()
a270 = (rac_llc270*hf_llc270).sum()

# precip from an 30year llc90 spinup run
bdir = '/work/ollie/mlosch/raslyboca'
m90 = 'MITgcm_elena/llc90/spinup_era_interim/monitor_exf.0000000000.t001.nc'
ds90 = Dataset(os.path.join(bdir,m90))

# precip from a 20year llc270 spinup run
m270 = 'llc270/runz00/20years/stdout.*'
globfiles = os.path.join(bdir,m270)
files=[]
if len(args)<2:
    from glob import glob
    for infile in glob(globfiles):
        files.append(infile)
예제 #20
0
dates = []

for idic, dici in enumerate([
        dir_th, dir_thad, dir_thad_nc, dir_thad_nc_f, dir_thad_nc_pl,
        dir_thad_nc_pl_f
]):
    flist = [
        ifile for ifile in os.listdir(os.path.join(dici))
        if ifile.startswith(fname) and ifile.endswith('.data')
    ]
    flist.sort()
    steps = [int(ifile.split('.')[-2]) for ifile in flist]
    for istep, stepi in enumerate(steps):
        [a[idic, istep, :, :],
         t[idic, istep, :, :]] = rdmds(os.path.join(dici, fname),
                                       stepi)[2:4, :, :]
        if idic == 0:
            dates.append(
                datetime.datetime(1948, 1, 1, 0, 0, 0) +
                datetime.timedelta(0, stepi * 28800))

# In[22]:

# Read grid
dir_grid = '/home/ollie/nhutter/raslyboca/MITgcm/llc90/grid'
xg = rdmds(os.path.join(dir_grid, 'XG'))
yg = rdmds(os.path.join(dir_grid, 'YG'))
rac = rdmds(os.path.join(dir_grid, 'RAC'))

# In[23]:
예제 #21
0
    def __init__(self, path, model_grid=None, split=0):

        self.split = split

        if path.endswith('.nc'):
            use_netcdf = True
        elif os.path.isdir(path):
            use_netcdf = False
            path = real_dir(path)
            from MITgcmutils import rdmds
        else:
            print 'Error (SOSEGrid): ' + path + ' is neither a NetCDF file nor a directory'
            sys.exit()

        self.trim_extend = True
        if model_grid is None:
            self.trim_extend = False

        if self.trim_extend:
            # Error checking for which longitude range we're in
            if split == 180:
                max_lon = 180
                if np.amax(model_grid.lon_2d) > max_lon:
                    print 'Error (SOSEGrid): split=180 does not match model grid'
                    sys.exit()
            elif split == 0:
                max_lon = 360
                if np.amin(model_grid.lon_2d) < 0:
                    print 'Error (SOSEGrid): split=0 does not match model grid'
                    sys.exit()
            else:
                print 'Error (SOSEGrid): split must be 180 or 0'
                sys.exit()
        else:
            max_lon = 360

        # Read variables
        if use_netcdf:
            # Make the 2D grid 1D so it's regular
            self.lon_1d = read_netcdf(path, 'XC')[0, :]
            self.lon_corners_1d = read_netcdf(path, 'XG')[0, :]
            self.lat_1d = read_netcdf(path, 'YC')[:, 0]
            self.lat_corners_1d = read_netcdf(path, 'YG')[:, 0]
            self.z = read_netcdf(path, 'Z')
            self.z_edges = read_netcdf(path, 'RF')
        else:
            self.lon_1d = rdmds(path + 'XC')[0, :]
            self.lon_corners_1d = rdmds(path + 'XG')[0, :]
            self.lat_1d = rdmds(path + 'YC')[:, 0]
            self.lat_corners_1d = rdmds(path + 'YG')[:, 0]
            self.z = rdmds(path + 'RC').squeeze()
            self.z_edges = rdmds(path + 'RF').squeeze()

        # Fix longitude range
        self.lon_1d = fix_lon_range(self.lon_1d, max_lon=max_lon)
        self.lon_corners_1d = fix_lon_range(self.lon_corners_1d,
                                            max_lon=max_lon)
        if split == 180:
            # Split the domain at 180E=180W and rearrange the two halves so longitude is strictly ascending
            self.i_split = np.nonzero(self.lon_1d < 0)[0][0]
        else:
            # Set i_split to 0 which won't actually do anything
            self.i_split = 0
        self.lon_1d = split_longitude(self.lon_1d, self.i_split)
        self.lon_corners_1d = split_longitude(self.lon_corners_1d,
                                              self.i_split)
        if self.lon_corners_1d[0] > 0:
            # The split happened between lon_corners[i_split] and lon[i_split].
            # Take mod 360 on this index of lon_corners to make sure it's strictly increasing.
            self.lon_corners_1d[0] -= 360
        # Make sure the longitude axes are strictly increasing after the splitting
        if not np.all(np.diff(self.lon_1d) > 0) or not np.all(
                np.diff(self.lon_corners_1d) > 0):
            print 'Error (SOSEGrid): longitude is not strictly increasing'
            sys.exit()

        # Save original dimensions
        sose_nx = self.lon_1d.size
        sose_ny = self.lat_1d.size
        sose_nz = self.z.size

        if self.trim_extend:

            # Trim and/or extend the axes
            # Notes about this:
            # Longitude can only be trimmed as SOSE considers all longitudes (someone doing a high-resolution circumpolar model with points in the gap might need to write a patch to wrap the SOSE grid around)
            # Latitude can be trimmed in both directions, or extended to the south (not extended to the north - if you need to do this, SOSE is not the right product for you!)
            # Depth can be extended by one level in both directions, and the deeper bound can also be trimmed
            # The indices i, j, and k will be kept track of with 4 variables each. For example, with longitude:
            # i0_before = first index we care about
            #           = how many cells to trim at beginning
            # i0_after = i0_before's position in the new grid
            #          = how many cells to extend at beginning
            # i1_before = first index we don't care about
            #           sose_nx - i1_before = how many cells to trim at end
            # i1_after = i1_before's position in the new grid
            #          = i1_before - i0_before + i0_after
            # nx = length of new grid
            #      nx - i1_after = how many cells to extend at end

            # Find bounds on model grid
            xmin = np.amin(model_grid.lon_corners_2d)
            xmax = np.amax(model_grid.lon_2d)
            ymin = np.amin(model_grid.lat_corners_2d)
            ymax = np.amax(model_grid.lat_2d)
            z_shallow = model_grid.z[0]
            z_deep = model_grid.z[-1]

            # Western bound (use longitude at cell centres to make sure all grid types clear the bound)
            if xmin == self.lon_1d[0]:
                # Nothing to do
                self.i0_before = 0
            elif xmin > self.lon_1d[0]:
                # Trim
                self.i0_before = np.nonzero(self.lon_1d > xmin)[0][0] - 1
            else:
                print 'Error (SOSEGrid): not allowed to extend westward'
                sys.exit()
            self.i0_after = 0

            # Eastern bound (use longitude at cell corners, i.e. western edge)
            if xmax == self.lon_corners_1d[-1]:
                # Nothing to do
                self.i1_before = sose_nx
            elif xmax < self.lon_corners_1d[-1]:
                # Trim
                self.i1_before = np.nonzero(
                    self.lon_corners_1d > xmax)[0][0] + 1
            else:
                print 'Error (SOSEGrid): not allowed to extend eastward'
                sys.exit()
            self.i1_after = self.i1_before - self.i0_before + self.i0_after
            self.nx = self.i1_after

            # Southern bound (use latitude at cell centres)
            if ymin == self.lat_1d[0]:
                # Nothing to do
                self.j0_before = 0
                self.j0_after = 0
            elif ymin > self.lat_1d[0]:
                # Trim
                self.j0_before = np.nonzero(self.lat_1d > ymin)[0][0] - 1
                self.j0_after = 0
            elif ymin < self.lat_1d[0]:
                # Extend
                self.j0_after = int(np.ceil(
                    (self.lat_1d[0] - ymin) / sose_res))
                self.j0_before = 0

            # Northern bound (use latitude at cell corners, i.e. southern edge)
            if ymax == self.lat_corners_1d[-1]:
                # Nothing to do
                self.j1_before = sose_ny
            elif ymax < self.lat_corners_1d[-1]:
                # Trim
                self.j1_before = np.nonzero(
                    self.lat_corners_1d > ymax)[0][0] + 1
            else:
                print 'Error (SOSEGrid): not allowed to extend northward'
                sys.exit()
            self.j1_after = self.j1_before - self.j0_before + self.j0_after
            self.ny = self.j1_after

            # Depth
            self.k0_before = 0
            if z_shallow <= self.z[0]:
                # Nothing to do
                self.k0_after = 0
            else:
                # Extend
                self.k0_after = 1
            if z_deep > self.z[-1]:
                # Trim
                self.k1_before = np.nonzero(self.z < z_deep)[0][0] + 1
            else:
                # Either extend or do nothing
                self.k1_before = sose_nz
            self.k1_after = self.k1_before + self.k0_after
            if z_deep < self.z[-1]:
                # Extend
                self.nz = self.k1_after + 1
            else:
                self.nz = self.k1_after

            # Now we have the indices we need, so trim/extend the axes as needed
            # Longitude: can only trim
            self.lon_1d = self.lon_1d[self.i0_before:self.i1_before]
            self.lon_corners_1d = self.lon_corners_1d[self.i0_before:self.
                                                      i1_before]
            # Latitude: can extend on south side, trim on both sides
            lat_extend = np.flipud(-1 *
                                   (np.arange(self.j0_after) + 1) * sose_res +
                                   self.lat_1d[self.j0_before])
            lat_trim = self.lat_1d[self.j0_before:self.j1_before]
            self.lat_1d = np.concatenate((lat_extend, lat_trim))
            lat_corners_extend = np.flipud(-1 *
                                           (np.arange(self.j0_after) + 1) *
                                           sose_res +
                                           self.lat_corners_1d[self.j0_before])
            lat_corners_trim = self.lat_corners_1d[self.j0_before:self.
                                                   j1_before]
            self.lat_corners_1d = np.concatenate(
                (lat_corners_extend, lat_corners_trim))
            # Depth: can extend on both sides (depth 0 at top and extrapolated at bottom to clear the deepest model depth), trim on deep side
            z_above = 0 * np.ones([self.k0_after
                                   ])  # Will either be [0] or empty
            z_middle = self.z[self.k0_before:self.k1_before]
            z_edges_middle = self.z_edges[self.k0_before:self.k1_before + 1]
            z_below = (2 * model_grid.z[-1] - model_grid.z[-2]) * np.ones([
                self.nz - self.k1_after
            ])  # Will either be [something deeper than z_deep] or empty
            self.z = np.concatenate((z_above, z_middle, z_below))
            self.z_edges = np.concatenate((z_above, z_edges_middle, z_below))

            # Make sure we cleared those bounds
            if self.lon_corners_1d[0] > xmin:
                print 'Error (SOSEGrid): western bound not cleared'
                sys.exit()
            if self.lon_corners_1d[-1] < xmax:
                print 'Error (SOSEGrid): eastern bound not cleared'
                sys.exit()
            if self.lat_corners_1d[0] > ymin:
                print 'Error (SOSEGrid): southern bound not cleared'
                sys.exit()
            if self.lat_corners_1d[-1] < ymax:
                print 'Error (SOSEGrid): northern bound not cleared'
                sys.exit()
            if self.z[0] < z_shallow:
                print 'Error (SOSEGrid): shallow bound not cleared'
                sys.exit()
            if self.z[-1] > z_deep:
                print 'Error (SOSEGrid): deep bound not cleared'
                sys.exit()

        else:

            # Nothing fancy to do
            self.nx = sose_nx
            self.ny = sose_ny
            self.nz = sose_nz

        # Now read the rest of the variables we need, splitting/trimming/extending them if needed
        if use_netcdf:
            self.hfac = self.read_field(path,
                                        'xyz',
                                        var_name='hFacC',
                                        fill_value=0)
            self.hfac_w = self.read_field(path,
                                          'xyz',
                                          var_name='hFacW',
                                          fill_value=0)
            self.hfac_s = self.read_field(path,
                                          'xyz',
                                          var_name='hFacS',
                                          fill_value=0)
            self.dA = self.read_field(path, 'xy', var_name='rA', fill_value=0)
            self.dz = self.read_field(path, 'z', var_name='DRF', fill_value=0)
        else:
            self.hfac = self.read_field(path + 'hFacC', 'xyz', fill_value=0)
            self.hfac_w = self.read_field(path + 'hFacW', 'xyz', fill_value=0)
            self.hfac_s = self.read_field(path + 'hFacS', 'xyz', fill_value=0)
            self.dA = self.read_field(path + 'RAC', 'xy', fill_value=0)
            self.dz = self.read_field(path + 'DRF', 'z', fill_value=0)
        # Calculate volume
        self.dV = xy_to_xyz(self.dA, [self.nx, self.ny, self.nz]) * z_to_xyz(
            self.dz, [self.nx, self.ny, self.nz]) * self.hfac

        # Mesh lat and lon
        self.lon_2d, self.lat_2d = np.meshgrid(self.lon_1d, self.lat_1d)
        self.lon_corners_2d, self.lat_corners_2d = np.meshgrid(
            self.lon_corners_1d, self.lat_corners_1d)

        # Calculate bathymetry
        self.bathy = bdry_from_hfac('bathy', self.hfac, self.z_edges)

        # Create land masks
        self.land_mask = self.build_land_mask(self.hfac)
        self.land_mask_u = self.build_land_mask(self.hfac_w)
        self.land_mask_v = self.build_land_mask(self.hfac_s)
        # Dummy ice mask with all False
        self.ice_mask = np.zeros(self.land_mask.shape).astype(bool)
예제 #22
0
        face3_rot = np.rot90(field[order[k-1]],1)
        field_ext = np.hstack((face3_rot[:,-n:],field[face],face1_rot[:,0:n]))
        field_ext = np.vstack((np.hstack((np.zeros((n,n)),field[order[k-4]][-n:,:],np.zeros((n,n)))),field_ext,np.hstack((np.zeros((n,n)),face2_rot[0:n,:],np.zeros((n,n))))))
    else:
        field_ext = np.hstack((field[order[k-1]][:,-n:],field[face],field[order[k+1]][:,:n]))
        face4_rot = np.rot90(field[order[0]],order[k])
        field_ext = np.vstack((np.hstack((np.zeros((n,n)),np.zeros((n,field[face].shape[1])),np.zeros((n,n)))),field_ext,np.hstack((np.zeros((n,n)),face4_rot[:n,:],np.zeros((n,n))))))
    
    return field_ext  #}}}2

#}}}1


if __name__=='__main__':
    
    mask = rdmds(args.gridpath+'hFacC')[0,:,:]
    runoff = readfield(args.infile,(12,len(mask[:,0]),len(mask[0,:])),np.float32)
    bathy = readfield(args.bathyfile,(len(mask[:,0]),len(mask[0,:])),np.float32)

    x_mit = rdmds(args.gridpath+'XC',astype=None)
    y_mit = rdmds(args.gridpath+'YC',astype=None)
    area = rdmds(args.gridpath+'RAC',astype=None)
    ind = np.unravel_index(np.argmax(runoff*area[None,:,:]),runoff.shape)
    print ind
    print 'Max', y_mit[ind[1],ind[2]],x_mit[ind[1],ind[2]] 

    extension = args.iterations

    moves = list_of_movements(runoff,mask,args.iterations,bathy,extension)

    new_runoff = move(runoff,moves,mask)
예제 #23
0
    def __init__(self, path, x_is_lon=True, max_lon=None):

        if path.endswith('.nc'):
            use_netcdf = True
        elif os.path.isdir(path):
            use_netcdf = False
            path = real_dir(path)
            from MITgcmutils import rdmds
        else:
            print 'Error (Grid): ' + path + ' is neither a NetCDF file nor a directory'
            sys.exit()

        # Read variables
        # Note that some variables are capitalised differently in NetCDF versus binary, so can't make this more efficient...
        if use_netcdf:
            self.lon_2d = read_netcdf(path, 'XC')
            self.lat_2d = read_netcdf(path, 'YC')
            self.lon_corners_2d = read_netcdf(path, 'XG')
            self.lat_corners_2d = read_netcdf(path, 'YG')
            self.dx_s = read_netcdf(path, 'dxG')
            self.dy_w = read_netcdf(path, 'dyG')
            # I have no idea why this requires .data but it does, otherwise WSS breaks (?!?!)
            self.dA = read_netcdf(path, 'rA').data
            self.z = read_netcdf(path, 'Z')
            self.z_edges = read_netcdf(path, 'Zp1')
            self.dz = read_netcdf(path, 'drF')
            self.dz_t = read_netcdf(path, 'drC')
            self.hfac = read_netcdf(path, 'hFacC')
            self.hfac_w = read_netcdf(path, 'hFacW')
            self.hfac_s = read_netcdf(path, 'hFacS')
        else:
            self.lon_2d = rdmds(path + 'XC')
            self.lat_2d = rdmds(path + 'YC')
            self.lon_corners_2d = rdmds(path + 'XG')
            self.lat_corners_2d = rdmds(path + 'YG')
            self.dx_s = rdmds(path + 'DXG')
            self.dy_w = rdmds(path + 'DYG')
            self.dA = rdmds(path + 'RAC')
            # Remove singleton dimensions from 1D depth variables
            self.z = rdmds(path + 'RC').squeeze()
            self.z_edges = rdmds(path + 'RF').squeeze()
            self.dz = rdmds(path + 'DRF').squeeze()
            self.dz_t = rdmds(path + 'DRC').squeeze()
            self.hfac = rdmds(path + 'hFacC')
            self.hfac_w = rdmds(path + 'hFacW')
            self.hfac_s = rdmds(path + 'hFacS')

        # Make 1D versions of latitude and longitude arrays (only useful for regular lat-lon grids)
        if len(self.lon_2d.shape) == 2:
            self.lon_1d = self.lon_2d[0, :]
            self.lat_1d = self.lat_2d[:, 0]
            self.lon_corners_1d = self.lon_corners_2d[0, :]
            self.lat_corners_1d = self.lat_corners_2d[:, 0]
        elif len(self.lon_2d.shape) == 1:
            # xmitgcm output has these variables as 1D already. So make 2D ones.
            self.lon_1d = np.copy(self.lon_2d)
            self.lat_1d = np.copy(self.lat_2d)
            self.lon_corners_1d = np.copy(self.lon_corners_2d)
            self.lat_corners_1d = np.copy(self.lat_corners_2d)
            self.lon_2d, self.lat_2d = np.meshgrid(self.lon_1d, self.lat_1d)
            self.lon_corners_2d, self.lat_corners_2d = np.meshgrid(
                self.lon_corners_1d, self.lat_corners_1d)

        # Decide on longitude range
        if max_lon is None and x_is_lon:
            # Choose range automatically
            if np.amin(self.lon_1d) < 180 and np.amax(self.lon_1d) > 180:
                # Domain crosses 180E, so use the range (0, 360)
                max_lon = 360
            else:
                # Use the range (-180, 180)
                max_lon = 180
            # Do one array to test
            self.lon_1d = fix_lon_range(self.lon_1d, max_lon=max_lon)
            # Make sure it's strictly increasing now
            if not np.all(np.diff(self.lon_1d) > 0):
                print 'Error (Grid): Longitude is not strictly increasing either in the range (0, 360) or (-180, 180).'
                sys.exit()
        if max_lon == 360:
            self.split = 0
        elif max_lon == 180:
            self.split = 180
        self.lon_1d = fix_lon_range(self.lon_1d, max_lon=max_lon)
        self.lon_corners_1d = fix_lon_range(self.lon_corners_1d,
                                            max_lon=max_lon)
        self.lon_2d = fix_lon_range(self.lon_2d, max_lon=max_lon)
        self.lon_corners_2d = fix_lon_range(self.lon_corners_2d,
                                            max_lon=max_lon)

        # Save dimensions
        self.nx = self.lon_1d.size
        self.ny = self.lat_1d.size
        self.nz = self.z.size

        # Calculate volume
        self.dV = xy_to_xyz(self.dA, [self.nx, self.ny, self.nz]) * z_to_xyz(
            self.dz, [self.nx, self.ny, self.nz]) * self.hfac

        # Calculate bathymetry and ice shelf draft
        self.bathy = bdry_from_hfac('bathy', self.hfac, self.z_edges)
        self.draft = bdry_from_hfac('draft', self.hfac, self.z_edges)

        # Create masks on the t, u, and v grids
        # Land masks
        self.land_mask = self.build_land_mask(self.hfac)
        self.land_mask_u = self.build_land_mask(self.hfac_w)
        self.land_mask_v = self.build_land_mask(self.hfac_s)
        # Ice shelf masks
        self.ice_mask = self.build_ice_mask(self.hfac)
        self.ice_mask_u = self.build_ice_mask(self.hfac_w)
        self.ice_mask_v = self.build_ice_mask(self.hfac_s)
예제 #24
0
def interpolate_output_to_new_grid(run_dir, last_iter, new_grid,
                                   get_grid_args=dict()):
    """
    Takes outputs U, V, T, S, Eta and interpolates them onto new grid

    Assumes both grids are 3D and Cartesian.

    Results for 2D aren't quite right. Interpolation falls back to nearest
    neighbour where it should be linear.

    Inputs
    ------
    run_dir: str
        Directory containing output
    last_iter: int
        The number in the output filenames that are to be interpolated
        For example, for T.00000360000.data, last_iter is 36000
    new_grid: Grid instance
        Grid from MyGrids.Grid
    get_grid_args: dict
        Arguments to pass to get_grid
        `g_in = get_grid(run_dir, squeeze_hfacs=False, **get_grid_args)`

    Returns
    -------
    all_outputs: dict
        Contains U, V, T, S, and Eta on new grid. Shape of these arrays
        are (Nz × Ny × Nx) or (Ny × Nx) for Eta

    Notes
    -----
    Not set up to work with OBCS (hFacs are not dealt with correctly)
    """
    # filterwarnings('ignore', '.*invalid value encountered in true_divide')
    # filterwarnings('ignore', '.*invalid value encountered in less_equal')

    # Helper function
    def get_coord(grid, coord_str):
        """get_coord(g, 'xc') returns g.xc or None if g has no attribute xc
        Remove extra value for xf and yf, since that's what's need in outer
        function"""
        try:
            out = getattr(grid, coord_str)
            if coord_str in ['xf', 'yf']:
                out = out[:-1]
            return out.copy()
        except TypeError:
            return None

    # Input and output grids
    run_dir = os.path.normpath(run_dir) + os.sep
    g_in = get_grid(run_dir, squeeze_hfacs=False, **get_grid_args)
    g_out = new_grid

    coord_sys = dict(
        U=('xf', 'yc', 'zc', 'hFacW'), V=('xc', 'yf', 'zc', 'hFacS'),
        T=('xc', 'yc', 'zc', 'hFacC'), S=('xc', 'yc', 'zc', 'hFacC'),
        Eta=('xc', 'yc', None, 'hFacC'))

    # Preallocate dict that is returned
    all_outputs = {}

    for k, (x, y, z, h) in coord_sys.items():
        threeD = False if k is 'Eta' else True

        # Read in all grids for current quantity
        xi, yi, zi, hi = [get_coord(g_in, q) for q in (x, y, z, h)]
        hi = hi[0, ...] if not threeD else hi

        # Read actual output
        fname = run_dir + k + '*'
        quantity = rdmds(fname, last_iter)

        # Convert zeros to NaN for values that aren't water
        # This is really important for T and S, where the average of say 35
        # and 0 is unphysical. For U and V, it's not as important but still
        # worthwhile
        quantity[hi == 0] = np.nan

        # Smooth at each depth level before interpolation. Helps reduce large
        # divergences
        # Update: 6/3/17. Try without smoothing
        gf_opts = dict(sigma=0, keep_nans=False, gf_kwargs=dict(truncate=8))
        if threeD:
            for i, level in enumerate(quantity):
                quantity[i, ...] = nan_gaussian_filter(level, **gf_opts)
        else:
            quantity = nan_gaussian_filter(quantity, **gf_opts)

        # Add a border around output to avoid problems with regions between
        # the centre of the first and last cells in a given dimension and the
        # edge of the domain
        quantity = add_border_values(quantity)

        # Add in associated values to x, y, z
        xp2 = np.r_[xi[0] - g_in.dx[0], xi, xi[-1] + g_in.dx[-1]]
        yp2 = np.r_[yi[0] - g_in.dy[0], yi, yi[-1] + g_in.dy[-1]]
        zp2 = np.r_[zi[0] - g_in.dz[0], zi, zi[-1] + g_in.dz[-1]] if threeD else None

        # Grid associated with added border
        pts_in = (zp2, yp2, xp2) if threeD else (yp2, xp2)

        # Overall interpolation will be combo of linear and nearest neighbour
        # to get the best of both
        interp_input = dict(points=pts_in, values=quantity,
                            bounds_error=False, fill_value=None)
        f_lin = rgi(method='linear', **interp_input)
        f_near = rgi(method='nearest', **interp_input)

        # Output grids
        xo, yo, zo = [get_coord(g_out, q) for q in (x, y, z)]
        if threeD:
            Zo, Yo, Xo = np.meshgrid(zo, yo, xo, indexing='ij')
        else:
            Yo, Xo = np.meshgrid(yo, xo, indexing='ij')

        pts_out = (Zo, Yo, Xo) if threeD else (Yo, Xo)

        # Linear interpolate to start with, then fill in bits near boundaries
        # with nearest neighbour interpolation, then fill everything else
        lin_out = f_lin(pts_out)
        near_out = f_near(pts_out)

        lin_out_is_nan = np.isnan(lin_out)
        lin_out[lin_out_is_nan] = near_out[lin_out_is_nan]

        # Fill any remaining gaps
        lin_out = remove_nans_laterally(lin_out, inverted_dimensions=True)

        # For completely empty levels, copy the level above
        if threeD:
            levels_to_copy = np.where(np.all(np.isnan(lin_out), axis=(1, 2)))[0]
            for level in levels_to_copy:
                lin_out[level, ...] = lin_out[level - 1, ...]

        all_outputs[k] = lin_out

    return all_outputs
예제 #25
0
파일: ttt.py 프로젝트: mjlosch/raslyboca
it2 = [
    35795, 35857, 35913, 35975, 36035, 36097, 36157, 36219, 36281, 36341,
    36403, 36463
]
it2 = [np.Inf]
#it2=[72743]
#it2=[72437]
#it2=[61, 119, 181, 241, 303, 363, 425, 487, 547, 609, 669, 731]

#it2=[108497,108581,108674,108764,108857,108947,109040,109133,109223,109316,109406,109499]
#it2=[108497]
#it2=[np.nan]

if len(it2) == 1:
    d, iter, meta = rdmds('diag2Dm', it2[0], returnmeta=True)
else:
    d, iter, meta = rdmds('diag2Dm', it2, returnmeta=True)

mydir = os.getcwd().split(os.sep)[-1]

xg, yg = rdmds('XG'), rdmds('YG')
xc, yc = rdmds('XC'), rdmds('YC')

hf = rdmds('hFacC')

d = np.where(np.abs(d + 999.) < 1e-5, 0., d)

# #m = Basemap(projection='moll',lon_0 = 0., resolution = 'c')
# m = Basemap(projection='cyl',lon_0 = 10., resolution = 'c')
# #m = Basemap(projection='merc',lon_0 = 0., resolution = 'c')
예제 #26
0
import numpy as np
import matplotlib.pyplot as plt
from MITgcmutils import rdmds
from MITgcmutils import llc
from myutils import sq, pcol
import os

myrun = 'runp00'
#myrun = 'runz00'
bdir = '/work/ollie/mlosch/egerwing/llc270'
gdir = '/home/ollie/mlosch/MITgcm/MITgcm/llc270/grid'
xg,yg=rdmds(os.path.join(gdir,'XG')),rdmds(os.path.join(gdir,'YG'))
xc,yc=rdmds(os.path.join(gdir,'XC')),rdmds(os.path.join(gdir,'YC'))

dxg=rdmds(os.path.join(gdir,'DXG'))
dyg=rdmds(os.path.join(gdir,'DYG'))
drf=rdmds(os.path.join(gdir,'DRF'))
rac=rdmds(os.path.join(gdir,'RAC'))
drf=rdmds(os.path.join(gdir,'DRF'))

#rf=rdmds(os.path.join(gdir,'RF'))[:,0,0]
rf=rdmds(os.path.join(bdir,myrun,'RF'))[:,0,0]
if np.sum(rf-np.abs(rf)) < 0:
    usingPCoords = False
else:
    usingPCoords = True

myiter = np.Inf
d3z,myiter,meta = rdmds(os.path.join(bdir,myrun,'diags3D'),myiter,
                        returnmeta=True)
dl = rdmds(os.path.join(bdir,myrun,'diagsLAYERS'),myiter)
예제 #27
0
import cartopy.crs as ccrs
import cartopy.feature as cfeature
get_ipython().run_line_magic('matplotlib', 'inline')

# ### Parameters

# In[3]:

rdir = '/Users/hajsong/Yonsei/Classes/ATM9107/mylectures/MITgcm_global/run/'
dt = 86400

# ### Load the model grid

# In[104]:

XC = rdmds(rdir + 'XC')
YC = rdmds(rdir + 'YC')
RC = rdmds(rdir + 'RC')
XG = rdmds(rdir + 'XG')
hFacC = rdmds(rdir + 'hFacC')
hFacS = rdmds(rdir + 'hFacS')
hFacW = rdmds(rdir + 'hFacW')
Depth = rdmds(rdir + 'Depth')
DRF = rdmds(rdir + 'DRF')
DXG = rdmds(rdir + 'DXG')
DYG = rdmds(rdir + 'DYG')
RAC = rdmds(rdir + 'RAC')

RC = np.squeeze(RC)
DRF = np.squeeze(DRF)
예제 #28
0
    outname = args.outfile
    writefield(outname, v_new)

    #    fig = plt.figure(figsize=(15,6))
    #    fig.clf()
    #    pcol(sq(llc.flat(v_new[5,:,:])),cmap='viridis')
    ##    pcol(llc.flat(t_new2[6,:,:]),cmap='viridis')
    #    plt.xlabel('Latitude [$\degree$]', fontsize=14)
    #    plt.ylabel('Longitude [$\degree$]', fontsize=14)
    #    cbar = plt.colorbar()
    #    plt.show()

    # Plot output (before and after writing) #{{{1
    #    plot_interpolated_data(outname,v_new)

    fid = open(outname, 'r')
    test = np.fromfile(fid, np.float32)
    fid.close()

    test.byteswap(True)
    grid = rdmds(args.gridpath + 'XC', astype=None)

    test = np.reshape(test, (12, len(grid[:, 0]), len(grid[0, :])))
    area = rdmds(args.gridpath + 'RAC', astype=None)

    print 'VOLUME AFTER WRITING:', np.sum(test * area[None, :, :])
#    print 'READ:',np.sum(test[7,:,:]*area)
#    print 'READ:',np.sum(test[8,:,:]*area)

#}}}1
예제 #29
0
def plot_domain_mesh(ua_mesh_file='ua_run/NewMeshFile.mat',
                     mit_file='output/197901/MITgcm/output.nc',
                     grid_nc=None,
                     grid_dir=None,
                     circumpolar=False,
                     pster=False,
                     fig_name=None,
                     figsize=(10, 6)):

    if grid_dir is not None:
        grid_dir = real_dir(grid_dir)

    # Read Ua mesh
    x_ua, y_ua, connectivity = read_ua_mesh(ua_mesh_file)

    # Read MIT grid
    if grid_nc is not None:
        # grid.glob.nc file; slightly different conventions
        lon = read_netcdf(grid_nc, 'X')
        lat = read_netcdf(grid_nc, 'Y')
        if circumpolar:
            lon = wrap_periodic(lon, is_lon=True)[1:]
            j_max = np.where(lat > -60)[0][0]
            lat = lat[:j_max]
        lon_2d, lat_2d = np.meshgrid(lon, lat)
        hfac = read_netcdf(grid_nc, 'HFacC')
        if circumpolar:
            hfac = wrap_periodic(hfac)[:, :j_max, 1:]
    elif grid_dir is not None:
        # Files from Jan
        lon_2d = rdmds(grid_dir + 'XC')
        lat_2d = rdmds(grid_dir + 'YC')
        hfac = rdmds('hFacC')
    else:
        grid = Grid(mit_file)
        lon_2d, lat_2d = grid.get_lon_lat()
        hfac = grid.get_hfac()
    if pster:
        [x_mit, y_mit] = [lon_2d, lat_2d]
    else:
        x_mit, y_mit = polar_stereo(lon_2d, lat_2d)
    # Get ocean mask to plot
    land_mask = np.sum(hfac, axis=0) == 0
    ocean_mask = np.ma.masked_where(land_mask, np.invert(land_mask))

    # Find bounds
    xmin, xmax = choose_range(x_ua, x2=x_mit, pad=0)
    ymin, ymax = choose_range(y_ua, x2=y_mit, pad=0)

    fig, ax = plt.subplots(figsize=figsize)
    fig.patch.set_facecolor('white')
    # Plot ocean cell boundaries
    if grid_nc is not None or grid_dir is not None:
        # Don't worry about the slight offset, it doesn't matter enough
        [x_plot, y_plot, mask_plot] = [x_mit, y_mit, ocean_mask]
    else:
        x_plot, y_plot, mask_plot = cell_boundaries(ocean_mask,
                                                    grid,
                                                    pster=True)
    ax.pcolor(x_plot,
              y_plot,
              mask_plot,
              facecolor='none',
              edgecolor='blue',
              alpha=0.5)
    # Shade the ice sheet in red
    ax.triplot(x_ua, y_ua, connectivity, color='red', alpha=0.5)
    # Set axes limits
    latlon_axes(ax,
                x_ua,
                y_ua,
                xmin=xmin,
                xmax=xmax,
                ymin=ymin,
                ymax=ymax,
                pster=True)
    ax.axis('equal')
    # Turn off box
    ax.axis('off')
    finished_plot(fig, fig_name=fig_name, dpi=300)
예제 #30
0
    y = np.copy(x)
    for i in ii:
        #        print(i,y[i],y[i+1],2*y[i+1]-y[i])
        y[i + 1:] = y[i + 1:] - y[i + 1] + y[i]

    return y


mondt, m2zUnits, rhoConst, gravityz, startdate = get_parms(zfiles[0])
mondt, m2pUnits, rhoConst, gravityp, startdate = get_parms(pfiles[0])
refdate = datetime.datetime(int(startdate[:4]), int(startdate[4:6]),
                            int(startdate[6:8]), 0, 0)
timesecz, etaz = get_output(zfiles, 'dynstat_eta_mean')
timesecp, etap = get_output(pfiles, 'dynstat_eta_mean')

rac = rdmds(os.path.join(zdir, 'RAC'))
hfc = rdmds(os.path.join(zdir, 'hFacC'), lev=[0])
globalArea = (rac * hfc).sum()

grac = rac / globalArea
files = glob(os.path.join(zdir, 'diags2D.*.meta'))
itrs = []
for f in files:
    itrs.append(int(f.split('.')[-2]))

itrs = np.sort(itrs)

zdiag = os.path.join(zdir, 'diags2D')
pdiag = os.path.join(pdir, 'diags2D')
etanz = []
etanp = []
예제 #31
0
def get_grid(run_dir, grid_filename=None, x0=0, y0=0, hFacs=True,
             squeeze_hfacs=True, xslice=np.s_[0:], yslice=np.s_[0:],
             zslice=np.s_[0:]):
    """Create a Grid object from model's output grid files

    Inputs
    ------
    run_dir: str
        Full path to model's run directory without trailing slash
    grid_filename: str (optional)
        Name of grid file if it is not grid.t001.nc or grid.nc
    x0, y0: floats
        Distances from origin
    hfacs: bool
        Whether to read in hFacs
    squeeze_hfacs: bool
        Whether to remove singleton dimensions from hfacs
    xslice, yslice, zslice: slice objects
        Allow reading of part of grid
        Not implemented for mds grid

    Implementation for netcdf grid file could use work
    """

    def hfac_mnc(W_S_or_C, squeeze_hfacs=squeeze_hfacs,
                 x=xslice, y=yslice, z=zslice):
        tmp_hfac = g['HFac' + W_S_or_C].isel(Z=zslice)
        if W_S_or_C == 'W':
            tmp_hfac = tmp_hfac.isel(Xp1=xslice)
        else:
            tmp_hfac = tmp_hfac.isel(X=xslice)
        if W_S_or_C == 'S':
            tmp_hfac = tmp_hfac.isel(Yp1=yslice)
        else:
            tmp_hfac = tmp_hfac.isel(Y=yslice)

        tmp_hfac = tmp_hfac.isel(Z=zslice)

        if squeeze_hfacs:
            return tmp_hfac.squeeze().data
        else:
            return tmp_hfac.data

    try:
        # MDS approach
        dx = rdmds(run_dir + 'DXG*')[0, xslice]
        dy = rdmds(run_dir + 'DYG*')[yslice, 0]
        dz = rdmds(run_dir + 'DRF*')[:, 0]
        added_attrs = dict(depth=rdmds(run_dir + 'Depth*').squeeze())

        if hFacs:
            for k in 'SWC':
                tmp_hfac = rdmds(run_dir + 'hFac' + k)
                if squeeze_hfacs:
                    tmp_hfac = tmp_hfac.squeeze()
                added_attrs['hFac' + k] = tmp_hfac

        g = Grid(dx, dy, dz, x0=x0, y0=y0, added_attrs=added_attrs)
    except IOError:
        # NetCDF approach
        xp1_slice = shift_slice(xslice, stop=1)
        yp1_slice = shift_slice(yslice, stop=1)

        if grid_filename is not None:
            pass
        elif os.path.exists(run_dir + '/grid.t001.nc'):
            grid_filename = '/grid.t001.nc'
        elif os.path.exists(run_dir + '/grid.nc'):
            grid_filename = '/grid.nc'
        else:
            err_msg = ('\nGrid file appears to not exist.\n'
                       'Are you using arguments correctly: '
                       'run_dir, grid_filename\n'
                       'Tried:\n'
                       'grid.t001.nc\n'
                       'grid.nc\n'
                       'and grid_filename if given as argument')
            raise OSError(err_msg)
        g = open_dataset(run_dir + '/' + grid_filename)
        depth = -g.R_low.isel(X=xslice, Y=yslice).squeeze().data

        g = g.isel(X=xslice, Y=yslice, Z=zslice)

        dx, dy, dz = [g[dim].data for dim in ['dxF', 'dyF', 'drF']]
        dx, dy = dx[0, xslice], dy[:, 0]
        added_attrs = dict(depth=depth)
        if hFacs:
            # I think this needs work
            try:
                added_attrs['hFacW'] = hfac_mnc('W', x=xp1_slice, z=zslice)
            except KeyError:
                pass

            try:
                added_attrs['hFacS'] = hfac_mnc('S', y=yp1_slice, z=zslice)
            except KeyError:
                pass

            try:
                added_attrs['hFacC'] = hfac_mnc('C', z=zslice)
            except KeyError:
                pass

        g = Grid(dx, dy, dz, x0=x0, y0=y0, added_attrs=added_attrs)

    return g
예제 #32
0
def move_points(parameter, infile, grid_path):  #{{{2
    '''move variable points to closest points
       on MITgcm llc grid
    '''

    print '############### Move points to MITgcm grid ##############'
    data = Dataset(infile, 'r')
    variable = data.variables[parameter][:]
    lat = data.variables['yc'][:]
    lon = data.variables['xc'][:]
    area = data.variables['area'][:]

    volume_in = variable * area[
        None, :, :] / 1000.  # kg/s/m^2 to m^3/s (divided by density)

    print 'VOLUME IN (m^3/s)', np.sum(volume_in)

    x_mit = rdmds(grid_path + 'XC', astype=None)
    y_mit = rdmds(grid_path + 'YC', astype=None)
    area2 = rdmds(grid_path + 'RAC', astype=None)
    coordinates = np.vstack((y_mit.flat, x_mit.flat))

    var_new = np.zeros((len(variable[:, 0,
                                     0]), len(x_mit[:, 0]), len(x_mit[0, :])))

    for t in np.arange(0, len(var_new[:, 0, 0])):
        index_input = np.transpose(np.nonzero(volume_in[t, :, :]))
        #        print 'timestep,number of non-zero points:',t,len(index_input)

        #        print np.vstack((lat[index_input[:,0],index_input[:,1]],lon[index_input[:,0],index_input[:,1]])).shape,np.transpose(coordinates).shape
        index = closest_point(
            np.vstack((lat[index_input[:, 0],
                           index_input[:, 1]], lon[index_input[:, 0],
                                                   index_input[:, 1]])).T,
            coordinates.T)
        #        print index.shape
        #        print index[0]

        indices = np.unravel_index(index, (len(x_mit[:, 0]), len(x_mit[0, :])))
        x_ind = indices[0]
        y_ind = indices[1]
        #        print 'x',x_ind.shape,x_ind[0]

        for ind in np.arange(len(x_ind)):
            var_new[t, x_ind[ind], y_ind[ind]] = var_new[
                t, x_ind[ind], y_ind[ind]] + volume_in[t, index_input[ind, 0],
                                                       index_input[ind, 1]]
#            print 'VAR2',var_new[t,x_ind[ind],y_ind[ind]],volume_in[t,index_input[ind,0],index_input[ind,1]]

#        for point in index_input:
#            i = point[0]
#            j = point[1]
##            print point, lat[i,j], lon[i,j]
#
#            index = closest_point([[lat[i,j], lon[i,j]]],coordinates.T)
#            print index
#
#            indices = np.unravel_index(index,(len(x_mit[:,0]),len(x_mit[0,:])))
#            print indices
#
#            var_new[t,indices[0],indices[1]] = var_new[t,indices[0],indices[1]]+volume_in[t,i,j]
#            print 'VAR2',var_new[t,indices[0],indices[1]],volume_in[t,i,j]

#    print 'Old and new sum',np.sum(variable),np.sum(var_new)

# convert from volume (m^3/s) to m/s
    area2[area2 == 0.0] = 1e-33
    var_new = var_new / area2[None, :, :]

    data.close()

    print 'VOLUME OUT', np.sum(var_new * area2[None, :, :])

    return var_new