Пример #1
0
 def load_dims(self):
     dms = netcdf.fdim(self.name)
     for k in dms.keys():
         if k in ("ocean_time", "time", "scrum_time"):
             setattr(self, "TIME", dms[k])
         else:
             setattr(self, k.upper(), dms[k])
Пример #2
0
 def load_dims(self):
   dms=netcdf.fdim(self.name)
   for k in dms.keys():
     if k in ('ocean_time','time','scrum_time'):
       setattr(self,'TIME',dms[k])
     else:
       setattr(self,k.upper(),dms[k])
Пример #3
0
def gen_frc(fname,grd,tag='_ccmp'):

  nc=netcdf.Pync(fname,'t',version=3)

  # dims:
  grd_dims=netcdf.fdim(grd)
  gdims='xi_rho','xi_u','xi_v','eta_rho','eta_u','eta_v'
  for name in gdims: nc.add_dim(name,grd_dims[name])
  # time dim:
  nc.add_dim('wind_time',0)

  v=nc.add_var('wind_time',np.dtype('d'),('wind_time',))
  v.add_att('long_name','wind forcing time')
  tunits='days since 1970-01-01'
  v.add_att('units',tunits)

  v=nc.add_var('Uwind'+tag,np.dtype('d'),('wind_time','eta_rho', 'xi_rho'))
  v.add_att('long_name','u-wind')
  v.add_att('units','metre second-1')
  v.add_att('time','time')

  v=nc.add_var('Vwind'+tag,np.dtype('d'),('wind_time','eta_rho', 'xi_rho'))
  v.add_att('long_name','v-wind')
  v.add_att('units','metre second-1')
  v.add_att('time','time')

  # Global Attributes:
  nc.add_att('type','Wind forcing file')
  nc.add_att('title','CCMP wind')
  nc.add_att('grd_file',os.path.realpath(grd))
  from time import ctime
  nc.add_att('history','ROMS  wind  file, '+ctime())
  nc.add_att('author',cb.username()[1]+', '+cb.machinename())

  nc.close()
Пример #4
0
 def load_dims(self):
   dms=netcdf.fdim(self.nc)
   for k in dms:
     if k in ('ocean_time','time','scrum_time'):
       setattr(self,'TIME',dms[k])
     else:
       setattr(self,k.upper(),dms[k])
Пример #5
0
  def add_horiz_dims(self,nc):
    '''
    Adds horizontal dimensions to netcdf file

    '''
    grd_dims=netcdf.fdim(self.grid)
    gdims='xi_rho','xi_u','xi_v','eta_rho','eta_u','eta_v'
    for name in gdims: nc.add_dim(name,grd_dims[name])
Пример #6
0
  def add_horiz_dims(self,nc):
    '''
    Adds horizontal dimensions to netcdf file

    '''
    grd_dims=netcdf.fdim(self.grid)
    gdims='xi_rho','xi_u','xi_v','eta_rho','eta_u','eta_v'
    for name in gdims: nc.add_dim(name,grd_dims[name])
Пример #7
0
def roms2clmbry(f0,clm,grd,sparams,**kargs):
  '''
  kargs:
  bry, bry file to create (False by default, no bry is created)
  times, all by default, but can be a range of times (inds or datetimes)
  tunits, 'days since 1970-01-01'

  ctitle, title for clm file (default used)
  btitle, title for bry file (defaul used)
  obc, bry open boundaries (defaul used)
  quiet, False

  other kargs of roms2roms:
    grd0
    sparams0
    tunits0
    quiet
    Z, intermediate interpolation step depths, automatically
       generated by default
    ij, interpolations along *i or j
  '''

  bry    = kargs.get('bry',    False)
  times  = kargs.get('times',  'all')
  tunits = kargs.get('tunits', 'days since 1970-01-01')
  ctitle = kargs.get('ctitle', False)
  btitle = kargs.get('btitle', False)
  obc    = kargs.get('obc',    False)
  quiet  = kargs.get('quiet',  False)

  create=True

  Cargs={}
  if ctitle: Cargs['title']  = ctitle
  Cargs['tunits'] = tunits
  Cargs['create'] = create

  Bargs={}
  if btitle: Bargs['title'] = btitle
  if obc:    Bargs['obc']   = obc
  Bargs['tunits'] = tunits
  Bargs['create'] = create


  if times=='all':
    times=range(netcdf.fdim(f0,'ocean_time'))

  for tind in times:
    data,datab=roms2roms(f0,grd,sparams,tind,**kargs)

    prognostic.make_clm(clm,data,grd,sparams,quiet=quiet,**Cargs)
    if bry:
      prognostic.make_bry(bry,datab,grd,sparams,quiet=quiet,**Bargs)

    if create:
      Cargs['create']=False
      Bargs['create']=False
Пример #8
0
def make_blk_wrf(wrfpath, grd, bulk, date0=False, date1=False, **kargs):
    """
  see make_blk_interim

  """

    quiet = kargs.get("quiet", 0)
    create = kargs.get("create", 1)
    model = kargs.get("model", "roms")  # or roms-agrif
    wrffiles = kargs.get("wrffiles", "wrfout*")
    dt = kargs.get("dt", 6)

    data = load_blkdata_wrf(wrfpath, wrffiles, date0, date1, quiet)

    if not len(data):
        return

    q = gennc.GenBlk(bulk, grd, **kargs)
    if create:
        # about original data, run data2romsblk once to test for x_original:
        tmp = data2romsblk(data[data.keys()[0]], grd, **kargs)
        if "x_original" in tmp.keys():
            original = tmp["x_original"].shape
        else:
            original = False

        q.create(model, original)

    for d in data.keys():

        # be sure time increases. Note that in load_blkdata_wrf
        # we checked if time increases in the dataset... not if dates are higher
        # then previous dates in file
        ntimes = netcdf.fdim(bulk, "time")
        if ntimes:
            tin = netcdf.nctime(bulk, "time")

            if tin.size and (d - tin[-1]) < datetime.timedelta(hours=dt - 0.1):
                print "-> not including %s" % d.isoformat()
                continue

        if model == "roms":
            if not quiet:
                print "  converting units:",
            conv_units(data[d], model, quiet)

        D = data2romsblk(data[d], grd, **kargs)
        D["date"] = d

        if not quiet:
            print "  =>filling date=%s" % d.isoformat(" ")
        q.fill(D, quiet=quiet)
Пример #9
0
def make_blk_wrf(wrfpath,grd,bulk,date0=False,date1=False,**kargs):
  '''
  see make_blk_interim

  '''

  quiet  = kargs.get('quiet',0)
  create = kargs.get('create',1)
  model  = kargs.get('model','roms') # or roms-agrif
  wrffiles=kargs.get('wrffiles','wrfout*')
  dt     = kargs.get('dt',6)
  proj   = kargs.get('proj','auto')

  data=load_blkdata_wrf(wrfpath,wrffiles,date0,date1,quiet)

  if not len(data): return

  g=roms.Grid(grd)
  if proj=='auto': kargs['proj']=g.get_projection()

  q=gennc.GenBlk(bulk,grd,**kargs)
  if create:
    # about original data, run data2romsblk once to test for x_original:
    tmp=data2romsblk(data[list(data.keys())[0]],g,**kargs)
    if 'x_original' in tmp: original=tmp['x_original'].shape
    else: original=False

    q.create(model,original)


  for d in data:

    # be sure time increases. Note that in load_blkdata_wrf
    # we checked if time increases in the dataset... not if dates are higher
    # then previous dates in file
    ntimes=netcdf.fdim(bulk,'time')
    if ntimes:
      tin=netcdf.nctime(bulk,'time')

      if tin.size and (d-tin[-1])<datetime.timedelta(hours=dt-0.1):
        print('-> not including %s'%d.isoformat())
        continue

    if model=='roms':
       if not quiet: print('  converting units:'),
       conv_units(data[d],model,quiet)

    D=data2romsblk(data[d],g,**kargs)
    D['date']=d

    if not quiet: print('  =>filling date=%s' % d.isoformat(' '))
    q.fill(D,quiet=quiet)
Пример #10
0
def make_blk_wrf(wrfpath, grd, bulk, date0=False, date1=False, **kargs):
    '''
  see make_blk_interim

  '''

    quiet = kargs.get('quiet', 0)
    create = kargs.get('create', 1)
    model = kargs.get('model', 'roms')  # or roms-agrif
    wrffiles = kargs.get('wrffiles', 'wrfout*')
    dt = kargs.get('dt', 6)
    proj = kargs.get('proj', 'auto')

    data = load_blkdata_wrf(wrfpath, wrffiles, date0, date1, quiet)

    if not len(data): return

    g = roms.Grid(grd)
    if proj == 'auto': kargs['proj'] = g.get_projection()

    q = gennc.GenBlk(bulk, grd, **kargs)
    if create:
        # about original data, run data2romsblk once to test for x_original:
        tmp = data2romsblk(data[list(data.keys())[0]], g, **kargs)
        if 'x_original' in tmp: original = tmp['x_original'].shape
        else: original = False

        q.create(model, original)

    for d in data:

        # be sure time increases. Note that in load_blkdata_wrf
        # we checked if time increases in the dataset... not if dates are higher
        # then previous dates in file
        ntimes = netcdf.fdim(bulk, 'time')
        if ntimes:
            tin = netcdf.nctime(bulk, 'time')

            if tin.size and (d - tin[-1]) < datetime.timedelta(hours=dt - 0.1):
                print('-> not including %s' % d.isoformat())
                continue

        if model == 'roms':
            if not quiet: print('  converting units:'),
            conv_units(data[d], model, quiet)

        D = data2romsblk(data[d], g, **kargs)
        D['date'] = d

        if not quiet: print('  =>filling date=%s' % d.isoformat(' '))
        q.fill(D, quiet=quiet)
Пример #11
0
def his2gnome(fname,
              his,
              grd=False,
              nomask=False,
              gshhsMask=True,
              xylim=False,
              dates=False,
              ij=(1, 1)):
    '''
  Creates GNOME wind file
  Ex:
    his2gnome(out,his,grd,dates=dates,ij=(2,2))

  if gshhsMask, the high res mask file mask_gshhs.npy will be created at 1st usage.
  Mask is based on high (h) resolution gshhs data which must be available (env variable
  GSHHS_MASK must be set). 
  '''

    if not grd: grd = his
    deta, dxi = ij

    dims = netcdf.fdim(his)
    xi, eta = dims['xi_rho'], dims['eta_rho']
    xi0, eta0 = xi, eta

    nc0 = netcdf.ncopen(his)
    time = netcdf.nctime(nc0, 'ocean_time')
    # for roms agrif:
    #t=netcdf.use(nc0,'scrum_time')
    #time=netcdf.num2date(t,'seconds since %d-01-01' % year0)

    x0 = netcdf.use(grd, 'lon_rho')
    y0 = netcdf.use(grd, 'lat_rho')
    ang = netcdf.use(grd, 'angle')

    if not xylim is False:
        xlim = xylim[:2]
        ylim = xylim[2:]
        i1, i2, j1, j2 = calc.ij_limits(x0, y0, xlim, ylim)
        print(i1, i2, j1, j2)
        xi = i2 - i1
        eta = j2 - j1
    else:
        i1, i2 = 0, xi
        j1, j2 = 0, eta

    XI = '%d:%d:%d' % (i1, i2, dxi)
    ETA = '%d:%d:%d' % (j1, j2, deta)

    xi = len(range(i1, i2, dxi))
    eta = len(range(j1, j2, deta))
    # create file:
    create_uv(fname, xi, eta)

    nc = netcdf.ncopen(fname, 'a')
    for v0, v in ('lon_rho', 'lon'), ('lat_rho', 'lat'), ('mask_rho',
                                                          'mask'), ('h',
                                                                    'depth'):
        print('filling %s with %s' % (v, v0))
        nc.vars[v][:] = netcdf.use(grd, v0, xi_rho=XI, eta_rho=ETA)

    if nomask:
        print('NO MASK !!!')
        nc.vars['mask'][:] = 1

    if gshhsMask:
        try:
            mask = np.load('mask_gshhs.npy')
        except:
            mask = 1 + 0 * netcdf.use(nc0, 'mask_rho', xi_rho=XI, eta_rho=ETA)
            mask = mask.astype('bool')
            x = netcdf.use(grd, 'lon_rho', xi_rho=XI, eta_rho=ETA)
            y = netcdf.use(grd, 'lat_rho', xi_rho=XI, eta_rho=ETA)

            from okean import gshhs
            axis = x.min(), x.max(), y.min(), y.max()
            g = gshhs.gshhs(axis,
                            resolution='h',
                            area_thresh=0.,
                            max_level=2,
                            clip=True)
            for lon, lat, level in zip(g.lon, g.lat, g.level):
                if level == 1:  # land
                    print('mask ', lon.shape)
                    i = calc.inpolygon(x, y, lon, lat)
                    mask = mask & ~i

            mask.dump('mask_gshhs.npy')

        nc.vars['mask'][:] = mask

    x = x0[j1:j2:deta, i1:i2:dxi]
    y = y0[j1:j2:deta, i1:i2:dxi]
    ang = ang[j1:j2:deta, i1:i2:dxi]

    n = -1
    for it in range(len(time)):
        if not dates is False:
            d0, d1 = dates
            if time[it] < d0 or time[it] >= d1: continue

        n += 1
        U = np.zeros((eta0, xi0), 'f')
        V = np.zeros((eta0, xi0), 'f')

        nc.vars['time'][n] = netcdf.date2num(time[it], tunits)

        # for roms agrif:
        #u=netcdf.use(nc0,'u',time=it,s_rho=-1)
        #v=netcdf.use(nc0,'v',time=it,s_rho=-1)
        u = netcdf.use(nc0, 'u', ocean_time=it, s_rho=-1)
        v = netcdf.use(nc0, 'v', ocean_time=it, s_rho=-1)

        # mask extrap:
        print('mask extrap...')

        u = calc.mask_extrap(x0, y0, np.ma.masked_where(u == 0, u))
        v = calc.mask_extrap(x0, y0, np.ma.masked_where(v == 0, v))

        U[:, 1:-1] = 0.5 * (u[:, :-1] + u[:, 1:])
        U[:, 0] = u[:, 0]
        U[:, -1] = u[:, -1]

        V[1:-1, :] = 0.5 * (v[:-1.:] + v[1:, :])
        V[0, :] = v[0, :]
        V[-1, :] = v[-1, :]

        U = U[j1:j2, i1:i2]
        V = V[j1:j2, i1:i2]

        U = U[j1:j2:deta, i1:i2:dxi]
        V = V[j1:j2:deta, i1:i2:dxi]

        # rotate uv:
        print('rotating ...')
        U, V = calc.rot2d(U, V, -ang)

        print('filling uv', n, time[it])
        nc.vars['u'][n, ...] = U
        nc.vars['v'][n, ...] = V

    nc.close()
    nc0.close()
Пример #12
0
def narr_file_data(fname, xlim=False, ylim=False, quiet=False):
    '''
  Returns bulk data from one NARR file
  '''

    out = {}

    # loading grid:
    if 0:
        if not quiet: print ' reading lon,lat from file %s' % grd
        nc = netcdf.ncopen(grd)
        x = nc.vars['East_longitude_0-360'][0, ...] - 360.
        y = nc.vars['Latitude_-90_to_+90'][0, ...]  # time always 1 !!
        nc.close()
    else:
        if not quiet: print ' reading lon,lat from file %s' % grdTxt
        x, y = load_grid()
        #x=x-360.
        x = -x

    ny, nx = x.shape

    if (xlim, ylim) == (False, False): i0, i1, j0, j1 = 0, nx, 0, ny
    else:
        i0, i1, j0, j1 = calc.ij_limits(x, y, xlim, ylim, margin=0)
        x = x[j0:j1, i0:i1]
        y = y[j0:j1, i0:i1]

    try:
        nc = netcdf.ncopen(fname)
    except:
        return {}

    xx = str(i0) + ':' + str(i1)
    yy = str(j0) + ':' + str(j1)

    tdim = netcdf.fdim(nc, 'time1')
    if tdim != 1: print 'WARNING: tdim !=1  !!!!!!'

    # T surface [K->C]
    if not quiet: print ' --> T air'
    tair = netcdf.use(nc, 'Temperature_surface', time1=0, x=xx, y=yy)
    tair = tair - 273.15
    out['tair'] = cb.Data(x, y, tair, 'C')

    # R humidity [% -> 0--1]
    if not quiet: print ' --> R humidity'
    rhum = netcdf.use(nc, 'Relative_humidity', time1=0, x=xx, y=yy)
    out['rhum'] = cb.Data(x, y, rhum / 100., '0--1')

    # surface pressure [Pa]
    if not quiet: print ' --> Surface pressure'
    pres = netcdf.use(nc, 'Pressure_surface', time1=0, x=xx, y=yy)
    out['pres'] = cb.Data(x, y, pres, 'Pa')

    # P rate [kg m-2 s-1 -> cm/d]
    if not quiet: print ' --> P rate'
    prate = netcdf.use(nc, 'Precipitation_rate', time1=0, x=xx, y=yy)
    prate = prate * 86400 * 100 / 1000.
    out['prate'] = cb.Data(x, y, prate, 'cm/d')

    # Net shortwave flux  [ W m-2]
    if not quiet: print ' --> Net shortwave flux'
    if not quiet: print '       SW down'
    sw_down = netcdf.use(nc,
                         'Downward_shortwave_radiation_flux',
                         time1=0,
                         x=xx,
                         y=yy)
    if not quiet: print '       SW up'
    sw_up = netcdf.use(nc,
                       'Upward_short_wave_radiation_flux_surface',
                       time1=0,
                       x=xx,
                       y=yy)
    sw_net = sw_down - sw_up
    out['radsw'] = cb.Data(x, y, sw_net, 'W m-2', info='positive downward')

    # Net longwave flux  [W/m^2]
    if not quiet: print ' --> Net longwave flux'
    if not quiet: print '       LW down'
    lw_down = netcdf.use(nc,
                         'Downward_longwave_radiation_flux',
                         time1=0,
                         x=xx,
                         y=yy)
    if not quiet: print '       LW up'
    lw_up = netcdf.use(nc,
                       'Upward_long_wave_radiation_flux_surface',
                       time1=0,
                       x=xx,
                       y=yy)
    lw_net = lw_down - lw_up
    out['radlw'] = cb.Data(x, y, -lw_net, 'W m-2', info='positive upward')

    # downward lw:
    out['dlwrf'] = cb.Data(x,
                           y,
                           -lw_down,
                           'W m-2',
                           info='negative... downward')

    # U and V wind speed 10m
    if not quiet: print ' --> U and V wind'
    # vertical dim is height_above_ground1: 10 and 30 m
    uwnd = netcdf.use(nc,
                      'u_wind_height_above_ground',
                      height_above_ground1=0,
                      time1=0,
                      x=xx,
                      y=yy)
    vwnd = netcdf.use(nc,
                      'v_wind_height_above_ground',
                      height_above_ground1=0,
                      time1=0,
                      x=xx,
                      y=yy)

    if not quiet: print ' --> calc wind speed and stress'
    speed = np.sqrt(uwnd**2 + vwnd**2)
    taux, tauy = air_sea.wind_stress(uwnd, vwnd)

    out['wspd'] = cb.Data(x, y, speed, 'm s-1')
    out['uwnd'] = cb.Data(x, y, uwnd, 'm s-1')
    out['vwnd'] = cb.Data(x, y, vwnd, 'm s-1')
    out['sustr'] = cb.Data(x, y, taux, 'Pa')
    out['svstr'] = cb.Data(x, y, tauy, 'Pa')

    # Cloud cover [0--100 --> 0--1]:
    if not quiet: print ' --> Cloud cover'
    clouds = netcdf.use(nc, 'Total_cloud_cover', time1=0, x=xx, y=yy)
    out['cloud'] = cb.Data(x, y, clouds / 100., 'fraction (0--1)')

    nc.close()
    return out
Пример #13
0
def load_data(f, quiet=0, **kargs):
    '''
  Loads prognostic variables (temp,salt,u,v,ubar,vbar,zeta) from
  netcdf file or opendap server. Also loads lon,lat, depth, and time.

  If f is a file, it must include the 1d variables lon,lat and depth;
  the 2d variable ssh (zeta) and the 3d variables temp, salt, u and v;
  ie, each file must contain data for a simgle time. The file must also
  contain the variable time.

  If f is a opendap address, it must contain also all these variables
  or the ones defined in the input karg settings (DataAccess object)

  To deal with the case of variables in different files/opendap addresses,
  f can also be a dictionary with keys the variables and values the files
  or opendap addresses. In this case, the keys must be:
    - temp
    - salt
    - u
    - v
    - ssh
    - misc, for lon, lat, depth, time and dimensions
      or xy for lon,lat and x,ydim; z for depth and zdim, time for time

  The output data (dict) is suitable to be used by data2roms, which
  interpolates the data to ROMS 3d grid.
  Also outputs an error/status string.

  kargs:
    inds, dict with dimension names/values (where time dim can be integer
          or datetime)
    settings, DataAccess object
    extra, extra misc vars to load [(outKey0,fileVar0),...]
    t_units, units of variable time, by default the att  units is used
  '''

    sett = DataAccess()
    inds = {}
    extra = []
    t_units = []
    if 'settings' in kargs.keys(): sett = kargs['settings']
    if 'inds' in kargs.keys(): inds = kargs['inds']
    if 'extra' in kargs.keys(): extra = kargs['extra']
    if 't_units' in kargs.keys(): t_units = kargs['t_units']

    res = {}
    msg = ''

    if not isinstance(f, dict) and not f.startswith('http') and not isfile(f):
        msg = 'file not found %s' % f
        if not quiet: print msg
        return res, msg

    # load nc files:
    if not isinstance(f, dict):
        f = {'temp': f, 'salt': f, 'u': f, 'v': f, 'ssh': f, 'misc': f}

    if not f.has_key('xy'): f['xy'] = f['misc']
    if not f.has_key('z'): f['z'] = f['misc']
    if not f.has_key('time'): f['time'] = f['misc']

    filesUsed = []
    ncUsed = []
    for i in f.keys():
        if not quiet: print '(%s) loading from %s' % (i.ljust(5), f[i])

        if i == 'temp':
            if f[i] in filesUsed: ncTemp = ncUsed[filesUsed.index(f[i])]
            else:
                ncTemp = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncTemp]

        elif i == 'salt':
            if f[i] in filesUsed: ncSalt = ncUsed[filesUsed.index(f[i])]
            else:
                ncSalt = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncSalt]

        elif i == 'u':
            if f[i] in filesUsed: ncU = ncUsed[filesUsed.index(f[i])]
            else:
                ncU = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncU]

        elif i == 'v':
            if f[i] in filesUsed: ncV = ncUsed[filesUsed.index(f[i])]
            else:
                ncV = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncV]

        elif i == 'ssh':
            if f[i] in filesUsed: ncSsh = ncUsed[filesUsed.index(f[i])]
            else:
                ncSsh = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncSsh]

        elif i == 'xy':
            if f[i] in filesUsed: ncXy = ncUsed[filesUsed.index(f[i])]
            else:
                ncXy = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncXy]

        elif i == 'z':
            if f[i] in filesUsed: ncZ = ncUsed[filesUsed.index(f[i])]
            else:
                ncZ = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncZ]

        elif i == 'time':
            if f[i] in filesUsed: ncTime = ncUsed[filesUsed.index(f[i])]
            else:
                ncTime = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncTime]

        elif i == 'misc':
            if f[i] in filesUsed: ncMisc = ncUsed[filesUsed.index(f[i])]
            else:
                ncMisc = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncMisc]

    # load dims:
    if not quiet: print '  loading dims...'
    dimsXy = netcdf.fdim(ncXy)
    dimsZ = netcdf.fdim(ncZ)

    res['NX'] = dimsXy[sett.xdim]
    res['NY'] = dimsXy[sett.ydim]
    ###if sett.z_name:
    if sett.zdim:
        res['NZ'] = dimsZ[sett.zdim]
    else:
        res['NZ'] = 1

    # about horizontal inds:
    if inds.has_key(
            sett.xdim) and len(inds[sett.xdim]) == 2 and not isinstance(
                inds[sett.xdim], basestring):
        if not quiet: print '  calc horizontal inds...'
        xlim = inds[sett.xdim]
        ylim = inds[sett.ydim]

        inds.pop(sett.xdim)
        inds.pop(sett.ydim)

        lon = netcdf.use(ncXy, sett.x_name, **inds)
        if np.any(lon > 360): lon = np.mod(lon, 360.)
        lat = netcdf.use(ncXy, sett.y_name, **inds)
        i0, i1, j0, j1 = calc.ij_limits(lon, lat, xlim, ylim, margin=3)
        inds[sett.xdim] = '%d:%d' % (i0, i1)
        inds[sett.ydim] = '%d:%d' % (j0, j1)

    if not quiet: print '  loading lon, lat, depth...'
    res['lon'] = netcdf.use(ncXy, sett.x_name, **inds)
    if np.any(res['lon'] > 360): res['lon'] = np.mod(res['lon'], 360.)
    res['lat'] = netcdf.use(ncXy, sett.y_name, **inds)
    if sett.z_name:
        res['depth'] = -netcdf.use(ncZ, sett.z_name, **inds)
    else:
        res['depth'] = False

    if res['lon'].size != res['lat'].size:
        res['lon'], res['lat'] = np.meshgrid(res['lon'], res['lat'])
        # needed for griddata, later

    # update nx,ny:
    if inds.has_key(sett.xdim):
        res['NY'], res['NX'] = res['lon'].shape

    # extra misc vars:
    if len(extra):
        for outKey, fileVar in extra:
            if not quiet:
                print '  loading extra misc... %s %s' % (outKey, fileVar)
            res[outKey] = netcdf.use(ncMisc, fileVar, **inds)

    # time:
    # file may have one or several times. If several, time dim must be given
    # with kargs inds!
    # but file may also have no time dim or time name !
    if sett.time_name:
        if not quiet: print '  loading time...'
        if t_units:
            times = netcdf.use(ncTime, sett.time_name)
            times = netcdf.num2date(times, t_units)
        else:
            times = netcdf.nctime(ncTime, sett.time_name)

        if inds.has_key(sett.tdim):
            try:
                tind = dts.parse_date(inds[sett.tdim])
            except:
                tind = inds[sett.tdim]  # is an integer, for instance

            if isinstance(tind, datetime.datetime):
                tind, = np.where(times == tind)
                if tind.size:
                    tind = tind[0]
                    inds[sett.
                         tdim] = tind  # update inds to extract other variables
                else:
                    Msg = 'date not found'
                    msg += '\n' + Msg
                    return res, msg + ' ERROR'

            date = times[tind]
            try:
                len(date)
                ndates = True
            except:
                ndates = False

            if ndates:
                if not quiet:
                    print '    tind, date= len=%d: %d to %d, %s to %s' % (
                        len(date), tind[0], tind[-1], date[0].isoformat(' '),
                        date[-1].isoformat(' '))
            else:
                if not quiet:
                    print '    tind, date= %d %s' % (tind, date.isoformat(' '))

        elif times.size == 1:
            date = times[0]
            if not quiet: print '    date= %s' % date.isoformat(' ')
        else:  # must provide tind as input!!
            Msg = 'several dates in file... provice tind!'
            msg += '\n' + Msg
            return res, msg + ' ERROR'

        res['date'] = date
    else:
        if not quiet: print '    warning: not using time !!'
        res['date'] = 0

    empty3d = np.zeros([res['NZ'], res['NY'], res['NX']])
    empty2d = np.zeros([res['NY'], res['NX']])

    if 'temp' in f.keys():
        if not quiet: print '  loading temp...'
        if sett.temp_name in ncTemp.varnames:
            res['temp'] = netcdf.use(ncTemp, sett.temp_name, **inds)
        else:
            Msg = 'var %s not found' % 'temp'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['temp'] = empty3d

    if 'salt' in f.keys():
        if not quiet: print '  loading salt...'
        if sett.salt_name in ncSalt.varnames:
            res['salt'] = netcdf.use(ncSalt, sett.salt_name, **inds)
        else:
            Msg = 'var %s not found' % 'salt'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['salt'] = empty3d

    if 'u' in f.keys():
        if not quiet: print '  loading u...'
        if sett.u_name in ncU.varnames:
            res['u'] = netcdf.use(ncU, sett.u_name, **inds)
        else:
            Msg = 'var %s not found' % 'u'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['u'] = empty3d

    if 'v' in f.keys():
        if not quiet: print '  loading v...'
        if sett.v_name in ncV.varnames:
            res['v'] = netcdf.use(ncV, sett.v_name, **inds)
        else:
            Msg = 'var %s not found' % 'v'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['v'] = empty3d

    if 'ssh' in f.keys():
        if not quiet: print '  loading ssh...'
        if sett.ssh_name in ncSsh.varnames:
            res['ssh'] = netcdf.use(ncSsh, sett.ssh_name, **inds)
        else:
            Msg = 'var %s not found' % 'ssh'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['ssh'] = empty2d

    for nc in ncUsed:
        try:
            nc.close()
        except:
            pass

    return res, msg
Пример #14
0
def narr_file_data(fname,xlim=False,ylim=False,quiet=False):
  '''
  Returns bulk data from one NARR file
  '''

  out={}

  # loading grid:
  if 0:
    if not quiet: print(' reading lon,lat from file %s' % grd)
    nc=netcdf.ncopen(grd)
    x=nc.vars['East_longitude_0-360'][0,...]-360.
    y=nc.vars['Latitude_-90_to_+90'][0,...] # time always 1 !!
    nc.close()
  else:
    if not quiet: print(' reading lon,lat from file %s' % grdTxt)
    x,y=load_grid()
    #x=x-360.
    x=-x

  ny,nx=x.shape


  if (xlim,ylim)==(False,False):i0,i1,j0,j1=0,nx,0,ny
  else:
    i0,i1,j0,j1=calc.ij_limits(x, y, xlim, ylim, margin=0)
    x=x[j0:j1,i0:i1]
    y=y[j0:j1,i0:i1]

  try:
    nc=netcdf.ncopen(fname)
  except:
    return {}

  xx=str(i0)+':'+str(i1)
  yy=str(j0)+':'+str(j1)

  tdim=netcdf.fdim(nc,'time1')
  if tdim!=1: print('WARNING: tdim !=1  !!!!!!')

  # T surface [K->C]
  if not quiet: print(' --> T air')
  tair=netcdf.use(nc,'Temperature_surface',time1=0,x=xx,y=yy)
  tair=tair-273.15
  out['tair']=cb.Data(x,y,tair,'C')

  # R humidity [% -> 0--1]
  if not quiet: print(' --> R humidity')
  rhum=netcdf.use(nc,'Relative_humidity',time1=0,x=xx,y=yy)
  out['rhum']=cb.Data(x,y,rhum/100.,'0--1')

  # surface pressure [Pa]
  if not quiet: print(' --> Surface pressure')
  pres=netcdf.use(nc,'Pressure_surface',time1=0,x=xx,y=yy)
  out['pres']=cb.Data(x,y,pres,'Pa')

  # P rate [kg m-2 s-1 -> cm/d]
  if not quiet: print(' --> P rate')
  prate=netcdf.use(nc,'Precipitation_rate',time1=0,x=xx,y=yy)
  prate=prate*86400*100/1000.
  out['prate']=cb.Data(x,y,prate,'cm/d')

  # Net shortwave flux  [ W m-2]
  if not quiet: print(' --> Net shortwave flux')
  if not quiet: print('       SW down')
  sw_down=netcdf.use(nc,'Downward_shortwave_radiation_flux',time1=0,x=xx,y=yy)
  if not quiet: print('       SW up')
  sw_up=netcdf.use(nc,'Upward_short_wave_radiation_flux_surface',time1=0,x=xx,y=yy)
  sw_net=sw_down-sw_up
  out['radsw']=cb.Data(x,y,sw_net,'W m-2',info='positive downward')

  # Net longwave flux  [W/m^2]
  if not quiet: print(' --> Net longwave flux')
  if not quiet: print('       LW down')
  lw_down=netcdf.use(nc,'Downward_longwave_radiation_flux',time1=0,x=xx,y=yy)
  if not quiet: print('       LW up')
  lw_up=netcdf.use(nc,'Upward_long_wave_radiation_flux_surface',time1=0,x=xx,y=yy)
  lw_net=lw_down-lw_up
  out['radlw']=cb.Data(x,y,-lw_net,'W m-2',info='positive upward')

  # downward lw:
  out['dlwrf']=cb.Data(x,y,-lw_down,'W m-2',info='negative... downward')

  # U and V wind speed 10m
  if not quiet: print(' --> U and V wind')
  # vertical dim is height_above_ground1: 10 and 30 m
  uwnd=netcdf.use(nc,'u_wind_height_above_ground',height_above_ground1=0,time1=0,x=xx,y=yy)
  vwnd=netcdf.use(nc,'v_wind_height_above_ground',height_above_ground1=0,time1=0,x=xx,y=yy)

  if not quiet: print(' --> calc wind speed and stress')
  speed = np.sqrt(uwnd**2+vwnd**2)
  taux,tauy=air_sea.wind_stress(uwnd,vwnd)

  out['wspd']=cb.Data(x,y,speed,'m s-1')
  out['uwnd']=cb.Data(x,y,uwnd,'m s-1')
  out['vwnd']=cb.Data(x,y,vwnd,'m s-1')
  out['sustr']=cb.Data(x,y,taux,'Pa')
  out['svstr']=cb.Data(x,y,tauy,'Pa')

  # Cloud cover [0--100 --> 0--1]:
  if not quiet: print(' --> Cloud cover')
  clouds=netcdf.use(nc,'Total_cloud_cover',time1=0,x=xx,y=yy)
  out['cloud']=cb.Data(x,y,clouds/100.,'fraction (0--1)')

  nc.close()
  return  out
Пример #15
0
  def create(self):
    '''
    Creates model netcdf river forcing file
    '''
    nc=netcdf.Pync(self.fname,self.perm,version=self.ncversion)

    nx=netcdf.fdim(self.grid,'xi_rho')
    ny=netcdf.fdim(self.grid,'eta_rho')

    # Dimensions:
    nc.add_dim('s_rho',self.nz)
    nc.add_dim('river',self.nrivers)
    nc.add_dim('river_time',0)

    # Variables:
    v=nc.add_var('river',np.dtype('d'),('river',))
    v.add_att('long_name','river runoff identification number')

    v=nc.add_var('river_Xposition',np.dtype('d'),('river',))
    v.add_att('long_name','river XI-position at RHO-points')
    v.add_att('valid_min',1)
    v.add_att('valid_max',nx-1)

    v=nc.add_var('river_Eposition',np.dtype('d'),('river',))
    v.add_att('long_name','river ETA-position at RHO-points')
    v.add_att('valid_min',1)
    v.add_att('valid_max',ny-1)

    v=nc.add_var('river_direction',np.dtype('d'),('river',))
    v.add_att('long_name','river runoff direction')

    v=nc.add_var('river_Vshape',np.dtype('d'),('s_rho','river'))
    v.add_att('long_name','river runoff mass transport vertical profile')

    v=nc.add_var('river_time',np.dtype('d'),('river_time',))
    v.add_att('long_name','river runoff time')
    v.add_att('units',self.tunits)
    v.add_att('add_offset',0)

    v=nc.add_var('river_transport',np.dtype('d'),('river_time','river'))
    v.add_att('long_name','river runoff vertically integrated mass transport')
    v.add_att('units','metre3 second-1')
    v.add_att('time','river_time')

    v=nc.add_var('river_temp',np.dtype('d'),('river_time','s_rho','river'))
    v.add_att('long_name','river runoff potential temperature')
    v.add_att('units','Celsius')
    v.add_att('time','river_time')

    v=nc.add_var('river_salt',np.dtype('d'),('river_time','s_rho','river'))
    v.add_att('long_name','river runoff salinity')
    v.add_att('units','Celsius')
    v.add_att('time','river_time')

    # Global Attributes:
    nc.add_att('type',self.type)
    nc.add_att('title',self.title)
    nc.add_att('grd_file',os.path.realpath(self.grid))
    nc.add_att('date',dts.currday().isoformat(' '))
    nc.add_att('author',cb.username()[1]+', '+cb.username()[0]+'@'+cb.machinename())

    # extra attrs:
    for i in self.attr.keys(): nc.add_att(i,self.attr[i])

    nc.close()
Пример #16
0
def roms2swan_wind(frc,date0,date1,fname='swan_wind.dat',**kargs):
  tname='wind_time'
  uname='Uwind'
  vname='Vwind'
  grd=False # needed if wind is 1d
  dt=1 # hours
  path=''
  if 'tname' in kargs.keys(): tname=kargs['tname']
  if 'uname' in kargs.keys(): uname=kargs['uname']
  if 'vname' in kargs.keys(): vname=kargs['vname']
  if 'grd'   in kargs.keys(): grd  =kargs['grd']
  if 'dt'    in kargs.keys(): dt   =kargs['dt']
  if 'path'  in kargs.keys(): path =kargs['path']
 
  print 'wind: loading time ...' 
  
  time=netcdf.nctime(frc,tname)
  #time=np.load('tfile')
  #cond=(time>=date0)&(time<=date1)
  cond=(time>=date0)&(time<=date1+datetime.timedelta(days=1)) # add one day at the end, just to avoid the "repeating last"
  time=time[cond]
  d=np.diff(pl.date2num(time))
  print 'current max and min dt = %6.2f %6.2f hrs = %6.2f %6.2f mins'%(d.max()*24, d.min()*24, d.max()*24*60, d.min()*24*60)
#  time=time[::dt]
#  d=np.diff(pl.date2num(time))
#  print ' final  max and min dt = %6.2f %6.2f hrs = %6.2f %6.2f mins'%(d.max()*24, d.min()*24, d.max()*24*60, d.min()*24*60)

  print 'wind: loading u ...' 
  u,nc=netcdf.var(frc,uname)
  print 'wind: loading v ...' 
  v,nc=netcdf.var(nc,uname)
#  u=u[cond,...][::dt,...]
#  v=v[cond,...][::dt,...]
  u=u[cond,...]
  v=v[cond,...]
  nc.close()


  if u.ndim==1:
    if not grd:
      print 'must provide grd karg!'
      return

    nt=u.size
    eta=netcdf.fdim(grd)['eta_rho']
    xi=netcdf.fdim(grd)['xi_rho']
  else:
    nt,eta,xi=u.shape

# array may be too big, so do this later (for each it)
#
#    u=np.tile(u[:,np.newaxis,np.newaxis],(1,eta,xi))
#    v=np.tile(v[:,np.newaxis,np.newaxis],(1,eta,xi))



  i=open(fname,'w')

  times=[]
  time0=time[0]-datetime.timedelta(hours=dt)
  ITs=[]
  for it in range(nt):
    time0=time0+datetime.timedelta(hours=dt)
    if time0>date1: break

    if time0>time[-1]:
      print 'Warning : repeating last ...', it

    times+=[time0]
    d=np.abs(time-time0)
    it=np.where(d==d.min())[0][0]
    ITs+=[it]

    if it%100==0: print 'saving u %s %s'%(fname,time[it].isoformat(' '))
    if u[it,...].ndim==0:
      U=np.tile(u[it,...],(eta,xi)).flatten()
    else:
      U=u[it,...].flatten()

    [i.write('%8.4f\n'%uu) for uu in U]

  for it in ITs:
    if it%100==0: print 'saving v %s %s'%(fname,time[it].isoformat(' '))
    if v[it,...].ndim==0:
      V=np.tile(v[it,...],(eta,xi)).flatten()
    else:
      V=v[it,...].flatten()

    [i.write('%8.4f\n'%vv) for vv in V]



  times=np.asarray(times)
  t0iso=times[0].strftime('%Y%m%d.%H%M%S')
  t1iso=times[-1].strftime('%Y%m%d.%H%M%S')
  dt=times[1]-times[0]
  dth=dt.days*24. + dt.seconds/60.**2

  print ' -- created swan wind file %s\n'%fname

  # add to swan INPUT:
  print '\n'
  print 'INPGRID WIND CURVILINEAR 0 0 %d %d EXC 9.999000e+003 &'%(xi-1,eta-1)
  print '       NONSTATIONARY %s %.2f HR %s'%(t0iso,dth,t1iso)
  print 'READINP WIND 1 \'%s\' 4 0 FREE '%(os.path.join(path,fname))
  print '\n'
Пример #17
0
def his2gnome(fname,his,grd=False,nomask=False,gshhsMask=True,xylim=False,dates=False,ij=(1,1)):
  '''
  Creates GNOME wind file
  Ex:
    his2gnome(out,his,grd,dates=dates,ij=(2,2))

  if gshhsMask, the high res mask file mask_gshhs.npy will be created at 1st usage.
  Mask is based on high (h) resolution gshhs data which must be available (env variable
  GSHHS_MASK must be set). 
  '''

  if not grd: grd=his
  deta,dxi=ij

  dims=netcdf.fdim(his)
  xi,eta=dims['xi_rho'],dims['eta_rho']
  xi0,eta0=xi,eta

  nc0=netcdf.ncopen(his)
  time=netcdf.nctime(nc0,'ocean_time')
  # for roms agrif:
  #t=netcdf.use(nc0,'scrum_time')
  #time=netcdf.num2date(t,'seconds since %d-01-01' % year0)

  x0=netcdf.use(grd,'lon_rho')
  y0=netcdf.use(grd,'lat_rho')
  ang=netcdf.use(grd,'angle')

  if not xylim is False:
    xlim=xylim[:2]
    ylim=xylim[2:]
    i1,i2,j1,j2=calc.ij_limits(x0,y0,xlim,ylim)
    print i1,i2,j1,j2
    xi=i2-i1
    eta=j2-j1
  else:
    i1,i2=0,xi
    j1,j2=0,eta

  XI  ='%d:%d:%d' %(i1,i2,dxi)
  ETA ='%d:%d:%d' %(j1,j2,deta)

  xi=len(range(i1,i2,dxi))
  eta=len(range(j1,j2,deta))
  # create file:
  create_uv(fname,xi,eta)

  nc=netcdf.ncopen(fname,'a')
  for v0,v in ('lon_rho','lon'),('lat_rho','lat'),('mask_rho','mask'),('h','depth'):
    print 'filling %s with %s' % (v,v0)
    nc.vars[v][:]=netcdf.use(grd,v0,xi_rho=XI,eta_rho=ETA)

  if nomask:
    print 'NO MASK !!!'
    nc.vars['mask'][:]=1

  if gshhsMask:
    try:
     mask=np.load('mask_gshhs.npy')
    except:
      mask=1+0*netcdf.use(nc0,'mask_rho',xi_rho=XI,eta_rho=ETA)
      mask=mask.astype('bool')
      x=netcdf.use(grd,'lon_rho',xi_rho=XI,eta_rho=ETA)
      y=netcdf.use(grd,'lat_rho',xi_rho=XI,eta_rho=ETA)

      from okean import gshhs
      axis=x.min(),x.max(),y.min(),y.max()
      g=gshhs.gshhs(axis, resolution='h',area_thresh=0., max_level=2,clip=True)
      for lon, lat, level in zip(g.lon, g.lat, g.level):
        if level == 1: # land
          print 'mask ',lon.shape
          i=calc.inpolygon(x,y,lon,lat)
          mask=mask & ~i

      mask.dump('mask_gshhs.npy')


    nc.vars['mask'][:]=mask


  x=x0[j1:j2:deta,i1:i2:dxi]
  y=y0[j1:j2:deta,i1:i2:dxi]
  ang=ang[j1:j2:deta,i1:i2:dxi]

  n=-1
  for it in range(len(time)):
    if not dates is False:
      d0,d1=dates
      if time[it]<d0 or time[it]>=d1: continue

    n+=1
    U=np.zeros((eta0,xi0),'f')
    V=np.zeros((eta0,xi0),'f')

    nc.vars['time'][n]=netcdf.date2num(time[it],tunits)

    # for roms agrif:
    #u=netcdf.use(nc0,'u',time=it,s_rho=-1)
    #v=netcdf.use(nc0,'v',time=it,s_rho=-1)
    u=netcdf.use(nc0,'u',ocean_time=it,s_rho=-1)
    v=netcdf.use(nc0,'v',ocean_time=it,s_rho=-1)

    # mask extrap:
    print 'mask extrap...'

    u=calc.mask_extrap(x0,y0,np.ma.masked_where(u==0,u))
    v=calc.mask_extrap(x0,y0,np.ma.masked_where(v==0,v))

    U[:,1:-1]=0.5*(u[:,:-1]+u[:,1:])
    U[:,0]=u[:,0]
    U[:,-1]=u[:,-1]

    V[1:-1,:]=0.5*(v[:-1.:]+v[1:,:])
    V[0,:]=v[0,:]
    V[-1,:]=v[-1,:]

    U=U[j1:j2,i1:i2]
    V=V[j1:j2,i1:i2]
  
    U=U[j1:j2:deta,i1:i2:dxi]
    V=V[j1:j2:deta,i1:i2:dxi]

    # rotate uv:
    print 'rotating ...'
    U,V=calc.rot2d(U,V,-ang)

    print 'filling uv', n, time[it]
    nc.vars['u'][n,...]=U
    nc.vars['v'][n,...]=V

  nc.close()
  nc0.close()
Пример #18
0
def roms2swan_wind(frc, date0, date1, fname='swan_wind.dat', **kargs):
    tname = 'wind_time'
    uname = 'Uwind'
    vname = 'Vwind'
    grd = False  # needed if wind is 1d
    dt = 1  # hours
    path = ''
    if 'tname' in kargs.keys(): tname = kargs['tname']
    if 'uname' in kargs.keys(): uname = kargs['uname']
    if 'vname' in kargs.keys(): vname = kargs['vname']
    if 'grd' in kargs.keys(): grd = kargs['grd']
    if 'dt' in kargs.keys(): dt = kargs['dt']
    if 'path' in kargs.keys(): path = kargs['path']

    print 'wind: loading time ...'

    time = netcdf.nctime(frc, tname)
    #time=np.load('tfile')
    #cond=(time>=date0)&(time<=date1)
    cond = (time >= date0) & (
        time <= date1 + datetime.timedelta(days=1)
    )  # add one day at the end, just to avoid the "repeating last"
    time = time[cond]
    d = np.diff(pl.date2num(time))
    print 'current max and min dt = %6.2f %6.2f hrs = %6.2f %6.2f mins' % (
        d.max() * 24, d.min() * 24, d.max() * 24 * 60, d.min() * 24 * 60)
    #  time=time[::dt]
    #  d=np.diff(pl.date2num(time))
    #  print ' final  max and min dt = %6.2f %6.2f hrs = %6.2f %6.2f mins'%(d.max()*24, d.min()*24, d.max()*24*60, d.min()*24*60)

    print 'wind: loading u ...'
    nc = netcdf.ncopen(frc)
    u = netcdf.var(nc, uname)
    print 'wind: loading v ...'
    v = netcdf.var(nc, uname)
    #  u=u[cond,...][::dt,...]
    #  v=v[cond,...][::dt,...]
    u = u[cond, ...]
    v = v[cond, ...]
    nc.close()

    if u.ndim == 1:
        if not grd:
            print 'must provide grd karg!'
            return

        nt = u.size
        eta = netcdf.fdim(grd)['eta_rho']
        xi = netcdf.fdim(grd)['xi_rho']
    else:
        nt, eta, xi = u.shape


# array may be too big, so do this later (for each it)
#
#    u=np.tile(u[:,np.newaxis,np.newaxis],(1,eta,xi))
#    v=np.tile(v[:,np.newaxis,np.newaxis],(1,eta,xi))

    i = open(fname, 'w')

    times = []
    time0 = time[0] - datetime.timedelta(hours=dt)
    ITs = []
    for it in range(nt):
        time0 = time0 + datetime.timedelta(hours=dt)
        if time0 > date1: break

        if time0 > time[-1]:
            print 'Warning : repeating last ...', it

        times += [time0]
        d = np.abs(time - time0)
        it = np.where(d == d.min())[0][0]
        ITs += [it]

        if it % 100 == 0:
            print 'saving u %s %s' % (fname, time[it].isoformat(' '))
        if u[it, ...].ndim == 0:
            U = np.tile(u[it, ...], (eta, xi)).flatten()
        else:
            U = u[it, ...].flatten()

        [i.write('%8.4f\n' % uu) for uu in U]

    for it in ITs:
        if it % 100 == 0:
            print 'saving v %s %s' % (fname, time[it].isoformat(' '))
        if v[it, ...].ndim == 0:
            V = np.tile(v[it, ...], (eta, xi)).flatten()
        else:
            V = v[it, ...].flatten()

        [i.write('%8.4f\n' % vv) for vv in V]

    times = np.asarray(times)
    t0iso = times[0].strftime('%Y%m%d.%H%M%S')
    t1iso = times[-1].strftime('%Y%m%d.%H%M%S')
    dt = times[1] - times[0]
    dth = dt.days * 24. + dt.seconds / 60.**2

    print ' -- created swan wind file %s\n' % fname

    # add to swan INPUT:
    print '\n'
    print 'INPGRID WIND CURVILINEAR 0 0 %d %d EXC 9.999000e+003 &' % (xi - 1,
                                                                      eta - 1)
    print '       NONSTATIONARY %s %.2f HR %s' % (t0iso, dth, t1iso)
    print 'READINP WIND 1 \'%s\' 4 0 FREE ' % (os.path.join(path, fname))
    print '\n'
Пример #19
0
def frc2gnome(fname, frc, grd, xylim=False, dates=False, ij=(1, 1), **kargs):
    '''
  Creates GNOME wind file
  kargs:
    t[u,v]var
    t[u,v]dim
    x[y,ang]var

  Ex:
    .frc2gnome(out,frc,grd,ij=(10,10),dates=dates,**{'tdim':'Time'})
  '''

    deta, dxi = ij

    tvar = 'time'
    uvar = 'Uwind'
    vvar = 'Vwind'
    #tvar='bulk_time'
    #uvar='uwnd'
    #vvar='vwnd'

    tdim = 'time'
    #tdim='bulk_time'
    xdim = 'xi_rho'
    ydim = 'eta_rho'

    xvar = 'lon_rho'
    yvar = 'lat_rho'
    angvar = 'angle'

    if 'tvar' in kargs.keys(): tvar = kargs['tvar']
    if 'uvar' in kargs.keys(): uvar = kargs['uvar']
    if 'vvar' in kargs.keys(): vvar = kargs['vvar']

    if 'tdim' in kargs.keys(): tdim = kargs['tdim']
    if 'xdim' in kargs.keys(): xdim = kargs['xdim']
    if 'ydim' in kargs.keys(): ydim = kargs['ydim']

    if 'xvar' in kargs.keys(): xvar = kargs['xvar']
    if 'yvar' in kargs.keys(): yvar = kargs['yvar']
    if 'angvar' in kargs.keys(): angvar = kargs['angvar']

    dims = netcdf.fdim(grd)
    xi, eta = dims[xdim], dims[ydim]
    xi0, eta0 = xi, eta

    ncg = netcdf.ncopen(grd)

    nc0 = netcdf.ncopen(frc)
    try:
        t = netcdf.nctime(nc0, tvar)
    except:
        t = netcdf.use(nc0, tvar)
        t = netcdf.num2date(t, 'days since %d-01-01' % year0)

    time = netcdf.date2num(t, tunits)

    x0 = netcdf.use(grd, xvar)
    y0 = netcdf.use(grd, yvar)
    if x0.ndim == 1: x0, y0 = np.meshgrid(x0, y0)

    if angvar:
        ang = netcdf.use(grd, angvar)

    if not xylim is False:
        xlim = xylim[:2]
        ylim = xylim[2:]
        i1, i2, j1, j2 = calc.ij_limits(x0, y0, xlim, ylim)
        xi = i2 - i1
        eta = j2 - j1
    else:
        i1, i2 = 0, xi
        j1, j2 = 0, eta

    XI = '%d:%d:%d' % (i1, i2, dxi)
    ETA = '%d:%d:%d' % (j1, j2, deta)

    xi = len(range(i1, i2, dxi))
    eta = len(range(j1, j2, deta))

    # create file:
    create_wind(fname, xi, eta)

    nc = netcdf.ncopen(fname, 'a')

    x = x0[j1:j2:deta, i1:i2:dxi]
    y = y0[j1:j2:deta, i1:i2:dxi]

    nc.vars['lon'][:] = x
    nc.vars['lat'][:] = y
    if angvar: ang = ang[j1:j2:deta, i1:i2:dxi]

    n = -1
    for it in range(len(time)):

        if not dates is False:
            d0, d1 = dates
            if t[it] < d0 or t[it] >= d1: continue

        n += 1
        u = netcdf.use(nc0, uvar, **{xdim: XI, ydim: ETA, tdim: it})
        v = netcdf.use(nc0, vvar, **{xdim: XI, ydim: ETA, tdim: it})

        # rotate uv:
        if angvar:
            print('rotating ...')
            u, v = calc.rot2d(u, v, -ang)

        nc.vars['time'][n] = time[it]
        print('filling uv', n, t[it])
        nc.vars['air_u'][n, ...] = u
        nc.vars['air_v'][n, ...] = v

    nc.close()
    nc0.close()
    ncg.close()
Пример #20
0
def frc2gnome(fname,frc,grd,xylim=False,dates=False,ij=(1,1),**kargs):
  '''
  Creates GNOME wind file
  kargs:
    t[u,v]var
    t[u,v]dim
    x[y,ang]var

  Ex:
    .frc2gnome(out,frc,grd,ij=(10,10),dates=dates,**{'tdim':'Time'})
  '''

  deta,dxi=ij

  tvar='time'
  uvar='Uwind'
  vvar='Vwind'
  #tvar='bulk_time'
  #uvar='uwnd'
  #vvar='vwnd'

  tdim='time'
  #tdim='bulk_time'
  xdim='xi_rho'
  ydim='eta_rho'

  xvar='lon_rho'
  yvar='lat_rho'
  angvar='angle'

  if 'tvar' in kargs.keys(): tvar=kargs['tvar']
  if 'uvar' in kargs.keys(): uvar=kargs['uvar']
  if 'vvar' in kargs.keys(): vvar=kargs['vvar']

  if 'tdim' in kargs.keys(): tdim=kargs['tdim']
  if 'xdim' in kargs.keys(): xdim=kargs['xdim']
  if 'ydim' in kargs.keys(): ydim=kargs['ydim']

  if 'xvar' in kargs.keys(): xvar=kargs['xvar']
  if 'yvar' in kargs.keys(): yvar=kargs['yvar']
  if 'angvar' in kargs.keys(): angvar=kargs['angvar']


  dims=netcdf.fdim(grd)
  xi,eta=dims[xdim],dims[ydim]
  xi0,eta0=xi,eta

  ncg=netcdf.ncopen(grd)

  nc0=netcdf.ncopen(frc)
  try:
   t=netcdf.nctime(nc0,tvar)
  except:
    t=netcdf.use(nc0,tvar)
    t=netcdf.num2date(t,'days since %d-01-01' % year0)

  time=netcdf.date2num(t,tunits)

  x0=netcdf.use(grd,xvar)
  y0=netcdf.use(grd,yvar)
  if x0.ndim==1: x0,y0=np.meshgrid(x0,y0)

  if angvar:
    ang=netcdf.use(grd,angvar)

  if not xylim is False:
    xlim=xylim[:2]
    ylim=xylim[2:]
    i1,i2,j1,j2=calc.ij_limits(x0,y0,xlim,ylim)
    xi=i2-i1
    eta=j2-j1
  else:
    i1,i2=0,xi
    j1,j2=0,eta

  XI  ='%d:%d:%d' %(i1,i2,dxi)
  ETA ='%d:%d:%d' %(j1,j2,deta)

  xi=len(range(i1,i2,dxi))
  eta=len(range(j1,j2,deta))

  # create file:
  create_wind(fname,xi,eta)

  nc=netcdf.ncopen(fname,'a')

  x=x0[j1:j2:deta,i1:i2:dxi]
  y=y0[j1:j2:deta,i1:i2:dxi]

  nc.vars['lon'][:]=x
  nc.vars['lat'][:]=y
  if angvar: ang=ang[j1:j2:deta,i1:i2:dxi]

  n=-1
  for it in range(len(time)):

    if not dates is False:
      d0,d1=dates
      if t[it]<d0 or t[it]>=d1: continue

    n+=1
    u=netcdf.use(nc0,uvar,**{xdim:XI,ydim:ETA,tdim:it})
    v=netcdf.use(nc0,vvar,**{xdim:XI,ydim:ETA,tdim:it})

    # rotate uv:
    if angvar:
      print 'rotating ...'
      u,v=calc.rot2d(u,v,-ang)


    nc.vars['time'][n]=time[it]
    print 'filling uv',n,t[it]
    nc.vars['air_u'][n,...]=u
    nc.vars['air_v'][n,...]=v


  nc.close()
  nc0.close()
  ncg.close()
Пример #21
0
def roms2clmbry(f0,clm,grd,sparams,**kargs):
  '''
  kargs:
  bry, bry file to create (False by default, no bry is created)
  times, all by default, but can be a range of times (inds or datetimes)
  tunits, 'days since 1970-01-01'

  ctitle, title for clm file (default used)
  btitle, title for bry file (defaul used)
  obc, bry open boundaries (defaul used)
  quiet, False

  other kargs of roms2roms:
    grd0
    sparams0
    tunits0
  '''

  bry    = False
  times  = 'all'
  tunits = 'days since 1970-01-01'
  ctitle = False
  btitle = False
  obc    = False
  quiet  = False

  if 'bry'    in kargs.keys(): bry    = kargs['bry']
  if 'times'  in kargs.keys(): times  = kargs['times']
  if 'tunits' in kargs.keys(): tunits = kargs['tunits']
  if 'ctitle' in kargs.keys(): ctitle = kargs['ctitle']
  if 'btitle' in kargs.keys(): btitle = kargs['btitle']
  if 'obc'    in kargs.keys(): obc    = kargs['obc']
  if 'quiet'  in kargs.keys(): quiet  = kargs['quiet']

  create=True

  Cargs={}
  if ctitle: Cargs['title']  = ctitle
  Cargs['tunits'] = tunits
  Cargs['create'] = create

  Bargs={}
  if btitle: Bargs['title'] = btitle
  if obc:    Bargs['obc']   = obc
  Bargs['tunits'] = tunits
  Bargs['create'] = create


  if times=='all':
    times=range(netcdf.fdim(f0,'ocean_time'))

  for tind in times:
    data,datab=roms2roms(f0,grd,sparams,tind,**kargs)

    prognostic.make_clm(clm,data,grd,sparams,quiet=quiet,**Cargs)
    if bry:
      prognostic.make_bry(bry,datab,grd,sparams,quiet=quiet,**Bargs)

    if create:
      Cargs['create']=False
      Bargs['create']=False
Пример #22
0
def load_data(f,quiet=0,**kargs):
  '''
  Loads prognostic variables (temp,salt,u,v,ubar,vbar,zeta) from
  netcdf file or opendap server. Also loads lon,lat, depth, and time.

  If f is a file, it must include the 1d variables lon,lat and depth;
  the 2d variable ssh (zeta) and the 3d variables temp, salt, u and v;
  ie, each file must contain data for a simgle time. The file must also
  contain the variable time.

  If f is a opendap address, it must contain also all these variables
  or the ones defined in the input karg settings (DataAccess object)

  To deal with the case of variables in different files/opendap addresses,
  f can also be a dictionary with keys the variables and values the files
  or opendap addresses. In this case, the keys must be:
    - temp
    - salt
    - u
    - v
    - ssh
    - misc, for lon, lat, depth, time and dimensions
      or xy for lon,lat and x,ydim; z for depth and zdim, time for time

  The output data (dict) is suitable to be used by data2roms, which
  interpolates the data to ROMS 3d grid.
  Also outputs an error/status string.

  kargs:
    inds, dict with dimension names/values (where time dim can be integer
          or datetime)
    settings, DataAccess object
    extra, extra misc vars to load [(outKey0,fileVar0),...]
    t_units, units of variable time, by default the att  units is used
  '''

  sett=DataAccess()
  inds={}
  extra=[]
  t_units=[]
  if 'settings' in kargs.keys(): sett    = kargs['settings']
  if 'inds'     in kargs.keys(): inds    = kargs['inds']
  if 'extra'    in kargs.keys(): extra   = kargs['extra']
  if 't_units'  in kargs.keys(): t_units = kargs['t_units']

  res={}
  msg=''

  if not isinstance(f,dict) and not f.startswith('http') and not isfile(f):
    msg='file not found %s' % f
    if not quiet: print msg
    return res, msg

  # load nc files:
  if not isinstance(f,dict):
    f={'temp':f,'salt':f,'u':f,'v':f,'ssh':f,'misc':f}

  if not f.has_key('xy'):   f['xy']   = f['misc']
  if not f.has_key('z'):    f['z']    = f['misc']
  if not f.has_key('time'): f['time'] = f['misc']

  filesUsed=[]
  ncUsed=[]
  for i in f.keys():
    if not quiet: print '(%s) loading from %s' % (i.ljust(5),f[i])

    if i=='temp':
      if f[i] in filesUsed: ncTemp=ncUsed[filesUsed.index(f[i])]
      else:
        ncTemp=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncTemp]

    elif i=='salt':
      if f[i] in filesUsed: ncSalt=ncUsed[filesUsed.index(f[i])]
      else:
        ncSalt=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncSalt]

    elif i=='u':
      if f[i] in filesUsed: ncU=ncUsed[filesUsed.index(f[i])]
      else:
        ncU=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncU]

    elif i=='v':
      if f[i] in filesUsed: ncV=ncUsed[filesUsed.index(f[i])]
      else:
        ncV=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncV]

    elif i=='ssh':
      if f[i] in filesUsed: ncSsh=ncUsed[filesUsed.index(f[i])]
      else:
        ncSsh=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncSsh]

    elif i=='xy':
      if f[i] in filesUsed: ncXy=ncUsed[filesUsed.index(f[i])]
      else:
        ncXy=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncXy]

    elif i=='z':
      if f[i] in filesUsed: ncZ=ncUsed[filesUsed.index(f[i])]
      else:
        ncZ=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncZ]

    elif i=='time':
      if f[i] in filesUsed: ncTime=ncUsed[filesUsed.index(f[i])]
      else:
        ncTime=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncTime]

    elif i=='misc':
      if f[i] in filesUsed: ncMisc=ncUsed[filesUsed.index(f[i])]
      else:
        ncMisc=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncMisc]


  # load dims:
  if not quiet: print '  loading dims...'
  dimsXy=netcdf.fdim(ncXy)
  dimsZ =netcdf.fdim(ncZ)

  res['NX']=dimsXy[sett.xdim]
  res['NY']=dimsXy[sett.ydim]
  ###if sett.z_name:
  if sett.zdim:
    res['NZ']=dimsZ[sett.zdim]
  else:
    res['NZ']=1

  # about horizontal inds:
  if inds.has_key(sett.xdim) and len(inds[sett.xdim])==2 and not isinstance(inds[sett.xdim],basestring):
    if not quiet: print '  calc horizontal inds...'
    xlim=inds[sett.xdim]
    ylim=inds[sett.ydim]

    inds.pop(sett.xdim)
    inds.pop(sett.ydim)

    lon=netcdf.use(ncXy,sett.x_name,**inds)
    if np.any(lon>360): lon=np.mod(lon,360.)
    lat=netcdf.use(ncXy,sett.y_name,**inds)
    i0,i1,j0,j1=calc.ij_limits(lon,lat,xlim,ylim,margin=3)
    inds[sett.xdim]='%d:%d' % (i0,i1)
    inds[sett.ydim]='%d:%d' % (j0,j1)


  if not quiet: print '  loading lon, lat, depth...'
  res['lon']  = netcdf.use(ncXy,sett.x_name,**inds)
  if np.any(res['lon']>360): res['lon']=np.mod(res['lon'],360.)
  res['lat']  = netcdf.use(ncXy,sett.y_name,**inds)
  if sett.z_name:
    res['depth'] = -netcdf.use(ncZ,sett.z_name,**inds)
  else: res['depth']=False

  if res['lon'].size!=res['lat'].size:
    res['lon'],res['lat']=np.meshgrid(res['lon'],res['lat'])
    # needed for griddata, later

  # update nx,ny:
  if inds.has_key(sett.xdim):
    res['NY'],res['NX']=res['lon'].shape

  # extra misc vars:
  if len(extra):
    for outKey,fileVar in extra:
      if not quiet: print '  loading extra misc... %s %s' % (outKey,fileVar)
      res[outKey]=netcdf.use(ncMisc,fileVar,**inds)


  # time:
  # file may have one or several times. If several, time dim must be given
  # with kargs inds!
  if not quiet: print '  loading time...'
  if t_units:
    times=netcdf.use(ncTime,sett.time_name)
    times=netcdftime.num2date(times,t_units)
  else:
    times=netcdf.nctime(ncTime,sett.time_name)

  if inds.has_key(sett.tdim):
    try: tind=dts.parse_date(inds[sett.tdim])
    except: tind=inds[sett.tdim] # is an integer, for instance

    if isinstance(tind,datetime.datetime):
      tind,=np.where(times==tind)
      if tind.size:
        tind=tind[0]
        inds[sett.tdim]=tind # update inds to extract other variables
      else:
        Msg='date not found'
        msg+='\n'+Msg
        return res,msg+' ERROR'

    date=times[tind]
    if not quiet: print '    tind, date= %d %s' % (tind,date.isoformat(' '))

  elif times.size==1:
    date=times[0]
    if not quiet: print '    date= %s' % date.isoformat(' ')
  else: # must provide tind as input!!
    Msg='several dates in file... provice tind!'
    msg+='\n'+Msg
    return res,msg+' ERROR'

  res['date'] = date

  empty3d=np.zeros([res['NZ'],res['NY'],res['NX']])
  empty2d=np.zeros([res['NY'],res['NX']])

  if 'temp' in f.keys():
    if not quiet: print '  loading temp...'
    if sett.temp_name in ncTemp.varnames: res['temp'] = netcdf.use(ncTemp,sett.temp_name,**inds)
    else:
      Msg='var %s not found' % 'temp'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['temp']=empty3d

  if 'salt' in f.keys():
    if not quiet: print '  loading salt...'
    if sett.salt_name in ncSalt.varnames: res['salt'] = netcdf.use(ncSalt,sett.salt_name,**inds)
    else:
      Msg='var %s not found' % 'salt'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['salt']=empty3d

  if 'u' in f.keys():
    if not quiet: print '  loading u...'
    if sett.u_name in ncU.varnames: res['u']    = netcdf.use(ncU,sett.u_name,**inds)
    else:
      Msg='var %s not found' % 'u'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['u']=empty3d

  if 'v' in f.keys():
    if not quiet: print '  loading v...'
    if sett.v_name in ncV.varnames: res['v']    = netcdf.use(ncV,sett.v_name,**inds)
    else:
      Msg='var %s not found' % 'v'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['v']=empty3d

  if 'ssh' in f.keys():
    if not quiet: print '  loading ssh...'
    if sett.ssh_name in ncSsh.varnames: res['ssh']  = netcdf.use(ncSsh,sett.ssh_name,**inds)
    else:
      Msg='var %s not found' % 'ssh'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['ssh']=empty2d

  for nc in ncUsed:
    try:  nc.close()
    except: pass

  return res, msg