Пример #1
0
    def extract(self, year, level=3, version=2, lims=False):
        a = SMOSdownload(self.path)
        p = a.destination_folder(year, level, version, gen=False)
        files = glob.glob(os.path.join(p, '*.nc'))
        files.sort()

        res = OrderedDict()
        for f in files:
            print(' -- extracting from %s' % f)
            if f == files[0]:
                lon = netcdf.use(f, 'longitude')
                lat = netcdf.use(f, 'latitude')
                if lims:
                    i0, i1, j0, j1 = calc.ij_limits(lon,
                                                    lat,
                                                    lims[:2],
                                                    lims[2:],
                                                    margin=2)
                    ii = '%d:%d' % (i0, i1 + 1)
                    jj = '%d:%d' % (j0, j1 + 1)
                    lon = netcdf.use(f, 'longitude', longitude=ii)
                    lat = netcdf.use(f, 'latitude', latitude=jj)
                else:
                    ii, jj = ':', ':'

            date0 = netcdf.nctime(f, 'date_start')[0]
            date1 = netcdf.nctime(f, 'date_stop')[0]
            date = netcdf.nctime(f, 'time')[0]
            sss = netcdf.use(f, 'sss', longitude=ii, latitude=jj)
            res[date] = date0, date1, sss

        return lon, lat, res
Пример #2
0
  def extract(self,year,level=3,version=2,lims=False):
    a=SMOSdownload(self.path)
    p=a.destination_folder(year,level,version,gen=False)
    files=glob.glob(os.path.join(p,'*.nc'))
    files.sort()

    res=OrderedDict()
    for f in files:
      print(' -- extracting from %s'%f)
      if f==files[0]:
        lon=netcdf.use(f,'longitude')
        lat=netcdf.use(f,'latitude')
        if lims:
          i0,i1,j0,j1=calc.ij_limits(lon,lat,lims[:2],lims[2:],margin=2)
          ii='%d:%d'%(i0,i1+1)
          jj='%d:%d'%(j0,j1+1)
          lon=netcdf.use(f,'longitude',longitude=ii)
          lat=netcdf.use(f,'latitude',latitude=jj)
        else: ii,jj=':',':'

      date0=netcdf.nctime(f,'date_start')[0]
      date1=netcdf.nctime(f,'date_stop')[0]
      date=netcdf.nctime(f,'time')[0]
      sss=netcdf.use(f,'sss',longitude=ii,latitude=jj)
      res[date]=date0,date1,sss

    return lon,lat,res
Пример #3
0
def get_ij_inds(grd,**kargs):
  f=kargs.get('url','http://tds.hycom.org/thredds/dodsC/glb_analysis')
  vlon=kargs.get('vlon','Longitude')
  vlat=kargs.get('vlat','Latitude')
  lon=kargs.get('lon',False)
  lat=kargs.get('lat',False)
  lon_add=kargs.get('lon_add',-360)
  fsave=kargs.get('fsave','ijinds.pickle')

  if lon is False:
    lon=netcdf.use(f,vlon)
    if np.any(lon>360): lon=np.mod(lon,360)
    lon+=lon_add

  if lat is False:
    lat=netcdf.use(f,vlat)

  rlon=netcdf.use(grd,'lon_rho')
  rlat=netcdf.use(grd,'lat_rho')
  xlim=rlon.min(),rlon.max()
  ylim=rlat.min(),rlat.max()
  from okean import calc
  i1,i2,j1,j2=calc.ij_limits(lon,lat,xlim,ylim,margin=1)

  i1=i1-2
  i2=i2+2
  j1=j1-2
  j2=j2+2

  if fsave:
    np.asarray([ i1,i2,j1,j2]).dump(fsave)
    print 'saved %s'%fsave

  return np.asarray([ i1,i2,j1,j2])
Пример #4
0
def get_ij_inds(grd, **kargs):
    f = kargs.get('url', 'http://tds.hycom.org/thredds/dodsC/glb_analysis')
    vlon = kargs.get('vlon', 'Longitude')
    vlat = kargs.get('vlat', 'Latitude')
    lon = kargs.get('lon', False)
    lat = kargs.get('lat', False)
    lon_add = kargs.get('lon_add', -360)
    fsave = kargs.get('fsave', 'ijinds.pickle')

    if lon is False:
        lon = netcdf.use(f, vlon)
        if np.any(lon > 360): lon = np.mod(lon, 360)
        lon += lon_add

    if lat is False:
        lat = netcdf.use(f, vlat)

    rlon = netcdf.use(grd, 'lon_rho')
    rlat = netcdf.use(grd, 'lat_rho')
    xlim = rlon.min(), rlon.max()
    ylim = rlat.min(), rlat.max()
    from okean import calc
    i1, i2, j1, j2 = calc.ij_limits(lon, lat, xlim, ylim, margin=1)

    i1 = i1 - 2
    i2 = i2 + 2
    j1 = j1 - 2
    j2 = j2 + 2

    if fsave:
        np.asarray([i1, i2, j1, j2]).dump(fsave)
        print 'saved %s' % fsave

    return np.asarray([i1, i2, j1, j2])
Пример #5
0
def grid_vicinity(grid,x,y,margin=5,rect=False,retinds=False):
  '''
  Returns True for x,y points inside roms grid boundary plus margin.
  Margin is the number of cells to add around the grid.

  if rect is True returns True for all points in the smallest 2d xy grid
  (usually a rectangle) around the grid.
  In this case,margin is the rectangle margin, ie, in units of xy, not
  in  units of grid

  if retinds, in the case of rect, the rectangle j1,j2 and i1,i2 are
  also returned (cond,inds=grid_vicinity(....); i1,i2,j1,j2=inds)

  mma, TAMU 2011
  '''

  xg=netcdf.use(grid,'lon_rho')
  yg=netcdf.use(grid,'lat_rho')
  xlim=xg.min(),xg.max()
  ylim=yg.min(),yg.max()

  if x.ndim==1 and y.ndim==1: x,y=np.meshgrid(x,y)

  if rect:
    out=np.zeros(x.shape,'bool')
    i1,i2,j1,j2=calc.ij_limits(x,y,xlim,ylim,margin)
    out[j1:j2,i1:i2]=1
  else:
    from roms import Grid
    g=Grid(grid)
    xb,yb=g.border(margin=-margin)
    out=calc.inpolygon(x,y,xb,yb)

  if rect and retinds: return out,(i1,i2,j1,j2)
  else: return out
Пример #6
0
def grid_vicinity(grid, x, y, margin=5, rect=False, retinds=False):
    '''
  Returns True for x,y points inside roms grid boundary plus margin.
  Margin is the number of cells to add around the grid.

  if rect is True returns True for all points in the smallest 2d xy grid
  (usually a rectangle) around the grid.
  In this case,margin is the rectangle margin, ie, in units of xy, not
  in  units of grid

  if retinds, in the case of rect, the rectangle j1,j2 and i1,i2 are
  also returned (cond,inds=grid_vicinity(....); i1,i2,j1,j2=inds)

  mma, TAMU 2011
  '''

    xg = netcdf.use(grid, 'lon_rho')
    yg = netcdf.use(grid, 'lat_rho')
    xlim = xg.min(), xg.max()
    ylim = yg.min(), yg.max()

    if x.ndim == 1 and y.ndim == 1: x, y = np.meshgrid(x, y)

    if rect:
        out = np.zeros(x.shape, 'bool')
        i1, i2, j1, j2 = calc.ij_limits(x, y, xlim, ylim, margin)
        out[j1:j2, i1:i2] = 1
    else:
        from roms import Grid
        g = Grid(grid)
        xb, yb = g.border(margin=-margin)
        out = calc.inpolygon(x, y, xb, yb)

    if rect and retinds: return out, (i1, i2, j1, j2)
    else: return out
Пример #7
0
def read_wind(grd, date, ij=False):
    f = source(date)
    print('-- reading from %s' % f)
    time = netcdf.nctime(f, 'time')

    if 0:
        try:
            i = np.where(time == date)[0][0]
        except:
            return 'date %s not found' % date.isoformat(' ')
    else:
        i = 0

    returnXY = False
    if ij is False:
        returnXY = True
        lon = netcdf.use(f, 'longitude')  # -180..180
        lat = netcdf.use(f, 'latitude')
        g = roms.Grid(grd)
        xl0 = np.asarray((g.lon.min(), g.lon.max()))
        xl = np.asarray((g.lon.min(), g.lon.max()))
        if np.any(xl > 180) or np.any(xl < -180):
            print('ERROR: grid is supposed to be -180<x<180')
            print(
                'Can be implemented with mpl_toolkits.basemap.shiftgrid ... TODO'
            )
            print('(http://matplotlib.org/basemap/api/basemap_api.html)')
            return

        yl = g.lat.min(), g.lat.max()
        ij = calc.ij_limits(lon, lat, xl, yl, margin=1)

    i0, i1, j0, j1 = ij
    u = netcdf.use(f,
                   'eastward_wind',
                   longitude='%d:%d' % (i0, i1),
                   latitude='%d:%d' % (j0, j1),
                   time=i)
    v = netcdf.use(f,
                   'northward_wind',
                   longitude='%d:%d' % (i0, i1),
                   latitude='%d:%d' % (j0, j1),
                   time=i)
    if returnXY:
        lon = netcdf.use(f,
                         'longitude',
                         longitude='%d:%d' % (i0, i1),
                         latitude='%d:%d' % (j0, j1))
        lat = netcdf.use(f,
                         'latitude',
                         longitude='%d:%d' % (i0, i1),
                         latitude='%d:%d' % (j0, j1))

        lon, lat = np.meshgrid(lon, lat)
        #if np.all(xl0<0): lon=lon-360 # this may be wrong ... if xl is near 0, lon ay have pos and neg values !!! fix this one day ...
        return lon, lat, u, v, ij
    else:
        return u, v
Пример #8
0
def extract_region(lon, lat, data, lons, lats):
    '''
  extract data inside region lonsxlats (xlim x ylim)
  used by getvar
  '''
    if not lons: lons = lon.min(), lon.max()
    if not lats: lats = lat.min(), lat.max()
    i1, i2, j1, j2 = calc.ij_limits(lon, lat, lons, lats, margin=1)
    return lon[j1:j2, i1:i2], lat[j1:j2, i1:i2], data[j1:j2, i1:i2]
Пример #9
0
  def extract(self,year,lims=False,date=False):
    a=RTG_SSTdownload(self.path)
    p=a.destination_folder(year)
    if date:
      files=glob.glob(os.path.join(p,'*.%s'%date.strftime('%Y%m%d')))
    else:
      files=glob.glob(os.path.join(p,'*'))

    files.sort()

    # find ntimes:
    nt=0
    for f in files:
      nt=nt+len(gribu.findvar(f,'temp'))

    c=-1
    for f in files:
      print ' -- extracting from %s'%f
      q=gribu.findvar(f,'temp')
      for V in q: # times per file
        c+=1

        if c==0:
          lat,lon=V.latlons()
          mask=self.load_mask(lon.shape)
          if lims:
            if lims[1]<0 and lims[1]<0:
              lon=lon-360 # if not both lon lims <0,, a few more lines are needed !
            print 'calc ij inds...'
            ijname='ijinds.npy'
            if os.path.isfile(ijname):
              i0,i1,j0,j1=np.load(ijname)
            else:
              i0,i1,j0,j1=calc.ij_limits(lon,lat,lims[:2],lims[2:],margin=2)
              np.asarray([i0,i1,j0,j1]).dump(ijname)

            print 'done'
            lon=lon[j0:j1,i0:i1]
            lat=lat[j0:j1,i0:i1]
            mask=mask[j0:j1,i0:i1]

          else: i0=False

          sst=np.ma.zeros((nt,)+lon.shape,lon.dtype)
          time=np.zeros(nt,datetime.datetime)

        if not i0 is False:
          s=V.values[j0:j1,i0:i1]
        else: s=V.values

        s=np.ma.masked_where(mask==3,s)

        sst[c]=s
        time[c]=V.analDate
        print '=> done for %s'%time[c].isoformat(' ')

    return time,lon,lat,sst
Пример #10
0
def extract_region(lon,lat,data,lons,lats):
  '''
  extract data inside region lonsxlats (xlim x ylim)
  used by getvar
  '''
  if not lons: lons=lon.min(),lon.max()
  if not lats: lats=lat.min(),lat.max()
  i1,i2,j1,j2=calc.ij_limits(lon,lat,lons,lats,margin=1)
  return lon[j1:j2,i1:i2],lat[j1:j2,i1:i2],data[j1:j2,i1:i2]
Пример #11
0
def get(xlim, ylim):
    x = netcdf.use(f, 'lon')
    y = netcdf.use(f, 'lat')
    i0, i1, j0, j1 = calc.ij_limits(x, y, xlim, ylim)

    ix = '%d:%d' % (i0, i1)
    iy = '%d:%d' % (j0, j1)

    x = netcdf.use(f, 'lon', lon=ix)
    y = netcdf.use(f, 'lat', lat=iy)
    z = netcdf.use(f, 'z', lon=ix, lat=iy)

    x, y = np.meshgrid(x, y)
    np.savez('etopo1_madeira.npz', x=x, y=y, z=z)
Пример #12
0
def ww3_file_data(fname, xlim=False, ylim=False, quiet=False):

    # fast load aux tmp file:
    xylab = ''
    if xlim: xylab += '_%.2f_%.2f' % xlim
    if ylim: xylab += '_%.2f_%.2f' % ylim
    faux = fname + xylab + '.npz'
    if os.path.isfile(faux):
        if not quiet: print 'loading from %s' % faux
        a = np.load(faux)
        val = a['val']
        mval = a['mval']
        return a['lon'], a['lat'], a['time'], np.ma.masked_where(mval, val)

    f = pygrib.open(fname)

    nt = f.messages
    time = np.zeros(nt, datetime.datetime)

    for i in range(nt):
        o = f.message(i + 1)
        v = o.values
        if not quiet and i % 10 == 0:
            print '%03d of %03d %s' % (i, nt, o.validDate.isoformat(' '))
        if i == 0:
            lat, lon = o.latlons()
            lon[lon > 180] = lon[lon > 180] - 360

            if not xlim: lons = lon.min(), lon.max()
            if not ylim: lats = lat.min(), lat.max()
            i1, i2, j1, j2 = calc.ij_limits(lon, lat, xlim, ylim, margin=1)
            lon = lon[j1:j2, i1:i2]
            lat = lat[j1:j2, i1:i2]
            ny, nx = v[j1:j2, i1:i2].shape
            val = np.ma.zeros((nt, ny, nx), 'f')

        time[i] = o.validDate
        val[i, ...] = v[j1:j2, i1:i2]

    # save aux tmp file for fast loading:
    if not quiet: print 'saving aux file %s' % faux
    np.savez(faux, lon=lon, lat=lat, time=time, val=val.data, mval=val.mask)

    return lon, lat, time, val
Пример #13
0
def ww3_file_data(fname,xlim=False,ylim=False,quiet=False):

  # fast load aux tmp file:
  xylab=''
  if xlim: xylab+='_%.2f_%.2f'%xlim
  if ylim: xylab+='_%.2f_%.2f'%ylim
  faux=fname+xylab+'.npz'
  if os.path.isfile(faux):
    if not quiet: print 'loading from %s'%faux
    a=np.load(faux)
    val=a['val']
    mval=a['mval']
    return a['lon'],a['lat'],a['time'],np.ma.masked_where(mval,val)


  f=pygrib.open(fname)

  nt=f.messages
  time=np.zeros(nt,datetime.datetime)
  
  for i in range(nt):
    o=f.message(i+1)
    v=o.values
    if not quiet and i%10==0: print '%03d of %03d %s'%(i,nt,o.validDate.isoformat(' '))
    if i==0:
      lat,lon=o.latlons()
      lon[lon>180]=lon[lon>180]-360

      if not xlim: lons=lon.min(),lon.max()
      if not ylim: lats=lat.min(),lat.max()
      i1,i2,j1,j2=calc.ij_limits(lon,lat,xlim,ylim,margin=1)
      lon=lon[j1:j2,i1:i2]
      lat=lat[j1:j2,i1:i2]
      ny,nx=v[j1:j2,i1:i2].shape
      val=np.ma.zeros((nt,ny,nx),'f')

    time[i]=o.validDate
    val[i,...]=v[j1:j2,i1:i2]

  # save aux tmp file for fast loading:
  if not quiet: print 'saving aux file %s'%faux
  np.savez(faux,lon=lon,lat=lat,time=time,val=val.data,mval=val.mask)

  return lon,lat,time,val
Пример #14
0
    def extract(self, year, type, lims=False, date=False):
        a = OSTIAdownload(self.path)
        p = a.destination_folder(year, type)  #########level,version,gen=False)
        files = glob.glob(os.path.join(p, '*.nc'))
        files.sort()

        res = OrderedDict()
        c = -1
        for f in files:
            c += 1
            print(' -- extracting from %s' % f)
            if c == 0:
                lon = netcdf.use(f, 'lon')
                lat = netcdf.use(f, 'lat')
                if lims:
                    i0, i1, j0, j1 = calc.ij_limits(lon,
                                                    lat,
                                                    lims[:2],
                                                    lims[2:],
                                                    margin=2)
                    ii = '%d:%d' % (i0, i1 + 1)
                    jj = '%d:%d' % (j0, j1 + 1)
                    lon = netcdf.use(f, 'lon', lon=ii)
                    lat = netcdf.use(f, 'lat', lat=jj)
                else:
                    ii, jj = ':', ':'

            date = netcdf.nctime(f, 'time')[0]
            u = netcdf.use(f, 'analysed_sst', lon=ii, lat=jj)
            if c == 0:
                sst = np.ma.zeros((len(files), ) + u.shape, u.dtype)
                time = np.zeros(len(files), datetime.datetime)

            sst[c] = u
            time[c] = date


###      date0=netcdf.nctime(f,'date_start')[0]
###      date1=netcdf.nctime(f,'date_stop')[0]
#      res[date]=sst
#####  return lon,lat,res

        return time, lon, lat, sst
Пример #15
0
def read_wind(grd,date,ij=False):
  f=source(date)
  print '-- reading from %s'%f
  time=netcdf.nctime(f,'time')

  if 0:
    try:
      i=np.where(time==date)[0][0]
    except:
      return 'date %s not found'%date.isoformat(' ')
  else: i=0

  returnXY=False
  if ij is False:
    returnXY=True
    lon=netcdf.use(f,'longitude') # -180..180
    lat=netcdf.use(f,'latitude')
    g=roms.Grid(grd)
    xl0=np.asarray((g.lon.min(),g.lon.max()))
    xl=np.asarray((g.lon.min(),g.lon.max()))
    if np.any(xl>180) or np.any(xl<-180):
      print 'ERROR: grid is supposed to be -180<x<180'
      print 'Can be implemented with mpl_toolkits.basemap.shiftgrid ... TODO'
      print '(http://matplotlib.org/basemap/api/basemap_api.html)'
      return

    yl=g.lat.min(),g.lat.max()
    ij=calc.ij_limits(lon,lat,xl,yl,margin=1)

  i0,i1,j0,j1=ij
  u=netcdf.use(f,'eastward_wind',longitude='%d:%d'%(i0,i1),latitude='%d:%d'%(j0,j1),time=i)
  v=netcdf.use(f,'northward_wind',longitude='%d:%d'%(i0,i1),latitude='%d:%d'%(j0,j1),time=i)
  if returnXY:
    lon=netcdf.use(f,'longitude',longitude='%d:%d'%(i0,i1),latitude='%d:%d'%(j0,j1))
    lat=netcdf.use(f,'latitude',longitude='%d:%d'%(i0,i1),latitude='%d:%d'%(j0,j1))

    lon,lat=np.meshgrid(lon,lat)
    #if np.all(xl0<0): lon=lon-360 # this may be wrong ... if xl is near 0, lon ay have pos and neg values !!! fix this one day ...
    return lon,lat,u,v, ij
  else: return u,v
Пример #16
0
  def extract(self,year,type,lims=False,date=False):
    a=OSTIAdownload(self.path)
    p=a.destination_folder(year,type)#########level,version,gen=False)
    files=glob.glob(os.path.join(p,'*.nc'))
    files.sort()

    res=OrderedDict()
    c=-1
    for f in files:
      c+=1
      print(' -- extracting from %s'%f)
      if c==0:
        lon=netcdf.use(f,'lon')
        lat=netcdf.use(f,'lat')
        if lims:
          i0,i1,j0,j1=calc.ij_limits(lon,lat,lims[:2],lims[2:],margin=2)
          ii='%d:%d'%(i0,i1+1)
          jj='%d:%d'%(j0,j1+1)
          lon=netcdf.use(f,'lon',lon=ii)
          lat=netcdf.use(f,'lat',lat=jj)
        else: ii,jj=':',':'


      date=netcdf.nctime(f,'time')[0]
      u=netcdf.use(f,'analysed_sst',lon=ii,lat=jj)
      if c==0:
        sst=np.ma.zeros((len(files),)+u.shape,u.dtype)
        time=np.zeros(len(files),datetime.datetime)

      sst[c]=u
      time[c]=date

###      date0=netcdf.nctime(f,'date_start')[0]
###      date1=netcdf.nctime(f,'date_stop')[0]
#      res[date]=sst
#####  return lon,lat,res

    return time,lon,lat,sst
Пример #17
0
def read_wind(grd,date,ij=False):
  f=source(date)
  print '-- reading from %s'%f
  time=netcdf.nctime(f,'time')
  try:
    i=np.where(time==date)[0][0]
  except:
    return 'date %s not found'%d.isoformat(' ')

  returnXY=False
  if ij is False:
    returnXY=True
    lon=netcdf.use(f,'lon')
    lat=netcdf.use(f,'lat')
    g=roms.Grid(grd)
    xl0=np.asarray((g.lon.min(),g.lon.max()))
    xl=np.asarray((g.lon.min(),g.lon.max()))
    if np.all(xl<0): xl=xl+360
    elif np.any(xl<0) and np.any(xl>0):
      print 'ERROR: zero crossing not implemented !!!'
      print 'can be done with mpl_toolkits.basemap.shiftgrid ... TODO'
      print '(http://matplotlib.org/basemap/api/basemap_api.html)'
      return

    yl=g.lat.min(),g.lat.max()
    ij=calc.ij_limits(lon,lat,xl,yl,margin=1)

  i0,i1,j0,j1=ij
  u=netcdf.use(f,'uwnd',lon='%d:%d'%(i0,i1),lat='%d:%d'%(j0,j1),time=i)
  v=netcdf.use(f,'vwnd',lon='%d:%d'%(i0,i1),lat='%d:%d'%(j0,j1),time=i)
  if returnXY:
    lon=netcdf.use(f,'lon',lon='%d:%d'%(i0,i1),lat='%d:%d'%(j0,j1))
    lat=netcdf.use(f,'lat',lon='%d:%d'%(i0,i1),lat='%d:%d'%(j0,j1))
    lon,lat=np.meshgrid(lon,lat)
    if np.all(xl0<0): lon=lon-360 # this may be wrong ... if xl is near 0, lon ay have pos and neg values !!! fix this one day ...
    return lon,lat,u,v, ij
  else: return u,v
Пример #18
0
  def slicell(self,varname,X,Y,time=0,**opts):
#    x,y,z,v,m=[[]]*5
########    plot= opts.get('plot',False)
#########    ax  = opts.get('ax',None)
    coords=opts.get('coords',self._default_coords('slicell')).split(',')

    data      = opts.get('data',False)
#    dist      = opts.get('dist',False)
    extrap    = opts.get('extrap',False)
#    varOnly   = opts.get('retv',False)
    maskLimit = opts.get('lmask',0.5) # points where interpolated mask are above
                                      # this value are considered as mask!
                                      # Most strict value is 0

    out=Data()
    out.msg=self.check_slice(varname,t=time)
    if out.msg: return out#None,aux

    X=np.asarray(X)
    Y=np.asarray(Y)
    if X.ndim>1: X=np.squeeze(X)
    if Y.ndim>1: Y=np.squeeze(X)

#    if varname not in netcdf.varnames(self.name):
#      print ':: variable %s not found' % varname
#      return x,y,z,v,m
#
#    if time>=self.TIME:
#      print 't = %d exceeds TIME dimension (%d)' % (time,self.TIME)
#      return x,y,z,v,m

    x,y,h,m=self.grid.vars(ruvp=self.var_at(varname))
    if True: # extrat only portion of data needed:
##      print 'start', x.shape,y.shape,X.shape
      i0,i1,j0,j1=calc.ij_limits(x, y, (X.min(),X.max()),(Y.min(),Y.max()), margin=1)
##      print 'end'
      xi='%d:%d'%(i0,i1)
      eta='%d:%d'%(j0,j1)

      if data is False: V=self.use(varname,SEARCHtime=time,xi_SEARCH=xi,eta_SEARCH=eta)
      else: v=data[...,j0:j1,i0:i1]

      x=x[j0:j1,i0:i1]
      y=y[j0:j1,i0:i1]
      h=h[j0:j1,i0:i1]
      m=m[j0:j1,i0:i1]

    else:
      if data is False: V=self.use(varname,SEARCHtime=time)
      else: v=data


    if V.ndim==3:
      v=calc.griddata(x,y,V,X,Y,extrap=extrap,mask2d=m==0, keepMaskVal=maskLimit)
    elif V.ndim==2:
      v=calc.griddata(x,y,np.ma.masked_where(m==0,V),X,Y,extrap=extrap, keepMaskVal=maskLimit)

    # coords:
    if 'z' in coords and V.ndim==3:
      out.z=self.path_s_levels(time,X,Y,rw=varname[0])

    if 'd' in coords:
      d=calc.distance(X,Y)
      if v.ndim==2: d=np.tile(d,(v.shape[0],1))
      out.d=d

    if 'x' in coords:
      if v.ndim==2: X=np.tile(X,(v.shape[0],1))
      out.x=X

    if 'y' in coords:
      if v.ndim==2: Y=np.tile(Y,(v.shape[0],1))
      out.y=Y

    if 't' in coords: out.t=self.time[time]

#    # X, Y, dist:
#    if dist:
#      Dist=calc.distance(X,Y)
#      if v.ndim==3:
#        Dist=np.tile(Dist,(v.shape[0],1))
#    else:
#      if v.ndim==3:
#        X=np.tile(X,(v.shape[0],1))
#        Y=np.tile(Y,(v.shape[0],1))
#
#    if dist:
#      return Dist,Z,V
#    else:
#      return X,Y,Z,V
#
#    if plot:
#      if not ax: ax=pl.gca()
#      if v.ndim==2:
#        if 'x' in coords:
#          p=ax.pcolormesh(out.x,out.z,v)
###          ax.plot(aux.x[0],-h)
#        elif 'y' in coords:
#          p=ax.pcolormesh(out.y,out.z,v)
###          ax.plot(aux.y[0],-h)
#        else:
#          p=ax.pcolormesh(out.d,out.z,v)
####          ax.plot(aux.d[0],-h)
##
#        pl.colorbar(p,shrink=.7)
##      elif  v.ndim==1:
#        if 'x' in coords:
#          ax.plot(out.x,v)
#        elif 'y' in coords:
#          ax.plot(out.y,v)
#        else:
#          ax.plot(out.d,v)
#
    out.v=v
    out.coordsReq=','.join(sorted(coords))
    return out###v,aux
Пример #19
0
def narr_file_data(fname,xlim=False,ylim=False,quiet=False):
  '''
  Returns bulk data from one NARR file
  '''

  out={}

  # loading grid:
  if 0:
    if not quiet: print(' reading lon,lat from file %s' % grd)
    nc=netcdf.ncopen(grd)
    x=nc.vars['East_longitude_0-360'][0,...]-360.
    y=nc.vars['Latitude_-90_to_+90'][0,...] # time always 1 !!
    nc.close()
  else:
    if not quiet: print(' reading lon,lat from file %s' % grdTxt)
    x,y=load_grid()
    #x=x-360.
    x=-x

  ny,nx=x.shape


  if (xlim,ylim)==(False,False):i0,i1,j0,j1=0,nx,0,ny
  else:
    i0,i1,j0,j1=calc.ij_limits(x, y, xlim, ylim, margin=0)
    x=x[j0:j1,i0:i1]
    y=y[j0:j1,i0:i1]

  try:
    nc=netcdf.ncopen(fname)
  except:
    return {}

  xx=str(i0)+':'+str(i1)
  yy=str(j0)+':'+str(j1)

  tdim=netcdf.fdim(nc,'time1')
  if tdim!=1: print('WARNING: tdim !=1  !!!!!!')

  # T surface [K->C]
  if not quiet: print(' --> T air')
  tair=netcdf.use(nc,'Temperature_surface',time1=0,x=xx,y=yy)
  tair=tair-273.15
  out['tair']=cb.Data(x,y,tair,'C')

  # R humidity [% -> 0--1]
  if not quiet: print(' --> R humidity')
  rhum=netcdf.use(nc,'Relative_humidity',time1=0,x=xx,y=yy)
  out['rhum']=cb.Data(x,y,rhum/100.,'0--1')

  # surface pressure [Pa]
  if not quiet: print(' --> Surface pressure')
  pres=netcdf.use(nc,'Pressure_surface',time1=0,x=xx,y=yy)
  out['pres']=cb.Data(x,y,pres,'Pa')

  # P rate [kg m-2 s-1 -> cm/d]
  if not quiet: print(' --> P rate')
  prate=netcdf.use(nc,'Precipitation_rate',time1=0,x=xx,y=yy)
  prate=prate*86400*100/1000.
  out['prate']=cb.Data(x,y,prate,'cm/d')

  # Net shortwave flux  [ W m-2]
  if not quiet: print(' --> Net shortwave flux')
  if not quiet: print('       SW down')
  sw_down=netcdf.use(nc,'Downward_shortwave_radiation_flux',time1=0,x=xx,y=yy)
  if not quiet: print('       SW up')
  sw_up=netcdf.use(nc,'Upward_short_wave_radiation_flux_surface',time1=0,x=xx,y=yy)
  sw_net=sw_down-sw_up
  out['radsw']=cb.Data(x,y,sw_net,'W m-2',info='positive downward')

  # Net longwave flux  [W/m^2]
  if not quiet: print(' --> Net longwave flux')
  if not quiet: print('       LW down')
  lw_down=netcdf.use(nc,'Downward_longwave_radiation_flux',time1=0,x=xx,y=yy)
  if not quiet: print('       LW up')
  lw_up=netcdf.use(nc,'Upward_long_wave_radiation_flux_surface',time1=0,x=xx,y=yy)
  lw_net=lw_down-lw_up
  out['radlw']=cb.Data(x,y,-lw_net,'W m-2',info='positive upward')

  # downward lw:
  out['dlwrf']=cb.Data(x,y,-lw_down,'W m-2',info='negative... downward')

  # U and V wind speed 10m
  if not quiet: print(' --> U and V wind')
  # vertical dim is height_above_ground1: 10 and 30 m
  uwnd=netcdf.use(nc,'u_wind_height_above_ground',height_above_ground1=0,time1=0,x=xx,y=yy)
  vwnd=netcdf.use(nc,'v_wind_height_above_ground',height_above_ground1=0,time1=0,x=xx,y=yy)

  if not quiet: print(' --> calc wind speed and stress')
  speed = np.sqrt(uwnd**2+vwnd**2)
  taux,tauy=air_sea.wind_stress(uwnd,vwnd)

  out['wspd']=cb.Data(x,y,speed,'m s-1')
  out['uwnd']=cb.Data(x,y,uwnd,'m s-1')
  out['vwnd']=cb.Data(x,y,vwnd,'m s-1')
  out['sustr']=cb.Data(x,y,taux,'Pa')
  out['svstr']=cb.Data(x,y,tauy,'Pa')

  # Cloud cover [0--100 --> 0--1]:
  if not quiet: print(' --> Cloud cover')
  clouds=netcdf.use(nc,'Total_cloud_cover',time1=0,x=xx,y=yy)
  out['cloud']=cb.Data(x,y,clouds/100.,'fraction (0--1)')

  nc.close()
  return  out
Пример #20
0
def update_wind_blended2(fname,datapaths,**kargs):
  '''
  In days without blended data will try to use quikscat data
  '''
  from okean.datasets import quikscat
  from okean.datasets import blended_wind
  a=blended_wind.WINDData(datapaths[0])
  b=quikscat.WINDData(datapaths[1])

  time=netcdf.nctime(fname,'time')
  date0=dts.next_date(time[0],-1)
  date1=dts.next_date(time[-1],+2)

  data=a.data(date0,date1)

  # limit are... otherwise, quikscat interp will be very slow!
  grd=netcdf.fatt(fname,'grd_file')
  import os
  if not os.path.isfile(grd): grd=kargs['grd']
  cond,inds=rt.grid_vicinity(grd,data['x'],data['y'],margin=5,rect=True,retinds=True)
  i1,i2,j1,j2=inds
  for d in data.keys():
    if   d == 'x': data[d]=data[d][i1:i2]
    elif d == 'y': data[d]=data[d][j1:j2]
    else: data[d]=data[d][j1:j2,i1:i2]


  # check for missing days:
  time0=data.keys()
  x0=data['x']
  y0=data['y']
  x0,y0=np.meshgrid(x0,y0)
  time0.remove('x')
  time0.remove('y')

  out=OrderedDict()
  out['x']=x0
  out['y']=y0
  info=''
  qs_ij_limits_done=False
  for d in dts.drange(date0,date1):
    found=0
    for t in time0:
      if (t.year,t.month,t.day)==(d.year,d.month,d.day):
        print('==> blended : ',t)
        out[t]=data[t]
        found=1

    if not found: # use quikscat:
      print('==> quikscat : ',d.strftime('%Y-%m-%d'))
      tmp= b.data(d,dts.next_date(d))
      if not tmp.has_key('x'): continue
      x,y=tmp['x'],tmp['y']
      x,y=np.meshgrid(x,y)

      # reduce qs data:
      if not qs_ij_limits_done:
        i1,i2,j1,j2=calc.ij_limits(x,y,[x0.min(),x0.max()],[y0.min(),y0.max()])
        qs_ij_limits_done=True

      x=x[j1:j2,i1:i2]
      y=y[j1:j2,i1:i2]
      tmp[tmp.keys()[0]]=tmp[tmp.keys()[0]][j1:j2,i1:i2]


      print('  griddata u')
      u=calc.griddata(x,y,tmp[tmp.keys()[0]].real,x0,y0)
      print('  griddata v')
      v=calc.griddata(x,y,tmp[tmp.keys()[0]].imag,x0,y0)
      out[tmp.keys()[0]]=u+1.j*v
      info+='#'+d.strftime('%Y%m%d')


  new_wind_info='blended+quikscat at days: '+info
  update_wind(fname,out,new_wind_info,**kargs)
Пример #21
0
  def slicell(self,varname,X,Y,time=0,**opts):
    coords=opts.get('coords',self._default_coords('slicell')).split(',')

    data      = opts.get('data',False)
    extrap    = opts.get('extrap',False)
    maskLimit = opts.get('lmask',0.5) # points where interpolated mask are above
                                      # this value are considered as mask!
                                      # Most strict value is 0

    out=Data()
    out.msg=self.check_slice(varname,t=time)
    if out.msg: return out#None,aux

    X=np.asarray(X)
    Y=np.asarray(Y)
    if X.ndim>1: X=np.squeeze(X)
    if Y.ndim>1: Y=np.squeeze(X)

    x,y,h,m=self.grid.vars(ruvp=self.var_at(varname)[0])
    if True: # extrat only portion of data needed:
      i0,i1,j0,j1=calc.ij_limits(x, y, (X.min(),X.max()),(Y.min(),Y.max()), margin=1)
      xi='%d:%d'%(i0,i1)
      eta='%d:%d'%(j0,j1)

      if data is False: V=self.use(varname,SEARCHtime=time,xi_SEARCH=xi,eta_SEARCH=eta)
      else: v=data[...,j0:j1,i0:i1]

      x=x[j0:j1,i0:i1]
      y=y[j0:j1,i0:i1]
      #h=h[j0:j1,i0:i1] # not used
      m=m[j0:j1,i0:i1]

    else:
      if data is False: V=self.use(varname,SEARCHtime=time)
      else: v=data

    if V.ndim==3:
      v=calc.griddata(x,y,V,X,Y,extrap=extrap,mask2d=m==0, keepMaskVal=maskLimit)
    elif V.ndim==2:
      v=calc.griddata(x,y,np.ma.masked_where(m==0,V),X,Y,extrap=extrap, keepMaskVal=maskLimit)

    out.v=v
    out.info['v']['name']=varname
    out.info['v']['slice']='path npts=%d'%X.size
    try: out.info['v']['units']=netcdf.vatt(self.nc,varname,'units')
    except: pass


    # coords:
    if 'z' in coords and V.ndim==3:
      inds=dict(xi=(i0,i1),eta=(j0,j1))
#########      out.z=self.path_s_levels(time,X,Y,rw=varname[0],inds=inds)
      out.z=self.path_s_levels(time,X,Y,rw=self.var_at(varname)[1],inds=inds)

    if 'd' in coords:
      d=calc.distance(X,Y)
      if v.ndim==2: d=np.tile(d,(v.shape[0],1))
      out.d=d

    if 'x' in coords:
      if v.ndim==2: X=np.tile(X,(v.shape[0],1))
      out.x=X

    if 'y' in coords:
      if v.ndim==2: Y=np.tile(Y,(v.shape[0],1))
      out.y=Y

    if 't' in coords and self.hast(varname): out.t=self.time[time]

    out.coordsReq=','.join(sorted(coords))
    return out
Пример #22
0
def his2gnome(fname,
              his,
              grd=False,
              nomask=False,
              gshhsMask=True,
              xylim=False,
              dates=False,
              ij=(1, 1)):
    '''
  Creates GNOME wind file
  Ex:
    his2gnome(out,his,grd,dates=dates,ij=(2,2))

  if gshhsMask, the high res mask file mask_gshhs.npy will be created at 1st usage.
  Mask is based on high (h) resolution gshhs data which must be available (env variable
  GSHHS_MASK must be set). 
  '''

    if not grd: grd = his
    deta, dxi = ij

    dims = netcdf.fdim(his)
    xi, eta = dims['xi_rho'], dims['eta_rho']
    xi0, eta0 = xi, eta

    nc0 = netcdf.ncopen(his)
    time = netcdf.nctime(nc0, 'ocean_time')
    # for roms agrif:
    #t=netcdf.use(nc0,'scrum_time')
    #time=netcdf.num2date(t,'seconds since %d-01-01' % year0)

    x0 = netcdf.use(grd, 'lon_rho')
    y0 = netcdf.use(grd, 'lat_rho')
    ang = netcdf.use(grd, 'angle')

    if not xylim is False:
        xlim = xylim[:2]
        ylim = xylim[2:]
        i1, i2, j1, j2 = calc.ij_limits(x0, y0, xlim, ylim)
        print(i1, i2, j1, j2)
        xi = i2 - i1
        eta = j2 - j1
    else:
        i1, i2 = 0, xi
        j1, j2 = 0, eta

    XI = '%d:%d:%d' % (i1, i2, dxi)
    ETA = '%d:%d:%d' % (j1, j2, deta)

    xi = len(range(i1, i2, dxi))
    eta = len(range(j1, j2, deta))
    # create file:
    create_uv(fname, xi, eta)

    nc = netcdf.ncopen(fname, 'a')
    for v0, v in ('lon_rho', 'lon'), ('lat_rho', 'lat'), ('mask_rho',
                                                          'mask'), ('h',
                                                                    'depth'):
        print('filling %s with %s' % (v, v0))
        nc.vars[v][:] = netcdf.use(grd, v0, xi_rho=XI, eta_rho=ETA)

    if nomask:
        print('NO MASK !!!')
        nc.vars['mask'][:] = 1

    if gshhsMask:
        try:
            mask = np.load('mask_gshhs.npy')
        except:
            mask = 1 + 0 * netcdf.use(nc0, 'mask_rho', xi_rho=XI, eta_rho=ETA)
            mask = mask.astype('bool')
            x = netcdf.use(grd, 'lon_rho', xi_rho=XI, eta_rho=ETA)
            y = netcdf.use(grd, 'lat_rho', xi_rho=XI, eta_rho=ETA)

            from okean import gshhs
            axis = x.min(), x.max(), y.min(), y.max()
            g = gshhs.gshhs(axis,
                            resolution='h',
                            area_thresh=0.,
                            max_level=2,
                            clip=True)
            for lon, lat, level in zip(g.lon, g.lat, g.level):
                if level == 1:  # land
                    print('mask ', lon.shape)
                    i = calc.inpolygon(x, y, lon, lat)
                    mask = mask & ~i

            mask.dump('mask_gshhs.npy')

        nc.vars['mask'][:] = mask

    x = x0[j1:j2:deta, i1:i2:dxi]
    y = y0[j1:j2:deta, i1:i2:dxi]
    ang = ang[j1:j2:deta, i1:i2:dxi]

    n = -1
    for it in range(len(time)):
        if not dates is False:
            d0, d1 = dates
            if time[it] < d0 or time[it] >= d1: continue

        n += 1
        U = np.zeros((eta0, xi0), 'f')
        V = np.zeros((eta0, xi0), 'f')

        nc.vars['time'][n] = netcdf.date2num(time[it], tunits)

        # for roms agrif:
        #u=netcdf.use(nc0,'u',time=it,s_rho=-1)
        #v=netcdf.use(nc0,'v',time=it,s_rho=-1)
        u = netcdf.use(nc0, 'u', ocean_time=it, s_rho=-1)
        v = netcdf.use(nc0, 'v', ocean_time=it, s_rho=-1)

        # mask extrap:
        print('mask extrap...')

        u = calc.mask_extrap(x0, y0, np.ma.masked_where(u == 0, u))
        v = calc.mask_extrap(x0, y0, np.ma.masked_where(v == 0, v))

        U[:, 1:-1] = 0.5 * (u[:, :-1] + u[:, 1:])
        U[:, 0] = u[:, 0]
        U[:, -1] = u[:, -1]

        V[1:-1, :] = 0.5 * (v[:-1.:] + v[1:, :])
        V[0, :] = v[0, :]
        V[-1, :] = v[-1, :]

        U = U[j1:j2, i1:i2]
        V = V[j1:j2, i1:i2]

        U = U[j1:j2:deta, i1:i2:dxi]
        V = V[j1:j2:deta, i1:i2:dxi]

        # rotate uv:
        print('rotating ...')
        U, V = calc.rot2d(U, V, -ang)

        print('filling uv', n, time[it])
        nc.vars['u'][n, ...] = U
        nc.vars['v'][n, ...] = V

    nc.close()
    nc0.close()
Пример #23
0
def frc2gnome(fname, frc, grd, xylim=False, dates=False, ij=(1, 1), **kargs):
    '''
  Creates GNOME wind file
  kargs:
    t[u,v]var
    t[u,v]dim
    x[y,ang]var

  Ex:
    .frc2gnome(out,frc,grd,ij=(10,10),dates=dates,**{'tdim':'Time'})
  '''

    deta, dxi = ij

    tvar = 'time'
    uvar = 'Uwind'
    vvar = 'Vwind'
    #tvar='bulk_time'
    #uvar='uwnd'
    #vvar='vwnd'

    tdim = 'time'
    #tdim='bulk_time'
    xdim = 'xi_rho'
    ydim = 'eta_rho'

    xvar = 'lon_rho'
    yvar = 'lat_rho'
    angvar = 'angle'

    if 'tvar' in kargs.keys(): tvar = kargs['tvar']
    if 'uvar' in kargs.keys(): uvar = kargs['uvar']
    if 'vvar' in kargs.keys(): vvar = kargs['vvar']

    if 'tdim' in kargs.keys(): tdim = kargs['tdim']
    if 'xdim' in kargs.keys(): xdim = kargs['xdim']
    if 'ydim' in kargs.keys(): ydim = kargs['ydim']

    if 'xvar' in kargs.keys(): xvar = kargs['xvar']
    if 'yvar' in kargs.keys(): yvar = kargs['yvar']
    if 'angvar' in kargs.keys(): angvar = kargs['angvar']

    dims = netcdf.fdim(grd)
    xi, eta = dims[xdim], dims[ydim]
    xi0, eta0 = xi, eta

    ncg = netcdf.ncopen(grd)

    nc0 = netcdf.ncopen(frc)
    try:
        t = netcdf.nctime(nc0, tvar)
    except:
        t = netcdf.use(nc0, tvar)
        t = netcdf.num2date(t, 'days since %d-01-01' % year0)

    time = netcdf.date2num(t, tunits)

    x0 = netcdf.use(grd, xvar)
    y0 = netcdf.use(grd, yvar)
    if x0.ndim == 1: x0, y0 = np.meshgrid(x0, y0)

    if angvar:
        ang = netcdf.use(grd, angvar)

    if not xylim is False:
        xlim = xylim[:2]
        ylim = xylim[2:]
        i1, i2, j1, j2 = calc.ij_limits(x0, y0, xlim, ylim)
        xi = i2 - i1
        eta = j2 - j1
    else:
        i1, i2 = 0, xi
        j1, j2 = 0, eta

    XI = '%d:%d:%d' % (i1, i2, dxi)
    ETA = '%d:%d:%d' % (j1, j2, deta)

    xi = len(range(i1, i2, dxi))
    eta = len(range(j1, j2, deta))

    # create file:
    create_wind(fname, xi, eta)

    nc = netcdf.ncopen(fname, 'a')

    x = x0[j1:j2:deta, i1:i2:dxi]
    y = y0[j1:j2:deta, i1:i2:dxi]

    nc.vars['lon'][:] = x
    nc.vars['lat'][:] = y
    if angvar: ang = ang[j1:j2:deta, i1:i2:dxi]

    n = -1
    for it in range(len(time)):

        if not dates is False:
            d0, d1 = dates
            if t[it] < d0 or t[it] >= d1: continue

        n += 1
        u = netcdf.use(nc0, uvar, **{xdim: XI, ydim: ETA, tdim: it})
        v = netcdf.use(nc0, vvar, **{xdim: XI, ydim: ETA, tdim: it})

        # rotate uv:
        if angvar:
            print('rotating ...')
            u, v = calc.rot2d(u, v, -ang)

        nc.vars['time'][n] = time[it]
        print('filling uv', n, t[it])
        nc.vars['air_u'][n, ...] = u
        nc.vars['air_v'][n, ...] = v

    nc.close()
    nc0.close()
    ncg.close()
Пример #24
0
def load_data(f, quiet=0, **kargs):
    '''
  Loads prognostic variables (temp,salt,u,v,ubar,vbar,zeta) from
  netcdf file or opendap server. Also loads lon,lat, depth, and time.

  If f is a file, it must include the 1d variables lon,lat and depth;
  the 2d variable ssh (zeta) and the 3d variables temp, salt, u and v;
  ie, each file must contain data for a simgle time. The file must also
  contain the variable time.

  If f is a opendap address, it must contain also all these variables
  or the ones defined in the input karg settings (DataAccess object)

  To deal with the case of variables in different files/opendap addresses,
  f can also be a dictionary with keys the variables and values the files
  or opendap addresses. In this case, the keys must be:
    - temp
    - salt
    - u
    - v
    - ssh
    - misc, for lon, lat, depth, time and dimensions
      or xy for lon,lat and x,ydim; z for depth and zdim, time for time

  The output data (dict) is suitable to be used by data2roms, which
  interpolates the data to ROMS 3d grid.
  Also outputs an error/status string.

  kargs:
    inds, dict with dimension names/values (where time dim can be integer
          or datetime)
    settings, DataAccess object
    extra, extra misc vars to load [(outKey0,fileVar0),...]
    t_units, units of variable time, by default the att  units is used
  '''

    sett = DataAccess()
    inds = {}
    extra = []
    t_units = []
    if 'settings' in kargs.keys(): sett = kargs['settings']
    if 'inds' in kargs.keys(): inds = kargs['inds']
    if 'extra' in kargs.keys(): extra = kargs['extra']
    if 't_units' in kargs.keys(): t_units = kargs['t_units']

    res = {}
    msg = ''

    if not isinstance(f, dict) and not f.startswith('http') and not isfile(f):
        msg = 'file not found %s' % f
        if not quiet: print msg
        return res, msg

    # load nc files:
    if not isinstance(f, dict):
        f = {'temp': f, 'salt': f, 'u': f, 'v': f, 'ssh': f, 'misc': f}

    if not f.has_key('xy'): f['xy'] = f['misc']
    if not f.has_key('z'): f['z'] = f['misc']
    if not f.has_key('time'): f['time'] = f['misc']

    filesUsed = []
    ncUsed = []
    for i in f.keys():
        if not quiet: print '(%s) loading from %s' % (i.ljust(5), f[i])

        if i == 'temp':
            if f[i] in filesUsed: ncTemp = ncUsed[filesUsed.index(f[i])]
            else:
                ncTemp = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncTemp]

        elif i == 'salt':
            if f[i] in filesUsed: ncSalt = ncUsed[filesUsed.index(f[i])]
            else:
                ncSalt = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncSalt]

        elif i == 'u':
            if f[i] in filesUsed: ncU = ncUsed[filesUsed.index(f[i])]
            else:
                ncU = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncU]

        elif i == 'v':
            if f[i] in filesUsed: ncV = ncUsed[filesUsed.index(f[i])]
            else:
                ncV = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncV]

        elif i == 'ssh':
            if f[i] in filesUsed: ncSsh = ncUsed[filesUsed.index(f[i])]
            else:
                ncSsh = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncSsh]

        elif i == 'xy':
            if f[i] in filesUsed: ncXy = ncUsed[filesUsed.index(f[i])]
            else:
                ncXy = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncXy]

        elif i == 'z':
            if f[i] in filesUsed: ncZ = ncUsed[filesUsed.index(f[i])]
            else:
                ncZ = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncZ]

        elif i == 'time':
            if f[i] in filesUsed: ncTime = ncUsed[filesUsed.index(f[i])]
            else:
                ncTime = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncTime]

        elif i == 'misc':
            if f[i] in filesUsed: ncMisc = ncUsed[filesUsed.index(f[i])]
            else:
                ncMisc = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncMisc]

    # load dims:
    if not quiet: print '  loading dims...'
    dimsXy = netcdf.fdim(ncXy)
    dimsZ = netcdf.fdim(ncZ)

    res['NX'] = dimsXy[sett.xdim]
    res['NY'] = dimsXy[sett.ydim]
    ###if sett.z_name:
    if sett.zdim:
        res['NZ'] = dimsZ[sett.zdim]
    else:
        res['NZ'] = 1

    # about horizontal inds:
    if inds.has_key(
            sett.xdim) and len(inds[sett.xdim]) == 2 and not isinstance(
                inds[sett.xdim], basestring):
        if not quiet: print '  calc horizontal inds...'
        xlim = inds[sett.xdim]
        ylim = inds[sett.ydim]

        inds.pop(sett.xdim)
        inds.pop(sett.ydim)

        lon = netcdf.use(ncXy, sett.x_name, **inds)
        if np.any(lon > 360): lon = np.mod(lon, 360.)
        lat = netcdf.use(ncXy, sett.y_name, **inds)
        i0, i1, j0, j1 = calc.ij_limits(lon, lat, xlim, ylim, margin=3)
        inds[sett.xdim] = '%d:%d' % (i0, i1)
        inds[sett.ydim] = '%d:%d' % (j0, j1)

    if not quiet: print '  loading lon, lat, depth...'
    res['lon'] = netcdf.use(ncXy, sett.x_name, **inds)
    if np.any(res['lon'] > 360): res['lon'] = np.mod(res['lon'], 360.)
    res['lat'] = netcdf.use(ncXy, sett.y_name, **inds)
    if sett.z_name:
        res['depth'] = -netcdf.use(ncZ, sett.z_name, **inds)
    else:
        res['depth'] = False

    if res['lon'].size != res['lat'].size:
        res['lon'], res['lat'] = np.meshgrid(res['lon'], res['lat'])
        # needed for griddata, later

    # update nx,ny:
    if inds.has_key(sett.xdim):
        res['NY'], res['NX'] = res['lon'].shape

    # extra misc vars:
    if len(extra):
        for outKey, fileVar in extra:
            if not quiet:
                print '  loading extra misc... %s %s' % (outKey, fileVar)
            res[outKey] = netcdf.use(ncMisc, fileVar, **inds)

    # time:
    # file may have one or several times. If several, time dim must be given
    # with kargs inds!
    # but file may also have no time dim or time name !
    if sett.time_name:
        if not quiet: print '  loading time...'
        if t_units:
            times = netcdf.use(ncTime, sett.time_name)
            times = netcdf.num2date(times, t_units)
        else:
            times = netcdf.nctime(ncTime, sett.time_name)

        if inds.has_key(sett.tdim):
            try:
                tind = dts.parse_date(inds[sett.tdim])
            except:
                tind = inds[sett.tdim]  # is an integer, for instance

            if isinstance(tind, datetime.datetime):
                tind, = np.where(times == tind)
                if tind.size:
                    tind = tind[0]
                    inds[sett.
                         tdim] = tind  # update inds to extract other variables
                else:
                    Msg = 'date not found'
                    msg += '\n' + Msg
                    return res, msg + ' ERROR'

            date = times[tind]
            try:
                len(date)
                ndates = True
            except:
                ndates = False

            if ndates:
                if not quiet:
                    print '    tind, date= len=%d: %d to %d, %s to %s' % (
                        len(date), tind[0], tind[-1], date[0].isoformat(' '),
                        date[-1].isoformat(' '))
            else:
                if not quiet:
                    print '    tind, date= %d %s' % (tind, date.isoformat(' '))

        elif times.size == 1:
            date = times[0]
            if not quiet: print '    date= %s' % date.isoformat(' ')
        else:  # must provide tind as input!!
            Msg = 'several dates in file... provice tind!'
            msg += '\n' + Msg
            return res, msg + ' ERROR'

        res['date'] = date
    else:
        if not quiet: print '    warning: not using time !!'
        res['date'] = 0

    empty3d = np.zeros([res['NZ'], res['NY'], res['NX']])
    empty2d = np.zeros([res['NY'], res['NX']])

    if 'temp' in f.keys():
        if not quiet: print '  loading temp...'
        if sett.temp_name in ncTemp.varnames:
            res['temp'] = netcdf.use(ncTemp, sett.temp_name, **inds)
        else:
            Msg = 'var %s not found' % 'temp'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['temp'] = empty3d

    if 'salt' in f.keys():
        if not quiet: print '  loading salt...'
        if sett.salt_name in ncSalt.varnames:
            res['salt'] = netcdf.use(ncSalt, sett.salt_name, **inds)
        else:
            Msg = 'var %s not found' % 'salt'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['salt'] = empty3d

    if 'u' in f.keys():
        if not quiet: print '  loading u...'
        if sett.u_name in ncU.varnames:
            res['u'] = netcdf.use(ncU, sett.u_name, **inds)
        else:
            Msg = 'var %s not found' % 'u'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['u'] = empty3d

    if 'v' in f.keys():
        if not quiet: print '  loading v...'
        if sett.v_name in ncV.varnames:
            res['v'] = netcdf.use(ncV, sett.v_name, **inds)
        else:
            Msg = 'var %s not found' % 'v'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['v'] = empty3d

    if 'ssh' in f.keys():
        if not quiet: print '  loading ssh...'
        if sett.ssh_name in ncSsh.varnames:
            res['ssh'] = netcdf.use(ncSsh, sett.ssh_name, **inds)
        else:
            Msg = 'var %s not found' % 'ssh'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['ssh'] = empty2d

    for nc in ncUsed:
        try:
            nc.close()
        except:
            pass

    return res, msg
Пример #25
0
def roms2roms(f0,grd,sparams,tind=0,**kargs):
  '''
  tind: ind or datetime

  **kargs:
  grd0
  sparams0
  tunits0
  quiet

  '''

  grd0     = False
  sparams0 = False
  quiet    = False
  tunits0  = False
  Z='auto'
  ij='i'

  for k in kargs.keys():
    if   k=='grd0':     grd0      = kargs[k]
    elif k=='sparams0': sparams0 = kargs['sparams0']
    elif k=='quiet':    quiet    = kargs['quiet']
    elif k=='tunits0':  tunits0  = kargs['tunits0']
    elif k=='Z':  Z  = kargs['Z']
    elif k=='ij':  ij  = kargs['ij']


  r0=roms.His(f0,grd=grd0)
  g0=r0.grid
  g1=roms.Grid(grd)
  if sparams0 is False: sparams0=r0.s_params

  # load data:
  F0={}
  for i in ('temp','salt','u','v','ssh','time','misc'): F0[i]=f0
  F0['xy']=g0.name

  if tind=='all':
    times=range(r0.TIME)
  else: times=[tind]

  xlim=g1.lon.min(),g1.lon.max()
  ylim=g1.lat.min(),g1.lat.max()
  inds={}

  outdata={}
  outdatab={}
  for tind in times:
    inds[sett.tdim]=tind

    data,msg=prognostic.load_data(F0,quiet=quiet,settings=sett,inds=inds,t_units=tunits0)

    # z3d:
    data['z3d']=r0.s_levels(tind)

    # u,v at rho:
    print '  u,v at rho ...'
    u=np.ma.zeros((data['NZ'],data['NY'],data['NX']),data['u'].dtype)
    v=np.ma.zeros((data['NZ'],data['NY'],data['NX']),data['v'].dtype)

    u[:,:,1:-1]=(data['u'][:,:,:-1]+data['u'][:,:,1:])/2.
    u[:,:,0]=data['u'][:,:,0]
    u[:,:,-1]=data['u'][:,:,-1]

    v[:,1:-1,:]=(data['v'][:,:-1,:]+data['v'][:,1:,:])/2.
    v[:,0,:]=data['v'][:,0,:]
    v[:,-1,:]=data['v'][:,-1,:]

    print 'rot 2d from original grid'
    for k in range(v.shape[0]):
      u[k],v[k]=calc.rot2d(u[k],v[k],-g0.angle)

    data['u']=u
    data['v']=v

    # simplify data:
    print '  simplify data...'
    i0,i1,j0,j1=calc.ij_limits(g0.lon,g0.lat,xlim,ylim,margin=3)
    for v in 'z3d','temp','salt','u','v': data[v]=data[v][:,j0:j1,i0:i1]
    for v in 'ssh','lon','lat': data[v]=data[v][j0:j1,i0:i1]
    data['NZ'],data['NY'],data['NX']=data['temp'].shape

    # interp depths:
    if Z is 'auto':
      h=-g0.h
      Z=np.concatenate((np.arange(data['ssh'].max(),-2,-.05),np.arange(-2,-5,.2),np.arange(-5,-20,-1),np.arange(-20,-100,-2),
      np.arange(-100,-500,-5),np.arange(-500,-1000,-20),np.arange(-1000,h.min()-100,-100)))
      Z=Z[::3]
      if Z[-1]>h.min(): Z[-1]=h.min()

    data['depth']=Z

    for v in 'temp','salt','u','v','ssh':
      print '  %-6s %6.3f %6.3f'%(v,data[v].min(), data[v].max())

    # to z levels:
    Data=prognostic.data2z(data,quiet=quiet,ij=ij)
    for v in 'temp','salt','u','v','ssh':
      print '  %-6s %6.3f %6.3f'%(v,Data[v].min(), Data[v].max())


    # clm:
    data,HA=prognostic.data2roms(Data,grd,sparams,quiet=quiet,horizAux=True,ij=ij)
    for v in 'temp','salt','u','v','zeta':
      print '  %-6s %6.3f %6.3f'%(v,data[v].min(), data[v].max())

    # bry:
    datab=prognostic.data2romsbry(Data,grd,sparams,quiet=quiet,horizAux=HA)

    outdata[tind]  = data
    outdatab[tind] = datab

  if len(times)==1:  return outdata[tind],outdatab[tind]
  else: return outdata,outdatab
Пример #26
0
def his2gnome(fname,his,grd=False,nomask=False,gshhsMask=True,xylim=False,dates=False,ij=(1,1)):
  '''
  Creates GNOME wind file
  Ex:
    his2gnome(out,his,grd,dates=dates,ij=(2,2))

  if gshhsMask, the high res mask file mask_gshhs.npy will be created at 1st usage.
  Mask is based on high (h) resolution gshhs data which must be available (env variable
  GSHHS_MASK must be set). 
  '''

  if not grd: grd=his
  deta,dxi=ij

  dims=netcdf.fdim(his)
  xi,eta=dims['xi_rho'],dims['eta_rho']
  xi0,eta0=xi,eta

  nc0=netcdf.ncopen(his)
  time=netcdf.nctime(nc0,'ocean_time')
  # for roms agrif:
  #t=netcdf.use(nc0,'scrum_time')
  #time=netcdf.num2date(t,'seconds since %d-01-01' % year0)

  x0=netcdf.use(grd,'lon_rho')
  y0=netcdf.use(grd,'lat_rho')
  ang=netcdf.use(grd,'angle')

  if not xylim is False:
    xlim=xylim[:2]
    ylim=xylim[2:]
    i1,i2,j1,j2=calc.ij_limits(x0,y0,xlim,ylim)
    print i1,i2,j1,j2
    xi=i2-i1
    eta=j2-j1
  else:
    i1,i2=0,xi
    j1,j2=0,eta

  XI  ='%d:%d:%d' %(i1,i2,dxi)
  ETA ='%d:%d:%d' %(j1,j2,deta)

  xi=len(range(i1,i2,dxi))
  eta=len(range(j1,j2,deta))
  # create file:
  create_uv(fname,xi,eta)

  nc=netcdf.ncopen(fname,'a')
  for v0,v in ('lon_rho','lon'),('lat_rho','lat'),('mask_rho','mask'),('h','depth'):
    print 'filling %s with %s' % (v,v0)
    nc.vars[v][:]=netcdf.use(grd,v0,xi_rho=XI,eta_rho=ETA)

  if nomask:
    print 'NO MASK !!!'
    nc.vars['mask'][:]=1

  if gshhsMask:
    try:
     mask=np.load('mask_gshhs.npy')
    except:
      mask=1+0*netcdf.use(nc0,'mask_rho',xi_rho=XI,eta_rho=ETA)
      mask=mask.astype('bool')
      x=netcdf.use(grd,'lon_rho',xi_rho=XI,eta_rho=ETA)
      y=netcdf.use(grd,'lat_rho',xi_rho=XI,eta_rho=ETA)

      from okean import gshhs
      axis=x.min(),x.max(),y.min(),y.max()
      g=gshhs.gshhs(axis, resolution='h',area_thresh=0., max_level=2,clip=True)
      for lon, lat, level in zip(g.lon, g.lat, g.level):
        if level == 1: # land
          print 'mask ',lon.shape
          i=calc.inpolygon(x,y,lon,lat)
          mask=mask & ~i

      mask.dump('mask_gshhs.npy')


    nc.vars['mask'][:]=mask


  x=x0[j1:j2:deta,i1:i2:dxi]
  y=y0[j1:j2:deta,i1:i2:dxi]
  ang=ang[j1:j2:deta,i1:i2:dxi]

  n=-1
  for it in range(len(time)):
    if not dates is False:
      d0,d1=dates
      if time[it]<d0 or time[it]>=d1: continue

    n+=1
    U=np.zeros((eta0,xi0),'f')
    V=np.zeros((eta0,xi0),'f')

    nc.vars['time'][n]=netcdf.date2num(time[it],tunits)

    # for roms agrif:
    #u=netcdf.use(nc0,'u',time=it,s_rho=-1)
    #v=netcdf.use(nc0,'v',time=it,s_rho=-1)
    u=netcdf.use(nc0,'u',ocean_time=it,s_rho=-1)
    v=netcdf.use(nc0,'v',ocean_time=it,s_rho=-1)

    # mask extrap:
    print 'mask extrap...'

    u=calc.mask_extrap(x0,y0,np.ma.masked_where(u==0,u))
    v=calc.mask_extrap(x0,y0,np.ma.masked_where(v==0,v))

    U[:,1:-1]=0.5*(u[:,:-1]+u[:,1:])
    U[:,0]=u[:,0]
    U[:,-1]=u[:,-1]

    V[1:-1,:]=0.5*(v[:-1.:]+v[1:,:])
    V[0,:]=v[0,:]
    V[-1,:]=v[-1,:]

    U=U[j1:j2,i1:i2]
    V=V[j1:j2,i1:i2]
  
    U=U[j1:j2:deta,i1:i2:dxi]
    V=V[j1:j2:deta,i1:i2:dxi]

    # rotate uv:
    print 'rotating ...'
    U,V=calc.rot2d(U,V,-ang)

    print 'filling uv', n, time[it]
    nc.vars['u'][n,...]=U
    nc.vars['v'][n,...]=V

  nc.close()
  nc0.close()
Пример #27
0
def frc2gnome(fname,frc,grd,xylim=False,dates=False,ij=(1,1),**kargs):
  '''
  Creates GNOME wind file
  kargs:
    t[u,v]var
    t[u,v]dim
    x[y,ang]var

  Ex:
    .frc2gnome(out,frc,grd,ij=(10,10),dates=dates,**{'tdim':'Time'})
  '''

  deta,dxi=ij

  tvar='time'
  uvar='Uwind'
  vvar='Vwind'
  #tvar='bulk_time'
  #uvar='uwnd'
  #vvar='vwnd'

  tdim='time'
  #tdim='bulk_time'
  xdim='xi_rho'
  ydim='eta_rho'

  xvar='lon_rho'
  yvar='lat_rho'
  angvar='angle'

  if 'tvar' in kargs.keys(): tvar=kargs['tvar']
  if 'uvar' in kargs.keys(): uvar=kargs['uvar']
  if 'vvar' in kargs.keys(): vvar=kargs['vvar']

  if 'tdim' in kargs.keys(): tdim=kargs['tdim']
  if 'xdim' in kargs.keys(): xdim=kargs['xdim']
  if 'ydim' in kargs.keys(): ydim=kargs['ydim']

  if 'xvar' in kargs.keys(): xvar=kargs['xvar']
  if 'yvar' in kargs.keys(): yvar=kargs['yvar']
  if 'angvar' in kargs.keys(): angvar=kargs['angvar']


  dims=netcdf.fdim(grd)
  xi,eta=dims[xdim],dims[ydim]
  xi0,eta0=xi,eta

  ncg=netcdf.ncopen(grd)

  nc0=netcdf.ncopen(frc)
  try:
   t=netcdf.nctime(nc0,tvar)
  except:
    t=netcdf.use(nc0,tvar)
    t=netcdf.num2date(t,'days since %d-01-01' % year0)

  time=netcdf.date2num(t,tunits)

  x0=netcdf.use(grd,xvar)
  y0=netcdf.use(grd,yvar)
  if x0.ndim==1: x0,y0=np.meshgrid(x0,y0)

  if angvar:
    ang=netcdf.use(grd,angvar)

  if not xylim is False:
    xlim=xylim[:2]
    ylim=xylim[2:]
    i1,i2,j1,j2=calc.ij_limits(x0,y0,xlim,ylim)
    xi=i2-i1
    eta=j2-j1
  else:
    i1,i2=0,xi
    j1,j2=0,eta

  XI  ='%d:%d:%d' %(i1,i2,dxi)
  ETA ='%d:%d:%d' %(j1,j2,deta)

  xi=len(range(i1,i2,dxi))
  eta=len(range(j1,j2,deta))

  # create file:
  create_wind(fname,xi,eta)

  nc=netcdf.ncopen(fname,'a')

  x=x0[j1:j2:deta,i1:i2:dxi]
  y=y0[j1:j2:deta,i1:i2:dxi]

  nc.vars['lon'][:]=x
  nc.vars['lat'][:]=y
  if angvar: ang=ang[j1:j2:deta,i1:i2:dxi]

  n=-1
  for it in range(len(time)):

    if not dates is False:
      d0,d1=dates
      if t[it]<d0 or t[it]>=d1: continue

    n+=1
    u=netcdf.use(nc0,uvar,**{xdim:XI,ydim:ETA,tdim:it})
    v=netcdf.use(nc0,vvar,**{xdim:XI,ydim:ETA,tdim:it})

    # rotate uv:
    if angvar:
      print 'rotating ...'
      u,v=calc.rot2d(u,v,-ang)


    nc.vars['time'][n]=time[it]
    print 'filling uv',n,t[it]
    nc.vars['air_u'][n,...]=u
    nc.vars['air_v'][n,...]=v


  nc.close()
  nc0.close()
  ncg.close()
Пример #28
0
  def slicell(self,varname,X,Y,time=0,**opts):
    coords=opts.get('coords',self._default_coords('slicell')).split(',')

    data      = opts.get('data',False)
    extrap    = opts.get('extrap',False)
    maskLimit = opts.get('lmask',0.5) # points where interpolated mask are above
                                      # this value are considered as mask!
                                      # Most strict value is 0

    out=vis.Data()
    out.label='slicell'
    out.msg=self.check_slice(varname,t=time)
    if out.msg: return out#None,aux

    X=np.asarray(X)
    Y=np.asarray(Y)
    if X.ndim>1: X=np.squeeze(X)
    if Y.ndim>1: Y=np.squeeze(X)

###    x,y,h,m=self.grid.vars(ruvp=self.var_at(varname)[0])
    x,y,h,m=self.grid.vars(ruvp=self.vloc(varname)[0])
    if True: # extrat only portion of data needed:
      i0,i1,j0,j1=calc.ij_limits(x, y, (X.min(),X.max()),(Y.min(),Y.max()), margin=1)
      xi='%d:%d'%(i0,i1)
      eta='%d:%d'%(j0,j1)

      if data is False: V=self.use(varname,SEARCHtime=time,xi_SEARCH=xi,eta_SEARCH=eta)
      else: v=data[...,j0:j1,i0:i1]

      x=x[j0:j1,i0:i1]
      y=y[j0:j1,i0:i1]
      h=h[j0:j1,i0:i1]
      m=m[j0:j1,i0:i1]

    else:
      if data is False: V=self.use(varname,SEARCHtime=time)
      else: v=data

    if V.ndim==3:
      v=calc.griddata(x,y,V,X,Y,extrap=extrap,mask2d=m==0, keepMaskVal=maskLimit)
    elif V.ndim==2:
      v=calc.griddata(x,y,np.ma.masked_where(m==0,V),X,Y,extrap=extrap, keepMaskVal=maskLimit)

    out.v=v
    out.info['v']['name']=varname
    out.info['v']['slice']='path npts=%d'%X.size
    try: out.info['v']['units']=netcdf.vatt(self.nc,varname,'units')
    except: pass


    # coords:
    if 'z' in coords and V.ndim==3:
      inds=dict(xi=(i0,i1),eta=(j0,j1))
#########      out.z=self.path_s_levels(time,X,Y,rw=varname[0],inds=inds)
###      out.z=self.path_s_levels(time,X,Y,rw=self.var_at(varname)[1],inds=inds)
###
#######      out.z,zw=self.path_s_levels(time,X,Y,rw=False,inds=inds)
#######      if self.vloc(varname)[1]=='w': out.z=zw
      out.z=self.path_s_levels(time,X,Y,rw=self.vloc(varname)[1],inds=inds)
      out.info['z']=dict(name='Depth',units='m')

    if 'd' in coords:
      d=calc.distance(X,Y)
      if d[-1]-d[0]>1e4:
        d=d/1000.
        dunits='km'
      else: dunits='m'

      if v.ndim==2: d=np.tile(d,(v.shape[0],1))
      out.d=d
      out.info['d']=dict(name='Distance',units=dunits)

    if 'x' in coords:
      if v.ndim==2: X=np.tile(X,(v.shape[0],1))
      out.x=X
      out.info['x']=dict(name='Longitude',units=r'$\^o$E')

    if 'y' in coords:
      if v.ndim==2: Y=np.tile(Y,(v.shape[0],1))
      out.y=Y
      out.info['y']=dict(name='Latitude',units=r'$\^o$N')

#######    if 't' in coords and self.hast(varname): out.t=self.time[time]
    if 't' in coords and 't' in self.vaxes(varname): out.t=self.time[time]

    if v.ndim==2: ################3 and not out.z is None: # zeta and bottom already calculated
      out.extra=[vis.Data()]
      if 'd' in coords: out.extra[0].x=out.d[0]
      if 'x' in coords: out.extra[0].y=out.x[0]
      if 'y' in coords: out.extra[0].x=out.y[0]
####      #h=-zw[0]
      h    = calc.griddata(x,y,h,X,Y,extrap=False)
      out.extra[0].v=-h # bottom
      out.extra[0].config['d1.plot']='fill_between'
      out.extra[0].config['d1.y0']=-h.max()-(h.max()-h.min())/20.
      out.extra[0].label='bottom'



    out.coordsReq=','.join(sorted(coords))
    return out
Пример #29
0
ncv  = ncl.variables
lon_geb  =  ncv[llon][:]
lat_geb  =  ncv[llat][:]
bat_geb  = -ncv[lbat][:]
bat_geb  =  np.ma.masked_where(bat_geb < -2,bat_geb )  

lon_geb2,lat_geb2 = np.meshgrid(lon_geb,lat_geb)

# extent of new grid
lonlim1 = -79.0 ,-75.0
latlim1 =  31.0 , 35.1
limits = np.array([lonlim1,latlim1]).flatten()
    
# simplify data:
print 'Simplify data ...'
i0,i1,j0,j1 = calc.ij_limits(lon_geb2,lat_geb2,lonlim1,latlim1,margin=1)
lon_geb = lon_geb2 [j0:j1,i0:i1]
lat_geb = lat_geb2 [j0:j1,i0:i1]
bat_geb = bat_geb [j0:j1,i0:i1]

#read altimeter data positions
lon_sat,lat_sat = read_path(limits)

#from below plots
lon_tmp = np.array([-78.834167780118207, -77.217793492727196, -75.11248036395456, -76.700093543029013])
lat_tmp = np.array([ 33.94159435781355  , 31.065483526156946 , 32.376990065392356, 35.138056463782696])
#GRID    = np.array([lon_tmp,lat_tmp, [1.0, 1.0, 1.0, 1.0]])

if vv:
    x1 = [lonlim1[0], lonlim1[0], lonlim1[1], lonlim1[1]]
    y1 = [latlim1[1], latlim1[0], latlim1[0], latlim1[1]]
Пример #30
0
def narr_file_data(fname, xlim=False, ylim=False, quiet=False):
    '''
  Returns bulk data from one NARR file
  '''

    out = {}

    # loading grid:
    if 0:
        if not quiet: print ' reading lon,lat from file %s' % grd
        nc = netcdf.ncopen(grd)
        x = nc.vars['East_longitude_0-360'][0, ...] - 360.
        y = nc.vars['Latitude_-90_to_+90'][0, ...]  # time always 1 !!
        nc.close()
    else:
        if not quiet: print ' reading lon,lat from file %s' % grdTxt
        x, y = load_grid()
        #x=x-360.
        x = -x

    ny, nx = x.shape

    if (xlim, ylim) == (False, False): i0, i1, j0, j1 = 0, nx, 0, ny
    else:
        i0, i1, j0, j1 = calc.ij_limits(x, y, xlim, ylim, margin=0)
        x = x[j0:j1, i0:i1]
        y = y[j0:j1, i0:i1]

    try:
        nc = netcdf.ncopen(fname)
    except:
        return {}

    xx = str(i0) + ':' + str(i1)
    yy = str(j0) + ':' + str(j1)

    tdim = netcdf.fdim(nc, 'time1')
    if tdim != 1: print 'WARNING: tdim !=1  !!!!!!'

    # T surface [K->C]
    if not quiet: print ' --> T air'
    tair = netcdf.use(nc, 'Temperature_surface', time1=0, x=xx, y=yy)
    tair = tair - 273.15
    out['tair'] = cb.Data(x, y, tair, 'C')

    # R humidity [% -> 0--1]
    if not quiet: print ' --> R humidity'
    rhum = netcdf.use(nc, 'Relative_humidity', time1=0, x=xx, y=yy)
    out['rhum'] = cb.Data(x, y, rhum / 100., '0--1')

    # surface pressure [Pa]
    if not quiet: print ' --> Surface pressure'
    pres = netcdf.use(nc, 'Pressure_surface', time1=0, x=xx, y=yy)
    out['pres'] = cb.Data(x, y, pres, 'Pa')

    # P rate [kg m-2 s-1 -> cm/d]
    if not quiet: print ' --> P rate'
    prate = netcdf.use(nc, 'Precipitation_rate', time1=0, x=xx, y=yy)
    prate = prate * 86400 * 100 / 1000.
    out['prate'] = cb.Data(x, y, prate, 'cm/d')

    # Net shortwave flux  [ W m-2]
    if not quiet: print ' --> Net shortwave flux'
    if not quiet: print '       SW down'
    sw_down = netcdf.use(nc,
                         'Downward_shortwave_radiation_flux',
                         time1=0,
                         x=xx,
                         y=yy)
    if not quiet: print '       SW up'
    sw_up = netcdf.use(nc,
                       'Upward_short_wave_radiation_flux_surface',
                       time1=0,
                       x=xx,
                       y=yy)
    sw_net = sw_down - sw_up
    out['radsw'] = cb.Data(x, y, sw_net, 'W m-2', info='positive downward')

    # Net longwave flux  [W/m^2]
    if not quiet: print ' --> Net longwave flux'
    if not quiet: print '       LW down'
    lw_down = netcdf.use(nc,
                         'Downward_longwave_radiation_flux',
                         time1=0,
                         x=xx,
                         y=yy)
    if not quiet: print '       LW up'
    lw_up = netcdf.use(nc,
                       'Upward_long_wave_radiation_flux_surface',
                       time1=0,
                       x=xx,
                       y=yy)
    lw_net = lw_down - lw_up
    out['radlw'] = cb.Data(x, y, -lw_net, 'W m-2', info='positive upward')

    # downward lw:
    out['dlwrf'] = cb.Data(x,
                           y,
                           -lw_down,
                           'W m-2',
                           info='negative... downward')

    # U and V wind speed 10m
    if not quiet: print ' --> U and V wind'
    # vertical dim is height_above_ground1: 10 and 30 m
    uwnd = netcdf.use(nc,
                      'u_wind_height_above_ground',
                      height_above_ground1=0,
                      time1=0,
                      x=xx,
                      y=yy)
    vwnd = netcdf.use(nc,
                      'v_wind_height_above_ground',
                      height_above_ground1=0,
                      time1=0,
                      x=xx,
                      y=yy)

    if not quiet: print ' --> calc wind speed and stress'
    speed = np.sqrt(uwnd**2 + vwnd**2)
    taux, tauy = air_sea.wind_stress(uwnd, vwnd)

    out['wspd'] = cb.Data(x, y, speed, 'm s-1')
    out['uwnd'] = cb.Data(x, y, uwnd, 'm s-1')
    out['vwnd'] = cb.Data(x, y, vwnd, 'm s-1')
    out['sustr'] = cb.Data(x, y, taux, 'Pa')
    out['svstr'] = cb.Data(x, y, tauy, 'Pa')

    # Cloud cover [0--100 --> 0--1]:
    if not quiet: print ' --> Cloud cover'
    clouds = netcdf.use(nc, 'Total_cloud_cover', time1=0, x=xx, y=yy)
    out['cloud'] = cb.Data(x, y, clouds / 100., 'fraction (0--1)')

    nc.close()
    return out
Пример #31
0
def cordex_file_data(f,lims=False,quiet=False):
  '''
  CORDEX data for ROMS

  No accumulated variables are considered
  '''

  out={}

  # time, x, y:
  if not quiet: print(' reading time,x,y')
  out['time']=netcdf.nctime(f,'time')
  x=netcdf.use(f,'lon')
  y=netcdf.use(f,'lat')
  x[x>180]=x[x>180]-360
  if x.ndim==1 and y.ndim==1:
    x,y=np.meshgrid(x,y)

  if np.ma.isMA(x): x=x.data
  if np.ma.isMA(y): y=y.data

  if lims:
    from okean import calc
    xlim,ylim=lims
    i1,i2,j1,j2=calc.ij_limits(x,y,xlim,ylim,margin=3)
  else:
    i0=0
    j0=0
    j1,i1=x.shape

  I=range(i1,i2)
  J=range(j1,j2)
  x=x[j1:j2,i1:i2]
  y=y[j1:j2,i1:i2]

  # tair [K-->C]
  if not quiet: print(' --> T air')
  vname='tair'
  tair=netcdf.use(f,vname,lon=I,lat=J)-273.15
  out['tair']=Data(x,y,tair,'Celsius')

  # R humidity [0--1]
  if not quiet: print(' --> R humidity (from specific humidity)')
  vname='humid'
  q=netcdf.use(f,vname,lon=I,lat=J) # specific humidity
  rhum=q/air_sea.qsat(tair)
  rhum[rhum>1]=1
  out['rhum']=Data(x,y,rhum,'0--1')

  # surface pressure [Pa]
  if not quiet: print(' --> Surface pressure')
  vname='press'
  pres=netcdf.use(f,vname,lon=I,lat=J)
  out['pres']=Data(x,y,pres,'Pa')

  # P rate [kg m-2 s-1 -> cm/d]
  if not quiet: print(' --> P rate')
  vname='rain'
  prate=netcdf.use(f,vname,lon=I,lat=J)
  prate=prate*86400*100/1000.
  prate[prate<0]=0
  out['prate']=Data(x,y,prate,'cm/d')

  # Net shortwave flux  [W m-2]
  if not quiet: print(' --> Net shortwave flux')
  if not quiet: print('       SW down')
  sw_down=netcdf.use(f,'sw_down',lon=I,lat=J)
  if not quiet: print('       SW up')
  sw_up=netcdf.use(f,'sw_up',lon=I,lat=J)
  sw_net=sw_down-sw_up
  out['radsw']=Data(x,y,sw_net,'W m-2',info='positive downward')

  # Net longwave flux  [W m-2]
  if not quiet: print(' --> Net longwave flux')
  if not quiet: print('       LW down')
  lw_down=netcdf.use(f,'lw_down',lon=I,lat=J)
  if not quiet: print('       LW up')
  lw_up=netcdf.use(f,'lw_up',lon=I,lat=J)
  lw_net=lw_down-lw_up
  out['radlw']=Data(x,y,-lw_net,'W m-2',info='positive upward')
  # downward lw:
  out['dlwrf']=Data(x,y,-lw_down,'W m-2',info='negative... downward')
  # signs convention is better explained in wrf.py

  # U and V wind speed 10m
  if not quiet: print(' --> U and V wind')
  uwnd=netcdf.use(f,'u',lon=I,lat=J)
  vwnd=netcdf.use(f,'v',lon=I,lat=J)
  if not quiet: print(' --> calc wind speed and stress')
  speed = np.sqrt(uwnd**2+vwnd**2)
  taux,tauy=air_sea.wind_stress(uwnd,vwnd)

  out['wspd']=Data(x,y,speed,'m s-1')
  out['uwnd']=Data(x,y,uwnd,'m s-1')
  out['vwnd']=Data(x,y,vwnd,'m s-1')
  out['sustr']=Data(x,y,taux,'Pa')
  out['svstr']=Data(x,y,tauy,'Pa')

  # Cloud cover [0--100 --> 0--1]:
  if not quiet: print(' --> Cloud cover')
  if 'clouds' in netcdf.varnames(f):
    clouds=netcdf.use(f,'clouds',lon=I,lat=J)/100.
    out['cloud']=Data(x,y,clouds,'fraction (0--1)')
  else:
    print('==> clouds not present!')

  return fill_extremes(out,quiet)
Пример #32
0
def update_wind_blended2(fname, datapaths, **kargs):
    """
  In days without blended data will try to use quikscat data
  """
    from okean.datasets import quikscat
    from okean.datasets import blended_wind

    a = blended_wind.WINDData(datapaths[0])
    b = quikscat.WINDData(datapaths[1])

    time = netcdf.nctime(fname, "time")
    date0 = dts.next_date(time[0], -1)
    date1 = dts.next_date(time[-1], +2)

    data = a.data(date0, date1)

    # limit are... otherwise, quikscat interp will be very slow!
    grd = netcdf.fatt(fname, "grd_file")
    import os

    if not os.path.isfile(grd):
        grd = kargs["grd"]
    cond, inds = rt.grid_vicinity(grd, data["x"], data["y"], margin=5, rect=True, retinds=True)
    i1, i2, j1, j2 = inds
    for d in data.keys():
        if d == "x":
            data[d] = data[d][i1:i2]
        elif d == "y":
            data[d] = data[d][j1:j2]
        else:
            data[d] = data[d][j1:j2, i1:i2]

    # check for missing days:
    time0 = data.keys()
    x0 = data["x"]
    y0 = data["y"]
    x0, y0 = np.meshgrid(x0, y0)
    time0.remove("x")
    time0.remove("y")

    out = cb.odict()
    out["x"] = x0
    out["y"] = y0
    info = ""
    qs_ij_limits_done = False
    for d in dts.drange(date0, date1):
        found = 0
        for t in time0:
            if (t.year, t.month, t.day) == (d.year, d.month, d.day):
                print "==> blended : ", t
                out[t] = data[t]
                found = 1

        if not found:  # use quikscat:
            print "==> quikscat : ", d.strftime("%Y-%m-%d")
            tmp = b.data(d, dts.next_date(d))
            if not tmp.has_key("x"):
                continue
            x, y = tmp["x"], tmp["y"]
            x, y = np.meshgrid(x, y)

            # reduce qs data:
            if not qs_ij_limits_done:
                i1, i2, j1, j2 = calc.ij_limits(x, y, [x0.min(), x0.max()], [y0.min(), y0.max()])
                qs_ij_limits_done = True

            x = x[j1:j2, i1:i2]
            y = y[j1:j2, i1:i2]
            tmp[tmp.keys()[0]] = tmp[tmp.keys()[0]][j1:j2, i1:i2]

            print "  griddata u"
            u = calc.griddata(x, y, tmp[tmp.keys()[0]].real, x0, y0)
            print "  griddata v"
            v = calc.griddata(x, y, tmp[tmp.keys()[0]].imag, x0, y0)
            out[tmp.keys()[0]] = u + 1.0j * v
            info += "#" + d.strftime("%Y%m%d")

    new_wind_info = "blended+quikscat at days: " + info
    update_wind(fname, out, new_wind_info, **kargs)
Пример #33
0
  def slicell(self,varname,X,Y,time=0,**opts):
    coords=opts.get('coords',self._default_coords('slicell')).split(',')

    data      = opts.get('data',False)
    extrap    = opts.get('extrap',False)
    maskLimit = opts.get('lmask',0.5) # points where interpolated mask are above
                                      # this value are considered as mask!
                                      # Most strict value is 0

    out=vis.Data()
    out.label='slicell'
    out.msg=self.check_slice(varname,t=time)
    if out.msg: return out#None,aux

    X=np.asarray(X)
    Y=np.asarray(Y)
    if X.ndim>1: X=np.squeeze(X)
    if Y.ndim>1: Y=np.squeeze(X)

###    x,y,h,m=self.grid.vars(ruvp=self.var_at(varname)[0])
    x,y,h,m=self.grid.vars(ruvp=self.vloc(varname)[0])
    if True: # extrat only portion of data needed:
      i0,i1,j0,j1=calc.ij_limits(x, y, (X.min(),X.max()),(Y.min(),Y.max()), margin=1)
      xi='%d:%d'%(i0,i1)
      eta='%d:%d'%(j0,j1)

      if data is False: V=self.use(varname,SEARCHtime=time,xi_SEARCH=xi,eta_SEARCH=eta)
      else: v=data[...,j0:j1,i0:i1]

      x=x[j0:j1,i0:i1]
      y=y[j0:j1,i0:i1]
      h=h[j0:j1,i0:i1]
      m=m[j0:j1,i0:i1]

    else:
      if data is False: V=self.use(varname,SEARCHtime=time)
      else: v=data

    if V.ndim==3:
      v=calc.griddata(x,y,V,X,Y,extrap=extrap,mask2d=m==0, keepMaskVal=maskLimit)
    elif V.ndim==2:
      v=calc.griddata(x,y,np.ma.masked_where(m==0,V),X,Y,extrap=extrap, keepMaskVal=maskLimit)

    out.v=v
    out.info['v']['name']=varname
    out.info['v']['slice']='path npts=%d'%X.size
    try: out.info['v']['units']=netcdf.vatt(self.nc,varname,'units')
    except: pass


    # coords:
    if 'z' in coords and V.ndim==3:
      inds=dict(xi=(i0,i1),eta=(j0,j1))
#########      out.z=self.path_s_levels(time,X,Y,rw=varname[0],inds=inds)
###      out.z=self.path_s_levels(time,X,Y,rw=self.var_at(varname)[1],inds=inds)
###
#######      out.z,zw=self.path_s_levels(time,X,Y,rw=False,inds=inds)
#######      if self.vloc(varname)[1]=='w': out.z=zw
      out.z=self.path_s_levels(time,X,Y,rw=self.vloc(varname)[1],inds=inds)
      out.info['z']=dict(name='Depth',units='m')

    if 'd' in coords:
      d=calc.distance(X,Y)
      if d[-1]-d[0]>1e4:
        d=d/1000.
        dunits='km'
      else: dunits='m'

      if v.ndim==2: d=np.tile(d,(v.shape[0],1))
      out.d=d
      out.info['d']=dict(name='Distance',units=dunits)

    if 'x' in coords:
      if v.ndim==2: X=np.tile(X,(v.shape[0],1))
      out.x=X
      out.info['x']=dict(name='Longitude',units=r'$\^o$E')

    if 'y' in coords:
      if v.ndim==2: Y=np.tile(Y,(v.shape[0],1))
      out.y=Y
      out.info['y']=dict(name='Latitude',units=r'$\^o$N')

#######    if 't' in coords and self.hast(varname): out.t=self.time[time]
    if 't' in coords and 't' in self.vaxes(varname): out.t=self.time[time]

    if v.ndim==2: ################3 and not out.z is None: # zeta and bottom already calculated
      out.extra=[vis.Data()]
      if 'd' in coords: out.extra[0].x=out.d[0]
      if 'x' in coords: out.extra[0].y=out.x[0]
      if 'y' in coords: out.extra[0].x=out.y[0]
####      #h=-zw[0]
      h    = calc.griddata(x,y,h,X,Y,extrap=False)
      out.extra[0].v=-h # bottom
      out.extra[0].config['d1.plot']='fill_between'
      out.extra[0].config['d1.y0']=-h.max()-(h.max()-h.min())/20.
      out.extra[0].label='bottom'



    out.coordsReq=','.join(sorted(coords))
    return out
Пример #34
0
def update_wind_blended2(fname, datapaths, **kargs):
    '''
  In days without blended data will try to use quikscat data
  '''
    from okean.datasets import quikscat
    from okean.datasets import blended_wind
    a = blended_wind.WINDData(datapaths[0])
    b = quikscat.WINDData(datapaths[1])

    time = netcdf.nctime(fname, 'time')
    date0 = dts.next_date(time[0], -1)
    date1 = dts.next_date(time[-1], +2)

    data = a.data(date0, date1)

    # limit are... otherwise, quikscat interp will be very slow!
    grd = netcdf.fatt(fname, 'grd_file')
    import os
    if not os.path.isfile(grd): grd = kargs['grd']
    cond, inds = rt.grid_vicinity(grd,
                                  data['x'],
                                  data['y'],
                                  margin=5,
                                  rect=True,
                                  retinds=True)
    i1, i2, j1, j2 = inds
    for d in data.keys():
        if d == 'x': data[d] = data[d][i1:i2]
        elif d == 'y': data[d] = data[d][j1:j2]
        else: data[d] = data[d][j1:j2, i1:i2]

    # check for missing days:
    time0 = data.keys()
    x0 = data['x']
    y0 = data['y']
    x0, y0 = np.meshgrid(x0, y0)
    time0.remove('x')
    time0.remove('y')

    out = OrderedDict()
    out['x'] = x0
    out['y'] = y0
    info = ''
    qs_ij_limits_done = False
    for d in dts.drange(date0, date1):
        found = 0
        for t in time0:
            if (t.year, t.month, t.day) == (d.year, d.month, d.day):
                print('==> blended : ', t)
                out[t] = data[t]
                found = 1

        if not found:  # use quikscat:
            print('==> quikscat : ', d.strftime('%Y-%m-%d'))
            tmp = b.data(d, dts.next_date(d))
            if not tmp.has_key('x'): continue
            x, y = tmp['x'], tmp['y']
            x, y = np.meshgrid(x, y)

            # reduce qs data:
            if not qs_ij_limits_done:
                i1, i2, j1, j2 = calc.ij_limits(x, y,
                                                [x0.min(), x0.max()],
                                                [y0.min(), y0.max()])
                qs_ij_limits_done = True

            x = x[j1:j2, i1:i2]
            y = y[j1:j2, i1:i2]
            tmp[tmp.keys()[0]] = tmp[tmp.keys()[0]][j1:j2, i1:i2]

            print('  griddata u')
            u = calc.griddata(x, y, tmp[tmp.keys()[0]].real, x0, y0)
            print('  griddata v')
            v = calc.griddata(x, y, tmp[tmp.keys()[0]].imag, x0, y0)
            out[tmp.keys()[0]] = u + 1.j * v
            info += '#' + d.strftime('%Y%m%d')

    new_wind_info = 'blended+quikscat at days: ' + info
    update_wind(fname, out, new_wind_info, **kargs)
Пример #35
0
def load_data(f,quiet=0,**kargs):
  '''
  Loads prognostic variables (temp,salt,u,v,ubar,vbar,zeta) from
  netcdf file or opendap server. Also loads lon,lat, depth, and time.

  If f is a file, it must include the 1d variables lon,lat and depth;
  the 2d variable ssh (zeta) and the 3d variables temp, salt, u and v;
  ie, each file must contain data for a simgle time. The file must also
  contain the variable time.

  If f is a opendap address, it must contain also all these variables
  or the ones defined in the input karg settings (DataAccess object)

  To deal with the case of variables in different files/opendap addresses,
  f can also be a dictionary with keys the variables and values the files
  or opendap addresses. In this case, the keys must be:
    - temp
    - salt
    - u
    - v
    - ssh
    - misc, for lon, lat, depth, time and dimensions
      or xy for lon,lat and x,ydim; z for depth and zdim, time for time

  The output data (dict) is suitable to be used by data2roms, which
  interpolates the data to ROMS 3d grid.
  Also outputs an error/status string.

  kargs:
    inds, dict with dimension names/values (where time dim can be integer
          or datetime)
    settings, DataAccess object
    extra, extra misc vars to load [(outKey0,fileVar0),...]
    t_units, units of variable time, by default the att  units is used
  '''

  sett=DataAccess()
  inds={}
  extra=[]
  t_units=[]
  if 'settings' in kargs.keys(): sett    = kargs['settings']
  if 'inds'     in kargs.keys(): inds    = kargs['inds']
  if 'extra'    in kargs.keys(): extra   = kargs['extra']
  if 't_units'  in kargs.keys(): t_units = kargs['t_units']

  res={}
  msg=''

  if not isinstance(f,dict) and not f.startswith('http') and not isfile(f):
    msg='file not found %s' % f
    if not quiet: print msg
    return res, msg

  # load nc files:
  if not isinstance(f,dict):
    f={'temp':f,'salt':f,'u':f,'v':f,'ssh':f,'misc':f}

  if not f.has_key('xy'):   f['xy']   = f['misc']
  if not f.has_key('z'):    f['z']    = f['misc']
  if not f.has_key('time'): f['time'] = f['misc']

  filesUsed=[]
  ncUsed=[]
  for i in f.keys():
    if not quiet: print '(%s) loading from %s' % (i.ljust(5),f[i])

    if i=='temp':
      if f[i] in filesUsed: ncTemp=ncUsed[filesUsed.index(f[i])]
      else:
        ncTemp=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncTemp]

    elif i=='salt':
      if f[i] in filesUsed: ncSalt=ncUsed[filesUsed.index(f[i])]
      else:
        ncSalt=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncSalt]

    elif i=='u':
      if f[i] in filesUsed: ncU=ncUsed[filesUsed.index(f[i])]
      else:
        ncU=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncU]

    elif i=='v':
      if f[i] in filesUsed: ncV=ncUsed[filesUsed.index(f[i])]
      else:
        ncV=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncV]

    elif i=='ssh':
      if f[i] in filesUsed: ncSsh=ncUsed[filesUsed.index(f[i])]
      else:
        ncSsh=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncSsh]

    elif i=='xy':
      if f[i] in filesUsed: ncXy=ncUsed[filesUsed.index(f[i])]
      else:
        ncXy=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncXy]

    elif i=='z':
      if f[i] in filesUsed: ncZ=ncUsed[filesUsed.index(f[i])]
      else:
        ncZ=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncZ]

    elif i=='time':
      if f[i] in filesUsed: ncTime=ncUsed[filesUsed.index(f[i])]
      else:
        ncTime=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncTime]

    elif i=='misc':
      if f[i] in filesUsed: ncMisc=ncUsed[filesUsed.index(f[i])]
      else:
        ncMisc=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncMisc]


  # load dims:
  if not quiet: print '  loading dims...'
  dimsXy=netcdf.fdim(ncXy)
  dimsZ =netcdf.fdim(ncZ)

  res['NX']=dimsXy[sett.xdim]
  res['NY']=dimsXy[sett.ydim]
  ###if sett.z_name:
  if sett.zdim:
    res['NZ']=dimsZ[sett.zdim]
  else:
    res['NZ']=1

  # about horizontal inds:
  if inds.has_key(sett.xdim) and len(inds[sett.xdim])==2 and not isinstance(inds[sett.xdim],basestring):
    if not quiet: print '  calc horizontal inds...'
    xlim=inds[sett.xdim]
    ylim=inds[sett.ydim]

    inds.pop(sett.xdim)
    inds.pop(sett.ydim)

    lon=netcdf.use(ncXy,sett.x_name,**inds)
    if np.any(lon>360): lon=np.mod(lon,360.)
    lat=netcdf.use(ncXy,sett.y_name,**inds)
    i0,i1,j0,j1=calc.ij_limits(lon,lat,xlim,ylim,margin=3)
    inds[sett.xdim]='%d:%d' % (i0,i1)
    inds[sett.ydim]='%d:%d' % (j0,j1)


  if not quiet: print '  loading lon, lat, depth...'
  res['lon']  = netcdf.use(ncXy,sett.x_name,**inds)
  if np.any(res['lon']>360): res['lon']=np.mod(res['lon'],360.)
  res['lat']  = netcdf.use(ncXy,sett.y_name,**inds)
  if sett.z_name:
    res['depth'] = -netcdf.use(ncZ,sett.z_name,**inds)
  else: res['depth']=False

  if res['lon'].size!=res['lat'].size:
    res['lon'],res['lat']=np.meshgrid(res['lon'],res['lat'])
    # needed for griddata, later

  # update nx,ny:
  if inds.has_key(sett.xdim):
    res['NY'],res['NX']=res['lon'].shape

  # extra misc vars:
  if len(extra):
    for outKey,fileVar in extra:
      if not quiet: print '  loading extra misc... %s %s' % (outKey,fileVar)
      res[outKey]=netcdf.use(ncMisc,fileVar,**inds)


  # time:
  # file may have one or several times. If several, time dim must be given
  # with kargs inds!
  if not quiet: print '  loading time...'
  if t_units:
    times=netcdf.use(ncTime,sett.time_name)
    times=netcdftime.num2date(times,t_units)
  else:
    times=netcdf.nctime(ncTime,sett.time_name)

  if inds.has_key(sett.tdim):
    try: tind=dts.parse_date(inds[sett.tdim])
    except: tind=inds[sett.tdim] # is an integer, for instance

    if isinstance(tind,datetime.datetime):
      tind,=np.where(times==tind)
      if tind.size:
        tind=tind[0]
        inds[sett.tdim]=tind # update inds to extract other variables
      else:
        Msg='date not found'
        msg+='\n'+Msg
        return res,msg+' ERROR'

    date=times[tind]
    if not quiet: print '    tind, date= %d %s' % (tind,date.isoformat(' '))

  elif times.size==1:
    date=times[0]
    if not quiet: print '    date= %s' % date.isoformat(' ')
  else: # must provide tind as input!!
    Msg='several dates in file... provice tind!'
    msg+='\n'+Msg
    return res,msg+' ERROR'

  res['date'] = date

  empty3d=np.zeros([res['NZ'],res['NY'],res['NX']])
  empty2d=np.zeros([res['NY'],res['NX']])

  if 'temp' in f.keys():
    if not quiet: print '  loading temp...'
    if sett.temp_name in ncTemp.varnames: res['temp'] = netcdf.use(ncTemp,sett.temp_name,**inds)
    else:
      Msg='var %s not found' % 'temp'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['temp']=empty3d

  if 'salt' in f.keys():
    if not quiet: print '  loading salt...'
    if sett.salt_name in ncSalt.varnames: res['salt'] = netcdf.use(ncSalt,sett.salt_name,**inds)
    else:
      Msg='var %s not found' % 'salt'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['salt']=empty3d

  if 'u' in f.keys():
    if not quiet: print '  loading u...'
    if sett.u_name in ncU.varnames: res['u']    = netcdf.use(ncU,sett.u_name,**inds)
    else:
      Msg='var %s not found' % 'u'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['u']=empty3d

  if 'v' in f.keys():
    if not quiet: print '  loading v...'
    if sett.v_name in ncV.varnames: res['v']    = netcdf.use(ncV,sett.v_name,**inds)
    else:
      Msg='var %s not found' % 'v'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['v']=empty3d

  if 'ssh' in f.keys():
    if not quiet: print '  loading ssh...'
    if sett.ssh_name in ncSsh.varnames: res['ssh']  = netcdf.use(ncSsh,sett.ssh_name,**inds)
    else:
      Msg='var %s not found' % 'ssh'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['ssh']=empty2d

  for nc in ncUsed:
    try:  nc.close()
    except: pass

  return res, msg
Пример #36
0
  def slicell(self,varname,X,Y,time=0,**opts):
    coords=opts.get('coords',self._default_coords('slicell')).split(',')

    data      = opts.get('data',False)
    extrap    = opts.get('extrap',False)
    maskLimit = opts.get('lmask',0.5) # points where interpolated mask are above
                                      # this value are considered as mask!
                                      # Most strict value is 0

    out=Data()
    out.msg=self.check_slice(varname,t=time)
    if out.msg: return out#None,aux

    X=np.asarray(X)
    Y=np.asarray(Y)
    if X.ndim>1: X=np.squeeze(X)
    if Y.ndim>1: Y=np.squeeze(X)

    x,y,h,m=self.grid.vars(ruvp=self.var_at(varname)[0])
    if True: # extrat only portion of data needed:
      i0,i1,j0,j1=calc.ij_limits(x, y, (X.min(),X.max()),(Y.min(),Y.max()), margin=1)
      xi='%d:%d'%(i0,i1)
      eta='%d:%d'%(j0,j1)

      if data is False: V=self.use(varname,SEARCHtime=time,xi_SEARCH=xi,eta_SEARCH=eta)
      else: v=data[...,j0:j1,i0:i1]

      x=x[j0:j1,i0:i1]
      y=y[j0:j1,i0:i1]
      #h=h[j0:j1,i0:i1] # not used
      m=m[j0:j1,i0:i1]

    else:
      if data is False: V=self.use(varname,SEARCHtime=time)
      else: v=data

    if V.ndim==3:
      v=calc.griddata(x,y,V,X,Y,extrap=extrap,mask2d=m==0, keepMaskVal=maskLimit)
    elif V.ndim==2:
      v=calc.griddata(x,y,np.ma.masked_where(m==0,V),X,Y,extrap=extrap, keepMaskVal=maskLimit)

    out.v=v
    out.info['v']['name']=varname
    out.info['v']['slice']='path npts=%d'%X.size
    try: out.info['v']['units']=netcdf.vatt(self.nc,varname,'units')
    except: pass


    # coords:
    if 'z' in coords and V.ndim==3:
      inds=dict(xi=(i0,i1),eta=(j0,j1))
#########      out.z=self.path_s_levels(time,X,Y,rw=varname[0],inds=inds)
      out.z=self.path_s_levels(time,X,Y,rw=self.var_at(varname)[1],inds=inds)

    if 'd' in coords:
      d=calc.distance(X,Y)
      if v.ndim==2: d=np.tile(d,(v.shape[0],1))
      out.d=d

    if 'x' in coords:
      if v.ndim==2: X=np.tile(X,(v.shape[0],1))
      out.x=X

    if 'y' in coords:
      if v.ndim==2: Y=np.tile(Y,(v.shape[0],1))
      out.y=Y

    if 't' in coords and self.hast(varname): out.t=self.time[time]

    out.coordsReq=','.join(sorted(coords))
    return out