コード例 #1
0
ファイル: hycom.py プロジェクト: jcmt/okean
def get_ij_inds(grd,**kargs):
  f=kargs.get('url','http://tds.hycom.org/thredds/dodsC/glb_analysis')
  vlon=kargs.get('vlon','Longitude')
  vlat=kargs.get('vlat','Latitude')
  lon=kargs.get('lon',False)
  lat=kargs.get('lat',False)
  lon_add=kargs.get('lon_add',-360)
  fsave=kargs.get('fsave','ijinds.pickle')

  if lon is False:
    lon=netcdf.use(f,vlon)
    if np.any(lon>360): lon=np.mod(lon,360)
    lon+=lon_add

  if lat is False:
    lat=netcdf.use(f,vlat)

  rlon=netcdf.use(grd,'lon_rho')
  rlat=netcdf.use(grd,'lat_rho')
  xlim=rlon.min(),rlon.max()
  ylim=rlat.min(),rlat.max()
  from okean import calc
  i1,i2,j1,j2=calc.ij_limits(lon,lat,xlim,ylim,margin=1)

  i1=i1-2
  i2=i2+2
  j1=j1-2
  j2=j2+2

  if fsave:
    np.asarray([ i1,i2,j1,j2]).dump(fsave)
    print 'saved %s'%fsave

  return np.asarray([ i1,i2,j1,j2])
コード例 #2
0
def grid_vicinity(grid, x, y, margin=5, rect=False, retinds=False):
    '''
  Returns True for x,y points inside roms grid boundary plus margin.
  Margin is the number of cells to add around the grid.

  if rect is True returns True for all points in the smallest 2d xy grid
  (usually a rectangle) around the grid.
  In this case,margin is the rectangle margin, ie, in units of xy, not
  in  units of grid

  if retinds, in the case of rect, the rectangle j1,j2 and i1,i2 are
  also returned (cond,inds=grid_vicinity(....); i1,i2,j1,j2=inds)

  mma, TAMU 2011
  '''

    xg = netcdf.use(grid, 'lon_rho')
    yg = netcdf.use(grid, 'lat_rho')
    xlim = xg.min(), xg.max()
    ylim = yg.min(), yg.max()

    if x.ndim == 1 and y.ndim == 1: x, y = np.meshgrid(x, y)

    if rect:
        out = np.zeros(x.shape, 'bool')
        i1, i2, j1, j2 = calc.ij_limits(x, y, xlim, ylim, margin)
        out[j1:j2, i1:i2] = 1
    else:
        from roms import Grid
        g = Grid(grid)
        xb, yb = g.border(margin=-margin)
        out = calc.inpolygon(x, y, xb, yb)

    if rect and retinds: return out, (i1, i2, j1, j2)
    else: return out
コード例 #3
0
ファイル: roms_tools.py プロジェクト: jsh1012/okean
def grid_vicinity(grid,x,y,margin=5,rect=False,retinds=False):
  '''
  Returns True for x,y points inside roms grid boundary plus margin.
  Margin is the number of cells to add around the grid.

  if rect is True returns True for all points in the smallest 2d xy grid
  (usually a rectangle) around the grid.
  In this case,margin is the rectangle margin, ie, in units of xy, not
  in  units of grid

  if retinds, in the case of rect, the rectangle j1,j2 and i1,i2 are
  also returned (cond,inds=grid_vicinity(....); i1,i2,j1,j2=inds)

  mma, TAMU 2011
  '''

  xg=netcdf.use(grid,'lon_rho')
  yg=netcdf.use(grid,'lat_rho')
  xlim=xg.min(),xg.max()
  ylim=yg.min(),yg.max()

  if x.ndim==1 and y.ndim==1: x,y=np.meshgrid(x,y)

  if rect:
    out=np.zeros(x.shape,'bool')
    i1,i2,j1,j2=calc.ij_limits(x,y,xlim,ylim,margin)
    out[j1:j2,i1:i2]=1
  else:
    from roms import Grid
    g=Grid(grid)
    xb,yb=g.border(margin=-margin)
    out=calc.inpolygon(x,y,xb,yb)

  if rect and retinds: return out,(i1,i2,j1,j2)
  else: return out
コード例 #4
0
ファイル: hycom.py プロジェクト: rsignell-usgs/okean
def get_ij_inds(grd, **kargs):
    f = kargs.get('url', 'http://tds.hycom.org/thredds/dodsC/glb_analysis')
    vlon = kargs.get('vlon', 'Longitude')
    vlat = kargs.get('vlat', 'Latitude')
    lon = kargs.get('lon', False)
    lat = kargs.get('lat', False)
    lon_add = kargs.get('lon_add', -360)
    fsave = kargs.get('fsave', 'ijinds.pickle')

    if lon is False:
        lon = netcdf.use(f, vlon)
        if np.any(lon > 360): lon = np.mod(lon, 360)
        lon += lon_add

    if lat is False:
        lat = netcdf.use(f, vlat)

    rlon = netcdf.use(grd, 'lon_rho')
    rlat = netcdf.use(grd, 'lat_rho')
    xlim = rlon.min(), rlon.max()
    ylim = rlat.min(), rlat.max()
    from okean import calc
    i1, i2, j1, j2 = calc.ij_limits(lon, lat, xlim, ylim, margin=1)

    i1 = i1 - 2
    i2 = i2 + 2
    j1 = j1 - 2
    j2 = j2 + 2

    if fsave:
        np.asarray([i1, i2, j1, j2]).dump(fsave)
        print 'saved %s' % fsave

    return np.asarray([i1, i2, j1, j2])
コード例 #5
0
    def extract(self, year, level=3, version=2, lims=False):
        a = SMOSdownload(self.path)
        p = a.destination_folder(year, level, version, gen=False)
        files = glob.glob(os.path.join(p, '*.nc'))
        files.sort()

        res = OrderedDict()
        for f in files:
            print(' -- extracting from %s' % f)
            if f == files[0]:
                lon = netcdf.use(f, 'longitude')
                lat = netcdf.use(f, 'latitude')
                if lims:
                    i0, i1, j0, j1 = calc.ij_limits(lon,
                                                    lat,
                                                    lims[:2],
                                                    lims[2:],
                                                    margin=2)
                    ii = '%d:%d' % (i0, i1 + 1)
                    jj = '%d:%d' % (j0, j1 + 1)
                    lon = netcdf.use(f, 'longitude', longitude=ii)
                    lat = netcdf.use(f, 'latitude', latitude=jj)
                else:
                    ii, jj = ':', ':'

            date0 = netcdf.nctime(f, 'date_start')[0]
            date1 = netcdf.nctime(f, 'date_stop')[0]
            date = netcdf.nctime(f, 'time')[0]
            sss = netcdf.use(f, 'sss', longitude=ii, latitude=jj)
            res[date] = date0, date1, sss

        return lon, lat, res
コード例 #6
0
ファイル: smos.py プロジェクト: martalmeida/okean
  def extract(self,year,level=3,version=2,lims=False):
    a=SMOSdownload(self.path)
    p=a.destination_folder(year,level,version,gen=False)
    files=glob.glob(os.path.join(p,'*.nc'))
    files.sort()

    res=OrderedDict()
    for f in files:
      print(' -- extracting from %s'%f)
      if f==files[0]:
        lon=netcdf.use(f,'longitude')
        lat=netcdf.use(f,'latitude')
        if lims:
          i0,i1,j0,j1=calc.ij_limits(lon,lat,lims[:2],lims[2:],margin=2)
          ii='%d:%d'%(i0,i1+1)
          jj='%d:%d'%(j0,j1+1)
          lon=netcdf.use(f,'longitude',longitude=ii)
          lat=netcdf.use(f,'latitude',latitude=jj)
        else: ii,jj=':',':'

      date0=netcdf.nctime(f,'date_start')[0]
      date1=netcdf.nctime(f,'date_stop')[0]
      date=netcdf.nctime(f,'time')[0]
      sss=netcdf.use(f,'sss',longitude=ii,latitude=jj)
      res[date]=date0,date1,sss

    return lon,lat,res
コード例 #7
0
ファイル: hycom.py プロジェクト: jsh1012/okean
def get_lonlat(date,vname,dataset='GOM'):
  url=gen_url(date,vname,dataset)
  lon=netcdf.use(url,'Longitude')
  lat=netcdf.use(url,'Latitude')

  if dataset=='GLOBAL': lon=np.mod(lon,360)-360

  return lon,lat
コード例 #8
0
def get(xlim, ylim):
    x = netcdf.use(f, 'lon')
    y = netcdf.use(f, 'lat')
    i0, i1, j0, j1 = calc.ij_limits(x, y, xlim, ylim)

    ix = '%d:%d' % (i0, i1)
    iy = '%d:%d' % (j0, j1)

    x = netcdf.use(f, 'lon', lon=ix)
    y = netcdf.use(f, 'lat', lat=iy)
    z = netcdf.use(f, 'z', lon=ix, lat=iy)

    x, y = np.meshgrid(x, y)
    np.savez('etopo1_madeira.npz', x=x, y=y, z=z)
コード例 #9
0
def roms2swan_grid(grd, exc=9999., label='swan_bathy'):
    '''exc (EXCeption) = mask points
  '''

    x = netcdf.use(grd, 'lon_rho')
    y = netcdf.use(grd, 'lat_rho')
    m = netcdf.use(grd, 'mask_rho')
    h = netcdf.use(grd, 'h')
    h[m == 0] = exc
    ny, nx = h.shape

    fbot = label + '.bot'
    fgrd = label + '.grd'

    # depths file:
    fb = open(fbot, 'w')
    for i in range(nx):
        for j in range(ny):
            fb.write('   ')
            fb.write('%12.8f' % h[j, i])

        fb.write('\n')

    fb.close()
    print(' -- created swan depths file %s' % fbot)

    # coords file:
    #v=np.hstack((x.T.flatten(),y.T.flatten()))
    v = np.hstack((x.flatten(), y.flatten()))
    np.savetxt(fgrd, v, fmt='%12.6f')
    print(' -- created swan computational grid file %s\n' % fgrd)

    # add to swan INPUT:
    print('\n')
    print(
        'CGRID CURVILINEAR %d %d EXC 9.999000e+003 9.999000e+003 CIRCLE 36 0.04 1.0 24'
        % (nx - 1, ny - 1))
    print('READGRID COORDINATES 1 \'%s\' 4 0 0 FREE ' % (fgrd))

    print('INPGRID BOTTOM CURVILINEAR 0 0 %d %d EXC 9.999000e+003' %
          (nx - 1, ny - 1))
    print('READINP BOTTOM 1 \'%s\' 4 0 FREE ' % (fbot))
    print('\n')
コード例 #10
0
ファイル: ostia.py プロジェクト: sciencewiki/okean
    def extract(self, year, type, lims=False, date=False):
        a = OSTIAdownload(self.path)
        p = a.destination_folder(year, type)  #########level,version,gen=False)
        files = glob.glob(os.path.join(p, '*.nc'))
        files.sort()

        res = OrderedDict()
        c = -1
        for f in files:
            c += 1
            print(' -- extracting from %s' % f)
            if c == 0:
                lon = netcdf.use(f, 'lon')
                lat = netcdf.use(f, 'lat')
                if lims:
                    i0, i1, j0, j1 = calc.ij_limits(lon,
                                                    lat,
                                                    lims[:2],
                                                    lims[2:],
                                                    margin=2)
                    ii = '%d:%d' % (i0, i1 + 1)
                    jj = '%d:%d' % (j0, j1 + 1)
                    lon = netcdf.use(f, 'lon', lon=ii)
                    lat = netcdf.use(f, 'lat', lat=jj)
                else:
                    ii, jj = ':', ':'

            date = netcdf.nctime(f, 'time')[0]
            u = netcdf.use(f, 'analysed_sst', lon=ii, lat=jj)
            if c == 0:
                sst = np.ma.zeros((len(files), ) + u.shape, u.dtype)
                time = np.zeros(len(files), datetime.datetime)

            sst[c] = u
            time[c] = date


###      date0=netcdf.nctime(f,'date_start')[0]
###      date1=netcdf.nctime(f,'date_stop')[0]
#      res[date]=sst
#####  return lon,lat,res

        return time, lon, lat, sst
コード例 #11
0
ファイル: swanu.py プロジェクト: jsh1012/okean
def roms2swan_grid(grd,exc=9999.,label='swan_bathy'):
  '''exc (EXCeption) = mask points
  '''

  x=netcdf.use(grd,'lon_rho')
  y=netcdf.use(grd,'lat_rho')
  m=netcdf.use(grd,'mask_rho')
  h=netcdf.use(grd,'h')
  h[m==0]=exc
  ny,nx=h.shape

  fbot=label+'.bot'
  fgrd=label+'.grd'

  # depths file:
  fb=open(fbot,'w')
  for i in range(nx):
    for j in range(ny):
      fb.write('   ')
      fb.write('%12.8f'%h[j,i])

    fb.write('\n')

  fb.close()
  print ' -- created swan depths file %s'%fbot

  # coords file:
  #v=np.hstack((x.T.flatten(),y.T.flatten()))
  v=np.hstack((x.flatten(),y.flatten()))
  np.savetxt(fgrd,v,fmt='%12.6f')
  print ' -- created swan computational grid file %s\n'%fgrd

  # add to swan INPUT:
  print '\n'
  print 'CGRID CURVILINEAR %d %d EXC 9.999000e+003 9.999000e+003 CIRCLE 36 0.04 1.0 24'%(nx-1,ny-1)
  print 'READGRID COORDINATES 1 \'%s\' 4 0 0 FREE '%(fgrd)

  print 'INPGRID BOTTOM CURVILINEAR 0 0 %d %d EXC 9.999000e+003'%(nx-1,ny-1)
  print 'READINP BOTTOM 1 \'%s\' 4 0 FREE '%(fbot)
  print '\n'
コード例 #12
0
ファイル: ostia.py プロジェクト: martalmeida/okean
  def extract(self,year,type,lims=False,date=False):
    a=OSTIAdownload(self.path)
    p=a.destination_folder(year,type)#########level,version,gen=False)
    files=glob.glob(os.path.join(p,'*.nc'))
    files.sort()

    res=OrderedDict()
    c=-1
    for f in files:
      c+=1
      print(' -- extracting from %s'%f)
      if c==0:
        lon=netcdf.use(f,'lon')
        lat=netcdf.use(f,'lat')
        if lims:
          i0,i1,j0,j1=calc.ij_limits(lon,lat,lims[:2],lims[2:],margin=2)
          ii='%d:%d'%(i0,i1+1)
          jj='%d:%d'%(j0,j1+1)
          lon=netcdf.use(f,'lon',lon=ii)
          lat=netcdf.use(f,'lat',lat=jj)
        else: ii,jj=':',':'


      date=netcdf.nctime(f,'time')[0]
      u=netcdf.use(f,'analysed_sst',lon=ii,lat=jj)
      if c==0:
        sst=np.ma.zeros((len(files),)+u.shape,u.dtype)
        time=np.zeros(len(files),datetime.datetime)

      sst[c]=u
      time[c]=date

###      date0=netcdf.nctime(f,'date_start')[0]
###      date1=netcdf.nctime(f,'date_stop')[0]
#      res[date]=sst
#####  return lon,lat,res

    return time,lon,lat,sst
コード例 #13
0
ファイル: ascat.py プロジェクト: sciencewiki/okean
def read_wind(grd, date, ij=False):
    f = source(date)
    print('-- reading from %s' % f)
    time = netcdf.nctime(f, 'time')

    if 0:
        try:
            i = np.where(time == date)[0][0]
        except:
            return 'date %s not found' % date.isoformat(' ')
    else:
        i = 0

    returnXY = False
    if ij is False:
        returnXY = True
        lon = netcdf.use(f, 'longitude')  # -180..180
        lat = netcdf.use(f, 'latitude')
        g = roms.Grid(grd)
        xl0 = np.asarray((g.lon.min(), g.lon.max()))
        xl = np.asarray((g.lon.min(), g.lon.max()))
        if np.any(xl > 180) or np.any(xl < -180):
            print('ERROR: grid is supposed to be -180<x<180')
            print(
                'Can be implemented with mpl_toolkits.basemap.shiftgrid ... TODO'
            )
            print('(http://matplotlib.org/basemap/api/basemap_api.html)')
            return

        yl = g.lat.min(), g.lat.max()
        ij = calc.ij_limits(lon, lat, xl, yl, margin=1)

    i0, i1, j0, j1 = ij
    u = netcdf.use(f,
                   'eastward_wind',
                   longitude='%d:%d' % (i0, i1),
                   latitude='%d:%d' % (j0, j1),
                   time=i)
    v = netcdf.use(f,
                   'northward_wind',
                   longitude='%d:%d' % (i0, i1),
                   latitude='%d:%d' % (j0, j1),
                   time=i)
    if returnXY:
        lon = netcdf.use(f,
                         'longitude',
                         longitude='%d:%d' % (i0, i1),
                         latitude='%d:%d' % (j0, j1))
        lat = netcdf.use(f,
                         'latitude',
                         longitude='%d:%d' % (i0, i1),
                         latitude='%d:%d' % (j0, j1))

        lon, lat = np.meshgrid(lon, lat)
        #if np.all(xl0<0): lon=lon-360 # this may be wrong ... if xl is near 0, lon ay have pos and neg values !!! fix this one day ...
        return lon, lat, u, v, ij
    else:
        return u, v
コード例 #14
0
ファイル: ascat.py プロジェクト: jcmt/okean
def read_wind(grd,date,ij=False):
  f=source(date)
  print '-- reading from %s'%f
  time=netcdf.nctime(f,'time')

  if 0:
    try:
      i=np.where(time==date)[0][0]
    except:
      return 'date %s not found'%date.isoformat(' ')
  else: i=0

  returnXY=False
  if ij is False:
    returnXY=True
    lon=netcdf.use(f,'longitude') # -180..180
    lat=netcdf.use(f,'latitude')
    g=roms.Grid(grd)
    xl0=np.asarray((g.lon.min(),g.lon.max()))
    xl=np.asarray((g.lon.min(),g.lon.max()))
    if np.any(xl>180) or np.any(xl<-180):
      print 'ERROR: grid is supposed to be -180<x<180'
      print 'Can be implemented with mpl_toolkits.basemap.shiftgrid ... TODO'
      print '(http://matplotlib.org/basemap/api/basemap_api.html)'
      return

    yl=g.lat.min(),g.lat.max()
    ij=calc.ij_limits(lon,lat,xl,yl,margin=1)

  i0,i1,j0,j1=ij
  u=netcdf.use(f,'eastward_wind',longitude='%d:%d'%(i0,i1),latitude='%d:%d'%(j0,j1),time=i)
  v=netcdf.use(f,'northward_wind',longitude='%d:%d'%(i0,i1),latitude='%d:%d'%(j0,j1),time=i)
  if returnXY:
    lon=netcdf.use(f,'longitude',longitude='%d:%d'%(i0,i1),latitude='%d:%d'%(j0,j1))
    lat=netcdf.use(f,'latitude',longitude='%d:%d'%(i0,i1),latitude='%d:%d'%(j0,j1))

    lon,lat=np.meshgrid(lon,lat)
    #if np.all(xl0<0): lon=lon-360 # this may be wrong ... if xl is near 0, lon ay have pos and neg values !!! fix this one day ...
    return lon,lat,u,v, ij
  else: return u,v
コード例 #15
0
ファイル: ccmp.py プロジェクト: rsignell-usgs/okean
def read_wind(grd,date,ij=False):
  f=source(date)
  print '-- reading from %s'%f
  time=netcdf.nctime(f,'time')
  try:
    i=np.where(time==date)[0][0]
  except:
    return 'date %s not found'%d.isoformat(' ')

  returnXY=False
  if ij is False:
    returnXY=True
    lon=netcdf.use(f,'lon')
    lat=netcdf.use(f,'lat')
    g=roms.Grid(grd)
    xl0=np.asarray((g.lon.min(),g.lon.max()))
    xl=np.asarray((g.lon.min(),g.lon.max()))
    if np.all(xl<0): xl=xl+360
    elif np.any(xl<0) and np.any(xl>0):
      print 'ERROR: zero crossing not implemented !!!'
      print 'can be done with mpl_toolkits.basemap.shiftgrid ... TODO'
      print '(http://matplotlib.org/basemap/api/basemap_api.html)'
      return

    yl=g.lat.min(),g.lat.max()
    ij=calc.ij_limits(lon,lat,xl,yl,margin=1)

  i0,i1,j0,j1=ij
  u=netcdf.use(f,'uwnd',lon='%d:%d'%(i0,i1),lat='%d:%d'%(j0,j1),time=i)
  v=netcdf.use(f,'vwnd',lon='%d:%d'%(i0,i1),lat='%d:%d'%(j0,j1),time=i)
  if returnXY:
    lon=netcdf.use(f,'lon',lon='%d:%d'%(i0,i1),lat='%d:%d'%(j0,j1))
    lat=netcdf.use(f,'lat',lon='%d:%d'%(i0,i1),lat='%d:%d'%(j0,j1))
    lon,lat=np.meshgrid(lon,lat)
    if np.all(xl0<0): lon=lon-360 # this may be wrong ... if xl is near 0, lon ay have pos and neg values !!! fix this one day ...
    return lon,lat,u,v, ij
  else: return u,v
コード例 #16
0
def cordex_file_data(f,lims=False,quiet=False):
  '''
  CORDEX data for ROMS

  No accumulated variables are considered
  '''

  out={}

  # time, x, y:
  if not quiet: print(' reading time,x,y')
  out['time']=netcdf.nctime(f,'time')
  x=netcdf.use(f,'lon')
  y=netcdf.use(f,'lat')
  x[x>180]=x[x>180]-360
  if x.ndim==1 and y.ndim==1:
    x,y=np.meshgrid(x,y)

  if np.ma.isMA(x): x=x.data
  if np.ma.isMA(y): y=y.data

  if lims:
    from okean import calc
    xlim,ylim=lims
    i1,i2,j1,j2=calc.ij_limits(x,y,xlim,ylim,margin=3)
  else:
    i0=0
    j0=0
    j1,i1=x.shape

  I=range(i1,i2)
  J=range(j1,j2)
  x=x[j1:j2,i1:i2]
  y=y[j1:j2,i1:i2]

  # tair [K-->C]
  if not quiet: print(' --> T air')
  vname='tair'
  tair=netcdf.use(f,vname,lon=I,lat=J)-273.15
  out['tair']=Data(x,y,tair,'Celsius')

  # R humidity [0--1]
  if not quiet: print(' --> R humidity (from specific humidity)')
  vname='humid'
  q=netcdf.use(f,vname,lon=I,lat=J) # specific humidity
  rhum=q/air_sea.qsat(tair)
  rhum[rhum>1]=1
  out['rhum']=Data(x,y,rhum,'0--1')

  # surface pressure [Pa]
  if not quiet: print(' --> Surface pressure')
  vname='press'
  pres=netcdf.use(f,vname,lon=I,lat=J)
  out['pres']=Data(x,y,pres,'Pa')

  # P rate [kg m-2 s-1 -> cm/d]
  if not quiet: print(' --> P rate')
  vname='rain'
  prate=netcdf.use(f,vname,lon=I,lat=J)
  prate=prate*86400*100/1000.
  prate[prate<0]=0
  out['prate']=Data(x,y,prate,'cm/d')

  # Net shortwave flux  [W m-2]
  if not quiet: print(' --> Net shortwave flux')
  if not quiet: print('       SW down')
  sw_down=netcdf.use(f,'sw_down',lon=I,lat=J)
  if not quiet: print('       SW up')
  sw_up=netcdf.use(f,'sw_up',lon=I,lat=J)
  sw_net=sw_down-sw_up
  out['radsw']=Data(x,y,sw_net,'W m-2',info='positive downward')

  # Net longwave flux  [W m-2]
  if not quiet: print(' --> Net longwave flux')
  if not quiet: print('       LW down')
  lw_down=netcdf.use(f,'lw_down',lon=I,lat=J)
  if not quiet: print('       LW up')
  lw_up=netcdf.use(f,'lw_up',lon=I,lat=J)
  lw_net=lw_down-lw_up
  out['radlw']=Data(x,y,-lw_net,'W m-2',info='positive upward')
  # downward lw:
  out['dlwrf']=Data(x,y,-lw_down,'W m-2',info='negative... downward')
  # signs convention is better explained in wrf.py

  # U and V wind speed 10m
  if not quiet: print(' --> U and V wind')
  uwnd=netcdf.use(f,'u',lon=I,lat=J)
  vwnd=netcdf.use(f,'v',lon=I,lat=J)
  if not quiet: print(' --> calc wind speed and stress')
  speed = np.sqrt(uwnd**2+vwnd**2)
  taux,tauy=air_sea.wind_stress(uwnd,vwnd)

  out['wspd']=Data(x,y,speed,'m s-1')
  out['uwnd']=Data(x,y,uwnd,'m s-1')
  out['vwnd']=Data(x,y,vwnd,'m s-1')
  out['sustr']=Data(x,y,taux,'Pa')
  out['svstr']=Data(x,y,tauy,'Pa')

  # Cloud cover [0--100 --> 0--1]:
  if not quiet: print(' --> Cloud cover')
  if 'clouds' in netcdf.varnames(f):
    clouds=netcdf.use(f,'clouds',lon=I,lat=J)/100.
    out['cloud']=Data(x,y,clouds,'fraction (0--1)')
  else:
    print('==> clouds not present!')

  return fill_extremes(out,quiet)
コード例 #17
0
from okean import netcdf
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.dates import MO, TU, WE, TH, FR, SA, SU

# <markdowncell>

# Lets use some ROMS-ESPRESSO output and load info from a glider:

# <codecell>

froms='http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2009_da/his'
fglider='http://tds.marine.rutgers.edu/thredds/dodsC/cool/glider/mab/Gridded/20121025T000000_20121105T000000_maracoos_ru23.nc'
fglider='http://tds.marine.rutgers.edu:8080/thredds/dodsC/cool/glider/all/ru23-20121025T1944.ncCFMA.nc3.nc'

x=netcdf.use(fglider,'longitude')
y=netcdf.use(fglider,'latitude')
t=netcdf.nctime(fglider,'time')

a=glider.RomsGlider(froms,x,y,t)
a.plot()

# <markdowncell>

# Extract and plot the glider data

# <codecell>

z=netcdf.use(fglider,'depth')
v=netcdf.use(fglider,'temperature')
コード例 #18
0
%matplotlib inline
from okean.roms import glider
from okean import netcdf
import numpy as np
import matplotlib.pyplot as plt

# <markdowncell>

# Lets use some ROMS-ESPRESSO output and load info from a glider:

# <codecell>

froms='http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2009_da/his'
fglider='http://tds.marine.rutgers.edu/thredds/dodsC/cool/glider/mab/Gridded/20101025T1600_marcoos_ru22_active.nc'

x=netcdf.use(fglider,'longitude')
y=netcdf.use(fglider,'latitude')
t=netcdf.nctime(fglider,'time')

a=glider.RomsGlider(froms,x,y,t)
a.plot()

# <markdowncell>

# Extract and plot the glider data

# <codecell>

vmin=30.0
vmax=36.0
z=netcdf.use(fglider,'depth')
コード例 #19
0
ファイル: gnome.py プロジェクト: sciencewiki/okean
def frc2gnome(fname, frc, grd, xylim=False, dates=False, ij=(1, 1), **kargs):
    '''
  Creates GNOME wind file
  kargs:
    t[u,v]var
    t[u,v]dim
    x[y,ang]var

  Ex:
    .frc2gnome(out,frc,grd,ij=(10,10),dates=dates,**{'tdim':'Time'})
  '''

    deta, dxi = ij

    tvar = 'time'
    uvar = 'Uwind'
    vvar = 'Vwind'
    #tvar='bulk_time'
    #uvar='uwnd'
    #vvar='vwnd'

    tdim = 'time'
    #tdim='bulk_time'
    xdim = 'xi_rho'
    ydim = 'eta_rho'

    xvar = 'lon_rho'
    yvar = 'lat_rho'
    angvar = 'angle'

    if 'tvar' in kargs.keys(): tvar = kargs['tvar']
    if 'uvar' in kargs.keys(): uvar = kargs['uvar']
    if 'vvar' in kargs.keys(): vvar = kargs['vvar']

    if 'tdim' in kargs.keys(): tdim = kargs['tdim']
    if 'xdim' in kargs.keys(): xdim = kargs['xdim']
    if 'ydim' in kargs.keys(): ydim = kargs['ydim']

    if 'xvar' in kargs.keys(): xvar = kargs['xvar']
    if 'yvar' in kargs.keys(): yvar = kargs['yvar']
    if 'angvar' in kargs.keys(): angvar = kargs['angvar']

    dims = netcdf.fdim(grd)
    xi, eta = dims[xdim], dims[ydim]
    xi0, eta0 = xi, eta

    ncg = netcdf.ncopen(grd)

    nc0 = netcdf.ncopen(frc)
    try:
        t = netcdf.nctime(nc0, tvar)
    except:
        t = netcdf.use(nc0, tvar)
        t = netcdf.num2date(t, 'days since %d-01-01' % year0)

    time = netcdf.date2num(t, tunits)

    x0 = netcdf.use(grd, xvar)
    y0 = netcdf.use(grd, yvar)
    if x0.ndim == 1: x0, y0 = np.meshgrid(x0, y0)

    if angvar:
        ang = netcdf.use(grd, angvar)

    if not xylim is False:
        xlim = xylim[:2]
        ylim = xylim[2:]
        i1, i2, j1, j2 = calc.ij_limits(x0, y0, xlim, ylim)
        xi = i2 - i1
        eta = j2 - j1
    else:
        i1, i2 = 0, xi
        j1, j2 = 0, eta

    XI = '%d:%d:%d' % (i1, i2, dxi)
    ETA = '%d:%d:%d' % (j1, j2, deta)

    xi = len(range(i1, i2, dxi))
    eta = len(range(j1, j2, deta))

    # create file:
    create_wind(fname, xi, eta)

    nc = netcdf.ncopen(fname, 'a')

    x = x0[j1:j2:deta, i1:i2:dxi]
    y = y0[j1:j2:deta, i1:i2:dxi]

    nc.vars['lon'][:] = x
    nc.vars['lat'][:] = y
    if angvar: ang = ang[j1:j2:deta, i1:i2:dxi]

    n = -1
    for it in range(len(time)):

        if not dates is False:
            d0, d1 = dates
            if t[it] < d0 or t[it] >= d1: continue

        n += 1
        u = netcdf.use(nc0, uvar, **{xdim: XI, ydim: ETA, tdim: it})
        v = netcdf.use(nc0, vvar, **{xdim: XI, ydim: ETA, tdim: it})

        # rotate uv:
        if angvar:
            print('rotating ...')
            u, v = calc.rot2d(u, v, -ang)

        nc.vars['time'][n] = time[it]
        print('filling uv', n, t[it])
        nc.vars['air_u'][n, ...] = u
        nc.vars['air_v'][n, ...] = v

    nc.close()
    nc0.close()
    ncg.close()
コード例 #20
0
ファイル: cfsr.py プロジェクト: jsh1012/okean
def cfsr_file_data(files,quiet=False):
  '''
  Returns bulk data from one CFRS files
  '''


  def load_time(f):
    time=np.array((),datetime.datetime)
    ff=glob.glob(f)
    ff.sort()
    for f in ff: time=np.append(time,netcdf.nctime(f,'time'))
    return time


  def fix_time(t,var,t0,t1):
    # convert 1h, 7h, ... to 0h, 6h, ...
    if t[0].hour in [1,7,13,19]: # not all! sp analysis starts at 0, 6,...!
      print '     1,7,... to 0,6,...'
      var=(var[1:]*5+var[:-1]*1)/6.
      t=t[1:]-datetime.timedelta(hours=1)

    cond=(t>=t0)&(t<=t1)
    t=t[cond]
    var=var[cond]

    if t[0]>t0:
      dt=t[0]-t0
      dt=dt.days*24+dt.seconds/3600. # hours
      print 'missind data at start: %.2d h missing --> repeating 1st data'%dt
      v=np.zeros((var.shape[0]+1,)+var.shape[1:],var.dtype)
      v[1:]=var
      v[0]=var[0]
      var=v

    if t[-1]<t1:
      dt=t1-t[-1]
      dt=dt.days*24+dt.seconds/3600. # hours
      print 'missind data at end: %.2d h missing --> repeating last data'%dt
      v=np.zeros((var.shape[0]+1,)+var.shape[1:],var.dtype)
      v[:-1]=var
      v[-1]=var[-1]
      var=v

    return var


  out={}

  # time:
  if 0:
    time=netcdf.nctime(files['cc'],'time')
    # files have diff units !! so, cannot load all times at once!
    # thse result will use only units of 1st file!!
  else:
    time=load_time(files['cc'])


  out['time']=time

  # T air [K->C]
  if not quiet: print ' --> T air'
  f=files['st']
  tair=netcdf.use(f,'TMP_L103')
  tair=tair-273.15
  x=netcdf.use(f,'lon'); x[x>180]=x[x>180]-360
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print '    time ok'
  else:
コード例 #21
0
ファイル: cfsr.py プロジェクト: jsh1012/okean
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print '    time ok'
  else:
    print '   time differs !!!!',
    tair=fix_time(ttmp,tair,time[0],time[-1])
    print ' ...fixed!'
  out['tair']=Data(x,y,tair,'C')


  # R humidity [%-->0--1]
  if not quiet: print ' --> R humidity'
  f=files['rh']
  rhum=netcdf.use(f,'R_H_L103')
  rhum=rhum/100.
  x=netcdf.use(f,'lon'); x[x>180]=x[x>180]-360
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print '    time ok'
  else:
    print '   time differs !!!!',
    rhum=fix_time(ttmp,rhum,time[0],time[-1])
    print ' ...fixed!'
  out['rhum']=Data(x,y,rhum,'0--1')


  # surface pressure [Pa]
コード例 #22
0
ファイル: wrf.py プロジェクト: jcmt/okean
def wrf_file_data(file,quiet=False):
  '''
  WRF data for ROMS

  '''

  out={}

  # time:
  if not quiet: print ' --> get time'
  time=read_time(file)

  out['time']=time

  # lon,lat:
  if not quiet: print ' --> reading x,y'
  x=netcdf.use(file,'XLONG',**{'0': 0})
  y=netcdf.use(file,'XLAT',**{'0': 0})

  # tair [K-->C]
  if not quiet: print ' --> T air'
  tair=netcdf.use(file,'T2')-273.15
  out['tair']=Data(x,y,tair,'Celsius')

  # R humidity [kg/kg --> 0--1]
  if not quiet: print ' --> R humidity from QV at 2m'
  wv=netcdf.use(file,'Q2') # water vapor mixing ratio at 2m
  rhum=wv/air_sea.qsat(tair)
  rhum[rhum>1]=1
  out['rhum']=Data(x,y,rhum,'0--1')

  # surface pressure [Pa]
  if not quiet: print ' --> Surface pressure'
  pres=netcdf.use(file,'PSFC')
  out['pres']=Data(x,y,pres,'Pa')

  # P rate [mm --> cm day-1]
  if not quiet: print ' --> P rate (rainc+rainnc)'
  rainc  = netcdf.use(file,'RAINC')
  rainnc = netcdf.use(file,'RAINNC')
  prate=rainc+rainnc
  if not quiet: print '      accum2avg...'
  prate=accum2avg(prate,dt=time[1]-time[0]) # mm s-1
  conv= 0.1*86400       # from mm s-1      --> cm day-1
  prate=prate*conv # cm day-1
  prate[prate<0]=0 # interpolation errors may result in negative rain!
  out['prate']=Data(x,y,prate,'cm day-1')

  # LW, SW, latent, sensible signs:
  # positive (downward flux, heating) or negative (upward flux, cooling)
  #https://www.myroms.org/forum/viewtopic.php?f=1&t=2621

  # Net shortwave flux  [W m-2]
  if not quiet: print ' --> Net shortwave flux'
  sw_down=netcdf.use(file,'SWDOWN')
  albedo=netcdf.use(file,'ALBEDO')
  sw_net=sw_down*(1-albedo)
  out['radsw']=Data(x,y,sw_net,'W m-2',info='positive downward')

  # Net longwave flux  [W m-2]
  if not quiet: print ' --> Net longwave flux'
  lw_down=netcdf.use(file,'GLW') # positive
  # sst needed:
  if not quiet: print '     --> SST for LW up'
  sst=netcdf.use(file,'SST') # K
  lw_net = air_sea.lwhf(sst,lw_down) # positive down
  # here vars have roms-agrif signs --> radlw is positive upward!
  #conversion to ROMS is done in surface.py
  out['radlw']=Data(x,y,-lw_net,'W m-2',info='positive upward')
  out['dlwrf']=Data(x,y,-lw_down,'W m-2',info='positive upward')

  # U and V wind speed 10m
  if not quiet: print ' --> U and V wind'
  uwnd=netcdf.use(file,'U10')
  vwnd=netcdf.use(file,'V10')
  if not quiet: print ' --> calc wind speed and stress'
  speed = np.sqrt(uwnd**2+vwnd**2)
  taux,tauy=air_sea.wind_stress(uwnd,vwnd)

  out['wspd']=Data(x,y,speed,'m s-1')
  out['uwnd']=Data(x,y,uwnd,'m s-1')
  out['vwnd']=Data(x,y,vwnd,'m s-1')
  out['sustr']=Data(x,y,taux,'Pa')
  out['svstr']=Data(x,y,tauy,'Pa')

  # Cloud cover [0--1]:
  if not quiet: print ' --> Cloud cover for LONGWAVE. Use LONGWAVE_OUT instead...'
  if 'CLDFRA' in netcdf.varnames(file):
    clouds=netcdf.use(file,'CLDFRA').sum(-3)
    clouds=np.where(clouds>1,1,clouds)
  else:
    if not quiet: print 'CLDFRA not found!! Using SST and air_sea.clouds'
    sst=netcdf.use(f,'SST')
    clouds=air_sea.clouds(lw_net,sst,tair,rhum,Wtype='net')

  out['cloud']=Data(x,y,clouds,'fraction (0--1)')

  return out
コード例 #23
0
ファイル: wrf.py プロジェクト: jcmt/okean
def read_time(file):
  return parse_time(netcdf.use(file,'Times'))
コード例 #24
0
ファイル: gnome.py プロジェクト: jcmt/okean
def frc2gnome(fname,frc,grd,xylim=False,dates=False,ij=(1,1),**kargs):
  '''
  Creates GNOME wind file
  kargs:
    t[u,v]var
    t[u,v]dim
    x[y,ang]var

  Ex:
    .frc2gnome(out,frc,grd,ij=(10,10),dates=dates,**{'tdim':'Time'})
  '''

  deta,dxi=ij

  tvar='time'
  uvar='Uwind'
  vvar='Vwind'
  #tvar='bulk_time'
  #uvar='uwnd'
  #vvar='vwnd'

  tdim='time'
  #tdim='bulk_time'
  xdim='xi_rho'
  ydim='eta_rho'

  xvar='lon_rho'
  yvar='lat_rho'
  angvar='angle'

  if 'tvar' in kargs.keys(): tvar=kargs['tvar']
  if 'uvar' in kargs.keys(): uvar=kargs['uvar']
  if 'vvar' in kargs.keys(): vvar=kargs['vvar']

  if 'tdim' in kargs.keys(): tdim=kargs['tdim']
  if 'xdim' in kargs.keys(): xdim=kargs['xdim']
  if 'ydim' in kargs.keys(): ydim=kargs['ydim']

  if 'xvar' in kargs.keys(): xvar=kargs['xvar']
  if 'yvar' in kargs.keys(): yvar=kargs['yvar']
  if 'angvar' in kargs.keys(): angvar=kargs['angvar']


  dims=netcdf.fdim(grd)
  xi,eta=dims[xdim],dims[ydim]
  xi0,eta0=xi,eta

  ncg=netcdf.ncopen(grd)

  nc0=netcdf.ncopen(frc)
  try:
   t=netcdf.nctime(nc0,tvar)
  except:
    t=netcdf.use(nc0,tvar)
    t=netcdf.num2date(t,'days since %d-01-01' % year0)

  time=netcdf.date2num(t,tunits)

  x0=netcdf.use(grd,xvar)
  y0=netcdf.use(grd,yvar)
  if x0.ndim==1: x0,y0=np.meshgrid(x0,y0)

  if angvar:
    ang=netcdf.use(grd,angvar)

  if not xylim is False:
    xlim=xylim[:2]
    ylim=xylim[2:]
    i1,i2,j1,j2=calc.ij_limits(x0,y0,xlim,ylim)
    xi=i2-i1
    eta=j2-j1
  else:
    i1,i2=0,xi
    j1,j2=0,eta

  XI  ='%d:%d:%d' %(i1,i2,dxi)
  ETA ='%d:%d:%d' %(j1,j2,deta)

  xi=len(range(i1,i2,dxi))
  eta=len(range(j1,j2,deta))

  # create file:
  create_wind(fname,xi,eta)

  nc=netcdf.ncopen(fname,'a')

  x=x0[j1:j2:deta,i1:i2:dxi]
  y=y0[j1:j2:deta,i1:i2:dxi]

  nc.vars['lon'][:]=x
  nc.vars['lat'][:]=y
  if angvar: ang=ang[j1:j2:deta,i1:i2:dxi]

  n=-1
  for it in range(len(time)):

    if not dates is False:
      d0,d1=dates
      if t[it]<d0 or t[it]>=d1: continue

    n+=1
    u=netcdf.use(nc0,uvar,**{xdim:XI,ydim:ETA,tdim:it})
    v=netcdf.use(nc0,vvar,**{xdim:XI,ydim:ETA,tdim:it})

    # rotate uv:
    if angvar:
      print 'rotating ...'
      u,v=calc.rot2d(u,v,-ang)


    nc.vars['time'][n]=time[it]
    print 'filling uv',n,t[it]
    nc.vars['air_u'][n,...]=u
    nc.vars['air_v'][n,...]=v


  nc.close()
  nc0.close()
  ncg.close()
コード例 #25
0
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print '    time ok'
    else:
        print '   time differs !!!!',
        tair, tfix = fix_time(ttmp, tair, time[0], time[-1])
        if tfix.size == time.size and np.all(tfix == time):
            print ' ...fixed!'
        else:
            print 'time is NOT OK. Please check !!'
            return
    out['tair'] = Data(x, y, tair, 'C')

    # R humidity [%-->0--1]
    if not quiet: print ' --> R humidity'
    f = files['rh']
    rhum = netcdf.use(f, 'R_H_L103')
    rhum = rhum / 100.
    x = netcdf.use(f, 'lon')
    x[x > 180] = x[x > 180] - 360
    y = netcdf.use(f, 'lat')
    x, y = np.meshgrid(x, y)
    # check time:
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print '    time ok'
    else:
        print '   time differs !!!!',
        rhum, tfix = fix_time(ttmp, rhum, time[0], time[-1])
        if tfix.size == time.size and np.all(tfix == time):
            print ' ...fixed!'
        else:
            print 'time is NOT OK. Please check !!'
コード例 #26
0
 def use(self,varname,**kargs):
   return netcdf.use(self.nc,varname,**kargs)
コード例 #27
0
ファイル: prognostic.py プロジェクト: jsh1012/okean
def load_data(f,quiet=0,**kargs):
  '''
  Loads prognostic variables (temp,salt,u,v,ubar,vbar,zeta) from
  netcdf file or opendap server. Also loads lon,lat, depth, and time.

  If f is a file, it must include the 1d variables lon,lat and depth;
  the 2d variable ssh (zeta) and the 3d variables temp, salt, u and v;
  ie, each file must contain data for a simgle time. The file must also
  contain the variable time.

  If f is a opendap address, it must contain also all these variables
  or the ones defined in the input karg settings (DataAccess object)

  To deal with the case of variables in different files/opendap addresses,
  f can also be a dictionary with keys the variables and values the files
  or opendap addresses. In this case, the keys must be:
    - temp
    - salt
    - u
    - v
    - ssh
    - misc, for lon, lat, depth, time and dimensions
      or xy for lon,lat and x,ydim; z for depth and zdim, time for time

  The output data (dict) is suitable to be used by data2roms, which
  interpolates the data to ROMS 3d grid.
  Also outputs an error/status string.

  kargs:
    inds, dict with dimension names/values (where time dim can be integer
          or datetime)
    settings, DataAccess object
    extra, extra misc vars to load [(outKey0,fileVar0),...]
    t_units, units of variable time, by default the att  units is used
  '''

  sett=DataAccess()
  inds={}
  extra=[]
  t_units=[]
  if 'settings' in kargs.keys(): sett    = kargs['settings']
  if 'inds'     in kargs.keys(): inds    = kargs['inds']
  if 'extra'    in kargs.keys(): extra   = kargs['extra']
  if 't_units'  in kargs.keys(): t_units = kargs['t_units']

  res={}
  msg=''

  if not isinstance(f,dict) and not f.startswith('http') and not isfile(f):
    msg='file not found %s' % f
    if not quiet: print msg
    return res, msg

  # load nc files:
  if not isinstance(f,dict):
    f={'temp':f,'salt':f,'u':f,'v':f,'ssh':f,'misc':f}

  if not f.has_key('xy'):   f['xy']   = f['misc']
  if not f.has_key('z'):    f['z']    = f['misc']
  if not f.has_key('time'): f['time'] = f['misc']

  filesUsed=[]
  ncUsed=[]
  for i in f.keys():
    if not quiet: print '(%s) loading from %s' % (i.ljust(5),f[i])

    if i=='temp':
      if f[i] in filesUsed: ncTemp=ncUsed[filesUsed.index(f[i])]
      else:
        ncTemp=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncTemp]

    elif i=='salt':
      if f[i] in filesUsed: ncSalt=ncUsed[filesUsed.index(f[i])]
      else:
        ncSalt=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncSalt]

    elif i=='u':
      if f[i] in filesUsed: ncU=ncUsed[filesUsed.index(f[i])]
      else:
        ncU=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncU]

    elif i=='v':
      if f[i] in filesUsed: ncV=ncUsed[filesUsed.index(f[i])]
      else:
        ncV=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncV]

    elif i=='ssh':
      if f[i] in filesUsed: ncSsh=ncUsed[filesUsed.index(f[i])]
      else:
        ncSsh=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncSsh]

    elif i=='xy':
      if f[i] in filesUsed: ncXy=ncUsed[filesUsed.index(f[i])]
      else:
        ncXy=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncXy]

    elif i=='z':
      if f[i] in filesUsed: ncZ=ncUsed[filesUsed.index(f[i])]
      else:
        ncZ=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncZ]

    elif i=='time':
      if f[i] in filesUsed: ncTime=ncUsed[filesUsed.index(f[i])]
      else:
        ncTime=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncTime]

    elif i=='misc':
      if f[i] in filesUsed: ncMisc=ncUsed[filesUsed.index(f[i])]
      else:
        ncMisc=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncMisc]


  # load dims:
  if not quiet: print '  loading dims...'
  dimsXy=netcdf.fdim(ncXy)
  dimsZ =netcdf.fdim(ncZ)

  res['NX']=dimsXy[sett.xdim]
  res['NY']=dimsXy[sett.ydim]
  ###if sett.z_name:
  if sett.zdim:
    res['NZ']=dimsZ[sett.zdim]
  else:
    res['NZ']=1

  # about horizontal inds:
  if inds.has_key(sett.xdim) and len(inds[sett.xdim])==2 and not isinstance(inds[sett.xdim],basestring):
    if not quiet: print '  calc horizontal inds...'
    xlim=inds[sett.xdim]
    ylim=inds[sett.ydim]

    inds.pop(sett.xdim)
    inds.pop(sett.ydim)

    lon=netcdf.use(ncXy,sett.x_name,**inds)
    if np.any(lon>360): lon=np.mod(lon,360.)
    lat=netcdf.use(ncXy,sett.y_name,**inds)
    i0,i1,j0,j1=calc.ij_limits(lon,lat,xlim,ylim,margin=3)
    inds[sett.xdim]='%d:%d' % (i0,i1)
    inds[sett.ydim]='%d:%d' % (j0,j1)


  if not quiet: print '  loading lon, lat, depth...'
  res['lon']  = netcdf.use(ncXy,sett.x_name,**inds)
  if np.any(res['lon']>360): res['lon']=np.mod(res['lon'],360.)
  res['lat']  = netcdf.use(ncXy,sett.y_name,**inds)
  if sett.z_name:
    res['depth'] = -netcdf.use(ncZ,sett.z_name,**inds)
  else: res['depth']=False

  if res['lon'].size!=res['lat'].size:
    res['lon'],res['lat']=np.meshgrid(res['lon'],res['lat'])
    # needed for griddata, later

  # update nx,ny:
  if inds.has_key(sett.xdim):
    res['NY'],res['NX']=res['lon'].shape

  # extra misc vars:
  if len(extra):
    for outKey,fileVar in extra:
      if not quiet: print '  loading extra misc... %s %s' % (outKey,fileVar)
      res[outKey]=netcdf.use(ncMisc,fileVar,**inds)


  # time:
  # file may have one or several times. If several, time dim must be given
  # with kargs inds!
  if not quiet: print '  loading time...'
  if t_units:
    times=netcdf.use(ncTime,sett.time_name)
    times=netcdftime.num2date(times,t_units)
  else:
    times=netcdf.nctime(ncTime,sett.time_name)

  if inds.has_key(sett.tdim):
    try: tind=dts.parse_date(inds[sett.tdim])
    except: tind=inds[sett.tdim] # is an integer, for instance

    if isinstance(tind,datetime.datetime):
      tind,=np.where(times==tind)
      if tind.size:
        tind=tind[0]
        inds[sett.tdim]=tind # update inds to extract other variables
      else:
        Msg='date not found'
        msg+='\n'+Msg
        return res,msg+' ERROR'

    date=times[tind]
    if not quiet: print '    tind, date= %d %s' % (tind,date.isoformat(' '))

  elif times.size==1:
    date=times[0]
    if not quiet: print '    date= %s' % date.isoformat(' ')
  else: # must provide tind as input!!
    Msg='several dates in file... provice tind!'
    msg+='\n'+Msg
    return res,msg+' ERROR'

  res['date'] = date

  empty3d=np.zeros([res['NZ'],res['NY'],res['NX']])
  empty2d=np.zeros([res['NY'],res['NX']])

  if 'temp' in f.keys():
    if not quiet: print '  loading temp...'
    if sett.temp_name in ncTemp.varnames: res['temp'] = netcdf.use(ncTemp,sett.temp_name,**inds)
    else:
      Msg='var %s not found' % 'temp'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['temp']=empty3d

  if 'salt' in f.keys():
    if not quiet: print '  loading salt...'
    if sett.salt_name in ncSalt.varnames: res['salt'] = netcdf.use(ncSalt,sett.salt_name,**inds)
    else:
      Msg='var %s not found' % 'salt'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['salt']=empty3d

  if 'u' in f.keys():
    if not quiet: print '  loading u...'
    if sett.u_name in ncU.varnames: res['u']    = netcdf.use(ncU,sett.u_name,**inds)
    else:
      Msg='var %s not found' % 'u'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['u']=empty3d

  if 'v' in f.keys():
    if not quiet: print '  loading v...'
    if sett.v_name in ncV.varnames: res['v']    = netcdf.use(ncV,sett.v_name,**inds)
    else:
      Msg='var %s not found' % 'v'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['v']=empty3d

  if 'ssh' in f.keys():
    if not quiet: print '  loading ssh...'
    if sett.ssh_name in ncSsh.varnames: res['ssh']  = netcdf.use(ncSsh,sett.ssh_name,**inds)
    else:
      Msg='var %s not found' % 'ssh'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['ssh']=empty2d

  for nc in ncUsed:
    try:  nc.close()
    except: pass

  return res, msg
コード例 #28
0
ファイル: gnome.py プロジェクト: jcmt/okean
def his2gnome(fname,his,grd=False,nomask=False,gshhsMask=True,xylim=False,dates=False,ij=(1,1)):
  '''
  Creates GNOME wind file
  Ex:
    his2gnome(out,his,grd,dates=dates,ij=(2,2))

  if gshhsMask, the high res mask file mask_gshhs.npy will be created at 1st usage.
  Mask is based on high (h) resolution gshhs data which must be available (env variable
  GSHHS_MASK must be set). 
  '''

  if not grd: grd=his
  deta,dxi=ij

  dims=netcdf.fdim(his)
  xi,eta=dims['xi_rho'],dims['eta_rho']
  xi0,eta0=xi,eta

  nc0=netcdf.ncopen(his)
  time=netcdf.nctime(nc0,'ocean_time')
  # for roms agrif:
  #t=netcdf.use(nc0,'scrum_time')
  #time=netcdf.num2date(t,'seconds since %d-01-01' % year0)

  x0=netcdf.use(grd,'lon_rho')
  y0=netcdf.use(grd,'lat_rho')
  ang=netcdf.use(grd,'angle')

  if not xylim is False:
    xlim=xylim[:2]
    ylim=xylim[2:]
    i1,i2,j1,j2=calc.ij_limits(x0,y0,xlim,ylim)
    print i1,i2,j1,j2
    xi=i2-i1
    eta=j2-j1
  else:
    i1,i2=0,xi
    j1,j2=0,eta

  XI  ='%d:%d:%d' %(i1,i2,dxi)
  ETA ='%d:%d:%d' %(j1,j2,deta)

  xi=len(range(i1,i2,dxi))
  eta=len(range(j1,j2,deta))
  # create file:
  create_uv(fname,xi,eta)

  nc=netcdf.ncopen(fname,'a')
  for v0,v in ('lon_rho','lon'),('lat_rho','lat'),('mask_rho','mask'),('h','depth'):
    print 'filling %s with %s' % (v,v0)
    nc.vars[v][:]=netcdf.use(grd,v0,xi_rho=XI,eta_rho=ETA)

  if nomask:
    print 'NO MASK !!!'
    nc.vars['mask'][:]=1

  if gshhsMask:
    try:
     mask=np.load('mask_gshhs.npy')
    except:
      mask=1+0*netcdf.use(nc0,'mask_rho',xi_rho=XI,eta_rho=ETA)
      mask=mask.astype('bool')
      x=netcdf.use(grd,'lon_rho',xi_rho=XI,eta_rho=ETA)
      y=netcdf.use(grd,'lat_rho',xi_rho=XI,eta_rho=ETA)

      from okean import gshhs
      axis=x.min(),x.max(),y.min(),y.max()
      g=gshhs.gshhs(axis, resolution='h',area_thresh=0., max_level=2,clip=True)
      for lon, lat, level in zip(g.lon, g.lat, g.level):
        if level == 1: # land
          print 'mask ',lon.shape
          i=calc.inpolygon(x,y,lon,lat)
          mask=mask & ~i

      mask.dump('mask_gshhs.npy')


    nc.vars['mask'][:]=mask


  x=x0[j1:j2:deta,i1:i2:dxi]
  y=y0[j1:j2:deta,i1:i2:dxi]
  ang=ang[j1:j2:deta,i1:i2:dxi]

  n=-1
  for it in range(len(time)):
    if not dates is False:
      d0,d1=dates
      if time[it]<d0 or time[it]>=d1: continue

    n+=1
    U=np.zeros((eta0,xi0),'f')
    V=np.zeros((eta0,xi0),'f')

    nc.vars['time'][n]=netcdf.date2num(time[it],tunits)

    # for roms agrif:
    #u=netcdf.use(nc0,'u',time=it,s_rho=-1)
    #v=netcdf.use(nc0,'v',time=it,s_rho=-1)
    u=netcdf.use(nc0,'u',ocean_time=it,s_rho=-1)
    v=netcdf.use(nc0,'v',ocean_time=it,s_rho=-1)

    # mask extrap:
    print 'mask extrap...'

    u=calc.mask_extrap(x0,y0,np.ma.masked_where(u==0,u))
    v=calc.mask_extrap(x0,y0,np.ma.masked_where(v==0,v))

    U[:,1:-1]=0.5*(u[:,:-1]+u[:,1:])
    U[:,0]=u[:,0]
    U[:,-1]=u[:,-1]

    V[1:-1,:]=0.5*(v[:-1.:]+v[1:,:])
    V[0,:]=v[0,:]
    V[-1,:]=v[-1,:]

    U=U[j1:j2,i1:i2]
    V=V[j1:j2,i1:i2]
  
    U=U[j1:j2:deta,i1:i2:dxi]
    V=V[j1:j2:deta,i1:i2:dxi]

    # rotate uv:
    print 'rotating ...'
    U,V=calc.rot2d(U,V,-ang)

    print 'filling uv', n, time[it]
    nc.vars['u'][n,...]=U
    nc.vars['v'][n,...]=V

  nc.close()
  nc0.close()
コード例 #29
0
ファイル: op_plot.py プロジェクト: martalmeida/OOFe
def plt_hslice(conf,plconf,date,FA='a',nest=0,**kargs):
  err=''
  fig=False
  info={}

  type     = 'avg'
  var      = 'temp'
  slice    = 'z'
  ind      = -10
  time     = -1
  currents = False
  dcurr    = (3,3)
  scurr    = 3
  lcurr    = 0.2
  ifig     = 0
# closefig = True
  clim     = False
  quiet    = False
  outStoragePath=False
  cmap     = None
  norm     = None
  useBar   = True # currents are barotropic for 2D vars (like zeta)

  keys=kargs.keys()
  if 'type'     in keys: type     = kargs['type']
  if 'var'      in keys: var      = kargs['var']
  if 'slice'    in keys: slice    = kargs['slice']
  if 'ind'      in keys: ind      = kargs['ind']
  if 'time'     in keys: time     = kargs['time']
  if 'currents' in keys: currents = kargs['currents']
  if 'dcurr'    in keys: dcurr    = kargs['dcurr']
  if 'scurr'    in keys: scurr    = kargs['scurr']
  if 'lcurr'    in keys: lcurr    = kargs['lcurr']
  if 'ifig'     in keys: ifig     = kargs['ifig']
  if 'closefig' in keys: closefig = kargs['closefig']
  if 'clim'     in keys: clim     = kargs['clim']
  if 'quiet'    in keys: quiet    = kargs['quiet']
  if 'ostorage' in keys: outStoragePath = kargs['ostorage']
  if 'cmap'     in keys: cmap     = kargs['cmap']
  if 'usebar'   in keys: useBar   = kargs['usebar']
  if 'norm'     in keys: norm     = kargs['norm']

  date=dateu.parse_date(date)

  # find input files:
  args={'cf':conf,'date':date,'FA':FA,'nest':nest,'ostorage':outStoragePath}
  his = opt.nameof('out',type,**args)
  clm = opt.nameof('in','clm',**args)
  grd = opt.nameof('in','grd',**args)
  if not os.path.isfile(his):
    err='Main file not found (%s)' % his
    return err,fig,info
  if not os.path.isfile(grd):
    err='Grid file not found (%s)' % grd
    return err,fig,info
  r=roms.His(his,grd)

  # plot grid:
  proj,fig, ax = plt_grid(plconf,grd,ifig)

  def add_colorbar(handle,**args):
    ax=pl.gca()
    Data,err = opt.get_plconf(plconf,'AXES')
    cbpos    = Data['cbpos'][ifig]
    cbbgpos  = Data['cbbgpos'][ifig]
    cbbgc    = Data['cbbgcolor'][ifig]
    cbbga    = Data['cbbgalpha'][ifig]
    cblab    = Data['cblabel'][ifig]

    # colorbar bg axes:
    if cbbgpos:
      rec=pl.axes((cbpos[0]-cbpos[2]*cbbgpos[0],cbpos[1]-cbbgpos[2]*cbpos[3],
                      cbpos[2]*(1+cbbgpos[0]+cbbgpos[1]),cbpos[3]*(1+cbbgpos[2]+cbbgpos[3])),
                      axisbg=cbbgc,frameon=1)

      rec.patch.set_alpha(cbbga)
      rec.set_xticks([])
      rec.set_yticks([])
      for k in rec.axes.spines.keys():
        rec.axes.spines[k].set_color(cbbgc)
        rec.axes.spines[k].set_alpha(cbbga)


    # colorbar:
    if cbpos:
      cbax=fig.add_axes(cbpos)
      if cbpos[2]>cbpos[3]: orient='horizontal'
      else: orient='vertical'
      cb=pl.colorbar(handle,cax=cbax,orientation=orient,drawedges=0,**args)
      pl.axes(ax)

    # colorbar label:
    if cblab:
      Data,err = opt.get_plconf(plconf,'HSLICES')
      varnames=Data['varnames'][ifig].split(',')
      vnames=Data['vnames'][ifig].split(',')
      lab=''
      for i in range(len(varnames)):
        if varnames[i].strip()==var:
          lab=vnames[i].strip()
          break

      if lab:
        if r.hasz(var):
          if slice=='k':
            if ind==0: lab = 'Bottom '+lab
            elif ind in (-1,'surface'): lab = 'Surface '+lab
          elif slice=='z':
            lab=lab+' '+str(ind)+'m'

        cb.set_label(lab)


  def add_currkey(handle):
    Data,err = opt.get_plconf(plconf,'HSLICES')
    pos=Data['kcurrpos'][ifig]
    if pos:
      pl.quiverkey(handle, pos[0], pos[1], lcurr, '%s m/s' % str(lcurr),labelpos='S',
                                              coordinates='axes')

  # hslice:
  if var:
    if   slice=='k': metodo=r.slicek
    elif slice=='z': metodo=r.slicez

    x,y,z,v=metodo(var,ind,time,plot=False)
    x,y=proj(x,y)

    # cmap:
    if isinstance(cmap,basestring):
      try:cmap=pl.cm.cmap_d[cmap]
      except:
        try:
          from okean import pl_tools
          cmap=pl_tools.cm.cmap_d[cmap]
        except: cmap=pl.cm.jet

    # original data from clm
    if slice=='k' and ind in (-1,) and var+'_original' in netcdf.varnames(clm):
      tcurr= r.datetime[time]
      x_o=netcdf.use(clm,'x_original')
      y_o=netcdf.use(clm,'y_original')
      x_o,y_o=proj(x_o,y_o)
      v_o=netcdf.use(clm,'y_original')
      t_o=netcdf.nctime(clm,'clim_time')

      # average to current time:
      i0,=np.where(t_o<=tcurr)[-1]
      i1,=np.where(t_o>tcurr)[0]
      v_o0=netcdf.use(clm,var+'_original',time=i0)
      v_o1=netcdf.use(clm,var+'_original',time=i1)
      # avg:
      a=tcurr-t_o[i0]
      b=t_o[i1]-tcurr

      a=a.days*86400+a.seconds
      b=b.days*86400+b.seconds

      if a==0: v_o=v_o0
      elif b==0: v_o=v_o1
      else: v_o=(v_o0*b+v_o1*a)/(a+b)

      pch=pl.pcolormesh(x_o,y_o,v_o,shading='flat',cmap=cmap)
      if clim: pl.clim(clim[0],clim[1])

    if norm=='log':
      from matplotlib.colors import LogNorm
      Norm=LogNorm(vmin=clim[0],vmax=clim[1])
    else: Norm=None

    # change hypoxia colorbar/cmap
    if var=='dye_01':
      HypoxiaLim=135
      from okean import pl_tools
      cmap=pl_tools.ucmaps().gen_oxygen(v=(0,HypoxiaLim,300.)) # default is 0,135,300 !!

    pch=pl.pcolormesh(x,y,v,shading='flat',cmap=cmap, norm=Norm)
    if clim: pl.clim(clim[0],clim[1])

    # hypoxia:
    if var=='dye_01' and ind==0 and ifig==0:
      cond=v<135.
      cond=v<HypoxiaLim
      cond=(v<HypoxiaLim)&(r.grid.h>5)

      pm=r.grid.use('pm')
      pn=r.grid.use('pn')
      A=(1/pm[cond]*1/pn[cond]/1e6).sum()

      x_,y_=proj(-98,29.5)
      pl.text(x_,y_,'Hypoxia area = %.0f km$^2$' % A,color='r',
                       fontweight='bold',fontname='monospace',
                       bbox=dict(edgecolor='none',facecolor='white', alpha=0.8))
    # hypoxia.

    # colorbar:
    if norm=='log':
      tks=10**np.linspace(np.log10(clim[0]),np.log10(clim[1]),4)
      opts={'ticks':tks,'format':'%.2f'}
    else: opts={'ticks':None}
    add_colorbar(pch,**opts)

  if currents:
    if (var and r.hasz(var)) or not useBar:  uvind=ind
    else: uvind='bar'

    x,y,z,u,v=r.sliceuv(uvind,time)
    xm, ym = proj(x,y)
    mm=np.zeros(x.shape,'bool')
    mm[::dcurr[0],::dcurr[1]]=True


    Data,err = opt.get_plconf(plconf,'HSLICES')
    wcurr=Data['wcurr'][ifig]
    acurr=Data['acurr'][ifig]

    qvopts={'units':'x','scale':scurr,'width':wcurr,'alpha':acurr}
    if var:
      q=pl.quiver(xm[mm],ym[mm],u[mm],v[mm],**qvopts)
    else:
      s=np.sqrt(u**2+v**2)
      q=pl.quiver(xm[mm],ym[mm],u[mm],v[mm],s[mm],**qvopts)
      if clim: pl.clim(clim[0],clim[1])
      add_colorbar(q)

    add_currkey(q)

  # store some info that may be required later
  info['hasz']=False
  if var and r.hasz(var): info['hasz']=True


  # logo:
  if ifig==0:
    im=os.path.join(os.path.dirname(__file__),'logo_INOCAR.png')
    i=pl.imread(im)
    h,w=i.shape[:2]
    rx=.12
    W=(proj.xmax- proj.xmin)*rx
    H=W*h/w
    l=proj.xmax
    #pl.fill([proj.xmax-W, proj.xmax, proj.xmax,     proj.xmax-W],
    #           [proj.ymin,   proj.ymin, proj.ymin+2.8*H, proj.ymin+2.8*H],
    #           '#500000',alpha=0.25,ec='none')

    ax.imshow(i,extent=(proj.xmax*.98-W,proj.xmax*.98, proj.ymin+H*.1, proj.ymin+H*1.1),zorder=1e3)
    #pl.text(proj.xmax-W/2., proj.ymin+2.2*H,'OOF',
    #           fontdict={'size':14,'family':'serif'},
    #           color='#500000',ha='center',weight='bold')

    pl.text(proj.xmax*.8, proj.ymax*(-.1),r.datetime[time].strftime("%d %b %Y"),
               fontdict={'size':11,'family':'monospace'},ha='center')

    if FA=='f':
      s='Pronostico desde %s' % r.datetime[0].strftime("%d %b %Y")
      pl.text(proj.xmax*.8, proj.ymax*(-.15),s,
      #pl.text(proj.xmax-W/2., proj.ymin+1.1*H,s,
                 fontdict={'fontsize':10},ha='center')
  # logo.

  # lims change in some mpl versions !!
  pl.gca().axis([proj.xmin, proj.xmax, proj.ymin, proj.ymax])

  return err, fig, info
コード例 #30
0
ファイル: swanu.py プロジェクト: rsignell-usgs/okean
def ww3_specpoints(romsgrd, nseg=(5, 5), addxy=(.001, .001)):
    gx = netcdf.use(romsgrd, 'lon_rho')
    gy = netcdf.use(romsgrd, 'lat_rho')
    mask = netcdf.use(romsgrd, 'mask_rho')
    eta, xi = gx.shape

    if 0:  # use range (step=nseg)
        spec_res = nseg
        dxi, deta = spec_res

        ix = range(0, xi, dxi) + [xi - 1]
        iy = range(0, eta, deta) + [eta - 1]
    else:  # use linspace (nseg=n segments)
        nNS, nEW = nseg
        ix = np.round(np.linspace(0, xi - 1, nNS)).astype('i')
        iy = np.round(np.linspace(0, eta - 1, nEW)).astype('i')

    ix, iy = np.meshgrid(ix, iy)
    ix = calc.var_border(ix)
    iy = calc.var_border(iy)

    # unsorted unique:
    _, i = np.unique(ix + iy * 1j, return_index=True)
    i = np.sort(i)

    ix = ix[i]
    iy = iy[i]

    # add 1st:
    ix = np.append(ix, ix[0])
    iy = np.append(iy, iy[0])

    # create segments:
    segx = []
    segy = []
    for i in range(len(ix) - 1):
        I = ix[i], ix[i + 1]
        J = iy[i], iy[i + 1]
        i0, i1 = np.min(I), np.max(I)
        j0, j1 = np.min(J), np.max(J)

        if i0 == i1: mseg = mask[j0:j1, i0]
        else: mseg = mask[j0, i0:i1]

        if 1:
            # not use fully masked segments:
            if np.all(mseg == 0):
                print 'masked segment %d %d %d %d' % (i0, j0, i1, j1)
                continue
        else:
            # not use if segment starts with mask:
            if mseg.size and mseg[0] == 0:
                print 'masked 1st point of segment %d %d %d %d' % (i0, j0, i1,
                                                                   j1)
                continue

        segx += [[i0, i1]]
        segy += [[j0, j1]]

    # XY and IJ:
    XY = []
    IJ = []
    for i in range(len(segx)):
        i0, i1 = segx[i][0], segx[i][1]
        j0, j1 = segy[i][0], segy[i][1]

        IJ += [[i0, j0, i1, j1]]
        XY += [[gx[j0, i0], gy[j0, i0], gx[j1, i1], gy[j1, i1]]]

    IJ = np.asarray(IJ)
    XY = np.asarray(XY)

    # got to be sure the point is inside the comp grid avoiding the message:
    # "** Error            : Boundary point outside comp. grid"
    if not addxy is False:
        XY = in_comp_grid(XY, IJ, addxy)

    return XY, IJ
コード例 #31
0
ファイル: op_plot.py プロジェクト: martalmeida/OOFe
def plt_wind(conf,plconf,date,FA='a',nest=0,**kargs):
  err  = ''
  fig  = False
  info = ''

  ifig  = kargs.get('ifig',0)
  day   = kargs.get('day',0)
  quiet = kargs.get('quiet',0)

  time=day
  date=dateu.parse_date(date)

  # find input files:
  args={'cf':conf,'date':date,'FA':FA,'nest':nest}
  atm = opt.nameof('in','blk',**args)
  grd = opt.nameof('in','grd',**args)
  if not os.path.isfile(atm):
    err='ATM file not found (%s)' % atm
    return err,fig,info
  if not os.path.isfile(grd):
    err='Grid file not found (%s)' % grd
    return err,fig,info

  Data,err = opt.get_plconf(plconf,'WIND')
  dcurr=Data['dcurr'][ifig]
  lcurr=Data['lcurr'][ifig]
  scurr=Data['scurr'][ifig]
  clim =Data['clim'][ifig]
  tind = Data['time'][ifig]

  x=netcdf.use(grd,'lon_rho')
  y=netcdf.use(grd,'lat_rho')
  wtime=netcdf.nctime(atm,'time')
  cnd=(wtime>=date+datetime.timedelta(days=day))&(date<date+datetime.timedelta(days=day+1))
  u=netcdf.use(atm,'Uwind',time=cnd)
  v=netcdf.use(atm,'Uwind',time=cnd)
  if tind=='dailyMean':
    u=u.mean(0)
    v=v.mean(0)
    sdate=wtime[cnd][0] # for title... 1st day 00h is expected to be 1st date,
                        # or model should not run!
  else: # tind of some day, ex: tind 0 from forec day 3
    u=u[tind]
    v=v[tind]
    sdate=wtime[cnd][tind]


  if day>len(u)-1:
    err='Invalid day %d (max=%d)' % (day,len(u)-1)
    return err,fig,info

  # plot grid:
  proj,fig,ax= plt_grid(plconf,grd,ifig)


  # no mask on land:
  mask=np.zeros(u.shape,'bool')
  mask[::dcurr[0],::dcurr[1]]=True
  xm, ym = proj(x,y)

  s=np.sqrt(u**2+v**2)
  q=pl.quiver(xm[mask],ym[mask],u[mask],v[mask],s[mask],scale=scurr,zorder=100)

  pl.clim(clim[0],clim[1])


  def add_colorbar(handle,**args):
    ax=pl.gca()
    Data,err = opt.get_plconf(plconf,'AXES')
    cbpos    = Data['cbpos'][ifig]
    cbbgpos  = Data['cbbgpos'][ifig]
    cbbgc    = Data['cbbgcolor'][ifig]
    cbbga    = Data['cbbgalpha'][ifig]
    cblab    = Data['cblabel'][ifig]

    # colorbar bg axes:
    if cbbgpos:
      rec=pl.axes((cbpos[0]-cbpos[2]*cbbgpos[0],cbpos[1]-cbbgpos[2]*cbpos[3],
                      cbpos[2]*(1+cbbgpos[0]+cbbgpos[1]),cbpos[3]*(1+cbbgpos[2]+cbbgpos[3])),
                      axisbg=cbbgc,frameon=1)

      rec.patch.set_alpha(cbbga)
      rec.set_xticks([])
      rec.set_yticks([])
      for k in rec.axes.spines.keys():
        rec.axes.spines[k].set_color(cbbgc)
        rec.axes.spines[k].set_alpha(cbbga)


    # colorbar:
    if cbpos:
      cbax=fig.add_axes(cbpos)
      if cbpos[2]>cbpos[3]: orient='horizontal'
      else: orient='vertical'
      cb=pl.colorbar(handle,cax=cbax,orientation=orient,drawedges=0,**args)
      pl.axes(ax)

      # colorbar label:
      cb.set_label(r'Wind Speed [m s$^{\rm{-1}}$]')

  def add_currkey(handle):
    pos=Data['kcurrpos'][ifig]
    if pos:
      pl.quiverkey(handle, pos[0], pos[1], lcurr, '%s m/s' % str(lcurr),labelpos='S',
                                                coordinates='axes')


  add_colorbar(q)
  add_currkey(q)

  # tilte:
  Title,err=opt.get_plconf(plconf,'AXES','title')
  if Title[ifig]:
    simpleTitle=1

    rdate=date.strftime('%d-%m-%Y')
    title='wind %s %s %d' % (rdate,FA,day)


    if simpleTitle: # simpler version of title:
      if FA=='f': # forecast date:
        rdate=dateu.next_date(date,day);
        rdate=rdate.strftime('%d-%m-%Y')

      title='wind %s' % (rdate)
      if FA=='f':
        title=title+' (forec)'

    pl.title(title)


  # logo:
  if ifig==0:
    im=os.path.join(os.path.dirname(__file__),'logo_INOCAR.png')
    i=pl.imread(im)
    h,w=i.shape[:2]
    rx=.12
    W=(proj.xmax- proj.xmin)*rx
    H=W*h/w
    l=proj.xmax
    #pl.fill([proj.xmax-W, proj.xmax, proj.xmax,     proj.xmax-W],
    #           [proj.ymin,   proj.ymin, proj.ymin+2.8*H, proj.ymin+2.8*H],
    #           '#500000',alpha=0.25,ec='none')

    ax.imshow(i,extent=(proj.xmax*.98-W,proj.xmax*.98, proj.ymin+H*.1, proj.ymin+H*1.1),zorder=1e3)
    #pl.text(proj.xmax-W/2., proj.ymin+2.2*H,'OOF',
    #           fontdict={'size':14,'family':'serif'},
    #           color='#500000',ha='center',weight='bold')

    pl.text(proj.xmax*.8, proj.ymax*(-.1),sdate.strftime("%d %b %Y"),
    #pl.text(proj.xmax*.62, proj.ymax*.93,sdate.strftime("%d %b %Y"),
               fontdict={'size':13,'family':'monospace'},ha='center')
    # change date format if tind is not daily mean, ie, add hour, etc

    if FA=='f':
      s='Pronostico desde %s' % date.strftime("%d %b %Y")
      pl.text(proj.xmax*.8, proj.ymax*(-.15),s, ##this is outside
      #pl.text(proj.xmax-W/2., proj.ymin+1.1*H,s, ##this is in the proj (inside)
                 fontdict={'fontsize':10},ha='center')
  # logo.


  # lims change in some mpl versions !!
  pl.gca().axis([proj.xmin, proj.xmax, proj.ymin, proj.ymax])

  return err,fig,info
コード例 #32
0
ファイル: swanu.py プロジェクト: jsh1012/okean
def ww3_specpoints(romsgrd,nseg=(5,5),addxy=(.001,.001)):
  gx=netcdf.use(romsgrd,'lon_rho')
  gy=netcdf.use(romsgrd,'lat_rho')
  mask=netcdf.use(romsgrd,'mask_rho')
  eta,xi=gx.shape

  if 0: # use range (step=nseg)
    spec_res=nseg
    dxi,deta=spec_res

    ix=range(0,xi,dxi)+[xi-1]
    iy=range(0,eta,deta)+[eta-1]
  else: # use linspace (nseg=n segments)
    nNS,nEW=nseg
    ix=np.round(np.linspace(0,xi-1,nNS)).astype('i')
    iy=np.round(np.linspace(0,eta-1,nEW)).astype('i')


  ix,iy=np.meshgrid(ix,iy)
  ix=calc.var_border(ix)
  iy=calc.var_border(iy)

  # unsorted unique:
  _,i=np.unique(ix+iy*1j,return_index=True)
  i=np.sort(i)

  ix=ix[i]
  iy=iy[i]

  # add 1st:
  ix=np.append(ix,ix[0])
  iy=np.append(iy,iy[0])

  # create segments:
  segx=[]
  segy=[]
  for i in range(len(ix)-1):
    I=ix[i],ix[i+1]
    J=iy[i],iy[i+1]
    i0,i1=np.min(I),np.max(I)
    j0,j1=np.min(J),np.max(J)

    if i0==i1: mseg=mask[j0:j1,i0]
    else:      mseg=mask[j0,i0:i1]


    if 1:
      # not use fully masked segments:
      if np.all(mseg==0):
        print 'masked segment %d %d %d %d'%(i0,j0,i1,j1)
        continue
    else:
      # not use if segment starts with mask:
      if mseg.size and mseg[0]==0:
        print 'masked 1st point of segment %d %d %d %d'%(i0,j0,i1,j1)
        continue

   
    segx+=[[i0,i1]]
    segy+=[[j0,j1]]

  # XY and IJ:
  XY=[]
  IJ=[]
  for i in range(len(segx)):
    i0,i1=segx[i][0],segx[i][1]
    j0,j1=segy[i][0],segy[i][1]

    IJ+=[[i0,j0,i1,j1]]
    XY+=[[gx[j0,i0], gy[j0,i0], gx[j1,i1], gy[j1,i1]]]

  IJ=np.asarray(IJ)
  XY=np.asarray(XY)

  # got to be sure the point is inside the comp grid avoiding the message:
  # "** Error            : Boundary point outside comp. grid"
  if not addxy is False:
    XY=in_comp_grid(XY,IJ,addxy)

  return XY,IJ
コード例 #33
0
ファイル: cfsr.py プロジェクト: sciencewiki/okean
def cfsr_file_data(files, quiet=False):
    '''
  Returns bulk data from one CFRS files
  '''
    def load_time(f):
        time = np.array((), datetime.datetime)
        ff = glob.glob(f)
        ff.sort()
        for f in ff:
            time = np.append(time, netcdf.nctime(f, 'time'))
        return time

    def load_time_main(f):
        time = load_time(f)
        # I want 0,6,12,... after 2006 results may be 3,9,15, ...
        if time[0].hour in [3, 9, 15, 21]:
            time = time + datetime.timedelta(hours=3)
        # for 2011 1st time is not 0!
        if time[0].hour == 6: time = np.hstack((time[0].replace(hour=0), time))
        return time

    def fix_time(t, var, t0, t1):
        # convert 1h, 7h, ... to 0h, 6h, ...
        if t[0].hour in [1, 7, 13,
                         19]:  # not all! sp analysis starts at 0, 6,...!
            print('     1,7,... to 0,6,...')
            var = (var[1:] * 5 + var[:-1] * 1) / 6.
            t = t[1:] - datetime.timedelta(hours=1)
        elif t[0].hour in [3, 9, 15, 21]:
            print('     3,9,... to 0,6,...')
            var = (var[1:] * 3 + var[:-1] * 3) / 6.
            t = t[1:] - datetime.timedelta(hours=3)

        cond = (t >= t0) & (t <= t1)
        t = t[cond]
        var = var[cond]

        if t[0] > t0:
            dt = t[0] - t0
            dt = dt.days * 24 + dt.seconds / 3600.  # hours
            print(
                'missing data at start: %.2d h missing --> repeating 1st data'
                % dt)
            v = np.zeros((var.shape[0] + 1, ) + var.shape[1:], var.dtype)
            v[1:] = var
            v[0] = var[0]
            var = v
            t_ = np.zeros((t.shape[0] + 1, ) + t.shape[1:], t.dtype)
            t_[1:] = t
            t_[0] = t0
            t = t_

        if t[-1] < t1:
            dt = t1 - t[-1]
            dt = dt.days * 24 + dt.seconds / 3600.  # hours
            print(
                'missing data at end: %.2d h missing --> repeating last data' %
                dt)
            v = np.zeros((var.shape[0] + 1, ) + var.shape[1:], var.dtype)
            v[:-1] = var
            v[-1] = var[-1]
            var = v
            t_ = np.zeros((t.shape[0] + 1, ) + t.shape[1:], t.dtype)
            t_[:-1] = t
            t_[-1] = t1
            t = t_

        return var, t

    out = {}

    # time:
    if 0:
        time = netcdf.nctime(files['cc'], 'time')
        # files have diff units !! so, cannot load all times at once!
        # these result will use only units of 1st file!!
    else:
        time = load_time_main(files['cc'])

    out['time'] = time

    # T air [K->C]
    if not quiet: print(' --> T air')
    f = files['st']
    tair = netcdf.use(f, 'TMP_L103')
    tair = tair - 273.15
    x = netcdf.use(f, 'lon')
    x[x > 180] = x[x > 180] - 360
    y = netcdf.use(f, 'lat')
    x, y = np.meshgrid(x, y)
    # check time:
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print('    time ok')
    else:
        print('   time differs !!!!', )
        tair, tfix = fix_time(ttmp, tair, time[0], time[-1])
        if tfix.size == time.size and np.all(tfix == time):
            print(' ...fixed!')
        else:
            print('time is NOT OK. Please check !!')
            return
    out['tair'] = Data(x, y, tair, 'C')

    # R humidity [%-->0--1]
    if not quiet: print(' --> R humidity')
    f = files['rh']
    rhum = netcdf.use(f, 'R_H_L103')
    rhum = rhum / 100.
    x = netcdf.use(f, 'lon')
    x[x > 180] = x[x > 180] - 360
    y = netcdf.use(f, 'lat')
    x, y = np.meshgrid(x, y)
    # check time:
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print('    time ok')
    else:
        print('   time differs !!!!'
              ),  # should use end=' ' for python3 print continuation
        rhum, tfix = fix_time(ttmp, rhum, time[0], time[-1])
        if tfix.size == time.size and np.all(tfix == time):
            print(' ...fixed!')
        else:
            print('time is NOT OK. Please check !!')
            return
    out['rhum'] = Data(x, y, rhum, '0--1')

    # surface pressure [Pa]
    if not quiet: print(' --> Surface pressure')
    f = files['sp']
    pres = netcdf.use(f, 'PRES_L1')
    x = netcdf.use(f, 'lon')
    x[x > 180] = x[x > 180] - 360
    y = netcdf.use(f, 'lat')
    x, y = np.meshgrid(x, y)
    # check time:
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print('    time ok')
    else:
        print('   time differs !!!!'),
        pres, tfix = fix_time(ttmp, pres, time[0], time[-1])
        if tfix.size == time.size and np.all(tfix == time):
            print(' ...fixed!')
        else:
            print('time is NOT OK. Please check !!')
            return
    out['pres'] = Data(x, y, pres, 'Pa')

    # P rate [kg m-2 s-1 -> cm/d]
    if not quiet: print(' --> P rate')
    f = files['pr']
    if 'PRATE_L1' in netcdf.varnames(f):
        prate = netcdf.use(f, 'PRATE_L1')
    else:
        prate = netcdf.use(f, 'PRATE_L1_Avg_1')
    x = netcdf.use(f, 'lon')
    x[x > 180] = x[x > 180] - 360
    y = netcdf.use(f, 'lat')
    x, y = np.meshgrid(x, y)
    # Conversion kg m^-2 s^-1  to cm/day
    prate = prate * 86400 * 100 / 1000.
    prate = np.where(abs(prate) < 1.e-4, 0, prate)
    # check time:
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print('    time ok')
    else:
        print('   time differs !!!!'),
        prate, tfix = fix_time(ttmp, prate, time[0], time[-1])
        if tfix.size == time.size and np.all(tfix == time):
            print(' ...fixed!')
        else:
            print('time is NOT OK. Please check !!')
            return
    out['prate'] = Data(x, y, prate, 'cm/d')

    # Net shortwave flux  [W/m^2]
    if not quiet: print(' --> Net shortwave flux')
    if not quiet: print('       SW down')
    f = files['rad']
    sw_down = netcdf.use(f, 'DSWRF_L1_Avg_1')
    x = netcdf.use(f, 'lon')
    x[x > 180] = x[x > 180] - 360
    y = netcdf.use(f, 'lat')
    x, y = np.meshgrid(x, y)
    if not quiet: print('       SW up')
    sw_up = netcdf.use(f, 'USWRF_L1_Avg_1')
    sw_net = sw_down - sw_up
    sw_net = np.where(sw_net < 1.e-10, 0, sw_net)
    # check time:
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print('    time ok')
    else:
        print('   time differs !!!!'),
        sw_net, tfix = fix_time(ttmp, sw_net, time[0], time[-1])
        if tfix.size == time.size and np.all(tfix == time):
            print(' ...fixed!')
        else:
            print('time is NOT OK. Please check !!')
            return
    out['radsw'] = Data(x, y, sw_net, 'W m-2', info='positive downward')

    # Net longwave flux  [W/m^2]
    if not quiet: print(' --> Net longwave flux')
    if not quiet: print('       LW down')
    f = files['rad']
    lw_down = netcdf.use(f, 'DLWRF_L1_Avg_1')
    x = netcdf.use(f, 'lon')
    x[x > 180] = x[x > 180] - 360
    y = netcdf.use(f, 'lat')
    x, y = np.meshgrid(x, y)
    if not quiet: print('       LW up')
    lw_up = netcdf.use(f, 'ULWRF_L1_Avg_1')
    lw_net = lw_down - lw_up
    lw_net = np.where(np.abs(lw_net) < 1.e-10, 0, lw_net)
    # check time:
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print('    time ok')
    else:
        print('   time differs !!!!'),
        lw_net, tfix1 = fix_time(ttmp, lw_net, time[0], time[-1])
        lw_down, tfix2 = fix_time(ttmp, lw_down, time[0], time[-1])
        if tfix1.size == tfix2.size == time.size and np.all((tfix1 == time)
                                                            & (tfix2 == time)):
            print(' ...fixed!')
        else:
            print('time is NOT OK. Please check !!')
            return
    # ROMS (agrif, used to be!) convention: positive upward
    out['radlw'] = Data(x, y, -lw_net, 'W m-2', info='positive upward')
    # downward lw:
    out['dlwrf'] = Data(x, y, -lw_down, 'W m-2', info='negative... downward')
    # signs convention is better explained in wrf.py

    # U and V wind speed 10m
    if not quiet: print(' --> U and V wind')
    f = files['uv']
    uwnd = netcdf.use(f, 'U_GRD_L103')
    vwnd = netcdf.use(f, 'V_GRD_L103')
    x = netcdf.use(f, 'lon')
    x[x > 180] = x[x > 180] - 360
    y = netcdf.use(f, 'lat')
    x, y = np.meshgrid(x, y)
    # check time:
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print('    time ok')
    else:
        print('   time differs !!!!'),
        uwnd, tfix1 = fix_time(ttmp, uwnd, time[0], time[-1])
        vwnd, tfix2 = fix_time(ttmp, vwnd, time[0], time[-1])
        if tfix1.size == tfix2.size == time.size and np.all((tfix1 == time)
                                                            & (tfix2 == time)):
            print(' ...fixed!')
        else:
            print('time is NOT OK. Please check !!')
            return
    #
    if not quiet: print(' --> calc wind speed and stress')
    speed = np.sqrt(uwnd**2 + vwnd**2)
    taux, tauy = air_sea.wind_stress(uwnd, vwnd)

    out['wspd'] = Data(x, y, speed, 'm s-1')
    out['uwnd'] = Data(x, y, uwnd, 'm s-1')
    out['vwnd'] = Data(x, y, vwnd, 'm s-1')
    out['sustr'] = Data(x, y, taux, 'Pa')
    out['svstr'] = Data(x, y, tauy, 'Pa')

    # Cloud cover [0--100 --> 0--1]:
    if not quiet: print(' --> Cloud cover')
    f = files['cc']
    if 'T_CDC_L200' in netcdf.varnames(f):
        clouds = netcdf.use(f, 'T_CDC_L200')
    else:
        clouds = netcdf.use(f, 'T_CDC_L200_Avg_1')
    x = netcdf.use(f, 'lon')
    x[x > 180] = x[x > 180] - 360
    y = netcdf.use(f, 'lat')
    x, y = np.meshgrid(x, y)
    clouds = clouds / 100.
    # check time:
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print('    time ok')
    else:
        print('   time differs !!!!'),
        clouds, tfix = fix_time(ttmp, clouds, time[0], time[-1])
        if tfix.size == time.size and np.all(tfix == time):
            print(' ...fixed!')
        else:
            print('time is NOT OK. Please check !!')
            return
    out['cloud'] = Data(x, y, clouds, 'fraction (0--1)')

    # rhum has different resolution (0.5, just like dew point!)
    # so, i can edit surface.py or just interpolate here rhum to
    # other vars resolution:
    if out['rhum'].data.shape != out['uwnd'].data.shape:
        from okean import calc
        print('rhum shape differs!! --> interp:')
        nt, ny, nx = out['uwnd'].data.shape
        x, y = out['uwnd'].x, out['uwnd'].y
        rhum = np.zeros((nt, ny, nx), out['rhum'].data.dtype)
        for it in range(nt):
            if it % 100 == 0: print('  %d of %d' % (it, nt))
            rhum[it] = calc.griddata(out['rhum'].x, out['rhum'].y,
                                     out['rhum'].data[it], x, y)

        out['rhum'] = Data(x, y, rhum, '0--1')

    return out
コード例 #34
0
def interim_file_data(files, quiet=False):
    '''
  ECMWF ERA INTERIM data for ROMS

  To be used with data obtained from the new server:
  http://apps.ecmwf.int/datasets/

  and not the old one (http://data-portal.ecmwf.int/data/d/interim_daily/)
  To deal with data from old server use module interim_past

  => forecast vars:
  time=00h, 12h
  step=3,6,9,12 --> n forec steps=4

      needed (suggestion):
      - Surface net solar radiation (ssr)
      - Surface thermal radiation (str)
      - Total precipitation (tp)
      others:
      - Surface thermal radiation downwards (strd)
      - Evaporation (e)
      - ...

  =>analysis+forec vars: (interim analysis starts every 6h; forecast starts every 12h !!)
  time=00h,6h,12h,18h
  step=0,3,9 (3 and 9 are for the forecast 00h and 12h)

      needed (suggestion):
      - Surface pressure (sp)
      - Total cloud cover    (tcc)
      - 10 metre U wind component (v10u or u10)
      - 10 metre V wind component  (v10v or v10)
      - 2 metre temperature (v2t or t2m)
      - 2 metre dewpoint temperature (v2d or d2m)

  Accumulated vars (rad SW, LW and precipitation are converted to averages
  by acum2avg
  '''

    # some variables may have different names!
    Vars = {}
    Vars['v10u'] = 'v10u', 'u10'
    Vars['v10v'] = 'v10v', 'v10'
    Vars['v2t'] = 'v2t', 't2m'
    Vars['v2d'] = 'v2d', 'd2m'

    def find_v(name):
        if name in Vars.keys():
            for v in Vars[name]:
                if varfile(v): return v
        else: return name

    def varfile(var):
        for f in files:
            if var in netcdf.varnames(f): return f

    def check_var_type(var):
        # new interim dataserver provides forec+analysis vars with extra dim
        # 'type', 0 or 1
        if var.ndim == 4:
            if not quiet: print('      dealing with var type... '),
            v = np.zeros(var.shape[1:], var.dtype)
            v[::2] = var[0, ::2, ...]
            v[1::2] = var[1, 1::2, ...]
            var = v
            if not quiet: print('done.')

        return var

    out = {}

    # time:
    # all times from analysis file, except last ind which will be
    # the last time of forecast file
    aFile = varfile(find_v('v2t'))  # air temp, for instance
    fFile = varfile(find_v('ssr'))  # sw rad, for instance
    if not quiet: print(' reading "analysis" time from file %s' % aFile)
    aTime = netcdf.nctime(aFile, 'time')
    aTime.sort()  # analysis+forecast files may not have time sorted!!
    if not quiet: print(' reading "forecast" time from file %s' % fFile)
    fTime = netcdf.nctime(fFile, 'time')
    fTime.sort()  # this one should be sorted...
    time = np.append(aTime, fTime[-1])
    out['time'] = time

    # calc number of forecast steps stored,nforec (used by accum2avg)
    if [fTime[i].hour for i in range(8)] == range(3, 22, 3) + [0]: nforec = 4
    elif [fTime[i].hour for i in range(4)] == range(6, 19, 6) + [0]: nforec = 2
    else:
        if not quiet: print('INTERIM WRONG TIME: cannot n forec steps')
        return

    if not quiet: print(' ==> n forecast steps = %d' % nforec)

    # x,y:
    if not quiet: print(' reading x,y from file %s' % files[0])
    x = netcdf.use(files[0], 'longitude')
    y = netcdf.use(files[0], 'latitude')
    x[x > 180] = x[x > 180] - 360
    if x.ndim == 1 and y.ndim == 1:
        x, y = np.meshgrid(x, y)

    # tair [K-->C]
    if not quiet: print(' --> T air')
    vname = find_v('v2t')
    f = varfile(vname)
    # time may not be monotonically increasing !!
    # when using mix of analysis and forecast variables and steps
    sortInds = np.argsort(netcdf.use(f, 'time'))
    tair = netcdf.use(f, vname, time=sortInds) - 273.15
    tair = check_var_type(tair)
    if not quiet and np.any(sortInds != range(len(sortInds))):
        print('      sort DONE')
    if not quiet: print('      fill_tend...')
    tair = fill_tend(tair)
    out['tair'] = Data(x, y, tair, 'Celsius')

    # R humidity [0--1]
    if not quiet: print(' --> R humidity (from T dew)')
    vname = find_v('v2d')
    f = varfile(vname)
    sortInds = np.argsort(netcdf.use(f, 'time'))
    Td = netcdf.use(f, vname, time=sortInds) - 273.15
    Td = check_var_type(Td)
    if not quiet and np.any(sortInds != range(len(sortInds))):
        print('      sort DONE')
    if not quiet: print('      fill_tend... (T dew)')
    Td = fill_tend(Td)
    T = tair
    rhum = relative_humidity(T, Td)
    ##  rhum=((112-0.1*T+Td)/(112+0.9*T))**8
    rhum[rhum > 1] = 1
    out['rhum'] = Data(x, y, rhum, '0--1')

    # surface pressure [Pa]
    if not quiet: print(' --> Surface pressure')
    vname = find_v('sp')
    f = varfile(vname)
    sortInds = np.argsort(netcdf.use(f, 'time'))
    pres = netcdf.use(f, vname, time=sortInds)
    pres = check_var_type(pres)
    if not quiet and np.any(sortInds != range(len(sortInds))):
        print('      sort DONE')
    if not quiet: print('      fill_tend...')
    pres = fill_tend(pres)
    out['pres'] = Data(x, y, pres, 'Pa')

    # P rate [m --> cm day-1]
    if not quiet: print(' --> P rate')
    vname = find_v('tp')
    f = varfile(vname)
    sortInds = np.argsort(netcdf.use(f, 'time'))
    prate = netcdf.use(f, vname, time=sortInds)
    prate = check_var_type(prate)
    if not quiet and np.any(sortInds != range(len(sortInds))):
        print('      sort DONE')
    if not quiet: print('      accum2avg...')
    prate = accum2avg(prate, nforec)
    conv = 100 * 86400  # from m s-1      --> cm day-1
    #conv= 100*86400/1000. # from kg m-2 s-1 --> cm day-1
    prate = prate * conv  # cm day-1
    if not quiet: print('      fill_t0...')
    prate = fill_t0(prate)
    prate[prate < 0] = 0
    out['prate'] = Data(x, y, prate, 'cm day-1')

    # Net shortwave flux  [W m-2 s+1 --> W m-2]
    if not quiet: print(' --> Net shortwave flux')
    vname = find_v('ssr')
    f = varfile(vname)
    sortInds = np.argsort(netcdf.use(f, 'time'))
    sw_net = netcdf.use(f, vname, time=sortInds)
    sw_net = check_var_type(sw_net)
    if not quiet and np.any(sortInds != range(len(sortInds))):
        print('      sort DONE')
    if not quiet: print('      accum2avg...')
    sw_net = accum2avg(sw_net, nforec)
    if not quiet: print('      fill_t0...')
    sw_net = fill_t0(sw_net)
    out['radsw'] = Data(x, y, sw_net, 'W m-2', info='positive downward')

    # Net longwave flux  [W m-2 s+1 --> W m-2]
    if not quiet: print(' --> Net longwave flux')
    vname = find_v('str')
    f = varfile(vname)
    sortInds = np.argsort(netcdf.use(f, 'time'))
    lw_net = netcdf.use(
        f, vname, time=sortInds) * -1  # let us consider positive upward (*-1)
    lw_net = check_var_type(lw_net)
    if not quiet and np.any(sortInds != range(len(sortInds))):
        print('      sort DONE')
    if not quiet: print('      accum2avg...')
    lw_net = accum2avg(lw_net, nforec)
    if not quiet: print('      fill_t0...')
    lw_net = fill_t0(lw_net)
    out['radlw'] = Data(x, y, lw_net, 'W m-2', info='positive upward')

    # longwave down:
    # can be obtained from clouds!!
    if not quiet: print(' --> Down longwave flux')
    vname = find_v('strd')
    f = varfile(vname)
    if f:
        sortInds = np.argsort(netcdf.use(f, 'time'))
        lw_down = netcdf.use(
            f, vname,
            time=sortInds) * -1  # let us consider positive upward (*-1)
        lw_down = check_var_type(lw_down)
        if not quiet and np.any(sortInds != range(len(sortInds))):
            print('      sort DONE')
        if not quiet: print('      accum2avg...')
        lw_down = accum2avg(lw_down, nforec)
        if not quiet: print('      fill_t0...')
        lw_down = fill_t0(lw_down)
        out['dlwrf'] = Data(x,
                            y,
                            lw_down,
                            'W m-2',
                            info='negative... downward')
    else:
        print('down long wave CANNOT BE USED')

    # U and V wind speed 10m
    if not quiet: print(' --> U and V wind')
    vname = find_v('v10u')
    f = varfile(vname)
    sortInds = np.argsort(netcdf.use(f, 'time'))
    uwnd = netcdf.use(f, vname, time=sortInds)
    uwnd = check_var_type(uwnd)
    if not quiet and np.any(sortInds != range(len(sortInds))):
        print('      sort DONE')
    if not quiet: print('      fill_tend...')
    uwnd = fill_tend(uwnd)
    vname = find_v('v10v')
    f = varfile(vname)
    sortInds = np.argsort(netcdf.use(f, 'time'))
    vwnd = netcdf.use(f, vname, time=sortInds)
    vwnd = check_var_type(vwnd)
    if not quiet and np.any(sortInds != range(len(sortInds))):
        print('      sort DONE')
    if not quiet: print('      fill_tend...')
    vwnd = fill_tend(vwnd)

    if not quiet: print(' --> calc wind speed and stress')
    speed = np.sqrt(uwnd**2 + vwnd**2)
    taux, tauy = air_sea.wind_stress(uwnd, vwnd)

    out['wspd'] = Data(x, y, speed, 'm s-1')
    out['uwnd'] = Data(x, y, uwnd, 'm s-1')
    out['vwnd'] = Data(x, y, vwnd, 'm s-1')
    out['sustr'] = Data(x, y, taux, 'Pa')
    out['svstr'] = Data(x, y, tauy, 'Pa')

    # Cloud cover [0--1]:
    if not quiet: print(' --> Cloud cover')
    vname = find_v('tcc')
    f = varfile(vname)
    sortInds = np.argsort(netcdf.use(f, 'time'))
    clouds = netcdf.use(f, vname, time=sortInds)
    clouds = check_var_type(clouds)
    if not quiet and np.any(sortInds != range(len(sortInds))):
        print('      sort DONE')
    if not quiet: print('      fill_tend...')
    clouds = fill_tend(clouds)
    out['cloud'] = Data(x, y, clouds, 'fraction (0--1)')

    return out
コード例 #35
0
def read_time(file):
    return parse_time(netcdf.use(file, 'Times'))
コード例 #36
0
ファイル: narr.py プロジェクト: martalmeida/okean
def narr_file_data(fname,xlim=False,ylim=False,quiet=False):
  '''
  Returns bulk data from one NARR file
  '''

  out={}

  # loading grid:
  if 0:
    if not quiet: print(' reading lon,lat from file %s' % grd)
    nc=netcdf.ncopen(grd)
    x=nc.vars['East_longitude_0-360'][0,...]-360.
    y=nc.vars['Latitude_-90_to_+90'][0,...] # time always 1 !!
    nc.close()
  else:
    if not quiet: print(' reading lon,lat from file %s' % grdTxt)
    x,y=load_grid()
    #x=x-360.
    x=-x

  ny,nx=x.shape


  if (xlim,ylim)==(False,False):i0,i1,j0,j1=0,nx,0,ny
  else:
    i0,i1,j0,j1=calc.ij_limits(x, y, xlim, ylim, margin=0)
    x=x[j0:j1,i0:i1]
    y=y[j0:j1,i0:i1]

  try:
    nc=netcdf.ncopen(fname)
  except:
    return {}

  xx=str(i0)+':'+str(i1)
  yy=str(j0)+':'+str(j1)

  tdim=netcdf.fdim(nc,'time1')
  if tdim!=1: print('WARNING: tdim !=1  !!!!!!')

  # T surface [K->C]
  if not quiet: print(' --> T air')
  tair=netcdf.use(nc,'Temperature_surface',time1=0,x=xx,y=yy)
  tair=tair-273.15
  out['tair']=cb.Data(x,y,tair,'C')

  # R humidity [% -> 0--1]
  if not quiet: print(' --> R humidity')
  rhum=netcdf.use(nc,'Relative_humidity',time1=0,x=xx,y=yy)
  out['rhum']=cb.Data(x,y,rhum/100.,'0--1')

  # surface pressure [Pa]
  if not quiet: print(' --> Surface pressure')
  pres=netcdf.use(nc,'Pressure_surface',time1=0,x=xx,y=yy)
  out['pres']=cb.Data(x,y,pres,'Pa')

  # P rate [kg m-2 s-1 -> cm/d]
  if not quiet: print(' --> P rate')
  prate=netcdf.use(nc,'Precipitation_rate',time1=0,x=xx,y=yy)
  prate=prate*86400*100/1000.
  out['prate']=cb.Data(x,y,prate,'cm/d')

  # Net shortwave flux  [ W m-2]
  if not quiet: print(' --> Net shortwave flux')
  if not quiet: print('       SW down')
  sw_down=netcdf.use(nc,'Downward_shortwave_radiation_flux',time1=0,x=xx,y=yy)
  if not quiet: print('       SW up')
  sw_up=netcdf.use(nc,'Upward_short_wave_radiation_flux_surface',time1=0,x=xx,y=yy)
  sw_net=sw_down-sw_up
  out['radsw']=cb.Data(x,y,sw_net,'W m-2',info='positive downward')

  # Net longwave flux  [W/m^2]
  if not quiet: print(' --> Net longwave flux')
  if not quiet: print('       LW down')
  lw_down=netcdf.use(nc,'Downward_longwave_radiation_flux',time1=0,x=xx,y=yy)
  if not quiet: print('       LW up')
  lw_up=netcdf.use(nc,'Upward_long_wave_radiation_flux_surface',time1=0,x=xx,y=yy)
  lw_net=lw_down-lw_up
  out['radlw']=cb.Data(x,y,-lw_net,'W m-2',info='positive upward')

  # downward lw:
  out['dlwrf']=cb.Data(x,y,-lw_down,'W m-2',info='negative... downward')

  # U and V wind speed 10m
  if not quiet: print(' --> U and V wind')
  # vertical dim is height_above_ground1: 10 and 30 m
  uwnd=netcdf.use(nc,'u_wind_height_above_ground',height_above_ground1=0,time1=0,x=xx,y=yy)
  vwnd=netcdf.use(nc,'v_wind_height_above_ground',height_above_ground1=0,time1=0,x=xx,y=yy)

  if not quiet: print(' --> calc wind speed and stress')
  speed = np.sqrt(uwnd**2+vwnd**2)
  taux,tauy=air_sea.wind_stress(uwnd,vwnd)

  out['wspd']=cb.Data(x,y,speed,'m s-1')
  out['uwnd']=cb.Data(x,y,uwnd,'m s-1')
  out['vwnd']=cb.Data(x,y,vwnd,'m s-1')
  out['sustr']=cb.Data(x,y,taux,'Pa')
  out['svstr']=cb.Data(x,y,tauy,'Pa')

  # Cloud cover [0--100 --> 0--1]:
  if not quiet: print(' --> Cloud cover')
  clouds=netcdf.use(nc,'Total_cloud_cover',time1=0,x=xx,y=yy)
  out['cloud']=cb.Data(x,y,clouds/100.,'fraction (0--1)')

  nc.close()
  return  out
コード例 #37
0
def load_data(f, quiet=0, **kargs):
    '''
  Loads prognostic variables (temp,salt,u,v,ubar,vbar,zeta) from
  netcdf file or opendap server. Also loads lon,lat, depth, and time.

  If f is a file, it must include the 1d variables lon,lat and depth;
  the 2d variable ssh (zeta) and the 3d variables temp, salt, u and v;
  ie, each file must contain data for a simgle time. The file must also
  contain the variable time.

  If f is a opendap address, it must contain also all these variables
  or the ones defined in the input karg settings (DataAccess object)

  To deal with the case of variables in different files/opendap addresses,
  f can also be a dictionary with keys the variables and values the files
  or opendap addresses. In this case, the keys must be:
    - temp
    - salt
    - u
    - v
    - ssh
    - misc, for lon, lat, depth, time and dimensions
      or xy for lon,lat and x,ydim; z for depth and zdim, time for time

  The output data (dict) is suitable to be used by data2roms, which
  interpolates the data to ROMS 3d grid.
  Also outputs an error/status string.

  kargs:
    inds, dict with dimension names/values (where time dim can be integer
          or datetime)
    settings, DataAccess object
    extra, extra misc vars to load [(outKey0,fileVar0),...]
    t_units, units of variable time, by default the att  units is used
  '''

    sett = DataAccess()
    inds = {}
    extra = []
    t_units = []
    if 'settings' in kargs.keys(): sett = kargs['settings']
    if 'inds' in kargs.keys(): inds = kargs['inds']
    if 'extra' in kargs.keys(): extra = kargs['extra']
    if 't_units' in kargs.keys(): t_units = kargs['t_units']

    res = {}
    msg = ''

    if not isinstance(f, dict) and not f.startswith('http') and not isfile(f):
        msg = 'file not found %s' % f
        if not quiet: print msg
        return res, msg

    # load nc files:
    if not isinstance(f, dict):
        f = {'temp': f, 'salt': f, 'u': f, 'v': f, 'ssh': f, 'misc': f}

    if not f.has_key('xy'): f['xy'] = f['misc']
    if not f.has_key('z'): f['z'] = f['misc']
    if not f.has_key('time'): f['time'] = f['misc']

    filesUsed = []
    ncUsed = []
    for i in f.keys():
        if not quiet: print '(%s) loading from %s' % (i.ljust(5), f[i])

        if i == 'temp':
            if f[i] in filesUsed: ncTemp = ncUsed[filesUsed.index(f[i])]
            else:
                ncTemp = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncTemp]

        elif i == 'salt':
            if f[i] in filesUsed: ncSalt = ncUsed[filesUsed.index(f[i])]
            else:
                ncSalt = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncSalt]

        elif i == 'u':
            if f[i] in filesUsed: ncU = ncUsed[filesUsed.index(f[i])]
            else:
                ncU = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncU]

        elif i == 'v':
            if f[i] in filesUsed: ncV = ncUsed[filesUsed.index(f[i])]
            else:
                ncV = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncV]

        elif i == 'ssh':
            if f[i] in filesUsed: ncSsh = ncUsed[filesUsed.index(f[i])]
            else:
                ncSsh = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncSsh]

        elif i == 'xy':
            if f[i] in filesUsed: ncXy = ncUsed[filesUsed.index(f[i])]
            else:
                ncXy = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncXy]

        elif i == 'z':
            if f[i] in filesUsed: ncZ = ncUsed[filesUsed.index(f[i])]
            else:
                ncZ = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncZ]

        elif i == 'time':
            if f[i] in filesUsed: ncTime = ncUsed[filesUsed.index(f[i])]
            else:
                ncTime = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncTime]

        elif i == 'misc':
            if f[i] in filesUsed: ncMisc = ncUsed[filesUsed.index(f[i])]
            else:
                ncMisc = netcdf.ncopen(f[i])
                filesUsed += [f[i]]
                ncUsed += [ncMisc]

    # load dims:
    if not quiet: print '  loading dims...'
    dimsXy = netcdf.fdim(ncXy)
    dimsZ = netcdf.fdim(ncZ)

    res['NX'] = dimsXy[sett.xdim]
    res['NY'] = dimsXy[sett.ydim]
    ###if sett.z_name:
    if sett.zdim:
        res['NZ'] = dimsZ[sett.zdim]
    else:
        res['NZ'] = 1

    # about horizontal inds:
    if inds.has_key(
            sett.xdim) and len(inds[sett.xdim]) == 2 and not isinstance(
                inds[sett.xdim], basestring):
        if not quiet: print '  calc horizontal inds...'
        xlim = inds[sett.xdim]
        ylim = inds[sett.ydim]

        inds.pop(sett.xdim)
        inds.pop(sett.ydim)

        lon = netcdf.use(ncXy, sett.x_name, **inds)
        if np.any(lon > 360): lon = np.mod(lon, 360.)
        lat = netcdf.use(ncXy, sett.y_name, **inds)
        i0, i1, j0, j1 = calc.ij_limits(lon, lat, xlim, ylim, margin=3)
        inds[sett.xdim] = '%d:%d' % (i0, i1)
        inds[sett.ydim] = '%d:%d' % (j0, j1)

    if not quiet: print '  loading lon, lat, depth...'
    res['lon'] = netcdf.use(ncXy, sett.x_name, **inds)
    if np.any(res['lon'] > 360): res['lon'] = np.mod(res['lon'], 360.)
    res['lat'] = netcdf.use(ncXy, sett.y_name, **inds)
    if sett.z_name:
        res['depth'] = -netcdf.use(ncZ, sett.z_name, **inds)
    else:
        res['depth'] = False

    if res['lon'].size != res['lat'].size:
        res['lon'], res['lat'] = np.meshgrid(res['lon'], res['lat'])
        # needed for griddata, later

    # update nx,ny:
    if inds.has_key(sett.xdim):
        res['NY'], res['NX'] = res['lon'].shape

    # extra misc vars:
    if len(extra):
        for outKey, fileVar in extra:
            if not quiet:
                print '  loading extra misc... %s %s' % (outKey, fileVar)
            res[outKey] = netcdf.use(ncMisc, fileVar, **inds)

    # time:
    # file may have one or several times. If several, time dim must be given
    # with kargs inds!
    # but file may also have no time dim or time name !
    if sett.time_name:
        if not quiet: print '  loading time...'
        if t_units:
            times = netcdf.use(ncTime, sett.time_name)
            times = netcdf.num2date(times, t_units)
        else:
            times = netcdf.nctime(ncTime, sett.time_name)

        if inds.has_key(sett.tdim):
            try:
                tind = dts.parse_date(inds[sett.tdim])
            except:
                tind = inds[sett.tdim]  # is an integer, for instance

            if isinstance(tind, datetime.datetime):
                tind, = np.where(times == tind)
                if tind.size:
                    tind = tind[0]
                    inds[sett.
                         tdim] = tind  # update inds to extract other variables
                else:
                    Msg = 'date not found'
                    msg += '\n' + Msg
                    return res, msg + ' ERROR'

            date = times[tind]
            try:
                len(date)
                ndates = True
            except:
                ndates = False

            if ndates:
                if not quiet:
                    print '    tind, date= len=%d: %d to %d, %s to %s' % (
                        len(date), tind[0], tind[-1], date[0].isoformat(' '),
                        date[-1].isoformat(' '))
            else:
                if not quiet:
                    print '    tind, date= %d %s' % (tind, date.isoformat(' '))

        elif times.size == 1:
            date = times[0]
            if not quiet: print '    date= %s' % date.isoformat(' ')
        else:  # must provide tind as input!!
            Msg = 'several dates in file... provice tind!'
            msg += '\n' + Msg
            return res, msg + ' ERROR'

        res['date'] = date
    else:
        if not quiet: print '    warning: not using time !!'
        res['date'] = 0

    empty3d = np.zeros([res['NZ'], res['NY'], res['NX']])
    empty2d = np.zeros([res['NY'], res['NX']])

    if 'temp' in f.keys():
        if not quiet: print '  loading temp...'
        if sett.temp_name in ncTemp.varnames:
            res['temp'] = netcdf.use(ncTemp, sett.temp_name, **inds)
        else:
            Msg = 'var %s not found' % 'temp'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['temp'] = empty3d

    if 'salt' in f.keys():
        if not quiet: print '  loading salt...'
        if sett.salt_name in ncSalt.varnames:
            res['salt'] = netcdf.use(ncSalt, sett.salt_name, **inds)
        else:
            Msg = 'var %s not found' % 'salt'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['salt'] = empty3d

    if 'u' in f.keys():
        if not quiet: print '  loading u...'
        if sett.u_name in ncU.varnames:
            res['u'] = netcdf.use(ncU, sett.u_name, **inds)
        else:
            Msg = 'var %s not found' % 'u'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['u'] = empty3d

    if 'v' in f.keys():
        if not quiet: print '  loading v...'
        if sett.v_name in ncV.varnames:
            res['v'] = netcdf.use(ncV, sett.v_name, **inds)
        else:
            Msg = 'var %s not found' % 'v'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['v'] = empty3d

    if 'ssh' in f.keys():
        if not quiet: print '  loading ssh...'
        if sett.ssh_name in ncSsh.varnames:
            res['ssh'] = netcdf.use(ncSsh, sett.ssh_name, **inds)
        else:
            Msg = 'var %s not found' % 'ssh'
            msg += '\n' + Msg
            if not quiet: print Msg
            res['ssh'] = empty2d

    for nc in ncUsed:
        try:
            nc.close()
        except:
            pass

    return res, msg
コード例 #38
0
def cfsr_file_data(files, quiet=False):
    '''
  Returns bulk data from one CFRS files
  '''
    def load_time(f):
        time = np.array((), datetime.datetime)
        ff = glob.glob(f)
        ff.sort()
        for f in ff:
            time = np.append(time, netcdf.nctime(f, 'time'))
        return time

    def load_time_main(f):
        time = load_time(f)
        # I want 0,6,12,... after 2006 results may be 3,9,15, ...
        if time[0].hour in [3, 9, 15, 21]:
            time = time + datetime.timedelta(hours=3)
        # for 2011 1st time is not 0!
        if time[0].hour == 6: time = np.hstack((time[0].replace(hour=0), time))
        return time

    def fix_time(t, var, t0, t1):
        # convert 1h, 7h, ... to 0h, 6h, ...
        if t[0].hour in [1, 7, 13,
                         19]:  # not all! sp analysis starts at 0, 6,...!
            print '     1,7,... to 0,6,...'
            var = (var[1:] * 5 + var[:-1] * 1) / 6.
            t = t[1:] - datetime.timedelta(hours=1)
        elif t[0].hour in [3, 9, 15, 21]:
            print '     3,9,... to 0,6,...'
            var = (var[1:] * 3 + var[:-1] * 3) / 6.
            t = t[1:] - datetime.timedelta(hours=3)

        cond = (t >= t0) & (t <= t1)
        t = t[cond]
        var = var[cond]

        if t[0] > t0:
            dt = t[0] - t0
            dt = dt.days * 24 + dt.seconds / 3600.  # hours
            print 'missind data at start: %.2d h missing --> repeating 1st data' % dt
            v = np.zeros((var.shape[0] + 1, ) + var.shape[1:], var.dtype)
            v[1:] = var
            v[0] = var[0]
            var = v
            t_ = np.zeros((t.shape[0] + 1, ) + t.shape[1:], t.dtype)
            t_[1:] = t
            t_[0] = t0
            t = t_

        if t[-1] < t1:
            dt = t1 - t[-1]
            dt = dt.days * 24 + dt.seconds / 3600.  # hours
            print 'missind data at end: %.2d h missing --> repeating last data' % dt
            v = np.zeros((var.shape[0] + 1, ) + var.shape[1:], var.dtype)
            v[:-1] = var
            v[-1] = var[-1]
            var = v
            t_ = np.zeros((t.shape[0] + 1, ) + t.shape[1:], t.dtype)
            t_[:-1] = t
            t_[-1] = t1
            t = t_

        return var, t

    out = {}

    # time:
    if 0:
        time = netcdf.nctime(files['cc'], 'time')
        # files have diff units !! so, cannot load all times at once!
        # these result will use only units of 1st file!!
    else:
        time = load_time_main(files['cc'])

    out['time'] = time

    # T air [K->C]
    if not quiet: print ' --> T air'
    f = files['st']
    tair = netcdf.use(f, 'TMP_L103')
    tair = tair - 273.15
    x = netcdf.use(f, 'lon')
    x[x > 180] = x[x > 180] - 360
    y = netcdf.use(f, 'lat')
    x, y = np.meshgrid(x, y)
    # check time:
    ttmp = load_time(f)
    if ttmp.size == time.size and np.all(ttmp == time): print '    time ok'
    else:
コード例 #39
0
def plt_hslice(conf, plconf, date, FA='a', nest=0, **kargs):
    err = ''
    fig = False
    info = {}

    type = 'avg'
    var = 'temp'
    slice = 'z'
    ind = -10
    time = -1
    currents = False
    dcurr = (3, 3)
    scurr = 3
    lcurr = 0.2
    ifig = 0
    # closefig = True
    clim = False
    quiet = False
    outStoragePath = False
    cmap = None
    norm = None
    useBar = True  # currents are barotropic for 2D vars (like zeta)

    keys = kargs.keys()
    if 'type' in keys: type = kargs['type']
    if 'var' in keys: var = kargs['var']
    if 'slice' in keys: slice = kargs['slice']
    if 'ind' in keys: ind = kargs['ind']
    if 'time' in keys: time = kargs['time']
    if 'currents' in keys: currents = kargs['currents']
    if 'dcurr' in keys: dcurr = kargs['dcurr']
    if 'scurr' in keys: scurr = kargs['scurr']
    if 'lcurr' in keys: lcurr = kargs['lcurr']
    if 'ifig' in keys: ifig = kargs['ifig']
    if 'closefig' in keys: closefig = kargs['closefig']
    if 'clim' in keys: clim = kargs['clim']
    if 'quiet' in keys: quiet = kargs['quiet']
    if 'ostorage' in keys: outStoragePath = kargs['ostorage']
    if 'cmap' in keys: cmap = kargs['cmap']
    if 'usebar' in keys: useBar = kargs['usebar']
    if 'norm' in keys: norm = kargs['norm']

    date = dateu.parse_date(date)

    # find input files:
    args = {
        'cf': conf,
        'date': date,
        'FA': FA,
        'nest': nest,
        'ostorage': outStoragePath
    }
    his = opt.nameof('out', type, **args)
    clm = opt.nameof('in', 'clm', **args)
    grd = opt.nameof('in', 'grd', **args)
    if not os.path.isfile(his):
        err = 'Main file not found (%s)' % his
        return err, fig, info
    if not os.path.isfile(grd):
        err = 'Grid file not found (%s)' % grd
        return err, fig, info
    r = roms.His(his, grd)

    # plot grid:
    proj, fig, ax = plt_grid(plconf, grd, ifig)

    def add_colorbar(handle, **args):
        ax = pl.gca()
        Data, err = opt.get_plconf(plconf, 'AXES')
        cbpos = Data['cbpos'][ifig]
        cbbgpos = Data['cbbgpos'][ifig]
        cbbgc = Data['cbbgcolor'][ifig]
        cbbga = Data['cbbgalpha'][ifig]
        cblab = Data['cblabel'][ifig]

        # colorbar bg axes:
        if cbbgpos:
            rec = pl.axes((cbpos[0] - cbpos[2] * cbbgpos[0],
                           cbpos[1] - cbbgpos[2] * cbpos[3], cbpos[2] *
                           (1 + cbbgpos[0] + cbbgpos[1]), cbpos[3] *
                           (1 + cbbgpos[2] + cbbgpos[3])),
                          axisbg=cbbgc,
                          frameon=1)

            rec.patch.set_alpha(cbbga)
            rec.set_xticks([])
            rec.set_yticks([])
            for k in rec.axes.spines.keys():
                rec.axes.spines[k].set_color(cbbgc)
                rec.axes.spines[k].set_alpha(cbbga)

        # colorbar:
        if cbpos:
            cbax = fig.add_axes(cbpos)
            if cbpos[2] > cbpos[3]: orient = 'horizontal'
            else: orient = 'vertical'
            cb = pl.colorbar(handle,
                             cax=cbax,
                             orientation=orient,
                             drawedges=0,
                             **args)
            pl.axes(ax)

        # colorbar label:
        if cblab:
            Data, err = opt.get_plconf(plconf, 'HSLICES')
            varnames = Data['varnames'][ifig].split(',')
            vnames = Data['vnames'][ifig].split(',')
            lab = ''
            for i in range(len(varnames)):
                if varnames[i].strip() == var:
                    lab = vnames[i].strip()
                    break

            if lab:
                if r.hasz(var):
                    if slice == 'k':
                        if ind == 0: lab = 'Bottom ' + lab
                        elif ind in (-1, 'surface'): lab = 'Surface ' + lab
                    elif slice == 'z':
                        lab = lab + ' ' + str(ind) + 'm'

                cb.set_label(lab)

    def add_currkey(handle):
        Data, err = opt.get_plconf(plconf, 'HSLICES')
        pos = Data['kcurrpos'][ifig]
        if pos:
            pl.quiverkey(handle,
                         pos[0],
                         pos[1],
                         lcurr,
                         '%s m/s' % str(lcurr),
                         labelpos='S',
                         coordinates='axes')

    # hslice:
    if var:
        if slice == 'k': metodo = r.slicek
        elif slice == 'z': metodo = r.slicez

        x, y, z, v = metodo(var, ind, time, plot=False)
        x, y = proj(x, y)

        # cmap:
        if isinstance(cmap, basestring):
            try:
                cmap = pl.cm.cmap_d[cmap]
            except:
                try:
                    from okean import pl_tools
                    cmap = pl_tools.cm.cmap_d[cmap]
                except:
                    cmap = pl.cm.jet

        # original data from clm
        if slice == 'k' and ind in (
                -1, ) and var + '_original' in netcdf.varnames(clm):
            tcurr = r.datetime[time]
            x_o = netcdf.use(clm, 'x_original')
            y_o = netcdf.use(clm, 'y_original')
            x_o, y_o = proj(x_o, y_o)
            v_o = netcdf.use(clm, 'y_original')
            t_o = netcdf.nctime(clm, 'clim_time')

            # average to current time:
            i0, = np.where(t_o <= tcurr)[-1]
            i1, = np.where(t_o > tcurr)[0]
            v_o0 = netcdf.use(clm, var + '_original', time=i0)
            v_o1 = netcdf.use(clm, var + '_original', time=i1)
            # avg:
            a = tcurr - t_o[i0]
            b = t_o[i1] - tcurr

            a = a.days * 86400 + a.seconds
            b = b.days * 86400 + b.seconds

            if a == 0: v_o = v_o0
            elif b == 0: v_o = v_o1
            else: v_o = (v_o0 * b + v_o1 * a) / (a + b)

            pch = pl.pcolormesh(x_o, y_o, v_o, shading='flat', cmap=cmap)
            if clim: pl.clim(clim[0], clim[1])

        if norm == 'log':
            from matplotlib.colors import LogNorm
            Norm = LogNorm(vmin=clim[0], vmax=clim[1])
        else:
            Norm = None

        # change hypoxia colorbar/cmap
        if var == 'dye_01':
            HypoxiaLim = 135
            from okean import pl_tools
            cmap = pl_tools.ucmaps().gen_oxygen(
                v=(0, HypoxiaLim, 300.))  # default is 0,135,300 !!

        pch = pl.pcolormesh(x, y, v, shading='flat', cmap=cmap, norm=Norm)
        if clim: pl.clim(clim[0], clim[1])

        # hypoxia:
        if var == 'dye_01' and ind == 0 and ifig == 0:
            cond = v < 135.
            cond = v < HypoxiaLim
            cond = (v < HypoxiaLim) & (r.grid.h > 5)

            pm = r.grid.use('pm')
            pn = r.grid.use('pn')
            A = (1 / pm[cond] * 1 / pn[cond] / 1e6).sum()

            x_, y_ = proj(-98, 29.5)
            pl.text(x_,
                    y_,
                    'Hypoxia area = %.0f km$^2$' % A,
                    color='r',
                    fontweight='bold',
                    fontname='monospace',
                    bbox=dict(edgecolor='none', facecolor='white', alpha=0.8))
        # hypoxia.

        # colorbar:
        if norm == 'log':
            tks = 10**np.linspace(np.log10(clim[0]), np.log10(clim[1]), 4)
            opts = {'ticks': tks, 'format': '%.2f'}
        else:
            opts = {'ticks': None}
        add_colorbar(pch, **opts)

    if currents:
        if (var and r.hasz(var)) or not useBar: uvind = ind
        else: uvind = 'bar'

        x, y, z, u, v = r.sliceuv(uvind, time)
        xm, ym = proj(x, y)
        mm = np.zeros(x.shape, 'bool')
        mm[::dcurr[0], ::dcurr[1]] = True

        Data, err = opt.get_plconf(plconf, 'HSLICES')
        wcurr = Data['wcurr'][ifig]
        acurr = Data['acurr'][ifig]

        qvopts = {'units': 'x', 'scale': scurr, 'width': wcurr, 'alpha': acurr}
        if var:
            q = pl.quiver(xm[mm], ym[mm], u[mm], v[mm], **qvopts)
        else:
            s = np.sqrt(u**2 + v**2)
            q = pl.quiver(xm[mm], ym[mm], u[mm], v[mm], s[mm], **qvopts)
            if clim: pl.clim(clim[0], clim[1])
            add_colorbar(q)

        add_currkey(q)

    # store some info that may be required later
    info['hasz'] = False
    if var and r.hasz(var): info['hasz'] = True

    # logo:
    if ifig == 0:
        im = os.path.join(os.path.dirname(__file__), 'logo_INOCAR.png')
        i = pl.imread(im)
        h, w = i.shape[:2]
        rx = .12
        W = (proj.xmax - proj.xmin) * rx
        H = W * h / w
        l = proj.xmax
        #pl.fill([proj.xmax-W, proj.xmax, proj.xmax,     proj.xmax-W],
        #           [proj.ymin,   proj.ymin, proj.ymin+2.8*H, proj.ymin+2.8*H],
        #           '#500000',alpha=0.25,ec='none')

        ax.imshow(i,
                  extent=(proj.xmax * .98 - W, proj.xmax * .98,
                          proj.ymin + H * .1, proj.ymin + H * 1.1),
                  zorder=1e3)
        #pl.text(proj.xmax-W/2., proj.ymin+2.2*H,'OOF',
        #           fontdict={'size':14,'family':'serif'},
        #           color='#500000',ha='center',weight='bold')

        pl.text(proj.xmax * .8,
                proj.ymax * (-.1),
                r.datetime[time].strftime("%d %b %Y"),
                fontdict={
                    'size': 11,
                    'family': 'monospace'
                },
                ha='center')

        if FA == 'f':
            s = 'Pronostico desde %s' % r.datetime[0].strftime("%d %b %Y")
            pl.text(
                proj.xmax * .8,
                proj.ymax * (-.15),
                s,
                #pl.text(proj.xmax-W/2., proj.ymin+1.1*H,s,
                fontdict={'fontsize': 10},
                ha='center')
    # logo.

    # lims change in some mpl versions !!
    pl.gca().axis([proj.xmin, proj.xmax, proj.ymin, proj.ymax])

    return err, fig, info
コード例 #40
0
ファイル: roms_deprecated.py プロジェクト: martalmeida/okean
 def use(self,varname,**kargs):
   return netcdf.use(self.name,varname,**kargs)
コード例 #41
0
def plt_wind(conf, plconf, date, FA='a', nest=0, **kargs):
    err = ''
    fig = False
    info = ''

    ifig = kargs.get('ifig', 0)
    day = kargs.get('day', 0)
    quiet = kargs.get('quiet', 0)

    time = day
    date = dateu.parse_date(date)

    # find input files:
    args = {'cf': conf, 'date': date, 'FA': FA, 'nest': nest}
    atm = opt.nameof('in', 'blk', **args)
    grd = opt.nameof('in', 'grd', **args)
    if not os.path.isfile(atm):
        err = 'ATM file not found (%s)' % atm
        return err, fig, info
    if not os.path.isfile(grd):
        err = 'Grid file not found (%s)' % grd
        return err, fig, info

    Data, err = opt.get_plconf(plconf, 'WIND')
    dcurr = Data['dcurr'][ifig]
    lcurr = Data['lcurr'][ifig]
    scurr = Data['scurr'][ifig]
    clim = Data['clim'][ifig]
    tind = Data['time'][ifig]

    x = netcdf.use(grd, 'lon_rho')
    y = netcdf.use(grd, 'lat_rho')
    wtime = netcdf.nctime(atm, 'time')
    cnd = (wtime >= date + datetime.timedelta(days=day)) & (
        date < date + datetime.timedelta(days=day + 1))
    u = netcdf.use(atm, 'Uwind', time=cnd)
    v = netcdf.use(atm, 'Uwind', time=cnd)
    if tind == 'dailyMean':
        u = u.mean(0)
        v = v.mean(0)
        sdate = wtime[cnd][
            0]  # for title... 1st day 00h is expected to be 1st date,
        # or model should not run!
    else:  # tind of some day, ex: tind 0 from forec day 3
        u = u[tind]
        v = v[tind]
        sdate = wtime[cnd][tind]

    if day > len(u) - 1:
        err = 'Invalid day %d (max=%d)' % (day, len(u) - 1)
        return err, fig, info

    # plot grid:
    proj, fig, ax = plt_grid(plconf, grd, ifig)

    # no mask on land:
    mask = np.zeros(u.shape, 'bool')
    mask[::dcurr[0], ::dcurr[1]] = True
    xm, ym = proj(x, y)

    s = np.sqrt(u**2 + v**2)
    q = pl.quiver(xm[mask],
                  ym[mask],
                  u[mask],
                  v[mask],
                  s[mask],
                  scale=scurr,
                  zorder=100)

    pl.clim(clim[0], clim[1])

    def add_colorbar(handle, **args):
        ax = pl.gca()
        Data, err = opt.get_plconf(plconf, 'AXES')
        cbpos = Data['cbpos'][ifig]
        cbbgpos = Data['cbbgpos'][ifig]
        cbbgc = Data['cbbgcolor'][ifig]
        cbbga = Data['cbbgalpha'][ifig]
        cblab = Data['cblabel'][ifig]

        # colorbar bg axes:
        if cbbgpos:
            rec = pl.axes((cbpos[0] - cbpos[2] * cbbgpos[0],
                           cbpos[1] - cbbgpos[2] * cbpos[3], cbpos[2] *
                           (1 + cbbgpos[0] + cbbgpos[1]), cbpos[3] *
                           (1 + cbbgpos[2] + cbbgpos[3])),
                          axisbg=cbbgc,
                          frameon=1)

            rec.patch.set_alpha(cbbga)
            rec.set_xticks([])
            rec.set_yticks([])
            for k in rec.axes.spines.keys():
                rec.axes.spines[k].set_color(cbbgc)
                rec.axes.spines[k].set_alpha(cbbga)

        # colorbar:
        if cbpos:
            cbax = fig.add_axes(cbpos)
            if cbpos[2] > cbpos[3]: orient = 'horizontal'
            else: orient = 'vertical'
            cb = pl.colorbar(handle,
                             cax=cbax,
                             orientation=orient,
                             drawedges=0,
                             **args)
            pl.axes(ax)

            # colorbar label:
            cb.set_label(r'Wind Speed [m s$^{\rm{-1}}$]')

    def add_currkey(handle):
        pos = Data['kcurrpos'][ifig]
        if pos:
            pl.quiverkey(handle,
                         pos[0],
                         pos[1],
                         lcurr,
                         '%s m/s' % str(lcurr),
                         labelpos='S',
                         coordinates='axes')

    add_colorbar(q)
    add_currkey(q)

    # tilte:
    Title, err = opt.get_plconf(plconf, 'AXES', 'title')
    if Title[ifig]:
        simpleTitle = 1

        rdate = date.strftime('%d-%m-%Y')
        title = 'wind %s %s %d' % (rdate, FA, day)

        if simpleTitle:  # simpler version of title:
            if FA == 'f':  # forecast date:
                rdate = dateu.next_date(date, day)
                rdate = rdate.strftime('%d-%m-%Y')

            title = 'wind %s' % (rdate)
            if FA == 'f':
                title = title + ' (forec)'

        pl.title(title)

    # logo:
    if ifig == 0:
        im = os.path.join(os.path.dirname(__file__), 'logo_INOCAR.png')
        i = pl.imread(im)
        h, w = i.shape[:2]
        rx = .12
        W = (proj.xmax - proj.xmin) * rx
        H = W * h / w
        l = proj.xmax
        #pl.fill([proj.xmax-W, proj.xmax, proj.xmax,     proj.xmax-W],
        #           [proj.ymin,   proj.ymin, proj.ymin+2.8*H, proj.ymin+2.8*H],
        #           '#500000',alpha=0.25,ec='none')

        ax.imshow(i,
                  extent=(proj.xmax * .98 - W, proj.xmax * .98,
                          proj.ymin + H * .1, proj.ymin + H * 1.1),
                  zorder=1e3)
        #pl.text(proj.xmax-W/2., proj.ymin+2.2*H,'OOF',
        #           fontdict={'size':14,'family':'serif'},
        #           color='#500000',ha='center',weight='bold')

        pl.text(
            proj.xmax * .8,
            proj.ymax * (-.1),
            sdate.strftime("%d %b %Y"),
            #pl.text(proj.xmax*.62, proj.ymax*.93,sdate.strftime("%d %b %Y"),
            fontdict={
                'size': 13,
                'family': 'monospace'
            },
            ha='center')
        # change date format if tind is not daily mean, ie, add hour, etc

        if FA == 'f':
            s = 'Pronostico desde %s' % date.strftime("%d %b %Y")
            pl.text(
                proj.xmax * .8,
                proj.ymax * (-.15),
                s,  ##this is outside
                #pl.text(proj.xmax-W/2., proj.ymin+1.1*H,s, ##this is in the proj (inside)
                fontdict={'fontsize': 10},
                ha='center')
    # logo.

    # lims change in some mpl versions !!
    pl.gca().axis([proj.xmin, proj.xmax, proj.ymin, proj.ymax])

    return err, fig, info
コード例 #42
0
ファイル: narr.py プロジェクト: rsignell-usgs/okean
def narr_file_data(fname, xlim=False, ylim=False, quiet=False):
    '''
  Returns bulk data from one NARR file
  '''

    out = {}

    # loading grid:
    if 0:
        if not quiet: print ' reading lon,lat from file %s' % grd
        nc = netcdf.ncopen(grd)
        x = nc.vars['East_longitude_0-360'][0, ...] - 360.
        y = nc.vars['Latitude_-90_to_+90'][0, ...]  # time always 1 !!
        nc.close()
    else:
        if not quiet: print ' reading lon,lat from file %s' % grdTxt
        x, y = load_grid()
        #x=x-360.
        x = -x

    ny, nx = x.shape

    if (xlim, ylim) == (False, False): i0, i1, j0, j1 = 0, nx, 0, ny
    else:
        i0, i1, j0, j1 = calc.ij_limits(x, y, xlim, ylim, margin=0)
        x = x[j0:j1, i0:i1]
        y = y[j0:j1, i0:i1]

    try:
        nc = netcdf.ncopen(fname)
    except:
        return {}

    xx = str(i0) + ':' + str(i1)
    yy = str(j0) + ':' + str(j1)

    tdim = netcdf.fdim(nc, 'time1')
    if tdim != 1: print 'WARNING: tdim !=1  !!!!!!'

    # T surface [K->C]
    if not quiet: print ' --> T air'
    tair = netcdf.use(nc, 'Temperature_surface', time1=0, x=xx, y=yy)
    tair = tair - 273.15
    out['tair'] = cb.Data(x, y, tair, 'C')

    # R humidity [% -> 0--1]
    if not quiet: print ' --> R humidity'
    rhum = netcdf.use(nc, 'Relative_humidity', time1=0, x=xx, y=yy)
    out['rhum'] = cb.Data(x, y, rhum / 100., '0--1')

    # surface pressure [Pa]
    if not quiet: print ' --> Surface pressure'
    pres = netcdf.use(nc, 'Pressure_surface', time1=0, x=xx, y=yy)
    out['pres'] = cb.Data(x, y, pres, 'Pa')

    # P rate [kg m-2 s-1 -> cm/d]
    if not quiet: print ' --> P rate'
    prate = netcdf.use(nc, 'Precipitation_rate', time1=0, x=xx, y=yy)
    prate = prate * 86400 * 100 / 1000.
    out['prate'] = cb.Data(x, y, prate, 'cm/d')

    # Net shortwave flux  [ W m-2]
    if not quiet: print ' --> Net shortwave flux'
    if not quiet: print '       SW down'
    sw_down = netcdf.use(nc,
                         'Downward_shortwave_radiation_flux',
                         time1=0,
                         x=xx,
                         y=yy)
    if not quiet: print '       SW up'
    sw_up = netcdf.use(nc,
                       'Upward_short_wave_radiation_flux_surface',
                       time1=0,
                       x=xx,
                       y=yy)
    sw_net = sw_down - sw_up
    out['radsw'] = cb.Data(x, y, sw_net, 'W m-2', info='positive downward')

    # Net longwave flux  [W/m^2]
    if not quiet: print ' --> Net longwave flux'
    if not quiet: print '       LW down'
    lw_down = netcdf.use(nc,
                         'Downward_longwave_radiation_flux',
                         time1=0,
                         x=xx,
                         y=yy)
    if not quiet: print '       LW up'
    lw_up = netcdf.use(nc,
                       'Upward_long_wave_radiation_flux_surface',
                       time1=0,
                       x=xx,
                       y=yy)
    lw_net = lw_down - lw_up
    out['radlw'] = cb.Data(x, y, -lw_net, 'W m-2', info='positive upward')

    # downward lw:
    out['dlwrf'] = cb.Data(x,
                           y,
                           -lw_down,
                           'W m-2',
                           info='negative... downward')

    # U and V wind speed 10m
    if not quiet: print ' --> U and V wind'
    # vertical dim is height_above_ground1: 10 and 30 m
    uwnd = netcdf.use(nc,
                      'u_wind_height_above_ground',
                      height_above_ground1=0,
                      time1=0,
                      x=xx,
                      y=yy)
    vwnd = netcdf.use(nc,
                      'v_wind_height_above_ground',
                      height_above_ground1=0,
                      time1=0,
                      x=xx,
                      y=yy)

    if not quiet: print ' --> calc wind speed and stress'
    speed = np.sqrt(uwnd**2 + vwnd**2)
    taux, tauy = air_sea.wind_stress(uwnd, vwnd)

    out['wspd'] = cb.Data(x, y, speed, 'm s-1')
    out['uwnd'] = cb.Data(x, y, uwnd, 'm s-1')
    out['vwnd'] = cb.Data(x, y, vwnd, 'm s-1')
    out['sustr'] = cb.Data(x, y, taux, 'Pa')
    out['svstr'] = cb.Data(x, y, tauy, 'Pa')

    # Cloud cover [0--100 --> 0--1]:
    if not quiet: print ' --> Cloud cover'
    clouds = netcdf.use(nc, 'Total_cloud_cover', time1=0, x=xx, y=yy)
    out['cloud'] = cb.Data(x, y, clouds / 100., 'fraction (0--1)')

    nc.close()
    return out
コード例 #43
0
ファイル: interim.py プロジェクト: martalmeida/okean
def interim_file_data(files,quiet=False):
  '''
  ECMWF ERA INTERIM data for ROMS

  To be used with data obtained from the new server:
  http://apps.ecmwf.int/datasets/

  and not the old one (http://data-portal.ecmwf.int/data/d/interim_daily/)
  To deal with data from old server use module interim_past

  => forecast vars:
  time=00h, 12h
  step=3,6,9,12 --> n forec steps=4

      needed (suggestion):
      - Surface net solar radiation (ssr)
      - Surface thermal radiation (str)
      - Total precipitation (tp)
      others:
      - Surface thermal radiation downwards (strd)
      - Evaporation (e)
      - ...

  =>analysis+forec vars: (interim analysis starts every 6h; forecast starts every 12h !!)
  time=00h,6h,12h,18h
  step=0,3,9 (3 and 9 are for the forecast 00h and 12h)

      needed (suggestion):
      - Surface pressure (sp)
      - Total cloud cover    (tcc)
      - 10 metre U wind component (v10u or u10)
      - 10 metre V wind component  (v10v or v10)
      - 2 metre temperature (v2t or t2m)
      - 2 metre dewpoint temperature (v2d or d2m)

  Accumulated vars (rad SW, LW and precipitation are converted to averages
  by acum2avg
  '''

  # some variables may have different names! 
  Vars={}
  Vars['v10u']='v10u','u10'
  Vars['v10v']='v10v','v10'
  Vars['v2t']='v2t','t2m'
  Vars['v2d']='v2d','d2m'

  def find_v(name):
    if name in Vars.keys():
      for v in Vars[name]:
        if varfile(v): return v
    else: return name


  def varfile(var):
    for f in files:
      if var in netcdf.varnames(f): return f


  def check_var_type(var):
    # new interim dataserver provides forec+analysis vars with extra dim
    # 'type', 0 or 1
    if var.ndim==4:
      if not quiet: print('      dealing with var type... '),
      v=np.zeros(var.shape[1:],var.dtype)
      v[::2]=var[0,::2,...]
      v[1::2]=var[1,1::2,...]
      var=v
      if not quiet: print('done.')

    return var


  out={}

  # time:
  # all times from analysis file, except last ind which will be
  # the last time of forecast file
  aFile=varfile(find_v('v2t')) # air temp, for instance
  fFile=varfile(find_v('ssr')) # sw rad, for instance
  if not quiet: print(' reading "analysis" time from file %s' % aFile)
  aTime=netcdf.nctime(aFile,'time')
  aTime.sort() # analysis+forecast files may not have time sorted!!
  if not quiet: print(' reading "forecast" time from file %s' % fFile)
  fTime=netcdf.nctime(fFile,'time')
  fTime.sort() # this one should be sorted...
  time=np.append(aTime,fTime[-1])
  out['time']=time

  # calc number of forecast steps stored,nforec (used by accum2avg)
  if [fTime[i].hour for i in range(8)]==range(3,22,3)+[0]: nforec=4
  elif [fTime[i].hour for i in range(4)]==range(6,19,6)+[0]: nforec=2
  else:
    if not quiet: print('INTERIM WRONG TIME: cannot n forec steps')
    return

  if not quiet: print(' ==> n forecast steps = %d' % nforec)

  # x,y:
  if not quiet: print(' reading x,y from file %s' % files[0])
  x=netcdf.use(files[0],'longitude')
  y=netcdf.use(files[0],'latitude')
  x[x>180]=x[x>180]-360
  if x.ndim==1 and y.ndim==1:
    x,y=np.meshgrid(x,y)


  # tair [K-->C]
  if not quiet: print(' --> T air')
  vname=find_v('v2t')
  f=varfile(vname)
  # time may not be monotonically increasing !!
  # when using mix of analysis and forecast variables and steps
  sortInds=np.argsort(netcdf.use(f,'time'))
  tair=netcdf.use(f,vname,time=sortInds)-273.15
  tair=check_var_type(tair)
  if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
  if not quiet: print('      fill_tend...')
  tair=fill_tend(tair)
  out['tair']=Data(x,y,tair,'Celsius')

  # R humidity [0--1]
  if not quiet: print(' --> R humidity (from T dew)')
  vname=find_v('v2d')
  f=varfile(vname)
  sortInds=np.argsort(netcdf.use(f,'time'))
  Td=netcdf.use(f,vname,time=sortInds)-273.15
  Td=check_var_type(Td)
  if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
  if not quiet: print('      fill_tend... (T dew)')
  Td=fill_tend(Td)
  T=tair
  rhum=air_sea.relative_humidity(T,Td)
  ##  rhum=((112-0.1*T+Td)/(112+0.9*T))**8
  rhum[rhum>1]=1
  out['rhum']=Data(x,y,rhum,'0--1')

  # surface pressure [Pa]
  if not quiet: print(' --> Surface pressure')
  vname=find_v('sp')
  f=varfile(vname)
  sortInds=np.argsort(netcdf.use(f,'time'))
  pres=netcdf.use(f,vname,time=sortInds)
  pres=check_var_type(pres)
  if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
  if not quiet: print('      fill_tend...')
  pres=fill_tend(pres)
  out['pres']=Data(x,y,pres,'Pa')

  # P rate [m --> cm day-1]
  if not quiet: print(' --> P rate')
  vname=find_v('tp')
  f=varfile(vname)
  sortInds=np.argsort(netcdf.use(f,'time'))
  prate=netcdf.use(f,vname,time=sortInds)
  prate=check_var_type(prate)
  if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
  if not quiet: print('      accum2avg...')
  prate=accum2avg(prate,nforec)
  conv= 100*86400       # from m s-1      --> cm day-1
  #conv= 100*86400/1000. # from kg m-2 s-1 --> cm day-1
  prate=prate*conv # cm day-1
  if not quiet: print('      fill_t0...')
  prate=fill_t0(prate)
  prate[prate<0]=0
  out['prate']=Data(x,y,prate,'cm day-1')

  # Net shortwave flux  [W m-2 s+1 --> W m-2]
  if not quiet: print(' --> Net shortwave flux')
  vname=find_v('ssr')
  f=varfile(vname)
  sortInds=np.argsort(netcdf.use(f,'time'))
  sw_net=netcdf.use(f,vname,time=sortInds)
  sw_net=check_var_type(sw_net)
  if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
  if not quiet: print('      accum2avg...')
  sw_net=accum2avg(sw_net,nforec)
  if not quiet: print('      fill_t0...')
  sw_net=fill_t0(sw_net)
  out['radsw']=Data(x,y,sw_net,'W m-2',info='positive downward')


  # Net longwave flux  [W m-2 s+1 --> W m-2]
  if not quiet: print(' --> Net longwave flux')
  vname=find_v('str')
  f=varfile(vname)
  sortInds=np.argsort(netcdf.use(f,'time'))
  lw_net=netcdf.use(f,vname,time=sortInds)*-1 # let us consider positive upward (*-1)
  lw_net=check_var_type(lw_net)
  if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
  if not quiet: print('      accum2avg...')
  lw_net=accum2avg(lw_net,nforec)
  if not quiet: print('      fill_t0...')
  lw_net=fill_t0(lw_net)
  out['radlw']=Data(x,y,lw_net,'W m-2',info='positive upward')

  # longwave down:
  # can be obtained from clouds!!
  if not quiet: print(' --> Down longwave flux')
  vname=find_v('strd')
  f=varfile(vname)
  if f:
    sortInds=np.argsort(netcdf.use(f,'time'))
    lw_down=netcdf.use(f,vname,time=sortInds)*-1 # let us consider positive upward (*-1)
    lw_down=check_var_type(lw_down)
    if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
    if not quiet: print('      accum2avg...')
    lw_down=accum2avg(lw_down,nforec)
    if not quiet: print('      fill_t0...')
    lw_down=fill_t0(lw_down)
    out['dlwrf']=Data(x,y,lw_down,'W m-2',info='negative... downward')
  else:  print('down long wave CANNOT BE USED')

  # U and V wind speed 10m
  if not quiet: print(' --> U and V wind')
  vname=find_v('v10u')
  f=varfile(vname)
  sortInds=np.argsort(netcdf.use(f,'time'))
  uwnd=netcdf.use(f,vname,time=sortInds)
  uwnd=check_var_type(uwnd)
  if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
  if not quiet: print('      fill_tend...')
  uwnd=fill_tend(uwnd)
  vname=find_v('v10v')
  f=varfile(vname)
  sortInds=np.argsort(netcdf.use(f,'time'))
  vwnd=netcdf.use(f,vname,time=sortInds)
  vwnd=check_var_type(vwnd)
  if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
  if not quiet: print('      fill_tend...')
  vwnd=fill_tend(vwnd)

  if not quiet: print(' --> calc wind speed and stress')
  speed = np.sqrt(uwnd**2+vwnd**2)
  taux,tauy=air_sea.wind_stress(uwnd,vwnd)

  out['wspd']=Data(x,y,speed,'m s-1')
  out['uwnd']=Data(x,y,uwnd,'m s-1')
  out['vwnd']=Data(x,y,vwnd,'m s-1')
  out['sustr']=Data(x,y,taux,'Pa')
  out['svstr']=Data(x,y,tauy,'Pa')

  # Cloud cover [0--1]:
  if not quiet: print(' --> Cloud cover')
  vname=find_v('tcc')
  f=varfile(vname)
  sortInds=np.argsort(netcdf.use(f,'time'))
  clouds=netcdf.use(f,vname,time=sortInds)
  clouds=check_var_type(clouds)
  if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
  if not quiet: print('      fill_tend...')
  clouds=fill_tend(clouds)
  out['cloud']=Data(x,y,clouds,'fraction (0--1)')

  return out
コード例 #44
0
  def extract(self,varname,method='fast',nfast=1,quiet=1):
    if not self.roms.hast(varname):
      method,nfast='fast',1

    tag=self.horiz_var_type(varname)
    if tag in self.uinds.keys():
      inds0=self.inds0[tag]
      uinds=self.uinds[tag]
    else:
      inds0,uinds=self.locate(varname,quiet)

    J,I,T = uinds.T
    J0,I0,T0 = inds0.T

    if not self.roms.hast(varname): no_time=True
    else: no_time=False


    if method=='fast': # faster but will download more data than needed!
      II=range(I.min(),I.max()+1)
      JJ=range(J.min(),J.max()+1)
      TT=range(T[T>=0].min(),T.max()+1)


      if not calc.isiterable(nfast) and nfast==1:
        if not quiet: print('loading %s : ijt= %d %d %d'%(varname,len(II),len(JJ),len(TT)))
        v=netcdf.use(self.nc,varname,xiSEARCH=II,etaSEARCH=JJ,ocean_time=TT)
      else:
        v=False
        if not calc.isiterable(nfast):
          nfast=min(nfast,len(TT)-1)
          tmp=range(TT[0],TT[-1]+2,(TT[-1]-TT[0])/nfast)
          if tmp[-1]<TT[-1]+1: tmp+=[TT[-1]+1]
        else: tmp=nfast

        for k in range(len(tmp)-1):
          tt=range(tmp[k],tmp[k+1])
          if not quiet: print('loading %s : ijt= %d %d %d (t %d to %d)'%(varname,len(II),len(JJ),len(tt), tt[0],tt[-1]))
          vtmp=netcdf.use(self.nc,varname,xiSEARCH=II,etaSEARCH=JJ,ocean_time=tt)
          if not v is False:
            if vtmp.ndim<v.ndim: vtmp=vtmp[np.newaxis,:] # if len of last tt is 1 !
            v=np.vstack((v,vtmp))
          else: v=vtmp

      
      if v.ndim>3:
        V=np.ma.zeros((I.size,v.shape[1]),'f')
      else:
        V=np.ma.zeros(I.size,'f')

      for i in range(I.size):
        xi   = I[i]-I.min()
        eta  = J[i]-J.min()
        if T[i]>=0: tind = T[i]-T[T>=0].min()
        else: tind=T[i] # negative means data ouside model time

        if v.ndim==4:
          V[i]=v[tind,:,eta,xi]
        elif v.ndim==3:
          V[i]=v[tind,eta,xi]
        elif v.ndim==2:
          V[i]=v[eta,xi]

    else:
      V=False
      for i in range(len(I)):
        if T[i]<0: continue
        if not quiet: print('loading %s  (%d of %d): ijt= %d %d %d'%(varname,i,len(I),I[i],J[i],T[i]))
        v=netcdf.use(self.nc,varname,xiSEARCH=I[i],etaSEARCH=J[i],ocean_time=T[i])
        if V is False:
          if v.ndim>1: shape=(len(I),)+v.shape
          else: shape=len(I)
          V=np.ma.zeros(shape,'f')

        V[i]=v

    lon,lat=self.model_lon_lat(varname)
    U=np.array(())
    for i in range(len(self.t)):
      xi   = I0[i]
      eta  = J0[i]
      tind = T0[i]
      if tind<0: continue # data outside model time

      # rotate cell before interp:
      xp=np.asarray([lon[eta,xi],lon[eta,xi+1],lon[eta+1,xi+1],lon[eta+1,xi],self.x[i]])
      yp=np.asarray([lat[eta,xi],lat[eta,xi+1],lat[eta+1,xi+1],lat[eta+1,xi],self.y[i]])
      xp,yp=calc.rot2d(xp,yp,self.roms.grid.angle[eta,xi])
      x=xp[:-1].min(),xp[-1],xp[:-1].max()
      y=yp[:-1].min(),yp[-1],yp[:-1].max()

      A = x[1]-x[0]
      a = x[2]-x[1]
      B = y[1]-y[0]
      b = y[2]-y[1]

      tcond=(T==tind)#|(tind<0)
      tcond1=(T==tind+1)#|(tind+1<0)
      j0=np.where((I==xi)&(J==eta)&tcond)[0][0]
      j1=np.where((I==xi+1)&(J==eta)&tcond)[0][0]
      j2=np.where((I==xi+1)&(J==eta+1)&tcond)[0][0]
      j3=np.where((I==xi)&(J==eta+1)&tcond)[0][0]
      u0=(V[j0]*a*b+V[j1]*A*b+V[j2]*A*B+V[j3]*a*B)/(a*b+A*b+A*B+a*B)
      
      if not no_time:
        dt0=self.t[i]-self.roms.time[tind]
        dt1=self.roms.time[tind+1]-self.t[i]
        dt0=dt0.days*86400+dt0.seconds
        dt1=dt1.days*86400+dt1.seconds

        k0=np.where((I==xi)&(J==eta)&tcond1)[0][0]
        k1=np.where((I==xi+1)&(J==eta)&tcond1)[0][0]
        k2=np.where((I==xi+1)&(J==eta+1)&tcond1)[0][0]
        k3=np.where((I==xi)&(J==eta+1)&tcond1)[0][0]

        u1=(V[k0]*a*b+V[k1]*A*b+V[k2]*A*B+V[k3]*a*B)/(a*b+A*b+A*B+a*B)
        u=(u0*dt1+u1*dt0)/(dt1+dt0)
      else:
        u=u0

      if not U.size:
        U=np.ma.zeros((len(self.t),u.size),'f')
        U=np.ma.masked_where(U==0,U)

      U[i]=u

    U=np.squeeze(U)
    return U
コード例 #45
0
ファイル: gnome.py プロジェクト: sciencewiki/okean
def his2gnome(fname,
              his,
              grd=False,
              nomask=False,
              gshhsMask=True,
              xylim=False,
              dates=False,
              ij=(1, 1)):
    '''
  Creates GNOME wind file
  Ex:
    his2gnome(out,his,grd,dates=dates,ij=(2,2))

  if gshhsMask, the high res mask file mask_gshhs.npy will be created at 1st usage.
  Mask is based on high (h) resolution gshhs data which must be available (env variable
  GSHHS_MASK must be set). 
  '''

    if not grd: grd = his
    deta, dxi = ij

    dims = netcdf.fdim(his)
    xi, eta = dims['xi_rho'], dims['eta_rho']
    xi0, eta0 = xi, eta

    nc0 = netcdf.ncopen(his)
    time = netcdf.nctime(nc0, 'ocean_time')
    # for roms agrif:
    #t=netcdf.use(nc0,'scrum_time')
    #time=netcdf.num2date(t,'seconds since %d-01-01' % year0)

    x0 = netcdf.use(grd, 'lon_rho')
    y0 = netcdf.use(grd, 'lat_rho')
    ang = netcdf.use(grd, 'angle')

    if not xylim is False:
        xlim = xylim[:2]
        ylim = xylim[2:]
        i1, i2, j1, j2 = calc.ij_limits(x0, y0, xlim, ylim)
        print(i1, i2, j1, j2)
        xi = i2 - i1
        eta = j2 - j1
    else:
        i1, i2 = 0, xi
        j1, j2 = 0, eta

    XI = '%d:%d:%d' % (i1, i2, dxi)
    ETA = '%d:%d:%d' % (j1, j2, deta)

    xi = len(range(i1, i2, dxi))
    eta = len(range(j1, j2, deta))
    # create file:
    create_uv(fname, xi, eta)

    nc = netcdf.ncopen(fname, 'a')
    for v0, v in ('lon_rho', 'lon'), ('lat_rho', 'lat'), ('mask_rho',
                                                          'mask'), ('h',
                                                                    'depth'):
        print('filling %s with %s' % (v, v0))
        nc.vars[v][:] = netcdf.use(grd, v0, xi_rho=XI, eta_rho=ETA)

    if nomask:
        print('NO MASK !!!')
        nc.vars['mask'][:] = 1

    if gshhsMask:
        try:
            mask = np.load('mask_gshhs.npy')
        except:
            mask = 1 + 0 * netcdf.use(nc0, 'mask_rho', xi_rho=XI, eta_rho=ETA)
            mask = mask.astype('bool')
            x = netcdf.use(grd, 'lon_rho', xi_rho=XI, eta_rho=ETA)
            y = netcdf.use(grd, 'lat_rho', xi_rho=XI, eta_rho=ETA)

            from okean import gshhs
            axis = x.min(), x.max(), y.min(), y.max()
            g = gshhs.gshhs(axis,
                            resolution='h',
                            area_thresh=0.,
                            max_level=2,
                            clip=True)
            for lon, lat, level in zip(g.lon, g.lat, g.level):
                if level == 1:  # land
                    print('mask ', lon.shape)
                    i = calc.inpolygon(x, y, lon, lat)
                    mask = mask & ~i

            mask.dump('mask_gshhs.npy')

        nc.vars['mask'][:] = mask

    x = x0[j1:j2:deta, i1:i2:dxi]
    y = y0[j1:j2:deta, i1:i2:dxi]
    ang = ang[j1:j2:deta, i1:i2:dxi]

    n = -1
    for it in range(len(time)):
        if not dates is False:
            d0, d1 = dates
            if time[it] < d0 or time[it] >= d1: continue

        n += 1
        U = np.zeros((eta0, xi0), 'f')
        V = np.zeros((eta0, xi0), 'f')

        nc.vars['time'][n] = netcdf.date2num(time[it], tunits)

        # for roms agrif:
        #u=netcdf.use(nc0,'u',time=it,s_rho=-1)
        #v=netcdf.use(nc0,'v',time=it,s_rho=-1)
        u = netcdf.use(nc0, 'u', ocean_time=it, s_rho=-1)
        v = netcdf.use(nc0, 'v', ocean_time=it, s_rho=-1)

        # mask extrap:
        print('mask extrap...')

        u = calc.mask_extrap(x0, y0, np.ma.masked_where(u == 0, u))
        v = calc.mask_extrap(x0, y0, np.ma.masked_where(v == 0, v))

        U[:, 1:-1] = 0.5 * (u[:, :-1] + u[:, 1:])
        U[:, 0] = u[:, 0]
        U[:, -1] = u[:, -1]

        V[1:-1, :] = 0.5 * (v[:-1.:] + v[1:, :])
        V[0, :] = v[0, :]
        V[-1, :] = v[-1, :]

        U = U[j1:j2, i1:i2]
        V = V[j1:j2, i1:i2]

        U = U[j1:j2:deta, i1:i2:dxi]
        V = V[j1:j2:deta, i1:i2:dxi]

        # rotate uv:
        print('rotating ...')
        U, V = calc.rot2d(U, V, -ang)

        print('filling uv', n, time[it])
        nc.vars['u'][n, ...] = U
        nc.vars['v'][n, ...] = V

    nc.close()
    nc0.close()
コード例 #46
0
ファイル: era5.py プロジェクト: sciencewiki/okean
def era5_file_data(files, quiet=False):
    '''
  ECMWF ERA5 data for ROMS

  variables:
    # radiation:
      msdwlwrf  -  mean_surface_downward_long_wave_radiation_flux
      #msdwswrf -  mean_surface_downward_short_wave_radiation_flux -- not needed
      msnlwrf   -  mean_surface_net_long_wave_radiation_flux
      msnswrf   -  mean_surface_net_short_wave_radiation_flux
    # rain:
      mtpr      - mean_total_precipitation_rate
    # wind:
      u10       - 10m_u_component_of_wind
      v10       - 10m_v_component_of_wind
    # temp:
      t2m       - 2m_temperature
      d2m       - 2m_dewpoint_temperature (for relative humidity)
    # pres:
      sp        - surface_pressure
    # clouds:
      tcc       - 'total_cloud_cover
  '''

    ###  # some variables may have different names!
    ###  Vars={}
    ###  Vars['v10u']='v10u','u10'
    ###  Vars['v10v']='v10v','v10'
    ###  Vars['v2t']='v2t','t2m'
    ###  Vars['v2d']='v2d','d2m'
    ###
    ####  def find_v(name):
    ###    if name in Vars.keys():
    ###      for v in Vars[name]:
    ###        if varfile(v): return v
    ###    else: return name

    def varfile(var):
        for f in files:
            if var in netcdf.varnames(f): return f


##3  def check_var_type(var):
###    # new interim dataserver provides forec+analysis vars with extra dim
###    # 'type', 0 or 1
###    if var.ndim==4:
###      if not quiet: print('      dealing with var type... '),
###      v=np.zeros(var.shape[1:],var.dtype)
###      v[::2]=var[0,::2,...]
###      v[1::2]=var[1,1::2,...]
###      var=v
###      if not quiet: print('done.')
###
###    return var

    out = {}

    # time:
    File = varfile('t2m')  # air temp, for instance
    if not quiet: print(' reading  time from file %s' % File)
    time = netcdf.nctime(File, 'time')
    # check if last time at 00h:
    if time[-1].hour != 0:
        dateEnd = datetime.datetime(time[-1].year, time[-1].month,
                                    time[-1].day) + datetime.timedelta(days=1)
        time = np.append(time, dateEnd)
    else:
        dateEnd = 0
    out['time'] = time

    ###  # all times from analysis file, except last ind which will be
    ######  # the last time of forecast file
    ###  aFile=varfile(find_v('v2t')) # air temp, for instance
    ######  fFile=varfile(find_v('ssr')) # sw rad, for instance
    ###  if not quiet: print(' reading "analysis" time from file %s' % aFile)
    ###  aTime=netcdf.nctime(aFile,'time')
    ###  aTime.sort() # analysis+forecast files may not have time sorted!!
    ###  if not quiet: print(' reading "forecast" time from file %s' % fFile)
    ###  fTime=netcdf.nctime(fFile,'time')
    ###  fTime.sort() # this one should be sorted...
    ###  time=np.append(aTime,fTime[-1])
    ###  out['time']=time
    ##
    #  # calc number of forecast steps stored,nforec (used by accum2avg)
    #  if [fTime[i].hour for i in range(8)]==range(3,22,3)+[0]: nforec=4
    #  elif [fTime[i].hour for i in range(4)]==range(6,19,6)+[0]: nforec=2
    #  else:
    #    if not quiet: print('INTERIM WRONG TIME: cannot n forec steps')
    #    return
    #
    ###  if not quiet: print(' ==> n forecast steps = %d' % nforec)

    # x,y:
    if not quiet: print(' reading x,y from file %s' % File)
    x = netcdf.use(File, 'longitude')
    y = netcdf.use(File, 'latitude')
    x[x > 180] = x[x > 180] - 360
    if x.ndim == 1 and y.ndim == 1:
        x, y = np.meshgrid(x, y)

    # tair [K-->C]
    if not quiet: print(' --> T air')
    vname = 't2m'
    f = varfile(vname)
    tair = netcdf.use(f, vname) - 273.15
    if dateEnd:
        if not quiet: print('      fill_tend...')
        tair = fill_tend(tair)

    out['tair'] = Data(x, y, tair, 'Celsius')

    # R humidity [0--1]
    if not quiet: print(' --> R humidity (from T dew)')
    vname = 'd2m'
    f = varfile(vname)
    Td = netcdf.use(f, vname) - 273.15
    if dateEnd:
        if not quiet: print('      fill_tend... (T dew)')
        Td = fill_tend(Td)

    T = tair
    rhum = air_sea.relative_humidity(T, Td)
    rhum[rhum > 1] = 1
    out['rhum'] = Data(x, y, rhum, '0--1')

    # surface pressure [Pa]
    if not quiet: print(' --> Surface pressure')
    vname = 'sp'
    f = varfile(vname)
    pres = netcdf.use(f, vname)
    if dateEnd:
        if not quiet: print('      fill_tend...')
        pres = fill_tend(pres)

    out['pres'] = Data(x, y, pres, 'Pa')

    def avg_fix_time(v, DT):
        '''Fix data to right time in avg rate fields (ie, prev half hour to now)
       See:
       https://confluence.ecmwf.int/display/CKB/ERA5+data+documentation#ERA5datadocumentation-Meanratesandaccumulations
    '''
        DTstep = 1
        a = DTstep / 2.
        b = DT - a
        u = np.zeros_like(v)
        u[:-1] = (v[:-1] * b + v[1:] * a) / (a + b)
        # last one lost, use prev value (from 30 min before)
        u[-1] = v[-1]
        return u

    DT = (time[1] - time[0]).total_seconds() / 3600.  # hours

    # P rate [kg m-2 s-1 --> cm day-1]
    if not quiet: print(' --> P rate')
    vname = 'mtpr'
    f = varfile(vname)
    prate = netcdf.use(f, vname)
    if not quiet: print('      avg_fix_time - DTstep=1h - DT=%.2f h' % DT)
    prate = avg_fix_time(prate, DT)
    if dateEnd:
        if not quiet: print('      fill_tend...')
        prate = fill_tend(prate)

    conv = 100 * 86400 / 1000.  # from kg m-2 s-1 --> cm day-1
    prate = prate * conv  # cm day-1
    prate[prate < 0] = 0
    out['prate'] = Data(x, y, prate, 'cm day-1')

    #  dt=(TIMe[1]-time[0]).total_seconds()/3600.
    #  if not quiet: print('      accum to avg at correct time - DTstep=1h - DT=%.2f h'%DT)
    #  prate2=accum2avg(prate,DT)
    #  prate2=prate2*1000
    #  return prate,prate2
    #  prate=check_var_type(prate)
    #  if not quiet and np.any(sortInds!=range(len(sortInds))): print('      sort DONE')
    #  if not quiet: print('      accum2avg...')
    #  prate=accum2avg(prate,nforec)
    #  conv= 100*86400       # from m s-1      --> cm day-1
    #  #conv= 100*86400/1000. # from kg m-2 s-1 --> cm day-1
    #  prate=prate*conv # cm day-1
    #  if not quiet: print('      fill_t0...')
    #  prate=fill_t0(prate)
    #  prate[prate<0]=0
    #  out['prate']=Data(x,y,prate,'cm day-1')
    #
    #  return out

    # Net shortwave flux  [W m-2 --> W m-2]
    if not quiet: print(' --> Net shortwave flux')
    vname = 'msnswrf'
    f = varfile(vname)
    sw_net = netcdf.use(f, vname)
    if not quiet: print('      avg_fix_time - DTstep=1h - DT=%.2f h' % DT)
    sw_net = avg_fix_time(sw_net, DT)
    if dateEnd:
        if not quiet: print('      fill_tend...')
        sw_net = fill_tend(sw_net)

    out['radsw'] = Data(x, y, sw_net, 'W m-2', info='positive downward')

    # Net longwave flux  [W m-2 --> W m-2]
    if not quiet: print(' --> Net longwave flux')
    vname = 'msnlwrf'
    f = varfile(vname)
    lw_net = netcdf.use(f, vname) * -1  # positive upward (*-1)
    # here vars have roms-agrif signs --> radlw is positive upward!
    # conversion to ROMS is done in surface.py
    if not quiet: print('      avg_fix_time - DTstep=1h - DT=%.2f h' % DT)
    lw_net = avg_fix_time(lw_net, DT)
    if dateEnd:
        if not quiet: print('      fill_tend...')
        lw_net = fill_tend(lw_net)

    out['radlw'] = Data(x, y, lw_net, 'W m-2', info='positive upward')

    # longwave down:
    if not quiet: print(' --> Down longwave flux')
    vname = 'msdwlwrf'
    f = varfile(vname)
    lw_down = netcdf.use(f, vname) * -1  # positive upward (*-1)
    if not quiet: print('      avg_fix_time - DTstep=1h - DT=%.2f h' % DT)
    lw_down = avg_fix_time(lw_down, DT)
    if dateEnd:
        if not quiet: print('      fill_tend...')
        lw_down = fill_tend(lw_down)

    out['dlwrf'] = Data(x, y, lw_down, 'W m-2', info='positive upward')

    # U and V wind speed 10m
    if not quiet: print(' --> U and V wind')
    vname = 'u10'
    f = varfile(vname)
    uwnd = netcdf.use(f, vname)
    vname = 'v10'
    f = varfile(vname)
    vwnd = netcdf.use(f, vname)
    if dateEnd:
        if not quiet: print('      fill_tend...')
        uwnd = fill_tend(uwnd)
        vwnd = fill_tend(vwnd)

    out['uwnd'] = Data(x, y, uwnd, 'm s-1')
    out['vwnd'] = Data(x, y, vwnd, 'm s-1')
    # speed and stress:
    if 0:
        if not quiet: print(' --> calc wind speed and stress')
        speed = np.sqrt(uwnd**2 + vwnd**2)
        taux, tauy = air_sea.wind_stress(uwnd, vwnd)
        out['wspd'] = Data(x, y, speed, 'm s-1')
        out['sustr'] = Data(x, y, taux, 'Pa')
        out['svstr'] = Data(x, y, tauy, 'Pa')

    # Cloud cover [0--1]:
    if not quiet: print(' --> Cloud cover')
    vname = 'tcc'
    f = varfile(vname)
    clouds = netcdf.use(f, vname)
    if dateEnd:
        if not quiet: print('      fill_tend...')
        clouds = fill_tend(clouds)

    out['cloud'] = Data(x, y, clouds, 'fraction (0--1)')

    return out
コード例 #47
0
ファイル: cfsr.py プロジェクト: martalmeida/okean
def cfsr_file_data(files,quiet=False):
  '''
  Returns bulk data from one CFRS files
  '''



  def load_time(f):
    time=np.array((),datetime.datetime)
    ff=glob.glob(f)
    ff.sort()
    for f in ff: time=np.append(time,netcdf.nctime(f,'time'))
    return time


  def load_time_main(f):
    time=load_time(f)
    # I want 0,6,12,... after 2006 results may be 3,9,15, ...
    if time[0].hour in [3,9,15,21]: time=time+datetime.timedelta(hours=3)
    # for 2011 1st time is not 0!
    if time[0].hour==6: time=np.hstack((time[0].replace(hour=0),time))
    return time


  def fix_time(t,var,t0,t1):
    # convert 1h, 7h, ... to 0h, 6h, ...
    if t[0].hour in [1,7,13,19]: # not all! sp analysis starts at 0, 6,...!
      print('     1,7,... to 0,6,...')
      var=(var[1:]*5+var[:-1]*1)/6.
      t=t[1:]-datetime.timedelta(hours=1)
    elif t[0].hour in [3,9,15,21]:
      print('     3,9,... to 0,6,...')
      var=(var[1:]*3+var[:-1]*3)/6.
      t=t[1:]-datetime.timedelta(hours=3)
  
    cond=(t>=t0)&(t<=t1)
    t=t[cond]
    var=var[cond]

    if t[0]>t0:
      dt=t[0]-t0
      dt=dt.days*24+dt.seconds/3600. # hours
      print('missing data at start: %.2d h missing --> repeating 1st data'%dt)
      v=np.zeros((var.shape[0]+1,)+var.shape[1:],var.dtype)
      v[1:]=var
      v[0]=var[0]
      var=v
      t_=np.zeros((t.shape[0]+1,)+t.shape[1:],t.dtype)
      t_[1:]=t
      t_[0]=t0
      t=t_
      

    if t[-1]<t1:
      dt=t1-t[-1]
      dt=dt.days*24+dt.seconds/3600. # hours
      print('missing data at end: %.2d h missing --> repeating last data'%dt)
      v=np.zeros((var.shape[0]+1,)+var.shape[1:],var.dtype)
      v[:-1]=var
      v[-1]=var[-1]
      var=v
      t_=np.zeros((t.shape[0]+1,)+t.shape[1:],t.dtype)
      t_[:-1]=t
      t_[-1]=t1
      t=t_

    return var,t


  out={}

  # time:
  if 0:
    time=netcdf.nctime(files['cc'],'time')
    # files have diff units !! so, cannot load all times at once!
    # these result will use only units of 1st file!!
  else:
    time=load_time_main(files['cc'])


  out['time']=time

  # T air [K->C]
  if not quiet: print(' --> T air')
  f=files['st']
  tair=netcdf.use(f,'TMP_L103')
  tair=tair-273.15
  x=netcdf.use(f,'lon'); x[x>180]=x[x>180]-360
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print('    time ok')
  else:
    print('   time differs !!!!',)
    tair,tfix=fix_time(ttmp,tair,time[0],time[-1])
    if tfix.size==time.size and np.all(tfix==time):
      print(' ...fixed!')
    else:
      print('time is NOT OK. Please check !!')
      return
  out['tair']=Data(x,y,tair,'C')


  # R humidity [%-->0--1]
  if not quiet: print(' --> R humidity')
  f=files['rh']
  rhum=netcdf.use(f,'R_H_L103')
  rhum=rhum/100.
  x=netcdf.use(f,'lon'); x[x>180]=x[x>180]-360
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print('    time ok')
  else:
    print('   time differs !!!!'), # should use end=' ' for python3 print continuation
    rhum,tfix=fix_time(ttmp,rhum,time[0],time[-1])
    if tfix.size==time.size and np.all(tfix==time): 
      print(' ...fixed!')
    else:
      print('time is NOT OK. Please check !!')
      return
  out['rhum']=Data(x,y,rhum,'0--1')


  # surface pressure [Pa]
  if not quiet: print(' --> Surface pressure')
  f=files['sp']
  pres=netcdf.use(f,'PRES_L1')
  x=netcdf.use(f,'lon'); x[x>180]=x[x>180]-360
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print('    time ok')
  else:
    print('   time differs !!!!'),
    pres,tfix=fix_time(ttmp,pres,time[0],time[-1])
    if tfix.size==time.size and np.all(tfix==time):
      print(' ...fixed!')
    else:
      print('time is NOT OK. Please check !!')
      return
  out['pres']=Data(x,y,pres,'Pa')


  # P rate [kg m-2 s-1 -> cm/d]
  if not quiet: print(' --> P rate')
  f=files['pr']
  if 'PRATE_L1' in netcdf.varnames(f):
    prate=netcdf.use(f,'PRATE_L1')
  else:
    prate=netcdf.use(f,'PRATE_L1_Avg_1')
  x=netcdf.use(f,'lon'); x[x>180]=x[x>180]-360
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  # Conversion kg m^-2 s^-1  to cm/day
  prate=prate*86400*100/1000.
  prate=np.where(abs(prate)<1.e-4,0,prate)
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print('    time ok')
  else:
    print('   time differs !!!!'),
    prate,tfix=fix_time(ttmp,prate,time[0],time[-1])
    if tfix.size==time.size and np.all(tfix==time):
      print(' ...fixed!')
    else:
      print('time is NOT OK. Please check !!')
      return
  out['prate']=Data(x,y,prate,'cm/d')


  # Net shortwave flux  [W/m^2]
  if not quiet: print(' --> Net shortwave flux')
  if not quiet: print('       SW down')
  f=files['rad']
  sw_down=netcdf.use(f,'DSWRF_L1_Avg_1')
  x=netcdf.use(f,'lon'); x[x>180]=x[x>180]-360
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  if not quiet: print('       SW up')
  sw_up=netcdf.use(f,'USWRF_L1_Avg_1')
  sw_net=sw_down-sw_up
  sw_net=np.where(sw_net<1.e-10,0,sw_net)
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print('    time ok')
  else:
    print('   time differs !!!!'),
    sw_net,tfix=fix_time(ttmp,sw_net,time[0],time[-1])
    if tfix.size==time.size and np.all(tfix==time):
      print(' ...fixed!')
    else:
      print('time is NOT OK. Please check !!')
      return
  out['radsw']=Data(x,y,sw_net,'W m-2',info='positive downward')


  # Net longwave flux  [W/m^2]
  if not quiet: print(' --> Net longwave flux')
  if not quiet: print('       LW down')
  f=files['rad']
  lw_down=netcdf.use(f,'DLWRF_L1_Avg_1')
  x=netcdf.use(f,'lon'); x[x>180]=x[x>180]-360
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  if not quiet: print('       LW up')
  lw_up=netcdf.use(f,'ULWRF_L1_Avg_1')
  lw_net=lw_down-lw_up
  lw_net=np.where(np.abs(lw_net)<1.e-10,0,lw_net)
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print('    time ok')
  else:
    print('   time differs !!!!'),
    lw_net,tfix1=fix_time(ttmp,lw_net,time[0],time[-1])
    lw_down,tfix2=fix_time(ttmp,lw_down,time[0],time[-1])
    if  tfix1.size==tfix2.size==time.size and np.all((tfix1==time)&(tfix2==time)):
      print(' ...fixed!')
    else:
      print('time is NOT OK. Please check !!')
      return
  # ROMS (agrif, used to be!) convention: positive upward
  out['radlw']=Data(x,y,-lw_net,'W m-2',info='positive upward')
  # downward lw:
  out['dlwrf']=Data(x,y,-lw_down,'W m-2',info='negative... downward')
  # signs convention is better explained in wrf.py


  # U and V wind speed 10m
  if not quiet: print(' --> U and V wind')
  f=files['uv']
  uwnd=netcdf.use(f,'U_GRD_L103')
  vwnd=netcdf.use(f,'V_GRD_L103')
  x=netcdf.use(f,'lon'); x[x>180]=x[x>180]-360
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print('    time ok')
  else:
    print('   time differs !!!!'),
    uwnd,tfix1=fix_time(ttmp,uwnd,time[0],time[-1])
    vwnd,tfix2=fix_time(ttmp,vwnd,time[0],time[-1])
    if  tfix1.size==tfix2.size==time.size and np.all((tfix1==time)&(tfix2==time)):
      print(' ...fixed!')
    else:
      print('time is NOT OK. Please check !!')
      return
  #
  if not quiet: print(' --> calc wind speed and stress')
  speed = np.sqrt(uwnd**2+vwnd**2)
  taux,tauy=air_sea.wind_stress(uwnd,vwnd)

  out['wspd']=Data(x,y,speed,'m s-1')
  out['uwnd']=Data(x,y,uwnd,'m s-1')
  out['vwnd']=Data(x,y,vwnd,'m s-1')
  out['sustr']=Data(x,y,taux,'Pa')
  out['svstr']=Data(x,y,tauy,'Pa')


  # Cloud cover [0--100 --> 0--1]:
  if not quiet: print(' --> Cloud cover')
  f=files['cc']
  if 'T_CDC_L200' in netcdf.varnames(f):
    clouds=netcdf.use(f,'T_CDC_L200')
  else:
    clouds=netcdf.use(f,'T_CDC_L200_Avg_1')
  x=netcdf.use(f,'lon'); x[x>180]=x[x>180]-360
  y=netcdf.use(f,'lat')
  x,y=np.meshgrid(x,y)
  clouds=clouds/100.
  # check time:
  ttmp=load_time(f)
  if ttmp.size==time.size and np.all(ttmp==time): print('    time ok')
  else:
    print('   time differs !!!!'),
    clouds,tfix=fix_time(ttmp,clouds,time[0],time[-1])
    if tfix.size==time.size and np.all(tfix==time):
      print(' ...fixed!')
    else:
      print('time is NOT OK. Please check !!')
      return
  out['cloud']=Data(x,y,clouds,'fraction (0--1)')

  # rhum has different resolution (0.5, just like dew point!)
  # so, i can edit surface.py or just interpolate here rhum to
  # other vars resolution:
  if out['rhum'].data.shape!=out['uwnd'].data.shape:
    from okean import calc
    print('rhum shape differs!! --> interp:')
    nt,ny,nx=out['uwnd'].data.shape
    x,y=out['uwnd'].x,out['uwnd'].y
    rhum=np.zeros((nt,ny,nx), out['rhum'].data.dtype)
    for it in range(nt):
      if it%100==0: print('  %d of %d'%(it,nt))
      rhum[it]=calc.griddata(out['rhum'].x,out['rhum'].y,out['rhum'].data[it],x,y)

    out['rhum']=Data(x,y,rhum,'0--1')


  return out
コード例 #48
0
def wrf_file_data(file, quiet=False):
    '''
  WRF data for ROMS

  '''

    out = {}

    # time:
    if not quiet: print(' --> get time')
    time = read_time(file)

    out['time'] = time

    # lon,lat:
    if not quiet: print(' --> reading x,y')
    x = netcdf.use(file, 'XLONG', Time=0)  #**{'0': 0})
    y = netcdf.use(file, 'XLAT', Time=0)  #**{'0': 0})

    # tair [K-->C]
    if not quiet: print(' --> T air')
    tair = netcdf.use(file, 'T2') - 273.15
    out['tair'] = Data(x, y, tair, 'Celsius')

    # R humidity [kg/kg --> 0--1]
    if not quiet: print(' --> R humidity from QV at 2m')
    wv = netcdf.use(file, 'Q2')  # water vapor mixing ratio at 2m
    rhum = wv / air_sea.qsat(tair)
    rhum[rhum > 1] = 1
    out['rhum'] = Data(x, y, rhum, '0--1')

    # surface pressure [Pa]
    if not quiet: print(' --> Surface pressure')
    pres = netcdf.use(file, 'PSFC')
    out['pres'] = Data(x, y, pres, 'Pa')

    # P rate [mm --> cm day-1]
    if not quiet: print(' --> P rate (rainc+rainnc)')
    rainc = netcdf.use(file, 'RAINC')
    rainnc = netcdf.use(file, 'RAINNC')
    prate = rainc + rainnc
    if not quiet: print('      accum2avg...')
    prate = accum2avg(prate, dt=time[1] - time[0])  # mm s-1
    conv = 0.1 * 86400  # from mm s-1      --> cm day-1
    prate = prate * conv  # cm day-1
    prate[prate < 0] = 0  # interpolation errors may result in negative rain!
    out['prate'] = Data(x, y, prate, 'cm day-1')

    # LW, SW, latent, sensible signs:
    # positive (downward flux, heating) or negative (upward flux, cooling)
    #https://www.myroms.org/forum/viewtopic.php?f=1&t=2621

    # Net shortwave flux  [W m-2]
    if not quiet: print(' --> Net shortwave flux')
    sw_down = netcdf.use(file, 'SWDOWN')
    albedo = netcdf.use(file, 'ALBEDO')
    sw_net = sw_down * (1 - albedo)
    out['radsw'] = Data(x, y, sw_net, 'W m-2', info='positive downward')

    # Net longwave flux  [W m-2]
    if not quiet: print(' --> Net longwave flux')
    lw_down = netcdf.use(file, 'GLW')  # positive
    # sst needed:
    if not quiet: print('     --> SST for LW up')
    sst = netcdf.use(file, 'SST')  # K
    lw_net = air_sea.lwhf(sst, lw_down)  # positive down
    # here vars have roms-agrif signs --> radlw is positive upward!
    #conversion to ROMS is done in surface.py
    out['radlw'] = Data(x, y, -lw_net, 'W m-2', info='positive upward')
    out['dlwrf'] = Data(x, y, -lw_down, 'W m-2', info='positive upward')

    # U and V wind speed 10m
    if not quiet: print(' --> U and V wind')
    uwnd = netcdf.use(file, 'U10')
    vwnd = netcdf.use(file, 'V10')
    if not quiet: print(' --> calc wind speed and stress')
    speed = np.sqrt(uwnd**2 + vwnd**2)
    taux, tauy = air_sea.wind_stress(uwnd, vwnd)

    out['wspd'] = Data(x, y, speed, 'm s-1')
    out['uwnd'] = Data(x, y, uwnd, 'm s-1')
    out['vwnd'] = Data(x, y, vwnd, 'm s-1')
    out['sustr'] = Data(x, y, taux, 'Pa')
    out['svstr'] = Data(x, y, tauy, 'Pa')

    # Cloud cover [0--1]:
    if not quiet:
        print(' --> Cloud cover for LONGWAVE. Use LONGWAVE_OUT instead...')
    if 0:
        pass


# next code is wrong! If cloud cover is really needed, it needs to be calculated using wrfpost.
# See http://www2.mmm.ucar.edu/wrf/users/docs/user_guide/users_guide_chap8.html#_ARWpost_1
#
#  if 'CLDFRA' in netcdf.varnames(file):
#    clouds=netcdf.use(file,'CLDFRA').sum(-3)
#    clouds=np.where(clouds>1,1,clouds)
    else:
        if not quiet:
            print('CLDFRA not found!! Using SST and air_sea.cloud_fraction')
        sst = netcdf.use(file, 'SST') - 273.15
        clouds = air_sea.cloud_fraction(lw_net, sst, tair, rhum, Wtype='net')
        clouds[clouds < 0] = 0
        clouds[clouds > 1] = 1

    out['cloud'] = Data(x, y, clouds, 'fraction (0--1)')

    return out
コード例 #49
0
ファイル: roms.py プロジェクト: moghimis/okean
  def use(self,varname,**kargs): return netcdf.use(self.nc,varname,**kargs)

  def hasz(self,v):