Exemple #1
0
def load_blkdata_wrf(wrfpath, wrffiles="wrfout*", date0=False, date1=False, quiet=True):
    from okean.datasets import wrf

    a = wrf.WRFData(wrfpath, wrffiles)
    data = a.data(date0, date1, quiet)

    out = cb.odict()
    if not data:
        print "no data found !"
        return out

    time = data["time"]
    for it in range(len(time)):
        # be sure time increases!
        if out.keys() and time[it] <= out.keys()[-1]:
            continue

        out[time[it]] = {}
        for k in data.keys():

            if k in ("time",):
                continue
            elif k.startswith("INFO"):
                out[time[it]][k] = data[k]
            else:
                out[time[it]][k] = data[k].data[it, ...]

                # x and y should be the same, so:
                if not out[time[it]].has_key("x"):
                    out[time[it]]["x"] = data[k].x
                    out[time[it]]["y"] = data[k].y

    return out
Exemple #2
0
def load_blkdata_gfs(gfspath, date0, date1=False, nforec=0, quiet=True):
    """
  nforec=0 means not forecast, auto means default from config file
  """
    from okean.datasets import gfs

    a = gfs.GFSData(gfspath)
    if nforec == 0:
        data, miss = a.data_analysis(date0, date1, quiet=quiet)
    else:
        data, miss = a.data_forecast(date0, nforec, quiet=quiet)

    out = cb.odict()
    time = data.keys()
    for t in time:
        out[t] = {}
        for k in data[t].keys():

            if k.startswith("INFO"):
                out[t][k] = data[t][k]
            else:
                out[t][k] = data[t][k].data

                # x and y may change with time, but should be the same for each time (gfs file)
                if not out[t].has_key("x"):
                    out[t]["x"] = data[t][k].x
                    out[t]["y"] = data[t][k].y

    return out, miss
Exemple #3
0
def load_blkdata_interim(interimpath,
                         date0=False,
                         date1=False,
                         quiet=True,
                         past=False):
    '''
  If past, then module interim_past will be used to extract data from
  old interim server. Otherwise interim module will be used.
  '''
    if past:
        from okean.datasets import interim_past as interim
    else:
        from okean.datasets import interim

    a = interim.INTERIMData(interimpath)
    data = a.data(date0, date1, quiet)

    out = cb.odict()
    time = data['time']
    for it in range(len(time)):
        out[time[it]] = {}
        for k in data.keys():

            if k in ('time', ): continue
            elif k.startswith('INFO'):
                out[time[it]][k] = data[k]
            else:
                out[time[it]][k] = data[k].data[it, ...]

                # x and y should be the same, so:
                if not out[time[it]].has_key('x'):
                    out[time[it]]['x'] = data[k].x
                    out[time[it]]['y'] = data[k].y

    return out
Exemple #4
0
  def __dims(self):
    '''Returns variable dimensions dict'''
    d=cbt.odict()
    if   self._interface=='pycdf': names = self._nc.dimensions()
    elif self._interface=='scientific': names = self._nc.dimensions
    elif self._interface=='netcdf4': names = self._nc.dimensions

    if self._interface in ('pycdf','scientific'):
      for n in names: d[n]=self.parent.dims[n]

    # In netcdf 4, dimensions are scoped such that they can be seen
    # in all descendant groups. That is, dimensions can be shared
    # between variables in different groups, if they are defined in
    # a parent group.

    elif self._interface == 'netcdf4':
      for n in names:
        p=self.parent
        while True:
          if p.dims.has_key(n):
            d[n]=p.dims[n]
            break
          if p.parent: p=p.parent
          else: break

    return d
Exemple #5
0
    def __dims(self):
        '''Returns variable dimensions dict'''
        d = cbt.odict()
        if self._interface == 'pycdf': names = self._nc.dimensions()
        elif self._interface == 'scientific': names = self._nc.dimensions
        elif self._interface == 'netcdf4': names = self._nc.dimensions

        if self._interface in ('pycdf', 'scientific'):
            for n in names:
                d[n] = self.parent.dims[n]

        # In netcdf 4, dimensions are scoped such that they can be seen
        # in all descendant groups. That is, dimensions can be shared
        # between variables in different groups, if they are defined in
        # a parent group.

        elif self._interface == 'netcdf4':
            for n in names:
                p = self.parent
                while True:
                    if p.dims.has_key(n):
                        d[n] = p.dims[n]
                        break
                    if p.parent: p = p.parent
                    else: break

        return d
Exemple #6
0
    def __dims(self):
        '''Returns file dimensions dict'''
        dims = cbt.odict()
        if self._interface == 'pycdf':
            for i in range(self._nc.inq_ndims()):
                dims[self._nc.dim(i).inq()[0]] = self._nc.dim(i).inq()[1]
        elif self._interface == 'scientific':
            # get ordered dims:
            if not self._ncdump_info is False:
                odims = self._ncdump_info['dimensions']
            else:
                odims = False

            if not odims: odims = self._nc.dimensions.keys()
            for k in odims:
                dims[k] = self._nc.dimensions[k]
        elif self._interface == 'netcdf4':
            # get ordered dims:
            if not self._ncdump_info is False:
                odims = self._ncdump_info['dimensions']
            else:
                odims = False

            if not odims: odims = self._nc.dimensions.keys()
            for k in odims:
                dims[k] = len(self._nc.dimensions[k])

        return dims
Exemple #7
0
def load_blkdata_interim(interimpath, date0=False, date1=False, quiet=True, past=False):
    """
  If past, then module interim_past will be used to extract data from
  old interim server. Otherwise interim module will be used.
  """
    if past:
        from okean.datasets import interim_past as interim
    else:
        from okean.datasets import interim

    a = interim.INTERIMData(interimpath)
    data = a.data(date0, date1, quiet)

    out = cb.odict()
    time = data["time"]
    for it in range(len(time)):
        out[time[it]] = {}
        for k in data.keys():

            if k in ("time",):
                continue
            elif k.startswith("INFO"):
                out[time[it]][k] = data[k]
            else:
                out[time[it]][k] = data[k].data[it, ...]

                # x and y should be the same, so:
                if not out[time[it]].has_key("x"):
                    out[time[it]]["x"] = data[k].x
                    out[time[it]]["y"] = data[k].y

    return out
Exemple #8
0
def load_blkdata_wrf(wrfpath,
                     wrffiles='wrfout*',
                     date0=False,
                     date1=False,
                     quiet=True):
    from okean.datasets import wrf
    a = wrf.WRFData(wrfpath, wrffiles)
    data = a.data(date0, date1, quiet)

    out = cb.odict()
    if not data:
        print('no data found !')
        return out

    time = data['time']
    for it in range(len(time)):
        # be sure time increases!
        if out.keys() and time[it] <= out.keys()[-1]: continue

        out[time[it]] = {}
        for k in data.keys():

            if k in ('time', ): continue
            elif k.startswith('INFO'):
                out[time[it]][k] = data[k]
            else:
                out[time[it]][k] = data[k].data[it, ...]

                # x and y should be the same, so:
                if not out[time[it]].has_key('x'):
                    out[time[it]]['x'] = data[k].x
                    out[time[it]]['y'] = data[k].y

    return out
Exemple #9
0
def load_blkdata_cfsr(cfsrpath, date0=False, date1=False, quiet=True):
    from okean.datasets import cfsr

    a = cfsr.CFSRData(cfsrpath)
    data = a.data(date0, date1, quiet)

    out = cb.odict()
    time = data["time"]
    for it in range(len(time)):
        out[time[it]] = {}
        for k in data.keys():

            if k in ("time",):
                continue
            elif k.startswith("INFO"):
                out[time[it]][k] = data[k]
            else:
                out[time[it]][k] = data[k].data[it, ...]

                # x and y should be the same, so:
                if not out[time[it]].has_key("x"):
                    out[time[it]]["x"] = data[k].x
                    out[time[it]]["y"] = data[k].y

    return out
Exemple #10
0
def load_blkdata_gfs(gfspath, date0, date1=False, nforec=0, quiet=True):
    '''
  nforec=0 means not forecast, auto means default from config file
  '''
    from okean.datasets import gfs
    a = gfs.GFSData(gfspath)
    if nforec == 0:
        data, miss = a.data_analysis(date0, date1, quiet=quiet)
    else:
        data, miss = a.data_forecast(date0, nforec, quiet=quiet)

    out = cb.odict()
    time = data.keys()
    for t in time:
        out[t] = {}
        for k in data[t].keys():

            if k.startswith('INFO'):
                out[t][k] = data[t][k]
            else:
                out[t][k] = data[t][k].data

                # x and y may change with time, but should be the same for each time (gfs file)
                if not out[t].has_key('x'):
                    out[t]['x'] = data[t][k].x
                    out[t]['y'] = data[t][k].y

    return out, miss
Exemple #11
0
  def __groups(self):
    out=cbt.odict()
    if self._interface=='netcdf4':
      gs=self._nc.groups
      try: gnames=gs.keys()
      except: gnames=()
      for k in gnames: out[k]=Pyncgroup(nc=gs[k],name=k,parent=self)
    else:
      print ':: groups only implemented in interface ','netcdf4'

    return out
Exemple #12
0
  def read_log(f):
    out=cb.odict()
    if os.path.isfile(f):
      L=open(log).readlines()
      for l in L:
        tmp=l.split(' ',1)
        sdate=tmp[0]
        scontents=tmp[1].rstrip().split(' + ')
        out[tmp[0]]=scontents

    return out
Exemple #13
0
    def read_log(f):
        out = cb.odict()
        if os.path.isfile(f):
            L = open(log).readlines()
            for l in L:
                tmp = l.split(' ', 1)
                sdate = tmp[0]
                scontents = tmp[1].rstrip().split(' + ')
                out[tmp[0]] = scontents

        return out
Exemple #14
0
    def __atts(self):
        '''Returns netcdf ordered attributes, with types and values'''
        att = cbt.odict()
        if self._interface == 'pycdf':
            for i in range(self._nc.inq_natts()):
                a = self._nc.attr(i)
                # ps: a.get() produces a segmentation fault for large
                # strings, so:
                L = a.inq_len()
                if L >= 250: val = 'PYCDF ERROR: CANNOT READ !!!!'
                else: attvalue = a.get()

                # find nctype:
                nctype = nctypes.type_pycdf2nc(a.inq_type())

                att[a.inq_name()] = Pyncatt(self._nc,
                                            a.inq_name(),
                                            attvalue,
                                            nctype=nctype,
                                            ncversion=self.ncversion,
                                            interface=self._interface)

        elif self._interface == 'scientific':
            a = self._nc.__dict__
            # get ordered atts:
            if not self._ncdump_info is False:
                oatts = self._ncdump_info['vattributes'][self.varname]
            else:
                oatts = False

            if oatts: keys = oatts
            else: keys = a.keys()

            for k in keys:
                attvalue = a[k]
                att[k] = Pyncatt(self._nc,
                                 k,
                                 attvalue,
                                 ncversion=self.ncversion,
                                 interface=self._interface)

        elif self._interface == 'netcdf4':
            aname = self._nc.ncattrs()  # already ordered!
            for a in aname:
                attvalue = getattr(self._nc, a)
                att[a] = Pyncatt(self._nc,
                                 a,
                                 attvalue,
                                 ncversion=self.ncversion,
                                 interface=self._interface)
        return att
Exemple #15
0
    def __groups(self):
        out = cbt.odict()
        if self._interface == 'netcdf4':
            gs = self._nc.groups
            try:
                gnames = gs.keys()
            except:
                gnames = ()
            for k in gnames:
                out[k] = Pyncgroup(nc=gs[k], name=k, parent=self)
        else:
            print ':: groups only implemented in interface ', 'netcdf4'

        return out
Exemple #16
0
  def __vars(self):
    '''Returns file variables'''
    var=cbt.odict()
    if self._interface=='pycdf':
      names=[self._nc.var(i).inq_name() for i in range(self._nc.inq_nvars())]
    elif self._interface in ('scientific','netcdf4'):
      # get ordered names:
      if not self._ncdump_info is False:
        onames=self._ncdump_info['variables'].keys()
      else: onames=False

      if onames: names=onames
      else: names=self._nc.variables.keys()

    for vname in names: var[vname]=Pyncvar(self,vname)
    return var
Exemple #17
0
def list_files(cf,date1,date2,io,type,FA,nest=0,quiet=True):
  files=cb.odict()
  date=date1
  while date<=date2:
    f=opt.nameof(io,type,date,FA,nest,cf)
    if os.path.isfile(f): files[date]=f
    else: files[date]=False

    date=dateu.next_date(date,1)

  if not quiet:
    for d in files.keys():
      if files[d]:
        f=files[d]
        hs=cb.hsize(os.path.getsize(f))
        print d,' ', f, ' ',hs[0],hs[1]
      else:
        print d,' no file'

  return files
Exemple #18
0
def load_blkdata_narr(date0, date1, quiet=True):
    from okean.datasets import narr
    a = narr.NARRData()
    data, miss = a.data(date0, date1, quiet=quiet)

    out = cb.odict()
    time = data.keys()
    for t in time:
        out[t] = {}
        for k in data[t].keys():

            if k.startswith('INFO'):
                out[t][k] = data[t][k]
            else:
                out[t][k] = data[t][k].data
                if not out[t].has_key('x'):
                    out[t]['x'] = data[t][k].x
                    out[t]['y'] = data[t][k].y

    return out, miss
Exemple #19
0
def list_files(cf, date1, date2, io, type, FA, nest=0, quiet=True):
    files = cb.odict()
    date = date1
    while date <= date2:
        f = opt.nameof(io, type, date, FA, nest, cf)
        if os.path.isfile(f): files[date] = f
        else: files[date] = False

        date = dateu.next_date(date, 1)

    if not quiet:
        for d in files.keys():
            if files[d]:
                f = files[d]
                hs = cb.hsize(os.path.getsize(f))
                print d, ' ', f, ' ', hs[0], hs[1]
            else:
                print d, ' no file'

    return files
Exemple #20
0
    def __vars(self):
        '''Returns file variables'''
        var = cbt.odict()
        if self._interface == 'pycdf':
            names = [
                self._nc.var(i).inq_name() for i in range(self._nc.inq_nvars())
            ]
        elif self._interface in ('scientific', 'netcdf4'):
            # get ordered names:
            if not self._ncdump_info is False:
                onames = self._ncdump_info['variables'].keys()
            else:
                onames = False

            if onames: names = onames
            else: names = self._nc.variables.keys()

        for vname in names:
            var[vname] = Pyncvar(self, vname)
        return var
Exemple #21
0
def load_blkdata_narr(date0,date1,quiet=True):
  from okean.datasets import narr
  a=narr.NARRData()
  data,miss=a.data(date0,date1,quiet=quiet)

  out=cb.odict()
  time=data.keys()
  for t in time:
    out[t]={}
    for k in data[t].keys():

      if  k.startswith('INFO'):
        out[t][k]=data[t][k]
      else:
        out[t][k]=data[t][k].data
        if not out[t].has_key('x'):
          out[t]['x']=data[t][k].x
          out[t]['y']=data[t][k].y

  return out, miss
Exemple #22
0
  def __atts(self):
    '''Returns netcdf ordered attributes, with types and values'''
    att=cbt.odict()
    if self._interface=='pycdf':
      for i in range(self._nc.inq_natts()):
        a=self._nc.attr(i)
        # ps: a.get() produces a segmentation fault for large
        # strings, so:
        L= a.inq_len()
        if L>=250: val='PYCDF ERROR: CANNOT READ !!!!'
        else: attvalue=a.get()

        # find nctype:
        nctype=nctypes.type_pycdf2nc(a.inq_type())

        att[a.inq_name()]=Pyncatt(self._nc,a.inq_name(),attvalue,
                                  nctype=nctype,ncversion=self.ncversion,
                                  interface=self._interface)

    elif self._interface=='scientific':
      a=self._nc.__dict__
      # get ordered atts:
      if not self._ncdump_info is False:
        oatts=self._ncdump_info['vattributes'][self.varname]
      else: oatts=False

      if oatts: keys=oatts
      else: keys= a.keys()

      for k in keys:
        attvalue=a[k]
        att[k]=Pyncatt(self._nc,k,attvalue,ncversion=self.ncversion,
                       interface=self._interface)

    elif self._interface=='netcdf4':
      aname=self._nc.ncattrs() # already ordered!
      for a in aname:
        attvalue=getattr(self._nc,a)
        att[a]=Pyncatt(self._nc,a,attvalue,ncversion=self.ncversion,
                       interface=self._interface)
    return att
Exemple #23
0
  def data(self,date0,date1=False,xlim=False,ylim=False,quiet=True):
    files,time=self.files(date0,date1)
    res=cb.odict()

    for i in range(len(files)):
      if not quiet: print('|-> getting from file %s' % files[i])

      try:
        data=narr_file_data(files[i],xlim,ylim,quiet=quiet)
      except:
        if not quiet: print('CANNOT use data for %s' % time[i].isoformat())
        continue

      if data:
        data['INFO_file']   = files[i]
        res[time[i]]=data
      else:
        if not quiet: print('NO DATA for %s' % time[i].isoformat())

    miss={}
    return res,miss
Exemple #24
0
  def add_group(self,groupname):
    '''
    Creates netcdf groups

    See:
    http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4-module.html
    '''
    if self._interface=='netcdf4':
      self._nc.createGroup(groupname)
      newgroup=Pyncgroup(groupname,self)

      # update self.groups:
      self.groups[groupname]=newgroup

      # update ncdump info:
      if not self._ncdump_info is False:
        self._ncdump_info['groups'][groupname]=cbt.odict()

      return newgroup

    else:
      print ':: add_group only implemented in interface ','netcdf4'
      return False
Exemple #25
0
    def add_group(self, groupname):
        '''
    Creates netcdf groups

    See:
    http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4-module.html
    '''
        if self._interface == 'netcdf4':
            self._nc.createGroup(groupname)
            newgroup = Pyncgroup(groupname, self)

            # update self.groups:
            self.groups[groupname] = newgroup

            # update ncdump info:
            if not self._ncdump_info is False:
                self._ncdump_info['groups'][groupname] = cbt.odict()

            return newgroup

        else:
            print ':: add_group only implemented in interface ', 'netcdf4'
            return False
Exemple #26
0
def load_blkdata_cfsr(cfsrpath, date0=False, date1=False, quiet=True):
    from okean.datasets import cfsr
    a = cfsr.CFSRData(cfsrpath)
    data = a.data(date0, date1, quiet)

    out = cb.odict()
    time = data['time']
    for it in range(len(time)):
        out[time[it]] = {}
        for k in data.keys():

            if k in ('time', ): continue
            elif k.startswith('INFO'):
                out[time[it]][k] = data[k]
            else:
                out[time[it]][k] = data[k].data[it, ...]

                # x and y should be the same, so:
                if not out[time[it]].has_key('x'):
                    out[time[it]]['x'] = data[k].x
                    out[time[it]]['y'] = data[k].y

    return out
Exemple #27
0
  def __dims(self):
    '''Returns file dimensions dict'''
    dims=cbt.odict()
    if self._interface=='pycdf':
      for i in range(self._nc.inq_ndims()):
        dims[self._nc.dim(i).inq()[0]]=self._nc.dim(i).inq()[1]
    elif self._interface=='scientific':
      # get ordered dims:
      if not self._ncdump_info is False:
        odims=self._ncdump_info['dimensions']
      else: odims=False

      if not odims: odims=self._nc.dimensions.keys()
      for k in odims: dims[k]=self._nc.dimensions[k]
    elif self._interface=='netcdf4':
      # get ordered dims:
      if not self._ncdump_info is False:
        odims=self._ncdump_info['dimensions']
      else: odims=False

      if not odims: odims=self._nc.dimensions.keys()
      for k in odims: dims[k]=len(self._nc.dimensions[k])

    return dims
Exemple #28
0
def update_wind_blended2(fname, datapaths, **kargs):
    '''
  In days without blended data will try to use quikscat data
  '''
    from okean.datasets import quikscat
    from okean.datasets import blended_wind
    a = blended_wind.WINDData(datapaths[0])
    b = quikscat.WINDData(datapaths[1])

    time = netcdf.nctime(fname, 'time')
    date0 = dts.next_date(time[0], -1)
    date1 = dts.next_date(time[-1], +2)

    data = a.data(date0, date1)

    # limit are... otherwise, quikscat interp will be very slow!
    grd = netcdf.fatt(fname, 'grd_file')
    import os
    if not os.path.isfile(grd): grd = kargs['grd']
    cond, inds = rt.grid_vicinity(grd,
                                  data['x'],
                                  data['y'],
                                  margin=5,
                                  rect=True,
                                  retinds=True)
    i1, i2, j1, j2 = inds
    for d in data.keys():
        if d == 'x': data[d] = data[d][i1:i2]
        elif d == 'y': data[d] = data[d][j1:j2]
        else: data[d] = data[d][j1:j2, i1:i2]

    # check for missing days:
    time0 = data.keys()
    x0 = data['x']
    y0 = data['y']
    x0, y0 = np.meshgrid(x0, y0)
    time0.remove('x')
    time0.remove('y')

    out = cb.odict()
    out['x'] = x0
    out['y'] = y0
    info = ''
    qs_ij_limits_done = False
    for d in dts.drange(date0, date1):
        found = 0
        for t in time0:
            if (t.year, t.month, t.day) == (d.year, d.month, d.day):
                print('==> blended : ', t)
                out[t] = data[t]
                found = 1

        if not found:  # use quikscat:
            print('==> quikscat : ', d.strftime('%Y-%m-%d'))
            tmp = b.data(d, dts.next_date(d))
            if not tmp.has_key('x'): continue
            x, y = tmp['x'], tmp['y']
            x, y = np.meshgrid(x, y)

            # reduce qs data:
            if not qs_ij_limits_done:
                i1, i2, j1, j2 = calc.ij_limits(x, y,
                                                [x0.min(), x0.max()],
                                                [y0.min(), y0.max()])
                qs_ij_limits_done = True

            x = x[j1:j2, i1:i2]
            y = y[j1:j2, i1:i2]
            tmp[tmp.keys()[0]] = tmp[tmp.keys()[0]][j1:j2, i1:i2]

            print('  griddata u')
            u = calc.griddata(x, y, tmp[tmp.keys()[0]].real, x0, y0)
            print('  griddata v')
            v = calc.griddata(x, y, tmp[tmp.keys()[0]].imag, x0, y0)
            out[tmp.keys()[0]] = u + 1.j * v
            info += '#' + d.strftime('%Y%m%d')

    new_wind_info = 'blended+quikscat at days: ' + info
    update_wind(fname, out, new_wind_info, **kargs)
Exemple #29
0
def data2roms(data,grd,sparams,**kargs):
  '''
  Interpolates data to roms 3D grid.

  The dict data must contain the prognostic variables temp, salt, u,
  v (3d) and ssh (zeta, 2d), as well as lon, lat (2d), depth (1d) and
  time/date info: date (data date), date0 (reference date) and time
  (difference between date and date0). The input data can be provided
  by load_data.

  Parameters
  ----------
  data : dict with prognostic variables
  grd : ROMS netcdf grid file
  sparams : s-coordinates parameters, theta_s,theta_b, hc and NLevels

  **kargs:
  ij : axis for vertical interpolations (*i,j)
  ij_ind : list of i or j index for vertical interpolation, all by
           default (ij_ind=False)
  horizAux : if True, the original data horizontally interpolated is
             returned and can be used for next data2roms call with
             this same karg
  quiet : output messages flag (false by default)
  proj : projection - False, name or basemap proj - lcc by default
         if False, horizontal interpolations will use lonxlat instead of distances
  interp_opts: options for griddata
  rep_surf: repeat surface level (new upper level)
  '''

  ij          = kargs.get('ij','j')
  ij_ind      = kargs.get('ij_ind',False)
  horizAux    = kargs.get('horizAux',False)
  quiet       = kargs.get('quiet',False)
  proj        = kargs.get('proj','lcc') # lonxlat to distance before
                                        # horizontal interpolation
  interp_opts = kargs.get('interp_opts',{})
  rep_surf    = kargs.get('rep_surf',True) # create a surface upper level
                                           # before interpolation

  if not quiet: print 'using grid %s' % grd
  g=roms.Grid(grd)
  xr,yr,hr,mr=g.vars('r')
  xu,yu,hu,mu=g.vars('u')
  xv,yv,hv,mv=g.vars('v')
  ny,nx=hr.shape
  nz=sparams[3]

  if proj:
    print 'projecting coordinates...'
    if isinstance(proj,basestring):
       lonc=(xr.max()+xr.min())/2.
       latc=(yr.max()+yr.min())/2.
       from mpl_toolkits.basemap import Basemap
       proj=Basemap(projection=proj,width=1,height=1,resolution=None,
                    lon_0=lonc,lat_0=latc, lat_1=latc)

    xr,yr=proj(xr,yr)
    xu,yu=proj(xu,yu)
    xv,yv=proj(xv,yv)
    dlon,dlat=proj(data['lon'],data['lat'])
    Rdz=1/100. # distance to depth ratio (300km vs 3000m)
    distance=lambda x,y: np.append(0.,np.sqrt(np.diff(x)**2+np.diff(y)**2).cumsum())
  else:
    dlon,dlat=data['lon'],data['lat']
    distance=calc.distance

  # needed for s_levels and for rep_surf!
  sshr=calc.griddata(dlon,dlat,data['ssh'],xr,yr,extrap=True,**interp_opts)

  # repeat surface:
  if rep_surf:
    # copy data cos dont want to change the original dataset:
    import copy
    data=copy.deepcopy(data)
    for vname in ['temp','salt','u','v','depth']:
      if data[vname].ndim==1: # depth !
        if np.ma.isMA(data[vname]): vstack=np.ma.hstack
        else: vstack=np.hstack
      else:
        if np.ma.isMA(data[vname]): vstack=np.ma.vstack
        else: vstack=np.vstack

      if data['depth'][0]>data['depth'][1]: # surf at ind 0
        data[vname]=vstack((data[vname][0][np.newaxis],data[vname]))
        if vname=='depth': data[vname][0]=sshr.max()
      else:
        data[vname]=vstack((data[vname],data[vname][-1][np.newaxis]))
        if vname=='depth': data[vname][-1]=sshr.max()

    data['NZ']=data['NZ']+1

  NX=data['NX']
  NY=data['NY']
  NZ=data['NZ']

  if not quiet: print 'calc s levels...'
  Zr = g.s_levels(sparams,sshr,hr,'r')
  Zu = g.s_levels(sparams,sshr,hr,'u')
  Zv = g.s_levels(sparams,sshr,hr,'v')

  # interp horiz:
  retHorizAux=horizAux is True
  if horizAux in (True,False):
    TEMP = np.ma.masked_all((NZ,ny,nx),data['temp'].dtype)
    SALT = np.ma.masked_all((NZ,ny,nx),data['salt'].dtype)
    U    = np.ma.masked_all((NZ,ny,nx),data['u'].dtype)
    V    = np.ma.masked_all((NZ,ny,nx),data['v'].dtype)

    if not quiet: print 'horizontal interpolation:'
    for i in range(NZ):
      if not quiet and i%10==0: print '   lev %d of %d' % (i,NZ)
      #import pylab
      #pylab.figure()
      #pylab.pcolormesh(data['lon'],data['lat'],data['temp'][i,...])

      try: TEMP[i,...] = calc.griddata(dlon,dlat,data['temp'][i,...],xr,yr,extrap=True,**interp_opts)
      except: pass

      try: SALT[i,...] = calc.griddata(dlon,dlat,data['salt'][i,...],xr,yr,extrap=True,**interp_opts)
      except: pass

      try: U[i,...] = calc.griddata(dlon,dlat,data['u'][i,...],xr,yr,extrap=True,**interp_opts)
      except: pass

      try: V[i,...] = calc.griddata(dlon,dlat,data['v'][i,...],xr,yr,extrap=True,**interp_opts)
      except: pass

    # rotate U,V:
    if not quiet: print 'rotating U,V to grid angle'
    angle=g.use('angle')  # rad
    U,V=calc.rot2d(U,V,angle)
    U=rt.rho2uvp3d(U,'u')
    V=rt.rho2uvp3d(V,'v')

    horizAux={}
    horizAux['TEMP'] = TEMP
    horizAux['SALT'] = SALT
    horizAux['U']    = U
    horizAux['V']    = V

  else:
    TEMP = horizAux['TEMP']
    SALT = horizAux['SALT']
    U    = horizAux['U']
    V    = horizAux['V']

  # interp vert:
  nxu=nx-1
  nyv=ny-1
  #> -----------------------------------------------------------------
  useInd=not ij_ind is False
  if ij_ind is False:
    if   ij=='j': ij_ind=range(ny)
    elif ij=='i': ij_ind=range(nx)
  else:
    try: iter(ij_ind)
    except: ij_ind=[ij_ind]

    if   ij=='j': ny=nyv=len(ij_ind)
    elif ij=='i': nx=nxu=len(ij_ind)
  # -----------------------------------------------------------------<

  Temp = np.zeros((nz,ny ,nx ),data['temp'].dtype)
  Salt = np.zeros((nz,ny ,nx ),data['salt'].dtype)
  Uvel = np.zeros((nz,ny ,nxu),data['u'].dtype)
  Vvel = np.zeros((nz,nyv,nx ),data['v'].dtype)


  jslice=lambda x,ind: x[:,ind,:]
  islice=lambda x,ind: x[:,:,ind]

  ZZr = np.tile(data['depth'],(nx,ny,1)).T
  ZZu = np.tile(data['depth'],(nxu,ny,1)).T
  ZZv = np.tile(data['depth'],(nx,nyv,1)).T

  if not useInd is False: #>------------------------------------------
    if   ij=='j':
      slice=jslice
      sshr=sshr[ij_ind,:]
      hr  =hr[ij_ind,:]
    elif ij=='i':
      slice=islice
      sshr=sshr[:,ij_ind]
      hr  =hr[:,ij_ind]

    Zr,Zu,Zv,TEMP,SALT,U,V=[slice(k,ij_ind) for k in [Zr,Zu,Zv,TEMP,SALT,U,V]]
  # -----------------------------------------------------------------<

  if useInd: # then store distances for a possible bry file
    dtype=Temp.dtype
    distr=np.zeros((nz,ny, nx ),dtype)
    distu=np.zeros((nz,ny, nxu),dtype)
    distv=np.zeros((nz,nyv,nx ),dtype)

  if not quiet: print 'vertical interpolation:'
  if ij=='j':
    for j in range(ny):
      if not quiet and (ny<10 or (ny>=10 and j%10==0)): print '  j=%3d of %3d' % (j,ny)
      ind=ij_ind[j]
      dr=np.tile(distance(xr[ind,:],yr[ind,:]),(nz,1))
      du=np.tile(distance(xu[ind,:],yu[ind,:]),(nz,1))
      Dr=np.tile(distance(xr[ind,:],yr[ind,:]),(NZ,1))
      Du=np.tile(distance(xu[ind,:],yu[ind,:]),(NZ,1))

      if useInd:
        distr[:,j,:]=dr;
        distu[:,j,:]=du;

      Temp[:,j,:]   = calc.griddata(Rdz*Dr,ZZr[:,j,:],TEMP[:,j,:],Rdz*dr,Zr[:,j,:],extrap=True,**interp_opts)
      Salt[:,j,:]   = calc.griddata(Rdz*Dr,ZZr[:,j,:],SALT[:,j,:],Rdz*dr,Zr[:,j,:],extrap=True,**interp_opts)
      if 0 and j%10==0:
        print Dr.shape, ZZr[:,j,:].shape
        import pylab as pl
        pl.figure(1)
        pl.clf()
        pl.pcolormesh(Dr,ZZr[:,j,:],SALT[:,j,:])
        pl.colorbar()
        clim=pl.gci().get_clim()
      
        pl.figure(2)
        pl.clf()
        pl.pcolormesh(dr,Zr[:,j,:],Salt[:,j,:])
        pl.clim(clim)
        pl.colorbar()
        raw_input()
      
      Uvel[:,j,:]   = calc.griddata(Rdz*Du,ZZu[:,j,:],U[:,j,:],   Rdz*du,Zu[:,j,:],extrap=True,**interp_opts)
      if j<Vvel.shape[1]:
        dv=np.tile(distance(xv[ind,:],yv[ind,:]),(nz,1))
        Dv=np.tile(distance(xv[ind,:],yv[ind,:]),(NZ,1))
        Vvel[:,j,:] = calc.griddata(Rdz*Dv,ZZv[:,j,:],V[:,j,:],   Rdz*dv,Zv[:,j,:],extrap=True,**interp_opts)
        if useInd:
          distv[:,j,:]=dv

      if np.any(np.isnan(Temp[:,j,:])): print 'found nan in temp',j
      if np.any(np.isnan(Salt[:,j,:])): print 'found nan in salt',j
      if np.any(np.isnan(Uvel[:,j,:])): print 'found nan in u',j
      if j<Vvel.shape[1] and np.any(np.isnan(Vvel[:,j,:])): print 'found nan in v',j


  elif ij=='i':
    for i in range(nx):
      if not quiet and (nx<10 or (nx>=10 and i%10==0)): print '  i=%3d of %3d' % (i,nx)
      ind=ij_ind[i]
      dr=np.tile(distance(xr[:,ind],yr[:,ind]),(nz,1))
      dv=np.tile(distance(xv[:,ind],yv[:,ind]),(nz,1))
      Dr=np.tile(distance(xr[:,ind],yr[:,ind]),(NZ,1))
      Dv=np.tile(distance(xv[:,ind],yv[:,ind]),(NZ,1))

      if useInd:
        distr[:,:,i]=dr;
        distv[:,:,i]=dv;

      Temp[:,:,i]   = calc.griddata(Rdz*Dr,ZZr[:,:,i],TEMP[:,:,i],Rdz*dr,Zr[:,:,i],extrap=True,**interp_opts)
      Salt[:,:,i]   = calc.griddata(Rdz*Dr,ZZr[:,:,i],SALT[:,:,i],Rdz*dr,Zr[:,:,i],extrap=True,**interp_opts)
      Vvel[:,:,i]   = calc.griddata(Rdz*Dv,ZZv[:,:,i],V[:,:,i],   Rdz*dv,Zv[:,:,i],extrap=True,**interp_opts)
      if i<Uvel.shape[2]:
        du=np.tile(distance(xu[:,ind],yu[:,ind]),(nz,1))
        Du=np.tile(distance(xu[:,ind],yu[:,ind]),(NZ,1))
        Uvel[:,:,i] = calc.griddata(Rdz*Du,ZZu[:,:,i],U[:,:,i],   Rdz*du,Zu[:,:,i],extrap=True,**interp_opts)
        if useInd:
          distu[:,:,i]=du


  # uv bar:
  if not quiet: print 'calc uvbar'
  if useInd is False:
    ubar,vbar=rt.uvbar(Uvel,Vvel,sshr,hr,sparams)
  else: #>------------------------------------------------------------
    sshu=calc.griddata(dlon,dlat,data['ssh'],xu,yu,extrap=True,**interp_opts)
    sshv=calc.griddata(dlon,dlat,data['ssh'],xv,yv,extrap=True,**interp_opts)

    if ij=='j':
      sshu=sshu[ij_ind,:]
      sshv=sshv[ij_ind,:]
      hu  =hu[ij_ind,:]
      hv  =hv[ij_ind,:]
    elif ij=='i':
      sshu=sshu[:,ij_ind]
      sshv=sshv[:,ij_ind]
      hu  =hu[:,ij_ind]
      hv  =hv[:,ij_ind]

    ubar=rt.barotropic(Uvel,sshu,hu,sparams)
    vbar=rt.barotropic(Vvel,sshv,hv,sparams)
  # -----------------------------------------------------------------<


  Vars=cb.odict()
  Vars['temp'] = Temp
  Vars['salt'] = Salt
  Vars['u']    = Uvel
  Vars['v']    = Vvel
  Vars['zeta'] = sshr
  Vars['ubar'] = ubar
  Vars['vbar'] = vbar

  Vars['date']   = data['date']

  if not useInd is False: #>------------------------------------------
    Vars['depth']  = Zr
    Vars['depthu'] = Zu
    Vars['depthv'] = Zv

    Vars['dist']  = distr
    Vars['distu'] = distu
    Vars['distv'] = distv
  # -----------------------------------------------------------------<


  if retHorizAux: return Vars, horizAux
  else: return Vars
Exemple #30
0
def data2roms(data, grd, sparams, **kargs):
    '''
  Interpolates data to roms 3D grid.

  The dict data must contain the prognostic variables temp, salt, u,
  v (3d) and ssh (zeta, 2d), as well as lon, lat (2d), depth (1d) and
  time/date info: date (data date), date0 (reference date) and time
  (difference between date and date0). The input data can be provided
  by load_data.

  Parameters
  ----------
  data : dict with prognostic variables
  grd : ROMS netcdf grid file
  sparams : s-coordinates parameters, theta_s,theta_b, hc and NLevels

  **kargs:
  ij : axis for vertical interpolations (*i,j)
  ij_ind : list of i or j index for vertical interpolation, all by
           default (ij_ind=False)
  horizAux : if True, the original data horizontally interpolated is
             returned and can be used for next data2roms call with
             this same karg
  quiet : output messages flag (false by default)
  proj : projection - False, name or basemap proj - lcc by default
         if False, horizontal interpolations will use lonxlat instead of distances
  interp_opts: options for griddata
  rep_surf: repeat surface level (new upper level)
  '''

    ij = kargs.get('ij', 'j')
    ij_ind = kargs.get('ij_ind', False)
    horizAux = kargs.get('horizAux', False)
    quiet = kargs.get('quiet', False)
    proj = kargs.get('proj', 'lcc')  # lonxlat to distance before
    # horizontal interpolation
    interp_opts = kargs.get('interp_opts', {})
    rep_surf = kargs.get('rep_surf', True)  # create a surface upper level
    # before interpolation

    if not quiet: print 'using grid %s' % grd
    g = roms.Grid(grd)
    xr, yr, hr, mr = g.vars('r')
    xu, yu, hu, mu = g.vars('u')
    xv, yv, hv, mv = g.vars('v')
    ny, nx = hr.shape
    nz = sparams[3]

    if proj:
        print 'projecting coordinates...'
        if isinstance(proj, basestring):
            lonc = (xr.max() + xr.min()) / 2.
            latc = (yr.max() + yr.min()) / 2.
            from mpl_toolkits.basemap import Basemap
            proj = Basemap(projection=proj,
                           width=1,
                           height=1,
                           resolution=None,
                           lon_0=lonc,
                           lat_0=latc,
                           lat_1=latc)

        xr, yr = proj(xr, yr)
        xu, yu = proj(xu, yu)
        xv, yv = proj(xv, yv)
        dlon, dlat = proj(data['lon'], data['lat'])
        Rdz = 1 / 100.  # distance to depth ratio (300km vs 3000m)
        distance = lambda x, y: np.append(
            0.,
            np.sqrt(np.diff(x)**2 + np.diff(y)**2).cumsum())
    else:
        dlon, dlat = data['lon'], data['lat']
        distance = calc.distance

    # needed for s_levels and for rep_surf!
    sshr = calc.griddata(dlon,
                         dlat,
                         data['ssh'],
                         xr,
                         yr,
                         extrap=True,
                         **interp_opts)

    # repeat surface:
    if rep_surf:
        # copy data cos dont want to change the original dataset:
        import copy
        data = copy.deepcopy(data)
        for vname in ['temp', 'salt', 'u', 'v', 'depth']:
            if data[vname].ndim == 1:  # depth !
                if np.ma.isMA(data[vname]): vstack = np.ma.hstack
                else: vstack = np.hstack
            else:
                if np.ma.isMA(data[vname]): vstack = np.ma.vstack
                else: vstack = np.vstack

            if data['depth'][0] > data['depth'][1]:  # surf at ind 0
                data[vname] = vstack((data[vname][0][np.newaxis], data[vname]))
                if vname == 'depth': data[vname][0] = sshr.max()
            else:
                data[vname] = vstack(
                    (data[vname], data[vname][-1][np.newaxis]))
                if vname == 'depth': data[vname][-1] = sshr.max()

        data['NZ'] = data['NZ'] + 1

    NX = data['NX']
    NY = data['NY']
    NZ = data['NZ']

    if not quiet: print 'calc s levels...'
    Zr = g.s_levels(sparams, sshr, hr, 'r')
    Zu = g.s_levels(sparams, sshr, hr, 'u')
    Zv = g.s_levels(sparams, sshr, hr, 'v')

    # interp horiz:
    retHorizAux = horizAux is True
    if horizAux in (True, False):
        TEMP = np.ma.masked_all((NZ, ny, nx), data['temp'].dtype)
        SALT = np.ma.masked_all((NZ, ny, nx), data['salt'].dtype)
        U = np.ma.masked_all((NZ, ny, nx), data['u'].dtype)
        V = np.ma.masked_all((NZ, ny, nx), data['v'].dtype)

        if not quiet: print 'horizontal interpolation:'
        for i in range(NZ):
            if not quiet and i % 10 == 0: print '   lev %d of %d' % (i, NZ)
            #import pylab
            #pylab.figure()
            #pylab.pcolormesh(data['lon'],data['lat'],data['temp'][i,...])

            try:
                TEMP[i, ...] = calc.griddata(dlon,
                                             dlat,
                                             data['temp'][i, ...],
                                             xr,
                                             yr,
                                             extrap=True,
                                             **interp_opts)
            except:
                pass

            try:
                SALT[i, ...] = calc.griddata(dlon,
                                             dlat,
                                             data['salt'][i, ...],
                                             xr,
                                             yr,
                                             extrap=True,
                                             **interp_opts)
            except:
                pass

            try:
                U[i, ...] = calc.griddata(dlon,
                                          dlat,
                                          data['u'][i, ...],
                                          xr,
                                          yr,
                                          extrap=True,
                                          **interp_opts)
            except:
                pass

            try:
                V[i, ...] = calc.griddata(dlon,
                                          dlat,
                                          data['v'][i, ...],
                                          xr,
                                          yr,
                                          extrap=True,
                                          **interp_opts)
            except:
                pass

        # rotate U,V:
        if not quiet: print 'rotating U,V to grid angle'
        angle = g.use('angle')  # rad
        U, V = calc.rot2d(U, V, angle)
        U = rt.rho2uvp3d(U, 'u')
        V = rt.rho2uvp3d(V, 'v')

        horizAux = {}
        horizAux['TEMP'] = TEMP
        horizAux['SALT'] = SALT
        horizAux['U'] = U
        horizAux['V'] = V

    else:
        TEMP = horizAux['TEMP']
        SALT = horizAux['SALT']
        U = horizAux['U']
        V = horizAux['V']

    # interp vert:
    nxu = nx - 1
    nyv = ny - 1
    #> -----------------------------------------------------------------
    useInd = not ij_ind is False
    if ij_ind is False:
        if ij == 'j': ij_ind = range(ny)
        elif ij == 'i': ij_ind = range(nx)
    else:
        try:
            iter(ij_ind)
        except:
            ij_ind = [ij_ind]

        if ij == 'j': ny = nyv = len(ij_ind)
        elif ij == 'i': nx = nxu = len(ij_ind)
    # -----------------------------------------------------------------<

    Temp = np.zeros((nz, ny, nx), data['temp'].dtype)
    Salt = np.zeros((nz, ny, nx), data['salt'].dtype)
    Uvel = np.zeros((nz, ny, nxu), data['u'].dtype)
    Vvel = np.zeros((nz, nyv, nx), data['v'].dtype)

    jslice = lambda x, ind: x[:, ind, :]
    islice = lambda x, ind: x[:, :, ind]

    ZZr = np.tile(data['depth'], (nx, ny, 1)).T
    ZZu = np.tile(data['depth'], (nxu, ny, 1)).T
    ZZv = np.tile(data['depth'], (nx, nyv, 1)).T

    if not useInd is False:  #>------------------------------------------
        if ij == 'j':
            slice = jslice
            sshr = sshr[ij_ind, :]
            hr = hr[ij_ind, :]
        elif ij == 'i':
            slice = islice
            sshr = sshr[:, ij_ind]
            hr = hr[:, ij_ind]

        Zr, Zu, Zv, TEMP, SALT, U, V = [
            slice(k, ij_ind) for k in [Zr, Zu, Zv, TEMP, SALT, U, V]
        ]
    # -----------------------------------------------------------------<

    if useInd:  # then store distances for a possible bry file
        dtype = Temp.dtype
        distr = np.zeros((nz, ny, nx), dtype)
        distu = np.zeros((nz, ny, nxu), dtype)
        distv = np.zeros((nz, nyv, nx), dtype)

    if not quiet: print 'vertical interpolation:'
    if ij == 'j':
        for j in range(ny):
            if not quiet and (ny < 10 or (ny >= 10 and j % 10 == 0)):
                print '  j=%3d of %3d' % (j, ny)
            ind = ij_ind[j]
            dr = np.tile(distance(xr[ind, :], yr[ind, :]), (nz, 1))
            du = np.tile(distance(xu[ind, :], yu[ind, :]), (nz, 1))
            Dr = np.tile(distance(xr[ind, :], yr[ind, :]), (NZ, 1))
            Du = np.tile(distance(xu[ind, :], yu[ind, :]), (NZ, 1))

            if useInd:
                distr[:, j, :] = dr
                distu[:, j, :] = du

            Temp[:, j, :] = calc.griddata(Rdz * Dr,
                                          ZZr[:, j, :],
                                          TEMP[:, j, :],
                                          Rdz * dr,
                                          Zr[:, j, :],
                                          extrap=True,
                                          **interp_opts)
            Salt[:, j, :] = calc.griddata(Rdz * Dr,
                                          ZZr[:, j, :],
                                          SALT[:, j, :],
                                          Rdz * dr,
                                          Zr[:, j, :],
                                          extrap=True,
                                          **interp_opts)
            if 0 and j % 10 == 0:
                print Dr.shape, ZZr[:, j, :].shape
                import pylab as pl
                pl.figure(1)
                pl.clf()
                pl.pcolormesh(Dr, ZZr[:, j, :], SALT[:, j, :])
                pl.colorbar()
                clim = pl.gci().get_clim()

                pl.figure(2)
                pl.clf()
                pl.pcolormesh(dr, Zr[:, j, :], Salt[:, j, :])
                pl.clim(clim)
                pl.colorbar()
                raw_input()

            Uvel[:, j, :] = calc.griddata(Rdz * Du,
                                          ZZu[:, j, :],
                                          U[:, j, :],
                                          Rdz * du,
                                          Zu[:, j, :],
                                          extrap=True,
                                          **interp_opts)
            if j < Vvel.shape[1]:
                dv = np.tile(distance(xv[ind, :], yv[ind, :]), (nz, 1))
                Dv = np.tile(distance(xv[ind, :], yv[ind, :]), (NZ, 1))
                Vvel[:, j, :] = calc.griddata(Rdz * Dv,
                                              ZZv[:, j, :],
                                              V[:, j, :],
                                              Rdz * dv,
                                              Zv[:, j, :],
                                              extrap=True,
                                              **interp_opts)
                if useInd:
                    distv[:, j, :] = dv

            if np.any(np.isnan(Temp[:, j, :])): print 'found nan in temp', j
            if np.any(np.isnan(Salt[:, j, :])): print 'found nan in salt', j
            if np.any(np.isnan(Uvel[:, j, :])): print 'found nan in u', j
            if j < Vvel.shape[1] and np.any(np.isnan(Vvel[:, j, :])):
                print 'found nan in v', j

    elif ij == 'i':
        for i in range(nx):
            if not quiet and (nx < 10 or (nx >= 10 and i % 10 == 0)):
                print '  i=%3d of %3d' % (i, nx)
            ind = ij_ind[i]
            dr = np.tile(distance(xr[:, ind], yr[:, ind]), (nz, 1))
            dv = np.tile(distance(xv[:, ind], yv[:, ind]), (nz, 1))
            Dr = np.tile(distance(xr[:, ind], yr[:, ind]), (NZ, 1))
            Dv = np.tile(distance(xv[:, ind], yv[:, ind]), (NZ, 1))

            if useInd:
                distr[:, :, i] = dr
                distv[:, :, i] = dv

            Temp[:, :, i] = calc.griddata(Rdz * Dr,
                                          ZZr[:, :, i],
                                          TEMP[:, :, i],
                                          Rdz * dr,
                                          Zr[:, :, i],
                                          extrap=True,
                                          **interp_opts)
            Salt[:, :, i] = calc.griddata(Rdz * Dr,
                                          ZZr[:, :, i],
                                          SALT[:, :, i],
                                          Rdz * dr,
                                          Zr[:, :, i],
                                          extrap=True,
                                          **interp_opts)
            Vvel[:, :, i] = calc.griddata(Rdz * Dv,
                                          ZZv[:, :, i],
                                          V[:, :, i],
                                          Rdz * dv,
                                          Zv[:, :, i],
                                          extrap=True,
                                          **interp_opts)
            if i < Uvel.shape[2]:
                du = np.tile(distance(xu[:, ind], yu[:, ind]), (nz, 1))
                Du = np.tile(distance(xu[:, ind], yu[:, ind]), (NZ, 1))
                Uvel[:, :, i] = calc.griddata(Rdz * Du,
                                              ZZu[:, :, i],
                                              U[:, :, i],
                                              Rdz * du,
                                              Zu[:, :, i],
                                              extrap=True,
                                              **interp_opts)
                if useInd:
                    distu[:, :, i] = du

    # uv bar:
    if not quiet: print 'calc uvbar'
    if useInd is False:
        ubar, vbar = rt.uvbar(Uvel, Vvel, sshr, hr, sparams)
    else:  #>------------------------------------------------------------
        sshu = calc.griddata(dlon,
                             dlat,
                             data['ssh'],
                             xu,
                             yu,
                             extrap=True,
                             **interp_opts)
        sshv = calc.griddata(dlon,
                             dlat,
                             data['ssh'],
                             xv,
                             yv,
                             extrap=True,
                             **interp_opts)

        if ij == 'j':
            sshu = sshu[ij_ind, :]
            sshv = sshv[ij_ind, :]
            hu = hu[ij_ind, :]
            hv = hv[ij_ind, :]
        elif ij == 'i':
            sshu = sshu[:, ij_ind]
            sshv = sshv[:, ij_ind]
            hu = hu[:, ij_ind]
            hv = hv[:, ij_ind]

        ubar = rt.barotropic(Uvel, sshu, hu, sparams)
        vbar = rt.barotropic(Vvel, sshv, hv, sparams)
    # -----------------------------------------------------------------<

    Vars = cb.odict()
    Vars['temp'] = Temp
    Vars['salt'] = Salt
    Vars['u'] = Uvel
    Vars['v'] = Vvel
    Vars['zeta'] = sshr
    Vars['ubar'] = ubar
    Vars['vbar'] = vbar

    Vars['date'] = data['date']

    if not useInd is False:  #>------------------------------------------
        Vars['depth'] = Zr
        Vars['depthu'] = Zu
        Vars['depthv'] = Zv

        Vars['dist'] = distr
        Vars['distu'] = distu
        Vars['distv'] = distv
    # -----------------------------------------------------------------<

    if retHorizAux: return Vars, horizAux
    else: return Vars
Exemple #31
0
def data2roms(data, grd, sparams, **kargs):
    '''
  Interpolates data to roms 3D grid.

  The dict data must contain the prognostic variables temp, salt, u,
  v (3d) and ssh (zeta, 2d), as well as lon, lat (2d), depth (1d) and
  time/date info: date (data date), date0 (reference date) and time
  (difference between date and date0). The input data can be provided
  by load_data.

  Parameters
  ----------
  data : dict with prognostic variables
  grd : ROMS netcdf grid file
  sparams : s-coordinates parameters, theta_s,theta_b, hc and NLevels

  **kargs:
  ij : axis for vertical interpolations (*i,j)
  ij_ind : list of i or j index for vertical interpolation, all by
           default (ij_ind=False)
  horizAux : if True, the original data horizontally interpolated is
             returned and can be used for next data2roms call with
             this same karg
  quiet : output messages flag (false by default)
  '''

    ij = 'j'
    ij_ind = False
    horizAux = False
    quiet = False
    if 'ij' in kargs.keys(): ij = kargs['ij']
    if 'ij_ind' in kargs.keys(): ij_ind = kargs['ij_ind']
    if 'horizAux' in kargs.keys(): horizAux = kargs['horizAux']
    if 'quiet' in kargs.keys(): quiet = kargs['quiet']

    if not quiet: print 'using grid %s' % grd
    g = roms.Grid(grd)
    xr, yr, hr, mr = g.vars('r')
    xu, yu, hu, mu = g.vars('u')
    xv, yv, hv, mv = g.vars('v')
    ny, nx = hr.shape
    nz = sparams[3]

    NX = data['NX']
    NY = data['NY']
    NZ = data['NZ']

    if not quiet: print 'calc s levels...'
    sshr = calc.griddata(data['lon'],
                         data['lat'],
                         data['ssh'],
                         xr,
                         yr,
                         extrap=True)

    Zr = g.s_levels(sparams, sshr, hr, 'r')
    Zu = g.s_levels(sparams, sshr, hr, 'u')
    Zv = g.s_levels(sparams, sshr, hr, 'v')

    # interp horiz:
    retHorizAux = horizAux is True
    if horizAux in (True, False):
        TEMP = np.ma.masked_all((NZ, ny, nx), data['temp'].dtype)
        SALT = np.ma.masked_all((NZ, ny, nx), data['salt'].dtype)
        U = np.ma.masked_all((NZ, ny, nx), data['u'].dtype)
        V = np.ma.masked_all((NZ, ny, nx), data['v'].dtype)

        if not quiet: print 'horizontal interpolation:'
        for i in range(NZ):
            if not quiet and i % 10 == 0: print '   lev %d of %d' % (i, NZ)
            #import pylab
            #pylab.figure()
            #pylab.pcolormesh(data['lon'],data['lat'],data['temp'][i,...])

            try:
                TEMP[i, ...] = calc.griddata(data['lon'],
                                             data['lat'],
                                             data['temp'][i, ...],
                                             xr,
                                             yr,
                                             extrap=True)
            except:
                pass

            try:
                SALT[i, ...] = calc.griddata(data['lon'],
                                             data['lat'],
                                             data['salt'][i, ...],
                                             xr,
                                             yr,
                                             extrap=True)
            except:
                pass

            try:
                U[i, ...] = calc.griddata(data['lon'],
                                          data['lat'],
                                          data['u'][i, ...],
                                          xr,
                                          yr,
                                          extrap=True)
            except:
                pass

            try:
                V[i, ...] = calc.griddata(data['lon'],
                                          data['lat'],
                                          data['v'][i, ...],
                                          xr,
                                          yr,
                                          extrap=True)
            except:
                pass

        # rotate U,V:
        if not quiet: print 'rotating U,V to grid angle'
        angle = g.use('angle')  # rad
        U, V = calc.rot2d(U, V, angle)
        U = rt.rho2uvp3d(U, 'u')
        V = rt.rho2uvp3d(V, 'v')

        horizAux = {}
        horizAux['TEMP'] = TEMP
        horizAux['SALT'] = SALT
        horizAux['U'] = U
        horizAux['V'] = V

    else:
        TEMP = horizAux['TEMP']
        SALT = horizAux['SALT']
        U = horizAux['U']
        V = horizAux['V']

    # interp vert:
    nxu = nx - 1
    nyv = ny - 1
    #> -----------------------------------------------------------------
    useInd = not ij_ind is False
    if ij_ind is False:
        if ij == 'j': ij_ind = range(ny)
        elif ij == 'i': ij_ind = range(nx)
    else:
        try:
            iter(ij_ind)
        except:
            ij_ind = [ij_ind]

        if ij == 'j': ny = nyv = len(ij_ind)
        elif ij == 'i': nx = nxu = len(ij_ind)
    # -----------------------------------------------------------------<

    Temp = np.zeros((nz, ny, nx), data['temp'].dtype)
    Salt = np.zeros((nz, ny, nx), data['salt'].dtype)
    Uvel = np.zeros((nz, ny, nxu), data['u'].dtype)
    Vvel = np.zeros((nz, nyv, nx), data['v'].dtype)

    jslice = lambda x, ind: x[:, ind, :]
    islice = lambda x, ind: x[:, :, ind]

    ZZr = np.tile(data['depth'], (nx, ny, 1)).T
    ZZu = np.tile(data['depth'], (nxu, ny, 1)).T
    ZZv = np.tile(data['depth'], (nx, nyv, 1)).T

    if not useInd is False:  #>------------------------------------------
        if ij == 'j':
            slice = jslice
            sshr = sshr[ij_ind, :]
            hr = hr[ij_ind, :]
        elif ij == 'i':
            slice = islice
            sshr = sshr[:, ij_ind]
            hr = hr[:, ij_ind]

        Zr, Zu, Zv, TEMP, SALT, U, V = [
            slice(k, ij_ind) for k in [Zr, Zu, Zv, TEMP, SALT, U, V]
        ]
    # -----------------------------------------------------------------<

    if useInd:  # then store distances for a possible bry file
        dtype = Temp.dtype
        distr = np.zeros((nz, ny, nx), dtype)
        distu = np.zeros((nz, ny, nxu), dtype)
        distv = np.zeros((nz, nyv, nx), dtype)

    if not quiet: print 'vertical interpolation:'
    if ij == 'j':
        for j in range(ny):
            if not quiet and (ny < 10 or (ny >= 10 and j % 10 == 0)):
                print '  j=%3d of %3d' % (j, ny)
            ind = ij_ind[j]
            dr = np.tile(calc.distance(xr[ind, :], yr[ind, :]), (nz, 1))
            du = np.tile(calc.distance(xu[ind, :], yu[ind, :]), (nz, 1))
            Dr = np.tile(calc.distance(xr[ind, :], yr[ind, :]), (NZ, 1))
            Du = np.tile(calc.distance(xu[ind, :], yu[ind, :]), (NZ, 1))

            if useInd:
                distr[:, j, :] = dr
                distu[:, j, :] = du

            Temp[:, j, :] = calc.griddata(Dr,
                                          ZZr[:, j, :],
                                          TEMP[:, j, :],
                                          dr,
                                          Zr[:, j, :],
                                          extrap=True)
            Salt[:, j, :] = calc.griddata(Dr,
                                          ZZr[:, j, :],
                                          SALT[:, j, :],
                                          dr,
                                          Zr[:, j, :],
                                          extrap=True)
            if 0 and j % 10 == 0:
                print Dr.shape, ZZr[:, j, :].shape
                import pylab as pl
                pl.figure(1)
                pl.clf()
                pl.pcolormesh(Dr, ZZr[:, j, :], SALT[:, j, :])
                pl.colorbar()
                clim = pl.gci().get_clim()

                pl.figure(2)
                pl.clf()
                pl.pcolormesh(dr, Zr[:, j, :], Salt[:, j, :])
                pl.clim(clim)
                pl.colorbar()
                raw_input()

            Uvel[:, j, :] = calc.griddata(Du,
                                          ZZu[:, j, :],
                                          U[:, j, :],
                                          du,
                                          Zu[:, j, :],
                                          extrap=True)
            if j < Vvel.shape[1]:
                dv = np.tile(calc.distance(xv[ind, :], yv[ind, :]), (nz, 1))
                Dv = np.tile(calc.distance(xv[ind, :], yv[ind, :]), (NZ, 1))
                Vvel[:, j, :] = calc.griddata(Dv,
                                              ZZv[:, j, :],
                                              V[:, j, :],
                                              dv,
                                              Zv[:, j, :],
                                              extrap=True)
                if useInd:
                    distv[:, j, :] = dv

            if np.any(np.isnan(Temp[:, j, :])): print 'found nan in temp', j
            if np.any(np.isnan(Salt[:, j, :])): print 'found nan in salt', j
            if np.any(np.isnan(Uvel[:, j, :])): print 'found nan in u', j
            if j < Vvel.shape[1] and np.any(np.isnan(Vvel[:, j, :])):
                print 'found nan in v', j

    elif ij == 'i':
        for i in range(nx):
            if not quiet and (nx < 10 or (nx >= 10 and i % 10 == 0)):
                print '  i=%3d of %3d' % (i, nx)
            ind = ij_ind[i]
            dr = np.tile(calc.distance(xr[:, ind], yr[:, ind]), (nz, 1))
            dv = np.tile(calc.distance(xv[:, ind], yv[:, ind]), (nz, 1))
            Dr = np.tile(calc.distance(xr[:, ind], yr[:, ind]), (NZ, 1))
            Dv = np.tile(calc.distance(xv[:, ind], yv[:, ind]), (NZ, 1))

            if useInd:
                distr[:, :, i] = dr
                distv[:, :, i] = dv

            Temp[:, :, i] = calc.griddata(Dr,
                                          ZZr[:, :, i],
                                          TEMP[:, :, i],
                                          dr,
                                          Zr[:, :, i],
                                          extrap=True)
            Salt[:, :, i] = calc.griddata(Dr,
                                          ZZr[:, :, i],
                                          SALT[:, :, i],
                                          dr,
                                          Zr[:, :, i],
                                          extrap=True)
            Vvel[:, :, i] = calc.griddata(Dv,
                                          ZZv[:, :, i],
                                          V[:, :, i],
                                          dv,
                                          Zv[:, :, i],
                                          extrap=True)
            if i < Uvel.shape[2]:
                du = np.tile(calc.distance(xu[:, ind], yu[:, ind]), (nz, 1))
                Du = np.tile(calc.distance(xu[:, ind], yu[:, ind]), (NZ, 1))
                Uvel[:, :, i] = calc.griddata(Du,
                                              ZZu[:, :, i],
                                              U[:, :, i],
                                              du,
                                              Zu[:, :, i],
                                              extrap=True)
                if useInd:
                    distu[:, :, i] = du

    # uv bar:
    if not quiet: print 'calc uvbar'
    if useInd is False:
        ubar, vbar = rt.uvbar(Uvel, Vvel, sshr, hr, sparams)
    else:  #>------------------------------------------------------------
        sshu = calc.griddata(data['lon'],
                             data['lat'],
                             data['ssh'],
                             xu,
                             yu,
                             extrap=True)
        sshv = calc.griddata(data['lon'],
                             data['lat'],
                             data['ssh'],
                             xv,
                             yv,
                             extrap=True)

        if ij == 'j':
            sshu = sshu[ij_ind, :]
            sshv = sshv[ij_ind, :]
            hu = hu[ij_ind, :]
            hv = hv[ij_ind, :]
        elif ij == 'i':
            sshu = sshu[:, ij_ind]
            sshv = sshv[:, ij_ind]
            hu = hu[:, ij_ind]
            hv = hv[:, ij_ind]

        ubar = rt.barotropic(Uvel, sshu, hu, sparams)
        vbar = rt.barotropic(Vvel, sshv, hv, sparams)
    # -----------------------------------------------------------------<

    Vars = cb.odict()
    Vars['temp'] = Temp
    Vars['salt'] = Salt
    Vars['u'] = Uvel
    Vars['v'] = Vvel
    Vars['zeta'] = sshr
    Vars['ubar'] = ubar
    Vars['vbar'] = vbar

    Vars['date'] = data['date']

    if not useInd is False:  #>------------------------------------------
        Vars['depth'] = Zr
        Vars['depthu'] = Zu
        Vars['depthv'] = Zv

        Vars['dist'] = distr
        Vars['distu'] = distu
        Vars['distv'] = distv
    # -----------------------------------------------------------------<

    if retHorizAux: return Vars, horizAux
    else: return Vars
Exemple #32
0
def update_wind_blended2(fname, datapaths, **kargs):
    """
  In days without blended data will try to use quikscat data
  """
    from okean.datasets import quikscat
    from okean.datasets import blended_wind

    a = blended_wind.WINDData(datapaths[0])
    b = quikscat.WINDData(datapaths[1])

    time = netcdf.nctime(fname, "time")
    date0 = dts.next_date(time[0], -1)
    date1 = dts.next_date(time[-1], +2)

    data = a.data(date0, date1)

    # limit are... otherwise, quikscat interp will be very slow!
    grd = netcdf.fatt(fname, "grd_file")
    import os

    if not os.path.isfile(grd):
        grd = kargs["grd"]
    cond, inds = rt.grid_vicinity(grd, data["x"], data["y"], margin=5, rect=True, retinds=True)
    i1, i2, j1, j2 = inds
    for d in data.keys():
        if d == "x":
            data[d] = data[d][i1:i2]
        elif d == "y":
            data[d] = data[d][j1:j2]
        else:
            data[d] = data[d][j1:j2, i1:i2]

    # check for missing days:
    time0 = data.keys()
    x0 = data["x"]
    y0 = data["y"]
    x0, y0 = np.meshgrid(x0, y0)
    time0.remove("x")
    time0.remove("y")

    out = cb.odict()
    out["x"] = x0
    out["y"] = y0
    info = ""
    qs_ij_limits_done = False
    for d in dts.drange(date0, date1):
        found = 0
        for t in time0:
            if (t.year, t.month, t.day) == (d.year, d.month, d.day):
                print "==> blended : ", t
                out[t] = data[t]
                found = 1

        if not found:  # use quikscat:
            print "==> quikscat : ", d.strftime("%Y-%m-%d")
            tmp = b.data(d, dts.next_date(d))
            if not tmp.has_key("x"):
                continue
            x, y = tmp["x"], tmp["y"]
            x, y = np.meshgrid(x, y)

            # reduce qs data:
            if not qs_ij_limits_done:
                i1, i2, j1, j2 = calc.ij_limits(x, y, [x0.min(), x0.max()], [y0.min(), y0.max()])
                qs_ij_limits_done = True

            x = x[j1:j2, i1:i2]
            y = y[j1:j2, i1:i2]
            tmp[tmp.keys()[0]] = tmp[tmp.keys()[0]][j1:j2, i1:i2]

            print "  griddata u"
            u = calc.griddata(x, y, tmp[tmp.keys()[0]].real, x0, y0)
            print "  griddata v"
            v = calc.griddata(x, y, tmp[tmp.keys()[0]].imag, x0, y0)
            out[tmp.keys()[0]] = u + 1.0j * v
            info += "#" + d.strftime("%Y%m%d")

    new_wind_info = "blended+quikscat at days: " + info
    update_wind(fname, out, new_wind_info, **kargs)
Exemple #33
0
def data2roms(data,grd,sparams,**kargs):
  '''
  Interpolates data to roms 3D grid.

  The dict data must contain the prognostic variables temp, salt, u,
  v (3d) and ssh (zeta, 2d), as well as lon, lat (2d), depth (1d) and
  time/date info: date (data date), date0 (reference date) and time
  (difference between date and date0). The input data can be provided
  by load_data.

  Parameters
  ----------
  data : dict with prognostic variables
  grd : ROMS netcdf grid file
  sparams : s-coordinates parameters, theta_s,theta_b, hc and NLevels

  **kargs:
  ij : axis for vertical interpolations (*i,j)
  ij_ind : list of i or j index for vertical interpolation, all by
           default (ij_ind=False)
  horizAux : if True, the original data horizontally interpolated is
             returned and can be used for next data2roms call with
             this same karg
  quiet : output messages flag (false by default)
  '''

  ij='j'
  ij_ind=False
  horizAux=False
  quiet=False
  if 'ij'       in kargs.keys(): ij       = kargs['ij']
  if 'ij_ind'   in kargs.keys(): ij_ind   = kargs['ij_ind']
  if 'horizAux' in kargs.keys(): horizAux = kargs['horizAux']
  if 'quiet'    in kargs.keys(): quiet    = kargs['quiet']

  if not quiet: print 'using grid %s' % grd
  g=roms.Grid(grd)
  xr,yr,hr,mr=g.vars('r')
  xu,yu,hu,mu=g.vars('u')
  xv,yv,hv,mv=g.vars('v')
  ny,nx=hr.shape
  nz=sparams[3]

  NX=data['NX']
  NY=data['NY']
  NZ=data['NZ']

  if not quiet: print 'calc s levels...'
  sshr=calc.griddata(data['lon'],data['lat'],data['ssh'],xr,yr,extrap=True)

  Zr = g.s_levels(sparams,sshr,hr,'r')
  Zu = g.s_levels(sparams,sshr,hr,'u')
  Zv = g.s_levels(sparams,sshr,hr,'v')

  # interp horiz:
  retHorizAux=horizAux is True
  if horizAux in (True,False):
    TEMP = np.ma.masked_all((NZ,ny,nx),data['temp'].dtype)
    SALT = np.ma.masked_all((NZ,ny,nx),data['salt'].dtype)
    U    = np.ma.masked_all((NZ,ny,nx),data['u'].dtype)
    V    = np.ma.masked_all((NZ,ny,nx),data['v'].dtype)

    if not quiet: print 'horizontal interpolation:'
    for i in range(NZ):
      if not quiet and i%10==0: print '   lev %d of %d' % (i,NZ)
      #import pylab
      #pylab.figure()
      #pylab.pcolormesh(data['lon'],data['lat'],data['temp'][i,...])

      try: TEMP[i,...] = calc.griddata(data['lon'],data['lat'],data['temp'][i,...],xr,yr,extrap=True)
      except: pass

      try: SALT[i,...] = calc.griddata(data['lon'],data['lat'],data['salt'][i,...],xr,yr,extrap=True)
      except: pass

      try: U[i,...] = calc.griddata(data['lon'],data['lat'],data['u'][i,...],xr,yr,extrap=True)
      except: pass

      try: V[i,...] = calc.griddata(data['lon'],data['lat'],data['v'][i,...],xr,yr,extrap=True)
      except: pass

    # rotate U,V:
    if not quiet: print 'rotating U,V to grid angle'
    angle=g.use('angle')  # rad
    U,V=calc.rot2d(U,V,angle)
    U=rt.rho2uvp3d(U,'u')
    V=rt.rho2uvp3d(V,'v')

    horizAux={}
    horizAux['TEMP'] = TEMP
    horizAux['SALT'] = SALT
    horizAux['U']    = U
    horizAux['V']    = V

  else:
    TEMP = horizAux['TEMP']
    SALT = horizAux['SALT']
    U    = horizAux['U']
    V    = horizAux['V']

  # interp vert:
  nxu=nx-1
  nyv=ny-1
  #> -----------------------------------------------------------------
  useInd=not ij_ind is False
  if ij_ind is False:
    if   ij=='j': ij_ind=range(ny)
    elif ij=='i': ij_ind=range(nx)
  else:
    try: iter(ij_ind)
    except: ij_ind=[ij_ind]

    if   ij=='j': ny=nyv=len(ij_ind)
    elif ij=='i': nx=nxu=len(ij_ind)
  # -----------------------------------------------------------------<

  Temp = np.zeros((nz,ny ,nx ),data['temp'].dtype)
  Salt = np.zeros((nz,ny ,nx ),data['salt'].dtype)
  Uvel = np.zeros((nz,ny ,nxu),data['u'].dtype)
  Vvel = np.zeros((nz,nyv,nx ),data['v'].dtype)


  jslice=lambda x,ind: x[:,ind,:]
  islice=lambda x,ind: x[:,:,ind]

  ZZr = np.tile(data['depth'],(nx,ny,1)).T
  ZZu = np.tile(data['depth'],(nxu,ny,1)).T
  ZZv = np.tile(data['depth'],(nx,nyv,1)).T

  if not useInd is False: #>------------------------------------------
    if   ij=='j':
      slice=jslice
      sshr=sshr[ij_ind,:]
      hr  =hr[ij_ind,:]
    elif ij=='i':
      slice=islice
      sshr=sshr[:,ij_ind]
      hr  =hr[:,ij_ind]

    Zr,Zu,Zv,TEMP,SALT,U,V=[slice(k,ij_ind) for k in [Zr,Zu,Zv,TEMP,SALT,U,V]]
  # -----------------------------------------------------------------<

  if useInd: # then store distances for a possible bry file
    dtype=Temp.dtype
    distr=np.zeros((nz,ny, nx ),dtype)
    distu=np.zeros((nz,ny, nxu),dtype)
    distv=np.zeros((nz,nyv,nx ),dtype)

  if not quiet: print 'vertical interpolation:'
  if ij=='j':
    for j in range(ny):
      if not quiet and (ny<10 or (ny>=10 and j%10==0)): print '  j=%3d of %3d' % (j,ny)
      ind=ij_ind[j]
      dr=np.tile(calc.distance(xr[ind,:],yr[ind,:]),(nz,1))
      du=np.tile(calc.distance(xu[ind,:],yu[ind,:]),(nz,1))
      Dr=np.tile(calc.distance(xr[ind,:],yr[ind,:]),(NZ,1))
      Du=np.tile(calc.distance(xu[ind,:],yu[ind,:]),(NZ,1))

      if useInd:
        distr[:,j,:]=dr;
        distu[:,j,:]=du;

      Temp[:,j,:]   = calc.griddata(Dr,ZZr[:,j,:],TEMP[:,j,:],dr,Zr[:,j,:],extrap=True)
      Salt[:,j,:]   = calc.griddata(Dr,ZZr[:,j,:],SALT[:,j,:],dr,Zr[:,j,:],extrap=True)
      if 0 and j%10==0:
        print Dr.shape, ZZr[:,j,:].shape
        import pylab as pl
        pl.figure(1)
        pl.clf()
        pl.pcolormesh(Dr,ZZr[:,j,:],SALT[:,j,:])
        pl.colorbar()
        clim=pl.gci().get_clim()
      
        pl.figure(2)
        pl.clf()
        pl.pcolormesh(dr,Zr[:,j,:],Salt[:,j,:])
        pl.clim(clim)
        pl.colorbar()
        raw_input()
      
      Uvel[:,j,:]   = calc.griddata(Du,ZZu[:,j,:],U[:,j,:],   du,Zu[:,j,:],extrap=True)
      if j<Vvel.shape[1]:
        dv=np.tile(calc.distance(xv[ind,:],yv[ind,:]),(nz,1))
        Dv=np.tile(calc.distance(xv[ind,:],yv[ind,:]),(NZ,1))
        Vvel[:,j,:] = calc.griddata(Dv,ZZv[:,j,:],V[:,j,:],   dv,Zv[:,j,:],extrap=True)
        if useInd:
          distv[:,j,:]=dv

      if np.any(np.isnan(Temp[:,j,:])): print 'found nan in temp',j
      if np.any(np.isnan(Salt[:,j,:])): print 'found nan in salt',j
      if np.any(np.isnan(Uvel[:,j,:])): print 'found nan in u',j
      if j<Vvel.shape[1] and np.any(np.isnan(Vvel[:,j,:])): print 'found nan in v',j


  elif ij=='i':
    for i in range(nx):
      if not quiet and (nx<10 or (nx>=10 and i%10==0)): print '  i=%3d of %3d' % (i,nx)
      ind=ij_ind[i]
      dr=np.tile(calc.distance(xr[:,ind],yr[:,ind]),(nz,1))
      dv=np.tile(calc.distance(xv[:,ind],yv[:,ind]),(nz,1))
      Dr=np.tile(calc.distance(xr[:,ind],yr[:,ind]),(NZ,1))
      Dv=np.tile(calc.distance(xv[:,ind],yv[:,ind]),(NZ,1))

      if useInd:
        distr[:,:,i]=dr;
        distv[:,:,i]=dv;

      Temp[:,:,i]   = calc.griddata(Dr,ZZr[:,:,i],TEMP[:,:,i],dr,Zr[:,:,i],extrap=True)
      Salt[:,:,i]   = calc.griddata(Dr,ZZr[:,:,i],SALT[:,:,i],dr,Zr[:,:,i],extrap=True)
      Vvel[:,:,i]   = calc.griddata(Dv,ZZv[:,:,i],V[:,:,i],   dv,Zv[:,:,i],extrap=True)
      if i<Uvel.shape[2]:
        du=np.tile(calc.distance(xu[:,ind],yu[:,ind]),(nz,1))
        Du=np.tile(calc.distance(xu[:,ind],yu[:,ind]),(NZ,1))
        Uvel[:,:,i] = calc.griddata(Du,ZZu[:,:,i],U[:,:,i],   du,Zu[:,:,i],extrap=True)
        if useInd:
          distu[:,:,i]=du


  # uv bar:
  if not quiet: print 'calc uvbar'
  if useInd is False:
    ubar,vbar=rt.uvbar(Uvel,Vvel,sshr,hr,sparams)
  else: #>------------------------------------------------------------
    sshu=calc.griddata(data['lon'],data['lat'],data['ssh'],xu,yu,extrap=True)
    sshv=calc.griddata(data['lon'],data['lat'],data['ssh'],xv,yv,extrap=True)

    if ij=='j':
      sshu=sshu[ij_ind,:]
      sshv=sshv[ij_ind,:]
      hu  =hu[ij_ind,:]
      hv  =hv[ij_ind,:]
    elif ij=='i':
      sshu=sshu[:,ij_ind]
      sshv=sshv[:,ij_ind]
      hu  =hu[:,ij_ind]
      hv  =hv[:,ij_ind]

    ubar=rt.barotropic(Uvel,sshu,hu,sparams)
    vbar=rt.barotropic(Vvel,sshv,hv,sparams)
  # -----------------------------------------------------------------<


  Vars=cb.odict()
  Vars['temp'] = Temp
  Vars['salt'] = Salt
  Vars['u']    = Uvel
  Vars['v']    = Vvel
  Vars['zeta'] = sshr
  Vars['ubar'] = ubar
  Vars['vbar'] = vbar

  Vars['date']   = data['date']

  if not useInd is False: #>------------------------------------------
    Vars['depth']  = Zr
    Vars['depthu'] = Zu
    Vars['depthv'] = Zv

    Vars['dist']  = distr
    Vars['distu'] = distu
    Vars['distv'] = distv
  # -----------------------------------------------------------------<


  if retHorizAux: return Vars, horizAux
  else: return Vars
Exemple #34
0
    def add_var(self, varname, vartype, dimnames, **kargs):
        '''
    Creates netcdf variable

    Ex: add_var('temp','float32',('lon','lat','z'))


    About compression:

    Compression kargs options (available for netcdf4 interface)
      zlib, default True, turn on compression
      lsd (least_significant_digit), default is None: lossy
        compression with lsd precision
      complevel, 1..9, copression level, default 4


    About vartype:

       i) netcdf type names:
         for netcdf3:
           byte,char,short,int,float,double

         for netcdf4:
           + int64, ushort, uint,uint64, string

       ii) Numeric type code: fd1silbwuc

       iii) Numpy dtype or type name

    kargs are required when vartype is a netcdf type name
    and the interface is scientific, see type_2numeric
    and when interface is netcdf4, see type_2dtype

    No need for kargs when:
     1. interface = pycdf
     2. interface is scientific and vartype is numpy type name or
        dtype, or vartipe is numeric typecode
     3. interface is netcdf4 and vartype is numpy type name or dtype
    '''

        if self._interface == 'pycdf':
            vartype = nctypes.type_2pycdf(vartype)
            self._nc.def_var(varname, vartype, dimnames)
        elif self._interface == 'scientific':
            vartype = type_2numeric(vartype, **kargs)
            self._nc.createVariable(varname, vartype, dimnames)
        elif self._interface == 'netcdf4':
            vartype = nctypes.type_2numpy(vartype, **kargs)

            # NOTA: ocorre erro ao criar ex int64 em ncver 3. Pode fazer sentido
            # converter para int? ie, qd o tipo nao e suportado pela ncver!?

            zlib = kargs.get('zlib', True)
            lsd = kargs.get('lsd', None)
            complevel = kargs.get('complevel', 4)
            fill_value = kargs.get('fill_value', None)

            self._nc.createVariable(varname,
                                    vartype,
                                    dimnames,
                                    zlib=zlib,
                                    complevel=complevel,
                                    least_significant_digit=lsd,
                                    fill_value=fill_value)

            newvar = Pyncvar(self, varname)

            # update self.vars and self.varnames:
            self.vars[varname] = newvar

            # update ncdump_info:
            if not self._ncdump_info is False:
                self._ncdump_info['variables'][varname] = cbt.odict()

        return newvar
Exemple #35
0
  def add_var(self,varname,vartype,dimnames,**kargs):
    '''
    Creates netcdf variable

    Ex: add_var('temp','float32',('lon','lat','z'))


    About compression:

    Compression kargs options (available for netcdf4 interface)
      gzip, default True: gzip compression
      lsd (least_significant_digit), default is None: lossy
        compression with lsd precision


    About vartype:

       i) netcdf type names:
         for netcdf3:
           byte,char,short,int,float,double

         for netcdf4:
           + int64, ushort, uint,uint64, string

       ii) Numeric type code: fd1silbwuc

       iii) Numpy dtype or type name

    kargs are required when vartype is a netcdf type name
    and the interface is scientific, see type_2numeric
    and when interface is netcdf4, see type_2dtype

    No need for kargs when:
     1. interface = pycdf
     2. interface is scientific and vartype is numpy type name or
        dtype, or vartipe is numeric typecode
     3. interface is netcdf4 and vartype is numpy type name or dtype
    '''

    if self._interface=='pycdf':
      vartype=nctypes.type_2pycdf(vartype)
      self._nc.def_var(varname,vartype,dimnames)
    elif self._interface=='scientific':
      vartype=type_2numeric(vartype,**kargs)
      self._nc.createVariable(varname,vartype,dimnames)
    elif self._interface=='netcdf4':
      vartype=nctypes.type_2numpy(vartype,**kargs)

      # NOTA: ocorre erro ao criar ex int64 em ncver 3. Pode fazer sentido
      # converter para int? ie, qd o tipo nao e suportado pela ncver!?

      zlib=True
      lsd=None
      fill_value=None
      if 'gzip' in kargs: zlib = kargs['gzip']
      if 'lsd'  in kargs: lsd  = kargs['lsd']
      if 'fill_value'  in kargs: fill_value  = kargs['fill_value']

      self._nc.createVariable(varname,vartype,dimnames,zlib=zlib,
                              least_significant_digit=lsd,fill_value=fill_value)

      newvar=Pyncvar(self,varname)

      # update self.vars and self.varnames:
      self.vars[varname]=newvar

      # update ncdump_info:
      if not self._ncdump_info is False:
        self._ncdump_info['variables'][varname]=cbt.odict()

    return newvar