Exemplo n.º 1
0
def makeGrid():
    xstart = 0
    xend = 360
    xstep = 0.5
    ystart = -85
    yend = 85
    ystep = 0.5

    lon_bnds = []
    lon = []
    for ii in numpy.arange(xstart, xend, xstep):
        lon_bnds.append([ii, ii + xstep])
        lon.append(ii + 0.5 * xstep)
    lon_bnds = numpy.array(lon_bnds)
    lon = numpy.array(lon)

    lat_bnds = []
    lat = []
    for ii in numpy.arange(ystart, yend, ystep):
        lat_bnds.append([ii, ii + ystep])
        lat.append(ii + 0.5 * ystep)
    lat_bnds = numpy.array(lat_bnds)
    lat = numpy.array(lat)

    latAxis = cdms2.createAxis(lat, lat_bnds)
    latAxis.designateLatitude(True)
    latAxis.units = "degree_north"
    latAxis.long_name = "Latitude"

    lonAxis = cdms2.createAxis(lon, lon_bnds)
    lonAxis.designateLongitude(True, 360.0)

    return cdms2.createGenericGrid(latAxis, lonAxis, lat_bnds, lon_bnds)
Exemplo n.º 2
0
    def decorate(self,output,ynm=None,xnm=None):
        x=cdms2.createAxis(range(len(xnm)))
        y=cdms2.createAxis(range(len(ynm)))
        
        try:
            del(x.name)
            del(y.name)
            del(out.name)
        except:
            pass

        nm='___'.join(xnm)
        x.id=nm
        dic={}
        for i in range(len(xnm)):
            dic[i]=xnm[i]
        x.names=repr(dic)
        nm='___'.join(ynm)
        y.id=nm
        output.setAxis(0,y)
        dic={}
        for i in range(len(ynm)):
            dic[i]=ynm[i]
        y.names=repr(dic)
        output.setAxis(1,x)
        
        return
Exemplo n.º 3
0
def map2four(data,target,regridTool='regrid2'):
    lons=target.getLongitude()
    lats=target.getLatitude()
    lonso=cdms2.createAxis(lons[::2])
    lonse=cdms2.createAxis(lons[1::2])
    latso=cdms2.createAxis(lats[::2])
    latse=cdms2.createAxis(lats[1::2])
    
    oo=cdms2.createRectGrid(latso,lonso)
    oe=cdms2.createRectGrid(latso,lonse)
    eo=cdms2.createRectGrid(latse,lonso)
    ee=cdms2.createRectGrid(latse,lonse)
    
    doo = data.regrid(oo,regridTool=regridTool)
    doe = data.regrid(oe,regridTool=regridTool)
    deo = data.regrid(eo,regridTool=regridTool)
    dee = data.regrid(ee,regridTool=regridTool)

    out=MV2.zeros(data.shape,dtype='f')

    out[::2,::2]=doo
    out[::2,1::2]=doe
    out[1::2,::2]=deo
    out[1::2,1::2]=dee

    out.id=data.id
    out.setAxisList((lats,lons))

    return out
Exemplo n.º 4
0
def coarsenVars( filen, newfilen ):
    """Reads a NetCDF file, filen, and writes a new file, newfilen, containing all its variables,
    but regridded in its horizontal axeseb to a 12x14 (15 degree) lat-lon grid.
    """
    latbnds = numpy.linspace(-90,90,nlat)
    lat = 0.5*( latbnds[1:]+latbnds[:-1] )
    lataxis=cdms2.createAxis( lat, id='lat' )
    lonbnds = numpy.linspace(0,360,nlon)
    lon = 0.5*( lonbnds[1:]+lonbnds[:-1] )
    lonaxis=cdms2.createAxis( lon, id='lon' )
    newgrid=cdms2.createRectGrid(lataxis, lonaxis)

    f = cdms2.open(filen)
    nf = cdms2.open(newfilen,'w')
    for varn in f.variables.keys():
        print "working on",varn
        var = f(varn)
        if var.getGrid() is None:
            print varn,"has no grid!"
            continue
        newvar = var.regrid(newgrid)   # works even if var has levels
        newvar.id = var.id         # before, id was var.id+'_CdmaRegrid'
        nf.write(newvar)
    for attr in f.attributes.keys():
        setattr(nf,attr,getattr(f,attr))
    if hasattr(nf,'history'):
        nf.history = nf.history + '; some variables deleted'
    f.close()
    nf.close()
Exemplo n.º 5
0
def reference_solutions(container_type, gridtype):
    """Generate reference solutions in the required container."""
    container_type = container_type.lower()
    if container_type not in ("standard", "iris", "cdms"):
        raise ValueError("unknown container type: " "'{!s}'".format(container_type))
    reference = __read_reference_solutions()
    if container_type == "standard":
        # Reference solution already in numpy arrays.
        return reference
    # Generate coordinate dimensions for meta-data interfaces.
    lons = np.arange(0, 360, 2.5)
    lats = np.linspace(90, -90, 73)
    if container_type == "cdms":
        # Solution in cdms2 variables.
        try:
            londim = cdms2.createAxis(lons, id="longitude")
            londim.designateLongitude()
            latdim = cdms2.createAxis(lats, id="latitude")
            latdim.designateLatitude()
            for name in reference.keys():
                reference[name] = cdms2.createVariable(reference[name], axes=[latdim, londim], id=name)
        except NameError:
            raise ValueError("cannot use container 'cdms' without cdms2")
    elif container_type == "iris":
        # Solution in iris cubes.
        try:
            londim = DimCoord(lons, standard_name="longitude", units="degrees_east")
            latdim = DimCoord(lats, standard_name="latitude", units="degrees_north")
            coords = zip((latdim, londim), (0, 1))
            for name in reference.keys():
                reference[name] = Cube(reference[name], dim_coords_and_dims=coords, long_name=name)
        except NameError:
            raise ValueError("cannot use container 'iris' without iris")
    return reference
Exemplo n.º 6
0
  def __init__(self,weightFile,toRegularGrid=True):
    if isinstance(weightFile,str):
      if not os.path.exists(weightFile):
        raise Exception("WeightFile %s does not exists" % weightFile)
      wFile=cdms2.open(weightFile)
    else:
      wFile = weightFile
    self.S=wFile("S").filled()
    self.row=wFile("row").filled()-1
    self.col=wFile("col").filled()-1
    self.frac_b=wFile("frac_b").filled()
    self.mask_b=numpy.logical_not(wFile("mask_b").filled())
    self.n_s=self.S.shape[0]
    self.n_b=self.frac_b.shape[0]
    self.method = wFile.map_method
    self.regular=toRegularGrid
    if toRegularGrid:
      self.lats=cdms2.createAxis(sorted(set(wFile("yc_b").tolist())))
      self.lats.designateLatitude()
      self.lats.units="degrees_north"
      self.lats.setBounds(numpy.array(sorted(set(wFile("yv_b").ravel().tolist()))))
      self.lats.id="latitude"
      self.lons=cdms2.createAxis(sorted(set(wFile("xc_b").tolist())))
      self.lons.designateLongitude()
      self.lons.units="degrees_east"
      self.lons.setBounds(numpy.array(sorted(set(wFile("xv_b").ravel().tolist()))))
      self.lons.id="longitude"
    else:
      self.yc_b=wFile("yc_b")
      self.xc_b=wFile("xc_b")
      self.yv_b=wFile("yv_b")
      self.xv_b=wFile("xv_b")

    if isinstance(weightFile,str):
        wFile.close()
Exemplo n.º 7
0
def makeGrid(xstart=0, xend=359, xstep=1, ystart=-90, yend=89, ystep=1):

    lon_bnds=[]
    lon=[]
    for ii in numpy.arange(xstart, xend, xstep):
        lon_bnds.append( [ii, ii+xstep] )
        lon.append(ii+0.5*xstep)
    lon_bnds=numpy.array(lon_bnds)
    lon=numpy.array(lon)

    lat_bnds=[]
    lat=[]
    for ii in numpy.arange(ystart, yend, ystep):
        lat_bnds.append([ii, ii+ystep])
        lat.append(ii+0.5*ystep)
    lat_bnds=numpy.array(lat_bnds)
    lat=numpy.array(lat)

    latAxis = cdms2.createAxis(lat, lat_bnds)
    latAxis.designateLatitude(True)
    latAxis.units='degrees_north'
    latAxis.long_name='Latitude'
    latAxis.id='latitude'

    lonAxis = cdms2.createAxis(lon, lon_bnds)
    lonAxis.designateLongitude(True, 360.0)
    lonAxis.units='degrees_east'
    lonAxis.id='longitude'
    lonAxis.long_name='Longitude'

    return((cdms2.createGenericGrid(latAxis, lonAxis, lat_bnds, lon_bnds), latAxis, lonAxis, lat_bnds, lon_bnds))
Exemplo n.º 8
0
def _extremum_(func,ctime,i0,i,var,spline):
    """Extremum possibly using splines"""
    nt = len(var)
    if spline and nt >= 4: # and i != 0 and i != (nt-1)
        if i == 0:
            ifirst, ilast = 0, 4
        elif i == nt-1:
            ifirst, ilast = nt-4, nt
        else:
            icenter = i - int(var[i-1] > var[i+1])
            ifirst = max(icenter-1, 0)
            ilast = ifirst + 4
            if ilast > nt:
                ilast -= 1
                ifirst -= 1
        mn_units = 'minutes since %s'%ctime[i0+ifirst]
        old_rts = cdms2.createAxis(N.asarray([ct.torel(mn_units).value for ct in ctime[i0+ifirst:i0+ilast]],dtype='d'))
        old_var = MV2.array(var[ifirst:ilast], axes=[old_rts], copyaxes=0)
        mn_rts =  cdms2.createAxis(N.arange(int(old_rts[-1]+1),dtype='d'))
        mn_var = interp1d(old_var, mn_rts, method='cubic')
        del old_var, old_rts
#       mn_var = spline_interpolate(old_rts,var[i-1:i+2],mn_rts)
#       mn_var = splev(mn_rts, splrep(old_rts,var[ifirst:ilast]))
        mn_i = func(mn_var)
        val = mn_var[mn_i]
        del mn_var
        this_ctime = cdtime.reltime(mn_i,mn_units).tocomp()
    else:
        this_ctime = ctime[i0+i]
        val = var[i]
    return val,this_ctime
Exemplo n.º 9
0
def reference_solution(container_type):
    """Generate a reference field and a corresponding EOF solution.
    
    **Argument:**
    
    *container_type*
        The type of the solution containers. Either 'numpy' for
        :py:mod:`numpy` arrays or 'cdms2' for :py:mod:`cdms2`.
    
    """
    sf, eofs, pcs = _construct_reference()
    if container_type.lower() == 'numpy':
        # Return the solution as-is for numpy containers.
        return sf, eofs, pcs
    # Create meta-data for cdms2 containers.
    try:
        time = cdms2.createAxis(np.arange(100), id='time')
        time.designateTime()
        time.units = 'days since 2011-01-01 00:00:0.0'
        longitude = cdms2.createAxis(np.arange(0., 360., 360./225.),
                id='longitude')
        longitude.designateLongitude()
        eof = cdms2.createAxis(range(2), id='eof')
        eof.long_name = 'eof number'
        sf = cdms2.createVariable(sf, axes=[time,longitude], id='sf')
        eofs = cdms2.createVariable(eofs, axes=[eof,longitude], id='eofs')
        pcs = cdms2.createVariable(pcs, axes=[time,eof], id='pcs')
    except NameError:
        raise ValueError("can't create cdms2 containers without cdms2")
    return sf, eofs, pcs
Exemplo n.º 10
0
def makeDummyNC(filename):
    ''' This makes some dummy data for use in test routines '''
    f=cdms2.open(filename,'w')
    nx,ny,nz,nt=30,12,4,12
    lat=np.linspace(-30,30,ny)
    lon=np.linspace(0,330,nx)
    z=np.linspace(1,4,nz)
    t=np.linspace(1,14,nt)
    tb=np.array([[ti,ti+1] for ti in t])
    tax=cdms2.createAxis(t)
    tax.id='time'
    tax.units='days since 2013-1-1'
    tax.setBounds(tb)
    xax=cdms2.createAxis(lon)
    xax.id='longitude'
    xax.units='degrees_east'
    yax=cdms2.createAxis(lat)
    yax.id='latitude'
    yax.units='degrees_north'
    zax=cdms2.createAxis(z)
    zax.id='levels'
    d=np.outer(np.sin(lon),np.cos(lat))
    data=np.resize(d,(nx,ny,nz,nt))
    dv=cdms2.createVariable(data,axes=[xax,yax,zax,tax],fill_value=-999.)
    dv.long_name='Dummy Data'
    dv.units='Kelvin'
    shape=dv.shape
    f.write(dv)
    f.close()
    return shape
Exemplo n.º 11
0
    def decorate(self, output, ynm, xnm):
        x = cdms2.createAxis(list(range(len(xnm))))
        y = cdms2.createAxis(list(range(len(ynm))))

        try:
            del(x.name)
            del(y.name)
            del(output.name)
        except Exception:
            pass

        nm = '___'.join(xnm)
        x.id = nm
        dic = {}
        for i in range(len(xnm)):
            dic[i] = xnm[i]
        x.names = repr(dic)
        nm = '___'.join(ynm)
        y.id = nm
        y.original_id = output.getAxis(0,).id
        output.setAxis(0, y)
        dic = {}
        for i in range(len(ynm)):
            dic[i] = ynm[i]
        y.names = repr(dic)
        x.original_id = output.getAxis(1,).id
        output.setAxis(1, x)

        return
Exemplo n.º 12
0
    def post(self,fetched,slab,axes,specifications,confined_by,aux,axismap):
        ''' Post processing retouches the bounds and later will deal with the mask'''
        import cdms2 as cdms
        fetched=cdms.createVariable(fetched,copy=1)
        faxes=fetched.getAxisList()
        a=None
        for i in range(len(faxes)):
            if confined_by[i] is self:
                newaxvals=[]
                bounds=[]
                a=None
                sh=list(fetched.shape)
                sh[i]=1
                for l in self.aux[i]:
                    try:
                        tmp=fetched(**{faxes[i].id:(l,l)})
                        ax=tmp.getAxis(i)
                        #print ax
                        newaxvals.append(ax[0])
			if ax.getBounds()!=None:
                   	     bounds.append(ax.getBounds()[0])
			else:
			     bounds=None
                    except Exception,err:
                        #print 'err:',err,'match:',self.match
                        if self.match==1:
                            raise Exception,'Error axis value :'+str(l)+' was requested but is not present in slab\n(more missing might exists)'
                        elif self.match==0:
                            tmp=MV2.ones(sh,typecode=MV2.float)
                            tmp=MV2.masked_equal(tmp,1)
                            if type(l)==type(cdtime.comptime(1999)) or type(l)==type(cdtime.reltime(0,'days since 1999')) or type(l)==type(''):
                                if type(l)!=type(''):
                                    newaxvals.append(l.torel(faxes[i].units).value)
                                else:
                                    newaxvals.append(cdtime.s2r(l,faxes[i].units).value)
                            else:
                                newaxvals.append(l)
                            if bounds is not None:
                                bounds.append([ax[-1]-1.,ax[-1]+1])
                        else:
                            tmp=None
                    if not tmp is None:
                        if a is None:
                            a=tmp
                        elif not tmp is None:
                            a=MV2.concatenate((a,tmp),i)
                if bounds is not None:
			newax=cdms.createAxis(numpy.array(newaxvals),bounds=numpy.array(bounds),id=ax.id)
		else:
			newax=cdms.createAxis(numpy.array(newaxvals),id=ax.id)
                for att in faxes[i].attributes.keys():
                    setattr(newax,att,faxes[i].attributes.get(att))
                for j in range(len(fetched.shape)):
                    if j==i:
                        a.setAxis(i,newax)
                    else:
                        a.setAxis(j,faxes[j])
                fetched=a.astype(fetched.dtype.char)
                faxes=fetched.getAxisList()
Exemplo n.º 13
0
    def __init__(self,stat,components,components_names,time_domain,time_domain_names,id=None):
        # Set the id
        if id is None:
            self.id=""
        else:
            self.id=id

        # Check components
        if not isinstance(components,(list,tuple)):
            raise StatisticError,"components argument must be a list"
        else:
            if  isinstance(components_names[0],str):
                self.components={}
                for i in range(len(components)):
                    self.components[components[i]]=components_names[i]
            elif isinstance(components_names,dict):
                self.components=components
            else:
               raise StatisticError,"components_names argument must be a list"

        # Check time_domain
        if not isinstance(time_domain,(list,tuple)):
            raise StatisticError,"time_domain argument must be a list"
        else:
            if isinstance(time_domain_names[0],str):
                self.time_domain={}
                for i in range(len(time_domain)):
                    self.time_domain[time_domain[i]]=time_domain_names[i]
            elif isinstance(time_domain_names,dict):
                self.time_domain=time_domain
            else:
               raise StatisticError,"time_domain_names argument must be a list" 

        # check stat array
        if isinstance (stat,numpy.ndarray ) or numpy.ma.isMA(stat):
            s=stat.shape
            if len(s)!=2:
                raise StatisticError,"stat argument must be 2D"
            nc=len(components)
            nt=len(time_domain)
            if nc!=s[0]:
                raise StatisticError,"You claim "+str(nc)+" components but your stat shows:"+str(s[0])
            if nt!=s[1]:
                raise StatisticError,"You claim "+str(nt)+" time_domain but your stat shows:"+str(s[1])
            self.stat=cdms2.createVariable(stat,copy=0,id=str(self.id))
            autobounds=cdms2.getAutoBounds()
            cdms2.setAutoBounds('off')
            compaxis=cdms2.createAxis(components)
            compaxis.id='component'
            timaxis=cdms2.createAxis(time_domain)
            timaxis.id='time_domain'
            self.stat.setAxis(0,compaxis)
            self.stat.setAxis(1,timaxis)
            self.stat.components=repr(self.components)
            self.stat.time_domain=repr(self.time_domain)
            cdms2.setAutoBounds(autobounds)
        else:
            raise StatisticError,"stat argument must be A numpy array a MA or a MV2"
Exemplo n.º 14
0
    def setUp(self):
        sortedArray    = N.array([-2.45, 0.45, 1.345, 8.443, 20.55, 40.33])
        self.unsortedArray  = N.array([10, 40, 30, 20])
        longitudeArray = N.array([280.5, 290.995, 310.2, 340.3,
                                       357.0, 3.4, 5.53 , 18.63, 25.4])

        self.sortedAxis       = cdms.createAxis(sortedArray)
        self.unsortedAxis     = cdms.createAxis(self.unsortedArray)
        self.longitudeAxis    = cdms.createAxis(longitudeArray)
Exemplo n.º 15
0
 def setUp(self):
     a = N.array([[[17, 18, 19, 20],[21,22,23,24],[25, 26, 27, 28],[29, 30, 31, 32]]])
     self.temp = cdms.createVariable(a,id='temp')
     self.axisX = cdms.createAxis(N.array([10,20,30,40]))
     self.axisX.id = 'x'
     self.axisY = cdms.createAxis(N.array([45,55,65,75]))
     self.axisY.id = 'y'
     self.axisZ = cdms.createAxis(N.array([1]))
     self.axisZ.id = 'z'
     self.temp.setAxisList([self.axisZ, self.axisX, self.axisY])
Exemplo n.º 16
0
 def setUp(self):
     a = N.array([[[29, 30, 31, 32]]])
     self.temp = cdms.createVariable(a,id='temp')
     self.axisX = cdms.createAxis(N.array([10,20,30,40]))
     self.axisX.id = 'x'
     self.axisY = cdms.createAxis(N.array([45]))
     self.axisY.id = 'y'
     self.axisZ = cdms.createAxis(N.array([1]))
     self.axisZ.id = 'z'
     self.temp.setAxisList([self.axisZ, self.axisY, self.axisX])
Exemplo n.º 17
0
def _wrap_cdms(solution, neofs, time_units):
    try:
        import cdms2
    except ImportError:
        raise ValueError("cannot use container 'cdms' without "
                         "the cdms2 module")
    time_dim = cdms2.createAxis(solution['time'], id='time')
    time_dim.designateTime()
    time_dim.units = time_units
    lat_dim = cdms2.createAxis(solution['latitude'], id='latitude')
    lat_dim.designateLatitude()
    lon_dim = cdms2.createAxis(solution['longitude'], id='longitude')
    lon_dim.designateLongitude()
    eof_dim = cdms2.createAxis(np.arange(1, neofs+1), id='eof')
    eof_dim.long_name = 'eof_number'
    solution['sst'] = cdms2.createVariable(
        solution['sst'],
        axes=[time_dim, lat_dim, lon_dim],
        id='sst')
    solution['eigenvalues'] = cdms2.createVariable(
        solution['eigenvalues'],
        axes=[eof_dim],
        id='eigenvalues')
    solution['eofs'] = cdms2.createVariable(
        solution['eofs'],
        axes=[eof_dim, lat_dim, lon_dim],
        id='eofs')
    solution['pcs'] = cdms2.createVariable(
        solution['pcs'],
        axes=[time_dim, eof_dim],
        id='pcs')
    solution['variance'] = cdms2.createVariable(
        solution['variance'],
        axes=[eof_dim],
        id='variance')
    solution['eofscor'] = cdms2.createVariable(
        solution['eofscor'],
        axes=[eof_dim, lat_dim, lon_dim],
        id='eofscor')
    solution['eofscov'] = cdms2.createVariable(
        solution['eofscov'],
        axes=[eof_dim, lat_dim, lon_dim],
        id='eofscov')
    solution['errors'] = cdms2.createVariable(
        solution['errors'],
        axes=[eof_dim],
        id='errors')
    solution['scaled_errors'] = cdms2.createVariable(
        solution['scaled_errors'],
        axes=[eof_dim],
        id='scaled_errors')
    solution['rcon'] = cdms2.createVariable(
        solution['rcon'],
        axes=[time_dim, lat_dim, lon_dim],
        id='reconstructed_sst')
Exemplo n.º 18
0
def test_isAxisRegularlySpacedSubsetOf():
    axisX = cdms.createAxis(N.array([10, 20, 30, 40]))
    axisX.id = 'x'
    axisX.units = 'm'

    axisX2 = cdms.createAxis(N.array([10, 20]))
    axisX2.units = 'm'
    axisX2.id = 'x2'

    result = axis_utils.isAxisRegularlySpacedSubsetOf(axisX,axisX2)
    assert(result == len(axisX2)/ len(axisX))
Exemplo n.º 19
0
def test_isUniformlySpaced():
    axisX = cdms.createAxis(N.array([10, 20, 30, 40]))
    assert(axis_utils.isUniformlySpaced(axisX) == True)

    axisX = cdms.createAxis(N.array([10]))
    assert(axis_utils.isUniformlySpaced(axisX) == False)

    axisX = cdms.createAxis(N.array([10, 11, 13]))
    assert(axis_utils.isUniformlySpaced(axisX) == False)

    axisX = cdms.createAxis(N.array([10, 20.0, 30, 40.0]))
    assert(axis_utils.isUniformlySpaced(axisX) == True)
Exemplo n.º 20
0
def _wrap_cdms(reference, lats, lons):
    try:
        import cdms2
    except ImportError:
        raise ValueError("cannot use container 'cdms' without cdms2")
    londim = cdms2.createAxis(lons, id='longitude')
    londim.designateLongitude()
    latdim = cdms2.createAxis(lats, id='latitude')
    latdim.designateLatitude()
    for name in reference.keys():
        reference[name] = cdms2.createVariable(reference[name],
                                               axes=[latdim, londim],
                                               id=name)
Exemplo n.º 21
0
    def eeofs(self, eeofscaling=0, neeofs=None):
        """Extended Emipirical orthogonal functions (EEOFs).

        **Optional arguments:**

        *eofscaling*
            Sets the scaling of the EEOFs. The following values are
            accepted:

            * *0* : Un-scaled EOFs (default).
            * *1* : EEOFs are divided by the square-root of their
              eigenvalues.
            * *2* : EEOFs are multiplied by the square-root of their
              eigenvalues.

        *neeofs*
            Number of EEOFs to return. Defaults to all EEOFs. If the
            number of EEOFs requested is more than the number that are
            available, then all available EEOFs will be returned.

        **Returns:**

        *eeofs*
           A `cdms2` variable containing the ordered EEOFs. The EEOFs are
           numbered from 0 to *neeofs* - 1.

        **Examples:**

        All EEOFs with no scaling::

            eeofs = solver.eeofs()

        First 3 EEOFs with scaling applied::

            eeofs = solver.eeofs(neeofs=3, eeofscaling=1)

        """
        eeofs = self._solver.eeofs(eofscaling, neofs)
        eeofs.fill_value = self._missing_value
        eeofax = cdms2.createAxis(range(len(eeofs)), id='eeof')
        eeofax.long_name = 'eeof_number'

        lagax = cdms2.createAxis(range(self.window), id='lag')

        axlist = [eeofax, lagax] + self._channels
        eeofs = cdms2.createVariable(eeofs,
                                    id='eeofs',
                                    axes=axlist,
                                    fill_value=self._missing_value)
        eeofs.long_name = 'extended_empirical_orthogonal_functions'
        return eeofs
Exemplo n.º 22
0
    def setupVariableAxes(self):
        """ Iterate through the variable's axes and create and initialize an Axis
        object for each axis.
        """
        if self.var is None:
            return
        
        if (self.axisList is None):
            if self.cdmsFile is None:
                self.axisList = self.var.getAxisList()
                self.grid=self.var.getGrid()
            else:
                try:
                    self.axisList = self.cdmsFile[self.var].getAxisList()
                    self.grid=self.cdmsFile[self.var].getGrid()
                except:
                    ## Ok this is probably a simple axis
                    self.axisList=[self.cdmsFile[self.var],]
                    self.grid=None
            self.axisOrder = range(len(self.axisList))

        self.clear()            
        self.setAxesNames()
        
        # Iterate through the variables axes & init each axis widget
        axisIndex = 0
        didVirtual=False
        for axis, axisName in zip(self.axisList, self.axesNames):
            virtual = 0
            if  isinstance(self.grid,(cdms2.hgrid.AbstractHorizontalGrid,cdms2.gengrid.AbstractHorizontalGrid)) and axis in self.grid.getLatitude().getAxisList():
                    virtual = -1
                    if didVirtual is False:
                        didVirtual=True
                        minLat,maxLat=genutil.minmax(self.grid.getLatitude())
                        minLon,maxLon=genutil.minmax(self.grid.getLongitude())
                        vLat=cdms2.createAxis(numpy.arange(numpy.floor(minLat),numpy.ceil(maxLat)+.1,.1),id="latitude")
                        vLon=cdms2.createAxis(numpy.arange(numpy.floor(minLon),numpy.ceil(maxLon)+.1,.1),id="longitude")
                        virtualLatWidget=QAxis(vLat, "latitude", axisIndex, self,virtual=1)
                        virtualLonWidget=QAxis(vLon, "longitude", axisIndex, self,virtual=2)
                        self.axisWidgets.append(virtualLatWidget)
                        self.axisWidgets.append(virtualLonWidget)
            # Create the axis widget
            # Virtual: 0: Not virtual
            #         -1: options to switch to virtual
            #          1: is latitude virtual needs option to switch back
            #          2: is longitude virtual needs option to switch back
            axisWidget = QAxis(axis, axisName, axisIndex, self,virtual=virtual)
            self.axisWidgets.append(axisWidget)
            axisIndex += 1
        self.gridLayout.setRowStretch(self.gridLayout.rowCount(), 1)
Exemplo n.º 23
0
def fixInterpAxis(var):
    """
    Documentation for fixInterpAxis(var):
    -------
    The fixInterpAxis(var) function corrects temporal axis so that genutil.statistics.linearregression
    returns coefficients which are unscaled by the time axis

    Author: Paul J. Durack : [email protected]

    Usage:
    ------
        >>> from durolib import fixInterpAxis
        >>> (slope),(slope_err) = linearregression(fixInterpAxis(var),error=1,nointercept=1)

    Notes:
    -----
        ...
    """
    tind = range(shape(var)[0]) ; # Assume time axis is dimension 0
    t = cdm.createAxis(tind,id='time')
    t.units = 'years since 0-01-01 0:0:0.0'
    t.calendar = var.getTime().calendar
    cdu.times.setTimeBoundsYearly(t) ; # Explicitly set time bounds to yearly
    var.setAxis(0,t)
    return var
Exemplo n.º 24
0
    def eigenvalues(self, neigs=None):
        """Eigenvalues (decreasing variances) associated with each EOF.

        Returns the ordered eigenvalues in a :py:mod:`cdms2` variable.

        **Optional argument:**
        
        *neigs*
            Number of eigenvalues to return. Defaults to all
            eigenvalues.
        
        **Examples:**

        All eigenvalues:

        >>> lambdas = eofobj.eigenvalues()

        The first eigenvalue:

        >>> lambda1 = eofobj.eigenvalues(neigs=1)
        
        """
        lambdas = self._solver.eigenvalues(neigs=neigs)
        eofax = cdms2.createAxis(range(len(lambdas)), id="eigenvalue")
        axlist = [eofax]
        lambdas = cdms2.createVariable(lambdas, id="eigenvalues", axes=axlist)
        lambdas.name = "eigenvalues"
        lambdas.long_name = "eigenvalues"
        return lambdas
Exemplo n.º 25
0
 def _create_data(self):
     a1 = numpy.arange(10) * 18.0 - 90.
     a2 = numpy.arange(10) * 36.0 - 180.
     d = numpy.outerproduct(numpy.sin(a1*numpy.pi/360.), 
                         numpy.cos(a2*numpy.pi/360.))
     d.shape=(1,1,10,10)
     a1a = cdms.createAxis(a1)
     a1a.designateLatitude()
     a2a = cdms.createAxis(a2)
     a2a.designateLongitude()
     a3a = cdms.createAxis([1979.])
     a3a.designateTime()
     a4a = cdms.createAxis([0.5])
     a4a.designateLevel()
     data = cdms.MV2.array(d, axes=[a3a, a4a, a1a ,a2a])
     self.data = data
Exemplo n.º 26
0
    def eofsAsCovariance(self, neofs=None, pcscaling=1):
        """
        Empirical orthogonal functions (EOFs) expressed as the
        covariance between the principal component time series (PCs)
        and the time series of the `Eof` input *dataset* at each grid
        point.

        **Optional arguments:**

        *neofs*
            Number of EOFs to return. Defaults to all EOFs. If the
            number of EOFs requested is more than the number that are
            available, then all available EOFs will be returned.

        *pcscaling*
            Set the scaling of the PCs used to compute covariance. The
            following values are accepted:

            * *0* : Un-scaled PCs.
            * *1* : PCs are scaled to unit variance (divided by the
              square-root of their eigenvalue) (default).
            * *2* : PCs are multiplied by the square-root of their
              eigenvalue.

            The default is to divide PCs by the square-root of their
            eigenvalue so that the PCs are scaled to unit variance
            (option 1).

        **Returns:**

        *eofs*
           A `cdms2` variable containing the ordered EOFs. The EOFs are
           numbered from 0 to *neofs* - 1.

        **Examples:**

        All EOFs::

            eofs = solver.eofsAsCovariance()

        The leading EOF::

            eof1 = solver.eofsAsCovariance(neofs=1)

        The leading EOF using un-scaled PCs::

            eof1 = solver.eofsAsCovariance(neofs=1, pcscaling=0)

        """
        eofs = self._solver.eofsAsCovariance(neofs, pcscaling)
        eofs.fill_value = self._missing_value
        eofax = cdms2.createAxis(range(len(eofs)), id='eof')
        axlist = [eofax] + self._channels
        eofs = cdms2.createVariable(eofs,
                                    id='eofs_cov',
                                    axes=axlist,
                                    fill_value=self._missing_value)
        eofs.long_name = 'covariance_between_pcs_and_{:s}'.format(
            self._dataset_name)
        return eofs
Exemplo n.º 27
0
def scrap(data, axis=0):
    originalOrder = data.getOrder(ids=True)
    if axis not in ['x', 'y', 'z', 't'] and not isinstance(axis, int):
        order = "({})...".format(axis)
    else:
        order = "{}...".format(axis)
    new = data(order=order)
    axes = new.getAxisList()  # Save for later
    new = MV2.array(new.asma())  # lose dims
    for i in range(new.shape[0] - 1, -1, -1):
        tmp = new[i]
        if not isinstance(tmp, (float, numpy.float)) and tmp.mask.all():
            a = new[:i]
            b = new[i + 1:]
            if b.shape[0] == 0:
                new = a
            else:
                new = MV2.concatenate((a, b))
    newAxis = []
    for v in new.getAxis(0):
        newAxis.append(axes[0][int(v)])
    ax = cdms2.createAxis(newAxis, id=axes[0].id)
    axes[0] = ax
    new.setAxisList(axes)
    return new(order=originalOrder)
Exemplo n.º 28
0
    def eigenvalues(self, neigs=None):
        """Eigenvalues (decreasing variances) associated with each EOF.

        **Optional argument:**

        *neigs*
            Number of eigenvalues to return. Defaults to all
            eigenvalues.If the number of eigenvalues requested is more
            than the number that are available, then all available
            eigenvalues will be returned.

        **Returns:**

        *eigenvalues*
            A `cdms2` variable containing the eigenvalues arranged
            largest to smallest.

        **Examples:**

        All eigenvalues::

            eigenvalues = solver.eigenvalues()

        The first eigenvalue::

            eigenvalue1 = solver.eigenvalues(neigs=1)

        """
        lambdas = self._solver.eigenvalues(neigs=neigs)
        eofax = cdms2.createAxis(range(len(lambdas)), id='eigenvalue')
        eofax.long_name = 'eigenvalue_number'
        axlist = [eofax]
        lambdas = cdms2.createVariable(lambdas, id='eigenvalues', axes=axlist)
        lambdas.long_name = 'eigenvalues'
        return lambdas
Exemplo n.º 29
0
def create_axis(values, atype='-', **atts):
    """Quickly create a :mod:`cdms2` axis

    :Params:

        - **values**: Numerical values.
        - **atype**, optional: axis type within 'x','y','z','t','-' [default: '-']
        - Other keywords are passed as attributes to the axis.

    :Example:

        >>> lon = create_axis(N.arange(-10., 0, 2), 'x')
        >>> lon = create_axis((-10., 0, 2), 't', id='temps', units='seconds since 2000')
        >>>
    """
    from vacumm.misc import cp_atts
    if isinstance(values, tuple) and len(values) < 4:
        values = N.arange(*values, **{'dtype':'d'})
    if cdms2.isVariable(values):
        for item in values.attributes.items():
            atts.setdefault(*item)
        values = values.asma()
    if not isaxis(values):
        axis = cdms2.createAxis(values)
    else:
        axis = values
    for att,val in atts.items():
        setattr(axis, att, val)
    axis.axis = atype.upper()
    check_axis(axis)
    if axis.axis == '-':
        del axis.axis
    return axis
Exemplo n.º 30
0
    def varianceFraction(self, neigs=None):
        """Fractional EOF variances.
        
        The fraction of the total variance explained by each EOF, a
        value between 0 and 1 inclusive, in a :py:mod:`cdms2` variable.

        **Optional argument:**

        *neigs*
            Number of eigenvalues to return the fractional variance for.
            Defaults to all eigenvalues.
        
        **Examples:**

        The fractional variance represented by each eigenvalue:

        >>> varfrac = eofobj.varianceFraction()

        The fractional variance represented by the first 3 eigenvalues:

        >>> varfrac = eofobj.VarianceFraction(neigs=3)
        
        """
        vfrac = self._solver.varianceFraction(neigs=neigs)
        eofax = cdms2.createAxis(range(len(vfrac)), id="eigenvalue")
        axlist = [eofax]
        vfrac = cdms2.createVariable(vfrac, id="variance", axes=axlist)
        vfrac.name = "variance_fraction"
        vfrac.long_name = "variance fraction"
        return vfrac
binmids = x1 + width / 2.
cutoff = np.int(len(binmids) /
                2)  # [:cutoff] = ascent; [cutoff:-1] = descent; [-1] = land

# Load in the Zelinka et al 2012 kernels:
f = cdms.open(datadir + 'cloud_kernels2.nc')
LWkernel = f('LWkernel')
SWkernel = f('SWkernel')
f.close()
LWkernel = MV.masked_where(np.isnan(LWkernel), LWkernel)
SWkernel = MV.masked_where(np.isnan(SWkernel), SWkernel)
albcs = np.arange(
    0.0, 1.5, 0.5)  # the clear-sky albedos over which the kernel is computed

# Define the cloud kernel axis attributes
lats = cdms.createAxis(LWkernel.getLatitude()[:])
lats.id = "lat"
lats.units = "degrees_N"
lats.designateLatitude()
lons = cdms.createAxis(np.arange(1.25, 360, 2.5))
lons.id = "lon"
lons.units = "degrees_E"
lons.designateLongitude()
kern_grid = cdms.createGenericGrid(lats, lons)
kern_grid.getLatitude().id = 'lat'
kern_grid.getLongitude().id = 'lon'

##########################################################
##### Load in ISCCP HGG clisccp climo annual cycle  ######
##########################################################
f = cdms.open(datadir + 'AC_clisccp_ISCCP_HGG_198301-200812.nc', 'r')
Exemplo n.º 32
0
grid.setBounds(latbounds, lonbounds)
nlatb, nlonb = grid.getBounds()
if not numpy.ma.allequal(latbounds, nlatb): markError('Grid setBounds')
grid.setType('uniform')
if grid.getType() != 'uniform': markError('Grid setType', grid.getType())

yy = numpy.ma.reshape(numpy.ma.arange(272.0), tv.shape)
tv.assignValue(yy)
if not numpy.ma.allequal(tv, yy): markError('TV assignValue')
tv3 = tv[0:-1]
if tv3.shape != (1, 8, 17): markError('TV slice, negative index', tv3.shape)

# Create a transient variable from scratch
oldlat = tv.getLatitude()
oldBounds = oldlat.getBounds()
newlat = cdms2.createAxis(numpy.ma.array(oldlat[:]), numpy.ma.array(oldBounds))
b = newlat.getBounds()
b[0, 0] = -48.
newlat.setBounds(b)

tv4 = cdms2.createVariable(tv[:], copy=1, fill_value=255.)
tv4[0, 1:4] = 20.0

if tv[:, ::-1, :].shape != tv.shape: markError("Reversing axis direction")

# Test asVariable
www = cdms2.asVariable(tv4)
if www is not tv4: markError("asVariable failed, transient case.")
www = cdms2.asVariable(v, 0)
if www is not v: markError("asVariable failed, transient case.")
www = cdms2.asVariable([1., 2., 3.])
Exemplo n.º 33
0
    def testAxisDetection(self):
        val = [1, 2, 3]
        a = cdms2.createAxis(val)

        # First let's make sure it does not detect anything
        self.assertFalse(a.isLatitude())
        self.assertFalse(a.isLongitude())
        self.assertFalse(a.isLevel())
        self.assertFalse(a.isTime())

        # Now quick tests for making it latitude
        for u in [
                "DEGREESN", "  deGREEn  ", "degrees_north", "degree_north",
                "degree_n", "degrees_n", "degreen", "degreesn"
        ]:
            a.units = u
            self.assertTrue(a.isLatitude())
            a.units = ""
            self.assertFalse(a.isLatitude())
        for i in ["lat", "LAT", "latitude", "latituDE"]:
            a.id = i
            self.assertTrue(a.isLatitude())
            a.id = "axis"
            self.assertFalse(a.isLatitude())
        a.axis = "Y"
        self.assertTrue(a.isLatitude())
        del (a.axis)
        self.assertFalse(a.isLatitude())
        # Now quick tests for making it longitude
        for u in [
                "DEGREESe", "  deGREEe  ", "degrees_east", "degree_east",
                "degree_e", "degrees_e", "degreee", "degreese"
        ]:
            a.units = u
            self.assertTrue(a.isLongitude())
            a.units = ""
            self.assertFalse(a.isLongitude())
        for i in ["lon", "LON", "longitude", "lOngituDE"]:
            a.id = i
            self.assertTrue(a.isLongitude())
            a.id = "axis"
            self.assertFalse(a.isLongitude())
        a.axis = "X"
        self.assertTrue(a.isLongitude())
        del (a.axis)
        self.assertFalse(a.isLongitude())
        # Now quick tests for making it level
        try:
            import genutil
            has_genutil = True
        except BaseException:
            has_genutil = False
        if has_genutil:
            for u in [
                    "Pa", "hPa", "psi", "N/m2", "N*m-2", "kg*m-1*s-2", "atm",
                    "bar", "torr"
            ]:
                a.units = u
                self.assertTrue(a.isLevel())
                a.units = ""
                self.assertFalse(a.isLevel())
        for i in ["lev", "LEV", "level", "lEvEL", "depth", "  depth"]:
            a.id = i
            self.assertTrue(a.isLevel())
            a.id = "axis"
            self.assertFalse(a.isLevel())
        a.axis = "Z"
        self.assertTrue(a.isLevel())
        del (a.axis)
        self.assertFalse(a.isLevel())
        a.positive = "up"
        self.assertTrue(a.isLevel())
        a.positive = "positive"
        self.assertFalse(a.isLevel())
Exemplo n.º 34
0
            time = d.getTime() ; # Assumes variable is named 'time', for the demo file this is named 'months'
            #time = d.getAxis(0) ; # Rather use a file dimension-based load

            #%% Initialize and run CMOR
            print 'Start CMORizing..'
            # For more information see https://cmor.llnl.gov/mydoc_cmor3_api/
            cmor.setup(inpath='./',netcdf_file_action=cmor.CMOR_REPLACE_4) #,logfile='cmorLog.txt')
            cmor.dataset_json('tmp.json')
            cmor.load_table(cmorTable)
            #cmor.set_cur_dataset_attribute('history',f.history) ; # Force input file attribute as history
            # Create axes based on variable
            if var in ['q_10','t_10']:
                # Reset height2m coordinate value to 10m
                heightAx = {'table_entry': 'height2m',
                            'units': 'm',
                            'coord_vals': cdm.createAxis([10.],id='height')
                            }
            elif var in ['u_10','v_10']:
                # Use height10m coordinate entry
                heightAx = {'table_entry': 'height10m',
                            'units': 'm',
                            'coord_vals': cdm.createAxis([10.],id='height')
                            }
            else:
                # Use height2m default value
                heightAx = {'table_entry': 'height2m',
                            'units': 'm',
                            'coord_vals': cdm.createAxis([2.],id='height')
                            }
            # Create time based on table
            if key == 'A3hrPt':
Exemplo n.º 35
0
def readzfile(fileT, fileS, compute_gamma, targetGridFile, outdir):
    """ Reads a netcdf file in so and in thetao, computes gamma neutral at each point and each time step, makes annual means, interpolates horizonally to rregular grid, makes zonal means and writes to new file
    inputs:
    - fileT: netcdf file for thetao variable (function of time, depth, latitude, longitude)
    - fileS: netcdf file for so variable, same dimensions as fileT
    - compute_gamma: boolean (e.g. don't compute gamma for historicalNat)
    - targetGridFile: netcdf file with target horizontal grid for interpolation + basinmask
    - outdir: directory to write new file in
    """

    # == Prepare work ==

    t0 = timc.time()
    # Open files to read
    ft = cdm.open(fileT)
    fs = cdm.open(fileS)
    thetao_h = ft('thetao', time=slice(1, 2))
    so_h = fs('so', time=slice(1, 10))
    print('Opening files :\n - ' + os.path.basename(fileT) + '\n - ' +
          os.path.basename(fileS))

    # Horizontal grid
    ingrid = thetao_h.getGrid()
    # Get grid objects
    axesList = thetao_h.getAxisList()

    depth = thetao_h.getLevel()
    # Define dimensions
    lonN = thetao_h.shape[3]
    latN = thetao_h.shape[2]
    depthN = thetao_h.shape[1]
    timeax = ft.getAxis('time')
    t1 = timc.time()
    #print(t1-t0)

    thetaoLongName = thetao_h.long_name
    soLongName = so_h.long_name
    soUnits = so_h.units

    # Target horizonal grid for interp
    gridFile_f = cdm.open(targetGridFile)
    maskg = gridFile_f('basinmask3')
    outgrid = maskg.getGrid()
    maski = maskg.mask
    # Global mask
    # Regional masks
    maskAtl = maski * 1
    maskAtl[...] = True
    idxa = np.argwhere(maskg == 1).transpose()
    maskAtl[idxa[0], idxa[1]] = False
    maskPac = maski * 1
    maskPac[...] = True
    idxp = np.argwhere(maskg == 2).transpose()
    maskPac[idxp[0], idxp[1]] = False
    maskInd = maski * 1
    maskInd[...] = True
    idxi = np.argwhere(maskg == 3).transpose()
    maskInd[idxi[0], idxi[1]] = False
    loni = maskg.getLongitude()
    lati = maskg.getLatitude()
    Nii = len(loni)
    Nji = len(lati)

    gridFile_f.close()

    t2 = timc.time()
    #print(t2-t1)

    # Outfile
    fname = os.path.basename(fileS)
    split = fname.split('_')
    split[0] = 'so_thetao_gamma'
    split[1] = 'Oan'
    fname_yearly = '_'.join(split)
    outFile = outdir + fname_yearly
    outFile_f = cdm.open(outFile, 'w')

    # Valmask
    valmask = 1.e20

    # Define time chunks
    tmin = 0
    tmax = timeax.shape[
        0]  # Dimension of total time axis of infiles (total number of months)
    nyrtc = 5  # nb of years per time chunk
    tcdel = 5 * 12  # nb of months per time chunk
    tcmax = (tmax - tmin) / tcdel
    # number of time chunks

    # Initialize interpolated arrays on target grid - global and basin zonal means
    so_interp = np.ma.masked_all((nyrtc, depthN, Nji, Nii), dtype='float32')
    thetao_interp = np.ma.masked_all((nyrtc, depthN, Nji, Nii),
                                     dtype='float32')
    soa_interp, sop_interp, soi_interp, thetaoa_interp, thetaop_interp, thetaoi_interp = [
        np.ma.masked_all((nyrtc, depthN, Nji, Nii), dtype='float32')
        for _ in range(6)
    ]
    if compute_gamma:
        rhon_interp = np.ma.masked_all((nyrtc, depthN, Nji, Nii),
                                       dtype='float32')
        rhona_interp, rhonp_interp, rhoni_interp = [
            np.ma.masked_all((nyrtc, depthN, Nji, Nii), dtype='float32')
            for _ in range(3)
        ]

    # Interpolation init (regrid)
    t = timc.time()
    ESMP.ESMP_Initialize()
    regridObj = CdmsRegrid(ingrid,
                           outgrid,
                           so_interp.dtype,
                           missing=valmask,
                           regridMethod='distwgt',
                           regridTool='esmf',
                           coordSys='deg',
                           diag={},
                           periodicity=1)
    #print(timc.time()-t)

    # Define basin output axis
    basinAxis = cdm.createAxis([0, 1, 2, 3], bounds=None, id='basin')
    basinAxis.long_name = 'ocean basin index'
    basinAxis.standard_name = 'basin'
    basinAxis.units = 'basin index'
    basinAxis.units_long = '0: global_ocean 1: atlantic_ocean; 2: pacific_ocean; 3: indian_ocean'
    basinAxis.axis = 'B'

    # Create basin-zonal axes lists
    basinTimeList = [axesList[0], basinAxis]
    # time, basin
    basinAxesList = [axesList[0], basinAxis, axesList[2]]
    # time, basin, lat
    basinZAxesList = [axesList[0], basinAxis, axesList[1], axesList[2]]
    # time, basin, depth, lat

    # ====
    # == Start loop on time chunks ==
    # ====

    for tc in range(tcmax):  # Loop on time chunks
        #tc=0
        print('- time chunk: ' + str(tc))
        # read tcdel month by tcdel month to optimise memory
        trmin = tmin + tc * tcdel
        # define as function of tc and tcdel
        trmax = tmin + (tc + 1) * tcdel
        # define as function of tc and tcdel
        print('  months: ' + str(trmin) + ' ' + str(trmax))
        thetao = ft('thetao', time=slice(trmin, trmax)) - 273.15
        so = fs('so', time=slice(trmin, trmax))

        if compute_gamma:
            # Compute neutral density
            tbfrho = timc.time()
            rhon = eosNeutral(thetao, so) - 1000.
            print('  Neutral density computed in ' + str(timc.time() - tbfrho))

        # Turn into cdms variable
        time = thetao.getTime()
        newAxesList = [axesList[0], axesList[1], axesList[2], axesList[3]]
        newAxesList[0] = time
        # replace time axis
        So = cdm.createVariable(so, axes=newAxesList)
        Thetao = cdm.createVariable(thetao, axes=newAxesList)
        if compute_gamma:
            Rhon = cdm.createVariable(rhon, axes=newAxesList)

        # == Compute annual mean ==
        so_temp = np.ma.reshape(So, (nyrtc, 12, depthN, latN, lonN))
        thetao_temp = np.ma.reshape(Thetao, (nyrtc, 12, depthN, latN, lonN))
        so_yearly = np.ma.average(so_temp, axis=1)
        thetao_yearly = np.ma.average(thetao_temp, axis=1)
        if compute_gamma:
            rhon_temp = np.ma.reshape(Rhon, (nyrtc, 12, depthN, latN, lonN))
            rhon_yearly = np.ma.average(rhon_temp, axis=1)

        # Turn into cdms variable for horizontal interpolation
        so_yearly = cdm.createVariable(so_yearly)
        thetao_yearly = cdm.createVariable(thetao_yearly)
        if compute_gamma:
            rhon_yearly = cdm.createVariable(rhon_yearly)

        # Create annual time axis
        timeyr = cdm.createAxis(np.arange(trmin / 12, trmax / 12),
                                bounds=None,
                                id='time')
        timeyr.units = 'years'
        timeyr.designateTime()
        newAxesList[0] = timeyr  # replace time axis

        so_yearly.setAxisList(newAxesList)
        thetao_yearly.setAxisList(newAxesList)
        if compute_gamma:
            rhon_yearly.setAxisList(newAxesList)

        # == Interpolate onto regular grid ==
        for t in range(nyrtc):
            for ks in range(depthN):
                # Global
                so_interp[t, ks, :, :] = regridObj(so_yearly[t, ks, :, :])
                so_interp[t, ks, :, :].mask = maski
                thetao_interp[t, ks, :, :] = regridObj(thetao_yearly[t,
                                                                     ks, :, :])
                thetao_interp[t, ks, :, :].mask = maski
                if compute_gamma:
                    rhon_interp[t, ks, :, :] = regridObj(rhon_yearly[t,
                                                                     ks, :, :])
                    rhon_interp[t, ks, :, :].mask = maski

                # Atl
                soa_interp[t, ks, :, :] = so_interp[t, ks, :, :] * 1.
                soa_interp[t, ks, :, :].mask = maskAtl
                thetaoa_interp[t, ks, :, :] = thetao_interp[t, ks, :, :] * 1.
                thetaoa_interp[t, ks, :, :].mask = maskAtl
                if compute_gamma:
                    rhona_interp[t, ks, :, :] = rhon_interp[t, ks, :, :] * 1.
                    rhona_interp[t, ks, :, :].mask = maskAtl

                # Pac
                sop_interp[t, ks, :, :] = so_interp[t, ks, :, :] * 1.
                sop_interp[t, ks, :, :].mask = maskPac
                thetaop_interp[t, ks, :, :] = thetao_interp[t, ks, :, :] * 1.
                thetaop_interp[t, ks, :, :].mask = maskPac
                if compute_gamma:
                    rhonp_interp[t, ks, :, :] = rhon_interp[t, ks, :, :] * 1.
                    rhonp_interp[t, ks, :, :].mask = maskPac

                # Ind
                soi_interp[t, ks, :, :] = so_interp[t, ks, :, :] * 1.
                soi_interp[t, ks, :, :].mask = maskInd
                thetaoi_interp[t, ks, :, :] = thetao_interp[t, ks, :, :] * 1.
                thetaoi_interp[t, ks, :, :].mask = maskInd
                if compute_gamma:
                    rhoni_interp[t, ks, :, :] = rhon_interp[t, ks, :, :] * 1.
                    rhoni_interp[t, ks, :, :].mask = maskInd

        # Mask after interpolation
        so_interp = maskVal(so_interp, valmask)
        soa_interp = maskVal(soa_interp, valmask)
        sop_interp = maskVal(sop_interp, valmask)
        soi_interp = maskVal(soi_interp, valmask)
        thetao_interp = maskVal(thetao_interp, valmask)
        thetaoa_interp = maskVal(thetaoa_interp, valmask)
        thetaop_interp = maskVal(thetaop_interp, valmask)
        thetaoi_interp = maskVal(thetaoi_interp, valmask)
        if compute_gamma:
            rhon_interp = maskVal(rhon_interp, valmask)
            rhona_interp = maskVal(rhona_interp, valmask)
            rhonp_interp = maskVal(rhonp_interp, valmask)
            rhoni_interp = maskVal(rhoni_interp, valmask)

        # == Compute zonal means ==
        soz = np.ma.average(so_interp, axis=3)  # Global
        soza = np.ma.average(soa_interp, axis=3)  # Atlantic
        sozp = np.ma.average(sop_interp, axis=3)  # Pacific
        sozi = np.ma.average(soi_interp, axis=3)  # Indian
        thetaoz = np.ma.average(thetao_interp, axis=3)
        thetaoza = np.ma.average(thetaoa_interp, axis=3)
        thetaozp = np.ma.average(thetaop_interp, axis=3)
        thetaozi = np.ma.average(thetaoi_interp, axis=3)
        if compute_gamma:
            rhonz = np.ma.average(rhon_interp, axis=3)
            rhonza = np.ma.average(rhona_interp, axis=3)
            rhonzp = np.ma.average(rhonp_interp, axis=3)
            rhonzi = np.ma.average(rhoni_interp, axis=3)

        # == Write zonal means to outfile ==
        # Prepare axis
        timeBasinZAxesList = basinZAxesList
        timeBasinZAxesList[0] = timeyr
        # Replace monthly with annual
        timeBasinZAxesList[3] = lati
        # Replace lat with regrid target

        # Collapse onto basin axis
        Sz = np.ma.stack([soz, soza, sozp, sozi], axis=1)
        del (soz, soza, sozp, sozi)
        Sz = cdm.createVariable(Sz, axes=timeBasinZAxesList, id='salinity')
        Tz = np.ma.stack([thetaoz, thetaoza, thetaozp, thetaozi], axis=1)
        del (thetaoz, thetaoza, thetaozp, thetaozi)
        Tz = cdm.createVariable(Tz, axes=timeBasinZAxesList, id='temperature')
        if compute_gamma:
            Rz = np.ma.stack([rhonz, rhonza, rhonzp, rhonzi], axis=1)
            del (rhonz, rhonza, rhonzp, rhonzi)
            Rz = cdm.createVariable(Rz, axes=timeBasinZAxesList, id='density')

        if tc == 0:
            # Global attributes
            Sz.long_name = soLongName
            Sz.units = soUnits
            Tz.long_name = thetaoLongName
            Tz.units = 'degrees_C'
            if compute_gamma:
                Rz.long_name = 'Neutral density'
                Rz.units = 'kg.m-3'

        # Write & append
        outFile_f.write(Sz.astype('float32'),
                        extend=1,
                        index=(trmin - tmin) / 12)
        outFile_f.write(Tz.astype('float32'),
                        extend=1,
                        index=(trmin - tmin) / 12)
        del (Sz, Tz)
        if compute_gamma:
            outFile_f.write(Rz.astype('float32'),
                            extend=1,
                            index=(trmin - tmin) / 12)
            del (Rz)

        outFile_f.sync()

    # == End of loop on time chunks ==
    ft.close()
    fs.close()
    outFile_f.close()
    tf = timc.time() - t0
    print('Wrote file: ' + outFile)
    print('Total time :' + str(tf) + 's (' + str(tf / 60) + 'mn)')
Exemplo n.º 36
0
    writeToLog(logFile,climInterp3Str)

    # Redress WOA grid
    #pdb.set_trace()
    print('climInterp3.shape:',climInterp3.shape)
    #timeAx = cdm.createAxis(np.mean([startYrCt.absvalue,endYrCt.absvalue]),[startYrCt,endYrCt],id='time')
    # TypeError: len() of unsized object
    startYrCtYear = startYrCt.year
    startYrCtMonth = startYrCt.month
    startYrCtDay = startYrCt.day
    #pdb.set_trace()
    calStr = ' '.join(['days since','-'.join([str(startYrCtYear),str(startYrCtMonth),str(startYrCtDay)])])
    timeMean = np.mean([startYrCt.torel(calStr).value,endYrCt.torel(calStr).value])
    #timeMean = cdt.relativetime(timeMean,calStr)
    timeBounds = np.array([startYrCt.torel(calStr).value,endYrCt.torel(calStr).value])
    timeAx = cdm.createAxis((timeMean,),bounds=timeBounds,id='time')
    timeAx.units = calStr ; # Assign units to ndarray type NOT reltime type
    #print(timeAx)
    #pdb.set_trace()
    climInterp3.setAxis(0,timeAx)
    climInterp3.setAxis(1,woaLvls)
    climInterp3.setAxis(2,woaLat)
    climInterp3.setAxis(3,woaLon)

    # Write out data
    modId = '.'.join(['.'.join(filePath.split('/')[-1].split('.')[:-3]),'-'.join([str(startYr),str(endYr-1),'clim']),'nc'])
    outFMod = os.path.join(workDir,'ncs',dateNow,mipEra,experimentIdStartEndYrs,'modGrid')
    outFModId = os.path.join(outFMod,modId)
    woaId = '.'.join(['.'.join(filePath.split('/')[-1].split('.')[:-3]),'-'.join([str(startYr),str(endYr-1),'woaClim']),'nc'])
    outFWoa = os.path.join(workDir,'ncs',dateNow,mipEra,experimentIdStartEndYrs,'woaGrid')
    outFWoaId = os.path.join(outFWoa,woaId)
def main():
    # User options ---
    #mip = 'cmip5'
    mip = 'cmip6'
    #mip = 'cmip5+6'
    #stat = 'rmse'
    #stat = 'rmsc'
    stat = 'stdv_ratio'
    SideBySide = True
    #OrgInPaper = True
    OrgInPaper = False

    #modes = ['SAM', 'NAM', 'NAO', 'PNA', 'PDO']
    modes = ['SAM', 'NAM', 'NAO', 'PNA']
    # ----------------
    if stat == 'rmse':
        Normalize = True
        plotTitle = 'RMS using CBF approach with 20CR'
    elif stat == 'rmsc':
        Normalize = True
        plotTitle = 'Centered RMS using CBF approach with 20CR'
    elif stat == 'stdv_ratio':
        Normalize = False
        plotTitle = 'Ratio of Model CBF and Obs PC std with 20CR'
    # ----------------
    #imgName = 'PortraitPlot_'+mip+'_'+stat
    imgName = 'PortraitPlot_' + mip + '_' + stat + '_E3SM'
    if OrgInPaper:
        imgName = imgName + '_OrgInPaper'
    # ----------------
    if mip in ['cmip5', 'cmip6']:
        #stat_xy = getData(mip, stat, modes, OrgInPaper)
        stat_xy_pi = getData(mip, stat, modes, OrgInPaper, piControl=True)
        stat_xy_his = getData(mip, stat, modes, OrgInPaper)
        # Merge into one array
        stat_xy = np.concatenate((stat_xy_pi, stat_xy_his), axis=1)
        xaxis_label = list(stat_xy_pi.getAxis(1)) + list(
            stat_xy_his.getAxis(1))
        # Reduce text for x-axis label on plot
        xaxis_label = reduce_text(xaxis_label)
        # Decorate axes
        X = cdms2.createAxis(xaxis_label)
        Y = cdms2.createAxis(stat_xy_pi.getAxis(0)[:])
        stat_xy = MV2.array(stat_xy, axes=(Y, X), id=stat)
        stat_xy.id = stat
    elif mip in ['cmip5+6']:
        stat_xy_0 = getData('cmip5', stat, modes, OrgInPaper)
        stat_xy_0_pi = getData('cmip5',
                               stat,
                               modes,
                               OrgInPaper,
                               piControl=True)
        stat_xy_1_pi = getData('cmip6',
                               stat,
                               modes,
                               OrgInPaper,
                               piControl=True)
        # Merge into one array
        stat_xy = np.concatenate((stat_xy_0, stat_xy_0_pi, stat_xy_1_pi),
                                 axis=1)
        xaxis_label = list(stat_xy_0.getAxis(1)) + list(
            stat_xy_0_pi.getAxis(1)) + list(stat_xy_1_pi.getAxis(1))
        # Reduce text for x-axis label on plot
        xaxis_label = reduce_text(xaxis_label)
        # Decorate axes
        X = cdms2.createAxis(xaxis_label)
        Y = cdms2.createAxis(stat_xy_0.getAxis(0)[:])
        stat_xy = MV2.array(stat_xy, axes=(Y, X), id=stat)
    else:
        sys.exit('Error: mip ' + mip + ' not defined')
    # Normalize rows by its median
    if Normalize:
        # Normalize by median value
        stat_xy = normalize_by_median(stat_xy)
        # Revise image file name
        imgName = imgName + '_normalized'
    # Colormap to be used
    if stat in ['rmse', 'rmsc']:
        colormap = "bl_to_darkred"
        clevels = [
            -1.e20, -.5, -.4, -.3, -.2, -.1, 0, .1, .2, .3, .4, .5, 1.e20
        ]
        ccolors = vcs.getcolors(clevels, split=0, colors=range(16, 240))
    elif stat == 'stdv_ratio':
        colormap = "bl_to_darkred"
        clevels = [r / 10. for r in list(range(5, 16, 1))]
        clevels.insert(0, -1.e20)
        clevels.append(1.e20)
        ccolors = vcs.getcolors(clevels, split=0, colors=range(16, 240))
        ccolors[4:8] = ['lightgreen', 'green', 'green', 'darkgreen']
        #ccolors[4:8] = [1,2,2,3]
        #print('ccolors', ccolors)
    #
    # Portrait plot
    #
    plot_portrait(
        stat_xy,
        imgName,
        plotTitle=plotTitle,
        colormap=colormap,
        clevels=clevels,
        ccolors=ccolors,
        parea=(.05, .88, .25, .9),
        img_length=2600,
        img_height=800,
        xtic_textsize=10,
        ytic_textsize=10,
        missing_color='grey',
        logo=False,
        #Annotate=True,
    )
    try:
        xaxis_label_2 = reduce_text(xaxis_label_2)
        X2 = cdms2.createAxis(xaxis_label_2)
        stat_xy_2 = MV2.array(stat_xy_2, axes=(Y, X2), id=stat)
        imgName2 = imgName + '_2'
        plot_portrait(
            stat_xy_2,
            imgName2,
            plotTitle=plotTitle,
            colormap=colormap,
            clevels=clevels,
            ccolors=ccolors,
            parea=(.05, .88, .25, .9),
            img_length=1300,
            img_height=800,
            xtic_textsize=10,
            ytic_textsize=10,
            missing_color='grey',
            logo=False,
        )
    except:
        pass
Exemplo n.º 38
0
    def test0(self):
        """
        Test from Stephane Raynaud with a few modifications
        """

        toolsAndMethods = {
            'libcf': ['linear'],
            'esmf': ['linear', 'patch', 'conserve'],
        }

        f = cdms2.open(sys.prefix + '/sample_data/swan.four.nc')
        vari = f('HS')
        f.close()
        gridi = vari.getGrid()

        # add bounds to input grid
        lati = vari.getLatitude()
        loni = vari.getLongitude()
        xib, yib = bounds2d(loni, lati)
        loni.setBounds(xib)
        lati.setBounds(yib)
        self.assertNotEqual(gridi.getLatitude().getBounds(), None)
        self.assertNotEqual(gridi.getLongitude().getBounds(), None)

        # output grid
        nyo, nxo = 100, 200
        ymin, ymax = lati.min(), lati.max()
        xmin, xmax = loni.min(), loni.max()
        dy, dx = (ymax - ymin) / float(nyo), (xmax - xmin) / float(nxo)
        yo = numpy.array([ymin + dy * (j + 0.5) for j in range(nyo)])
        xo = numpy.array([xmin + dx * (i + 0.5) for i in range(nxo)])
        lato = cdms2.createAxis(yo)
        lato.designateLatitude()
        lato.units = 'degrees_north'
        lono = cdms2.createAxis(xo)
        lono.designateLongitude()
        lono.units = 'degrees_east'
        grido = cdms2.createRectGrid(lato, lono)
        self.assertNotEqual(grido.getLatitude().getBounds(), None)
        self.assertNotEqual(grido.getLongitude().getBounds(), None)

        for tool in toolsAndMethods:
            for met in toolsAndMethods[tool]:
                t0 = time()
                print tool.upper(), met, ':'
                diag = {}
                varo = vari.regrid(grido,
                                   regridMethod=met,
                                   regridTool=tool,
                                   coordSys='cart',
                                   nitermax=10,
                                   diag=diag)
                print 'diag = ', diag
                met2 = diag['regridMethod']
                tool2 = diag['regridTool']
                self.assertEqual(met, met2)
                self.assertEqual(tool2, tool)
                self.assertGreater(varo.min(), -0.01)
                dt = time() - t0
                print tool.upper(), met, ':', dt, 'seconds'

                if PLOT:
                    pylab.figure(figsize=(12, 6))
                    pylab.subplots_adjust(right=0.9)
                    pylab.subplot(121)
                    pylab.pcolor(loni[:],
                                 lati[:],
                                 vari[0].asma(),
                                 vmin=0,
                                 vmax=2.5)
                    pylab.axis([xmin, xmax, ymin, ymax])
                    pylab.colorbar()
                    pylab.title('Original')
                    pylab.subplot(122)
                    pylab.pcolor(lono[:],
                                 lato[:],
                                 varo[0].asma(),
                                 vmin=0,
                                 vmax=2.5)
                    pylab.axis([xmin, xmax, ymin, ymax])
                    pylab.title(tool.upper() + ' / ' + met.upper())
                    pylab.colorbar()  #cax=pylab.axes([0.92, 0.3, 0.02, 0.6]))
                    pylab.savefig('testRaynaud.%(tool)s.%(met)s.png' % vars())
Exemplo n.º 39
0
f.listdimension()
f.listvariables()

# MANUALLY PULL DATES

CAM5_PR = f('P',longitude=(0,360), latitude = (-90., 90.))#, time=(start_time,end_time))
#cdutil.setTimeBoundsMonthly(CAM5_PR)
CAM5_PR=CAM5_PR[1560:-120,:,:]
#======================================================================

CAM5_dD_pr = f('dD',longitude=(0,360), latitude = (-90., 90.))#, time=(start_time,end_time))
CAM5_dD_pr=CAM5_dD_pr[1560:-120,:,:]

#======================================================================
times=np.arange(1560.0,1860.0,1)
newTimeAxis = cdms2.createAxis(times, id='time')
newTimeAxis.units = 'months since 1850'
newTimeAxis.designateTime()
CAM5_PR.setAxis(0,newTimeAxis)

cdutil.setTimeBoundsMonthly(CAM5_PR)

#======================================================================
times=np.arange(1560.0,1860.0,1)
newTimeAxis = cdms2.createAxis(times, id='time')
newTimeAxis.units = 'months since 1850'
newTimeAxis.designateTime()
CAM5_dD_pr.setAxis(0,newTimeAxis)

cdutil.setTimeBoundsMonthly(CAM5_dD_pr)
#======================================================================
Exemplo n.º 40
0
bg = not args.show

x = vcs.init()
x.setantialiasing(0)
x.drawlogooff()
if bg:
    x.setbgoutputdimensions(1200, 1091, units="pixels")
x.setcolormap("rainbow")
gm = vcs.createvector()
gm.scale = args.scale
nm_xtra = ""
xtra = {}
#Creates 4x5 grid
dlat = 180. / args.nlat
lats = cdms2.createAxis(numpy.arange(-90 + dlat / 2., 90, dlat))
lats.id = "latitude"
lats.units = "degrees_north"
lons = cdms2.createAxis(numpy.arange(0, 360, 360. / args.nlon))
lons.id = "longitude"
lons.units = "degrees_east"
print len(lats), len(lons)
if args.angle in [-45, 0, 45]:
    u = MV2.ones((args.nlat, args.nlon))
elif args.angle in [-135, -180, 135]:
    u = -MV2.ones((args.nlat, args.nlon))
else:
    u = MV2.zeros((args.nlat, args.nlon))
if args.angle in [45, 90, 135]:
    v = MV2.ones((args.nlat, args.nlon))
elif args.angle in [-45, -90, -135]:
Exemplo n.º 41
0
f.listvariables()

# MANUALLY PULL DATES

OIPC = f('oipc_global10_v2',longitude=(0,360), latitude = (-90., 90.))#, time=(start_time,end_time))
OIPC_dD_ANN=OIPC[12,:,:]
lon=OIPC.getLongitude()
lat=OIPC.getLatitude()
#======================================================================
# 3. REGRID ALL DATA TO CAM GRID
#======================================================================
#======================================================================
#======================================================================
# numpy array -> axis -> grid
lonNp = np.linspace(0.0, 360.0, num=145)
lonAxis = cdms2.createAxis(lonNp)
latNp = np.linspace(-90., 90.0, num=96)
latAxis = cdms2.createAxis(latNp)
#======================================================================
grid3 = cdms2.createRectGrid(latAxis, lonAxis, 'yx', type="generic")
print grid3
#======================================================================

# HAVE TO USE THE ORIGINAL CDMS VARIABLE HERE, THEN RE-SAVE IF NECESSARY:

OIPC_r = OIPC_dD_ANN.regrid(grid3, regridTool='esmf', regridMethod='conserve')

#======================================================================
# EPSILON VALUES
#======================================================================
Exemplo n.º 42
0
        # find ARs
        mask_list, axis_list, armask, axismask = findARs(
            slabano, quano, qvano, areamap, costhetas, sinthetas, PARAM_DICT)

        # skip if none
        if armask.sum() == 0:
            continue

        # fetch AR related data
        labels, angles, crossfluxes, ardf = getARData(
            slab, quslab, qvslab, slabano, quano, qvano, areamap, mask_list,
            axis_list, timett_str, PARAM_DICT, SHIFT_LON, False, OUTPUTDIR)

        # prepare nc output
        timeaxii = cdms.createAxis([timett.torel('days since 1900-1-1').value])
        timeaxii.designateTime()
        timeaxii.id = 'time'
        timeaxii.units = 'days since 1900-1-1'

        labels = funcs.addExtraAxis(labels, timeaxii)
        angles = funcs.addExtraAxis(angles, timeaxii)
        crossfluxes = funcs.addExtraAxis(crossfluxes, timeaxii)

        # save to disk
        ncfout.write(labels, typecode='f')
        ncfout.write(angles, typecode='f')
        ncfout.write(crossfluxes, typecode='f')

        result_dict[timett_str] = ardf
def main():
    # User options ---    
    exp = 'cmip5'
    #exp = 'cmip6'
    #exp = 'cmip5+6'
    stat = 'rmse'
    #stat = 'stdv_ratio'
    # ----------------
    if stat == 'rmse':
        Normalize = True
        plotTitle = 'RMS using CBF approach with 20CR'
    elif stat == 'stdv_ratio':
        Normalize = False
        plotTitle = 'Ratio of Model CBF and Obs PC std with 20CR'
    # ----------------
    imgName = 'PortraitPlot_'+exp+'_'+stat
    # ----------------
    if exp in ['cmip5', 'cmip6']:
        stat_xy = getData(exp, stat)
        stat_xy.id = stat
    elif exp in ['cmip5+6']:
        stat_xy_0 = getData('cmip5', stat)
        stat_xy_1 = getData('cmip6', stat)
        # Add empty column for clear separation btw cmip 5 and 6
        empty_col = np.empty((len(stat_xy_0.getAxis(0)),3))
        empty_col[:] = np.nan
        stat_xy = np.concatenate((stat_xy_0, empty_col, stat_xy_1), axis=1)
        # Customize x-axis label
        xaxis_label = ( 
            ['[CMIP5] '+stat_xy_0.getAxis(1)[0]]  # cmip5, 1st model
            + [r for r in list(stat_xy_0.getAxis(1)[1:])]  # cmip5, rest
            + [' '] * 3  # separation
            + ['[CMIP6] '+stat_xy_1.getAxis(1)[0]]
            + [r for r in list(stat_xy_1.getAxis(1)[1:])])
        # Decorate axes
        X = cdms2.createAxis(xaxis_label)
        Y = cdms2.createAxis(stat_xy_0.getAxis(0)[:])
        stat_xy = MV2.array(stat_xy, axes=(Y,X), id=stat)
    else:
        sys.exit('Error: exp '+exp+' not defined')
    # Normalize rows by its median
    if Normalize:
        # Normalize by median value
        stat_xy = normalize_by_median(stat_xy)
        # Revise image file name
        imgName = imgName+'_normalized'
    # Colormap to be used 
    if stat == 'rmse':
        colormap = "bl_to_darkred"
        clevels = [-1.e20, -.5, -.4, -.3, -.2, -.1, 0, .1, .2, .3, .4, .5, 1.e20]
        ccolors = vcs.getcolors(clevels, split=0, colors=range(16,240))
    elif stat == 'stdv_ratio':
        colormap = "bl_to_darkred"
        clevels = [r/10. for r in list(range(5,16,1))]
        clevels.insert(0, -1.e20) 
        clevels.append(1.e20) 
        ccolors = vcs.getcolors(clevels, split=0, colors=range(16,240))
        ccolors[4:8] = ['lightgreen', 'green', 'green', 'darkgreen']
    #
    # Portrait plot
    #
    plot_portrait(stat_xy, imgName, plotTitle=plotTitle,
        colormap=colormap, clevels=clevels, ccolors=ccolors,
        parea=(.05, .88, .25, .9),
        img_length=2600, img_height=800, xtic_textsize=10, ytic_textsize=10, 
        missing_color='grey', logo=False,
        )
Exemplo n.º 44
0
        else:
            contact = 'BLANK'

    # Create component time from piControl time:units - need parent_experiment_id, parent_experiment_rip and resulting time:units
    # http://www2-pcmdi.llnl.gov/cdat/tutorials/cdatbasics/cdms-basics/createaxes
    if 'piControl' in header2:
        branch_time_comp1   = 'BLANK'
        piC_startyr         = 'BLANK'
        piC_endyr           = 'BLANK'
        greg_year           = 'BLANK'
        #piControl_info += [[filename_pad,tracking_id,creation_date,calendar,time_units,yearfirst,yearlast,branch_time,branch_time_comp,
        #                    branch_time_validated,parent_experiment_id,parent_experiment_rip,contact]]
        piControl_info += [[filename_pad,tracking_id,creation_date,calendar,time_units,firstyear,lastyear,branch_time,branch_time_comp1,
                            branch_time_valid,parent_experiment_id,parent_experiment_rip,contact]]
    else:
        tmp = cdms.createAxis(float32([f.branch_time]))
        tmp.id = 'time'
        # Determine dob attributes to apply to look up
        model = filename_trim[0:filename_trim.index('.')]
        # Create piControl string to search
        if 'BLANK' in parent_experiment_rip:
            parent_experiment_rip_s = 'r1i1p1'
        else:
            parent_experiment_rip_s = parent_experiment_rip
        piControl_search = "".join([model,'.',parent_experiment_id,'.',parent_experiment_rip_s])
        print "".join(['1: ',piControl_search])
        # Overwrite piControl info with valid info
        for piFix in valid:
            if (filename_trim == piFix[0]):
                branch_time_comp2 = piFix[1]
                parent_experiment_id = piFix[4].split('.')[0]
Exemplo n.º 45
0
import vcs, numpy, cdms2, MV2, os, sys
src = sys.argv[1]
pth = os.path.join(os.path.dirname(__file__), "..")
sys.path.append(pth)
import checkimage
x = vcs.init()
x.setantialiasing(0)

x.setbgoutputdimensions(1200, 1091, units="pixels")

t = cdms2.createAxis(numpy.arange(120))
t.designateTime()
t.id = "time"
t.units = "months since 2014"
data = MV2.arange(120, 0, -1)
data.id = "data"
data.setAxis(0, t)
x = vcs.init()
x.setantialiasing(0)
x.plot(data, bg=1)
fnm = 'test_vcs_monotonic_decreasing_yxvsx_default.png'

x.png(fnm)
print "fnm:", fnm
print "src:", src
ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
sys.exit(ret)
Exemplo n.º 46
0
                 'clt.nc'))
s = f("clt")
cdutil.setTimeBoundsMonthly(s)

print 'Getting JJA, which should be inexistant in data'

if cdutil.JJA(s[:5]) is not None:
    raise RuntimeError, "data w/o season did not return None"

## Create a year worth of data w/o JJA
s1 = s[:5]
s2 = s[8:12]

s3 = MV2.concatenate((s1, s2))
t = MV2.concatenate((s1.getTime()[:], s2.getTime()[:]))
t = cdms2.createAxis(t, id='time')
t.units = s.getTime().units
t.designateTime()

s3.setAxis(0, t)
cdutil.setTimeBoundsMonthly(s3)
if cdutil.JJA(s3) is not None:
    raise RuntimeError, "data w/o season did not return None"
if cdutil.JJA.departures(s3) is not None:
    raise RuntimeError, "data w/o season did not return None for dep"
if cdutil.JJA.climatology(s3) is not None:
    raise RuntimeError, "data w/o season did not return None for clim"

# Now gets seasonal cycle, should have JJA all missing
print 'Testing seasonal cycle on 1 year data w/o JJA should work'
a = cdutil.SEASONALCYCLE(s3)
Exemplo n.º 47
0
def runClim(A):

    print("OK SO START IS:", A.start)
    # season dictionary
    season_function = {
        "djf": cdutil.times.DJF,
        "mam": cdutil.times.MAM,
        "jja": cdutil.times.JJA,
        "son": cdutil.times.SON,
        "ann": cdutil.times.ANNUALCYCLE,
        "year": cdutil.times.YEAR,
    }

    print("BEFORE RPOCESEED:", A.results_dir)
    #   print("A VAR:", A.variable)
    #   print("A REF:", A.reference)
    results_dir = A.process_templated_argument("results_dir")
    print("RESDIR:", results_dir.template)
    A.results_dir = results_dir()

    print("HERE?", os.path.join(A.modpath, A.filename_template))
    print("A.variable", A.variable, A.model)
    filename_in = A.process_templated_argument(
        os.path.join(A.modpath, A.filename_template))

    if A.verbose:
        print("filename in after templating:", filename_in())
    filename = glob.glob(filename_in())[0]

    if not os.path.exists(filename):
        raise RuntimeError("file '{}' doe not exits".format(filename))

    filein = cdms2.open(filename)

    fvars = list(filein.variables.keys())
    v = A.variable
    if v not in fvars:
        raise RuntimeError("Variable '%s' is not contained in input file(s)" %
                           v)
    V = filein[v]
    tim = V.getTime().clone()
    # "monthly"
    if A.bounds:
        cdutil.times.setTimeBoundsMonthly(tim)
    # Now make sure we can get the requested period
    if A.start is None:
        i0 = 0
    else:  # Ok user specified a start time
        if A.index == "index":  # index-based slicing
            if int(A.start) >= len(tim):
                raise RuntimeError(
                    "For variable %s you requested start time to be at index: %i but the file only has %i time steps"
                    % (v, int(A.start), len(tim)))
            i0 = int(A.start)
        elif A.index == "value":  # actual value used for slicing
            v0 = float(A.start)
            try:
                i0, tmp = tim.mapInterval((v0, v0), 'cob')
            except Exception:
                raise RuntimeError(
                    "Could not find value %s for start time for variable %s" %
                    (A.start, v))
        elif A.index == "date":
            v0 = A.start
            # When too close from bounds it messes it up, adding a minute seems to help
            v0 = cdtime.s2c(A.start)
            v0 = v0.add(1, cdtime.Minute)
            try:
                i0, tmp = tim.mapInterval((v0, v0), 'cob')
            except Exception:
                raise RuntimeError(
                    "Could not find start time %s for variable: %s" %
                    (A.start, v))

    if A.end is None:
        i1 = None
    else:  # Ok user specified a end time
        if A.index == "index":  # index-based slicing
            if int(A.end) >= len(tim):
                raise RuntimeError(
                    "For variable %s you requested end time to be at index: %i but the file only has %i time steps"
                    % (v, int(A.end), len(tim)))
            i1 = int(A.end)
        elif A.index == "value":  # actual value used for slicing
            v0 = float(A.end)
            try:
                tmp, i1 = tim.mapInterval((v0, v0), 'cob')
            except Exception:
                raise RuntimeError(
                    "Could not find value %s for end time for variable %s" %
                    (A.end, v))
        elif A.index == "date":
            v0 = A.end
            # When too close from bounds it messes it up, adding a minute seems to help
            v0 = cdtime.s2c(A.end)
            v0 = v0.add(1, cdtime.Minute)
            try:
                tmp, i1 = tim.mapInterval((v0, v0), 'cob')
            except Exception:
                raise RuntimeError(
                    "Could not find end time %s for variable: %s" % (A.end, v))
    # Read in data
    data = V(time=slice(i0, i1))
    if A.verbose:
        print("DATA:", data.shape,
              data.getTime().asComponentTime()[0],
              data.getTime().asComponentTime()[-1])
    if A.bounds:
        cdutil.times.setTimeBoundsMonthly(data)
    # Now we can actually read and compute the climo
    seasons = [s.lower() for s in A.seasons]
    if "all" in seasons:
        seasons = ["djf", "mam", "jja", "son", "year", "ann"]

    for season in seasons:
        s = season_function[season].climatology(
            data, criteriaarg=[A.threshold, None])
        g = season_function[season].get(data, criteriaarg=[A.threshold, None])
        # Ok we know we have monthly data
        # We want to tweak bounds
        T = data.getTime()
        Tg = g.getTime()
        istart = 0
        while numpy.ma.allequal(g[istart].mask, True):
            istart += 1
        iend = -1
        while numpy.ma.allequal(g[iend].mask, True):
            iend -= 1
        if iend == -1:
            iend = None
        else:
            iend += 1
        if iend is None:
            iend = len(Tg)
        Tg = Tg.subAxis(istart, iend)

        cal = T.getCalendar()
        cal_name = getCalendarName(cal)
        Tunits = T.units
        bnds = T.getBounds()
        tc = T.asComponentTime()

        if A.verbose:
            print("TG:", Tg.asComponentTime()[0])
            print("START END THRESHOLD:", istart, iend, A.threshold, len(Tg))
            # print "SEASON:", season, "ORIGINAL:", T.asComponentTime()
        b1 = cdtime.reltime(Tg.getBounds()[0][0], Tg.units)
        b2 = cdtime.reltime(Tg.getBounds()[-1][1], Tg.units)

        # First and last time points
        y1 = cdtime.reltime(Tg[0], T.units)
        y2 = cdtime.reltime(Tg[-1], T.units)

        # Mid year is:
        yr = (y2.value + y1.value) / 2.
        y = cdtime.reltime(yr, T.units).tocomp(cal).year

        if A.verbose:
            print("We found data from ", y1.tocomp(cal), "to", y2.tocomp(cal),
                  "MID YEAR:", y)
            print("bounds:", b1.tocomp(cal), b2.tocomp(cal))

        values = []
        bounds = []

        # Loop thru clim month and set value and bounds appropriately
        ts = s.getTime().asComponentTime()
        for ii in range(s.shape[0]):
            t = ts[ii]
            t.year = y
            values.append(t.torel(Tunits, cal).value)
            if (s.shape[0] > 1):
                B1 = b1.tocomp(cal).add(ii, cdtime.Month)
                B2 = b2.tocomp(cal).add(ii - s.shape[0] + 1, cdtime.Month)
            else:
                B1 = b1
                B2 = b2
            # b2.year = y
            # b1.year = y
            #  if b1.cmp(b2) > 0:  # ooops
            #    if b1.month>b2.month and b1.month-b2.month!=11:
            #        b1.year -= 1
            #    else:
            #        b2.year += 1
            #  if b1.month == b2.month:
            #    b2.year = b1.year+1
            if A.verbose:
                print(B1.tocomp(cal), "<", t, "<", B2.tocomp(cal))
            bounds.append(
                [B1.torel(Tunits, cal).value,
                 B2.torel(Tunits, cal).value])

    fnmout = genutil.StringConstructor(A.output_filename_template)

    if "model_id" in fnmout.keys():
        model_id = checkCMORAttribute("model_id")
    if "experiment_id" in fnmout.keys():
        experiment_id = checkCMORAttribute("experiment_id")
    if "realization" in fnmout.keys():
        realization = checkCMORAttribute("realization")
    if "initialization_method" in fnmout.keys():
        initialization = checkCMORAttribute("initialization_method")
    if "physics_version" in fnmout.keys():
        physics_version = checkCMORAttribute("physics_version")
    if A.cmor and hasCMOR:
        dump_cmor(A, s, values, bounds, season)
    else:
        if A.cmor and not hasCMOR:
            print(
                "Your Python does not have CMOR, using regular cdms to write out files"
            )
        if not os.path.exists(A.results_dir):
            os.makedirs(A.results_dir)
        end_tc = tc[-1].add(1, cdtime.Month)

        # Populate fout template with values
        start = "{}{:02d}".format(tc[0].year, tc[0].month)
        end = "{}{:02d}".format(end_tc.year, end_tc.month)
        for k in fnmout.keys():
            try:
                setattr(fnmout, k, getattr(A, k))
            except Exception:
                pass
            # overwrite with locals
            try:
                setattr(fnmout, k, locals()[k])
            except Exception:
                pass
        nm = os.path.join(A.results_dir, fnmout())
        f = cdms2.open(nm, "w")
        # Global attributes copied
        for att, value in store_globals(filein).items():
            setattr(f, att, value)
        t = cdms2.createAxis(values)
        t.setBounds(numpy.array(bounds))
        t.designateTime()
        t.id = "time"
        s.setAxis(0, t)
        # copy orignal attributes
        for att, value in store_attributes(V).items():
            setattr(s, att, value)
        f.write(s, dtype=data.dtype)
        f.close()
        if A.verbose:
            print("Results out to:", nm)
Exemplo n.º 48
0
    options.along = 'auto'
if options.along == 'auto':
    options.along = 'lon' if xx.max() - xx.min() > yy.max() - yy.min(
    ) else 'lat'
if options.along == 'lon':
    taxis = create_lon(lons)
elif options.along == 'lat':
    taxis = create_lat(lats)
else:
    xy = N.sqrt((xx - xx[0])**2 + (yy - yy[0])**2)
    if xy.max() - xy.min() > 2500 or options.along == 'km':
        units = 'km'
        xy *= 1e-3
    else:
        units = 'm'
    taxis = cdms2.createAxis(xy)
    taxis.units = units
    taxis.long_name = 'Distance'
tbathy.setAxis(0, taxis)

# Plot
c = curve2(tbathy,
           'k',
           bgcolor=(.95, .95, 1),
           order='d-',
           show=False,
           yfmt='%gm',
           yunits=False,
           bottom=0.12,
           zorder=10,
           figsize=eval(options.figsize),
Exemplo n.º 49
0
def create_amount_freq_PDF(mv,
                           binedges,
                           binwidthtype=None,
                           bincentertype=None,
                           vid=None,
                           vid2=None,
                           vid3=None):
    """Takes in geospatial data (mv) with dimensions of lat, lon, and time, and 
    creates a PDF of the mv based on binedges at each lat/lon grid point. 
    binedges defines the edges of the bin, except for the bin with maximum value,
    where it is open. 
    
    binwidth option allows user to define whether the PDF is scaled by the 
    'arithmetic' bin width or the 'logarithmic' bin width. (dN/dx vs. dN/dlogx)
    default: 'logarithmic'
    
    The bincenter option allows one to use the 'geometric' 
    or the 'arithmetic' mean to define the edge. The bin with maximum value will have
    a bin center equidistant from the max bin edge as from the center of the previous bin. 
    
    PDFs will not be normalized over the histogram, but over all available data.
    For example, if there is data below the minimum bin edge, then the PDF will be 
    normalized, having included the data that lies below the minimum bin edge.
    
    vid = variable ID, which will typically be the variable name of the when output as a netcdf
    """

    #Step 1 input data and figure out the time dimensions in the data
    if vid is None:
        vid = mv.id
        vid2 = ''.join([mv.id, '2'])

    #Do get domain and find which axis corresponds to time, lat and lon
    time_index = mv.getAxisIndex('time')
    lat_index = mv.getAxisIndex('lat')
    lon_index = mv.getAxisIndex('lon')
    #obtain long_name, standard_name, typecode of the variable to eventually feed into output variables
    var_long_name = mv.long_name
    var_standard_name = mv.standard_name
    mv_typecode = mv.typecode()
    mv_lat = mv.getAxis(lat_index)
    mv_lon = mv.getAxis(lon_index)
    mv_att = mv.attributes
    mv_grid = mv.getGrid()

    #Step 2 loop over the bin widths and add up the number of data points in each bin

    #Create an array with the shape of (lat,lon,binedges,and corresponding bincenter)
    mapped_precip_freqpdf = numpy.zeros(
        (mv.shape[lat_index], mv.shape[lon_index], len(binedges)))
    mapped_precip_amntpdf = numpy.zeros(
        (mv.shape[lat_index], mv.shape[lon_index], len(binedges)))
    bincenter = numpy.zeros(len(binedges))

    #Count up total first
    counts_index = MV2.greater_equal(mv, 0.)
    print counts_index.shape
    data_counts = numpy.zeros((mv.shape))
    data_counts[counts_index] = 1.
    counts_total = numpy.sum(data_counts, axis=time_index)

    #specify what the binmean, bincenter, and binwidths are based on log and arith scaling
    binwidth = numpy.zeros(len(binedges))
    bincenter = numpy.zeros(len(binedges))
    binmean = numpy.zeros(len(binedges))

    #Calculate bin mean for amount PDF
    binmean[:-1] = (binedges[1:] + binedges[:-1]) / 2
    binmean[-1] = binedges[-1] + (binedges[-1] - binedges[-2]) / 2
    #Calculate bin width based on type
    if binwidthtype is 'arithmetic':
        binwidth[:-1] = binedges[1:] - binedges[:-1]
        binwidth[-1] = binedges[-1] - binedges[-2]
    elif binwidthtype is 'logarithmic' or binwidthtype is None:
        binwidth[:-1] = numpy.log10(binedges[1:] / binedges[:-1])
        binwidth[-1] = numpy.log10(binedges[-1] / binedges[-2])
    #Calculate bin center based on type
    if bincentertype is 'arithmetic' or bincentertype is None:
        bincenter = binmean
    elif bincentertype is 'geometric':
        bincenter[:-1] = numpy.sqrt(binedges[1:] * binedges[:-1])
        bincenter[-1] = binedges[-1] + (binedges[-1] - binedges[-2]) / 2

    #Count up the number of days of precip in each precip bin **Most work done here
    for i in range(len(binedges)):
        precip_index = numpy.ones(
            mv.shape
        )  #locate the index where precip rate is between the bin edges
        toolow_index = mv < binedges[i]
        precip_index[toolow_index] = 0
        if i != (len(binedges) - 1):
            toohigh_index = mv >= binedges[i + 1]
            precip_index[toohigh_index] = 0
        precip_total = numpy.sum(precip_index, axis=time_index)
        precip_fraction = numpy.divide(precip_total, counts_total)

        precip_freqpdf = precip_fraction / binwidth[i]
        precip_amntpdf = precip_fraction / binwidth[i] * binmean[i]
        mapped_precip_freqpdf[:, :, i] = precip_freqpdf
        mapped_precip_amntpdf[:, :, i] = precip_amntpdf
        precip_freqpdf = None
        precip_amntpdf = None

    #Step 3 attach all necessary attributes to data (create data as a transient variable)
    #First, specify a new axis for the PDF
    binbound_all = numpy.append(binedges, numpy.max(mv))
    binbounds = numpy.zeros((len(binedges), 2))
    binbounds[:, 0] = binbound_all[:-1]
    binbounds[:, 1] = binbound_all[1:]
    mv_hist = cdms2.createAxis(
        bincenter, bounds=binbounds,
        id='binvalue')  #mv_hist is the axes for precip rate
    ouput_mapped_precip_freqpdf = cdms2.createVariable(
        mapped_precip_freqpdf,
        typecode=mv_typecode,
        grid=mv_grid,
        axes=[mv_lat, mv_lon, mv_hist],
        attributes=mv_att,
        id=vid)

    ouput_mapped_precip_amntpdf = cdms2.createVariable(
        mapped_precip_amntpdf,
        typecode=mv_typecode,
        grid=mv_grid,
        axes=[mv_lat, mv_lon, mv_hist],
        attributes=mv_att,
        id=vid2)
    bincenter = cdms2.createVariable(bincenter,
                                     typecode=mv_typecode,
                                     axes=[mv_hist],
                                     attributes=mv_att,
                                     id=vid3)

    ouput_mapped_precip_freqpdf.units = 'frequency'
    ouput_mapped_precip_amntpdf.units = 'amount'
    ouput_mapped_precip_freqpdf.long_name = ''.join(
        ['Frequency as a function of ', var_long_name])
    ouput_mapped_precip_amntpdf.long_name = ''.join(
        ['Amount as a function of ', var_long_name])
    ouput_mapped_precip_freqpdf.standard_name = ''.join(
        [var_standard_name, '_frequency'])
    ouput_mapped_precip_amntpdf.standard_name = ''.join(
        [var_standard_name, '_amount'])
    #Step 4 output data
    return bincenter, ouput_mapped_precip_freqpdf, ouput_mapped_precip_amntpdf
Exemplo n.º 50
0
    def process(self, data):
        # if self.symetric:
        #             data = symetric(data)
        # Make sure we have an even number of time steps
        t = data.getTime()

        # length of time axis
        nt = len(t)
        if nt % 2 != 0:
            print("Warning time wasn't even, removed last time step")
            data = data[:-1]
            t = data.getTime()  # get the new time axis
            nt = len(t)

        if len(t) < self._NTSub:
            msg = "Error your data must have at least %i time steps, adjust frequency (currently: %i/day)"
            msg += " or number_of_days (currently: %i processed at once) to reach that limit, or get more data"
            raise Exception(msg %
                            (self._NTSub, self.frequency, self.number_of_days))
        # Computes PP, number of sub-domain
        PP = float(nt - self._NTSub) / self._NShift + 1
        PP = int(PP)

        # Number of longitudes
        lons = data.getLongitude()
        NL = len(lons)

        # Should redo that with just an arange (eventually...)!!!
        # Frequencies in cycles/day
        ff = numpy.arange(0, self._NTSub + 1, 1, numpy.float)
        for i in range(1, self._NTSub + 2):
            ff[i - 1] = float(i - 1 - self._NTSub / 2.) * \
                self.frequency / float(self._NTSub)

        # Should redo that with just an arange (eventually...)!!!
        # Wave numbers
        ss = numpy.arange(0, NL + 1, 1, numpy.float)
        for i in range(1, NL + 2):
            ss[i - 1] = float(i - 1 - NL / 2.)
# print 'Frequencies:',ff
# print 'Wave numbers:',ss
# Ok, we now do the real stuff
# Creates the array of powers (Number of
# subtimes,fqcy,wave_numbers,latitudes)
        lats = data.getLatitude()
        Power = numpy.zeros((PP, self._NTSub + 1, NL + 1, len(lats)),
                            numpy.float)

        # LOOP through time sub domains
        prev = 0  # initialize the scrolling bar
        for Pcount in range(PP):
            if PP > 1:
                prev = genutil.statusbar(float(Pcount),
                                         PP - 1,
                                         prev=prev,
                                         tk=self.tkbar)

            # Get the time subdomain
            EEo = data[Pcount * self._NShift:Pcount * self._NShift +
                       self._NTSub](order='tx...')
            # First does the symetric/antisymetric thing if needed
            if self.symetric:
                EEo = symetrick(EEo)

            # Now detrending
            # Step 1- Get the slope and intercept
            slope, intercept = genutil.statistics.linearregression(
                EEo, nointercept=0)
            # Step 2- remove the trend
            # Step 2a: Create an array with the time values
            a = EEo.getTime()
            A = MV2.array(a[:], typecode='d')
            A.setAxis(0, a)
            # Step 2b: "Grows" it so it has the same shape than data
            A, EEo = genutil.grower(A, EEo)
            # Step 2c: Actually remove the trend
            EE = EEo - A * slope - intercept

            # we don't need A,EEo,slope,intercept anymore
            del (EEo)
            del (slope)
            del (intercept)
            del (A)

            # Remove the time mean
            mean = MV2.average(EE, 0)
            EE = EE - mean
            del (mean)  # could be big in memory

            # Tapering time...
            tapertozero(EE, 1, len(EE) - 1, 5 * self.frequency)

            # OK here Wheeler has some windowing on longitude, but it's commented out
            # I'll pass it for now

            # Ok the actuall FFT work
            EE = numpy.fft.fft2(EE, axes=(1, 0)) / NL / self._NTSub

            # OK NOW THE LITTLE MAGIC WITH REORDERING !
            A = numpy.absolute(EE[0:self._NTSub // 2 + 1, 1:NL // 2 + 1])**2
            B = numpy.absolute(EE[self._NTSub // 2:self._NTSub,
                                  1:NL // 2 + 1])**2
            C = numpy.absolute(EE[self._NTSub // 2:self._NTSub,
                                  0:NL // 2 + 1])**2
            D = numpy.absolute(EE[0:self._NTSub // 2 + 1, 0:NL // 2 + 1])**2
            Power[Pcount, self._NTSub // 2:, :NL // 2] = A[:, ::-1]
            Power[Pcount, :self._NTSub // 2, :NL // 2] = B[:, ::-1]
            Power[Pcount, self._NTSub // 2 + 1:, NL // 2:] = C[::-1, :]
            Power[Pcount, :self._NTSub // 2 + 1, NL // 2:] = D[::-1, :]
        # End of Pcount loop
        if self.tkbar and PP > 1:
            prev[1].destroy()
            prev[0].destroy()
        # Now generates the decorations

        # first the time axis (subdomains)
        vals = []
        bounds = []
        pp = 0
        for i in range(0, len(t) - self._NShift, self._NShift):
            st = t.subAxis(i, i + self._NTSub)
            if len(st[:]) == self._NTSub:
                pp += 1
                vals.append((st[0] + st[-1]) / 2.)
                bds = st.getBounds()
                # print 'Bounds:',bds
                if bds is None:
                    raise ValueError(
                        "Data need to have bounds on time dimension")
                else:
                    bounds.append([bds[0][0], bds[-1][1]])
        # Convert lists to arrays
        vals = numpy.array(vals)
        bounds = numpy.array(bounds)
        # Creates the time axis
        dumt = cdms2.createAxis(vals, bounds=bounds)
        dumt.id = 'time'
        dumt.units = t.units
        dumt.designateTime()
        dumt.setCalendar(t.getCalendar())

        # Create the frequencies axis
        T = cdms2.createAxis(ff)
        T.id = 'frequency'
        T.units = 'cycles per day'

        # Create the wave numbers axis
        S = cdms2.createAxis(ss)
        S.id = 'planetaryzonalwavenumber'
        S.units = '-'

        # Makes it an MV2 with axis and id (id come sfrom orignal data id)
        Power = MV2.array(Power,
                          axes=(dumt, T, S, lats),
                          id=data.id + '_' + 'power')
        # Adds a long name attribute
        Power.longname = 'Real power spectrum for the many different parts (i.e. over separate time divisions)'
        # And return the whole thing ordered 'time', 'latitude',
        # 'frequencies','wavenumbers'
        return Power(order='ty...')
Exemplo n.º 51
0
    def testRegrid2(self):
        outgrid = cdms2.createGaussianGrid(32)

        pth = os.path.dirname(os.path.abspath(__file__))
        f = self.getDataFile('readonly.nc')
        u = f.variables['u']
        ingrid = u.getGrid()

        sh = ingrid.shape

        regridf = Horizontal(ingrid, outgrid)
        newu = regridf(u)

        self.assertLess(abs(newu[0, 0, -1] - 488.4763488), 1.e-3)
        newu = u.regrid(outgrid, regridTool='regrid2')
        self.assertLess(abs(newu[0, 0, -1] - 488.4763488), 1.e-3)

        # Regrid TV
        tv = u.subSlice(0)
        newtv = regridf(tv)
        self.assertLess(abs(newtv[0, 0, -1] - 488.4763488), 1.e-3)
        newtv = tv.regrid(outgrid, regridTool='regrid2')
        self.assertLess(abs(newtv[0, 0, -1] - 488.4763488), 1.e-3)

        # Regrid numpy.ma
        ma = u[0]
        newma = regridf(ma)
        # Force slice result to be a scalar
        self.assertLess(abs(newma[0][-1] - 488.4763488), 1.e-3)

        # Regrid numpy
        numar = numpy.ma.filled(u[0])
        newar = regridf(numar)
        self.assertLess(abs(newar[0][-1] - 488.4763488), 1.e-3)

        # Regrid masked Variable
        umasked = f.variables['umasked']
        newum = regridf(umasked)
        self.assertLess(abs(newum[0, 0, -1] - 488.4763488), 1.e-3)

        # Set explicit missing variable
        numar = numpy.ma.filled(umasked[0])
        newar = regridf(numar, missing=-99.9)
        self.assertLess(abs(newar[0][-1] - 488.4763488), 1.e-3)

        # Set explicit mask
        mask = umasked.subRegion().mask[0]
        newar = regridf(numar, mask=mask)
        self.assertLess(abs(newar[0][-1] - 488.4763488), 1.e-3)

        # Set the input grid mask
        ingrid.setMask(mask)
        regridf2 = Horizontal(ingrid, outgrid)
        newar = regridf2(numar)
        self.assertLess(abs(newar[0][-1] - 488.4763488), 1.e-3)

        # Dataset
        g = self.getDataFile('test.xml')
        u = g.variables['u']
        outgrid = cdms2.createGaussianGrid(24)
        regridf3 = Horizontal(u.getGrid(), outgrid)
        try:
            unew = regridf3(u)
        except BaseException:
            markError('regrid dataset variable')

        lon2 = numpy.ma.array([
            90.,
            101.25,
            112.5,
            123.75,
            135.,
            146.25,
            157.5,
            168.75,
            180.,
            191.25,
            202.5,
            213.75,
            225.,
            236.25,
            247.5,
            258.75,
        ])
        lat2 = numpy.ma.array([
            -42.,
            -30.,
            -18.,
            -6.,
            6.,
            18.,
            30.,
            42.,
        ])
        grid2 = cdms2.createGenericGrid(lat2, lon2)
        b1, b2 = grid2.getBounds()
        grid2.setBounds(b1, b2)
        latw, lonw = grid2.getWeights()

        g = cdms2.createGaussianGrid(16)
        levs = numpy.array([1.0, 3.0, 5.0])
        lev = cdms2.createAxis(levs, id='level')
        levsout = numpy.array([2.0, 4.0])
        levout = cdms2.createAxis(levsout, id='level')
        dat = numpy.zeros((3, 16, 32), numpy.float32)
        dat2 = numpy.zeros((2, 16, 32), numpy.float32)
        dat[0] = 2.0
        dat[1] = 4.0
        dat[2] = 6.0
        var = cdms2.createVariable(dat,
                                   axes=(lev, g),
                                   attributes={'units': 'N/A'},
                                   id='test')
        result = var.pressureRegrid(levout)

        self.assertLess(abs(result[0, 0, 0] - 3.26185), 1.e-4)
        # Test cross-section regridder --------------------------------
        latin = cdms2.createGaussianAxis(16)
        latout = cdms2.createGaussianAxis(24)
        levsin = numpy.array([1.0, 3.0, 5.0])
        lev = cdms2.createAxis(levsin, id='level')
        levsout = numpy.array([2.0, 4.0])
        levout = cdms2.createAxis(levsout, id='level')
        dat = numpy.zeros((3, 16), numpy.float32)
        dat[0] = 2.0
        dat[1] = 4.0
        dat[2] = 6.0
        var = cdms2.createVariable(dat,
                                   axes=(lev, latin),
                                   attributes={'units': 'N/A'},
                                   id='test')
        dat2 = var.crossSectionRegrid(levout, latout)
        self.assertLess(abs(dat2[0, 0] - 3.26185), 1.e-4)
s2 = MV2.reshape(s2, sh)
s2.setAxis(1, s.getLatitude())
s2.setAxis(2, s.getLongitude())
f = cdms2.open("Test/crap.nc", "w")
for i in range(ntimes):
    # print 'Time:',i
    cmor.write(myvars[0], s.filled(), 1)
    c = time.time()
    # print 'cmor write time:',c-c0
    totcmor += c - c0
    if maxcmor < c - c0:
        maxcmor = c - c0
    if mincmor > c - c0:
        mincmor = c - c0
    c0 = c
    t = cdms2.createAxis([i])
    t.id = 'time'
    t.units = 'months since 1980'
    t.designateTime()
    s2.setAxis(0, t)
    f.write(s2, id='tas')
    c = time.time()
    # print 'cdms time:',c-c0
    totcdms += c - c0
    if maxcdms < c - c0:
        maxcdms = c - c0
    if mincdms > c - c0:
        mincdms = c - c0
    c0 = c
f.close()
cmor.close()
#lst = os.popen('ls xmls/f.e11.FAMIPC5.f19_f19.topo_2d_control.001.xml')

vars = ['psl', 'tas', 'ua', 'va', 'pr', 'rlut', 'prw', 'zg', 'tauu', 'tauv']
vars = ['rsutcs']
#vars = ['rlutcs','rsdtcs','rsds','rlds']

# CLIMATOLOGY TIME MODEL

timel = [
    15.5, 45.5, 75.5, 106, 136.5, 167, 197.5, 228.5, 259, 289.5, 320, 350.5
]
timelbds = [(0, 31), (31, 60), (60, 91), (91, 121), (121, 152), (
    152,
    182,
), (182, 213), (213, 244), (244, 274), (274, 305), (305, 335), (335, 366)]
ta = cdms.createAxis(timel, id='time')
tb = MV.array(timelbds)
tb = tb.astype('float64')
ta.setBounds(tb)
ta.climatology = "climatology_bnds"
ta.units = "days since 0"
ta.calendar = 'gregorian'
ta.axis = 'T'
ta.long_name = 'time'
ta.standard_name = 'time'

print 'BEGIN PROCESSING...'

for l in lst:
    l = l[:-1]
Exemplo n.º 54
0
            print count
        tmp = line.split(',')
        tmp = filter(None, tmp)
        tmp_len = len(tmp) - 2
        lat_ind = np.where(np.equal(latitude, np.float32(tmp[0])))[0][0]
        lon_ind = np.where(np.equal(longitude, np.float32(tmp[1])))[0][0]
        #if np.uint8(tmp[2:]) < 0:
        #    print tmp,np.uint8(tmp[2:])
        landsea[lat_ind, lon_ind] = np.int16(tmp[2:])
    del (asc, count, line, tmp, tmp_len, lat_ind, lon_ind)
    gc.collect()
    # Convert to masked array
    landsea = np.ma.masked_equal(landsea, 0)

    # Write numpy to cdms2 objects
    latitude = cdm.createAxis(latitude, id='latitude')
    latitude.long_name = 'latitude'
    latitude.standard_name = 'latitude'
    longitude = cdm.createAxis(longitude, id='longitude')
    longitude.long_name = 'longtitude'
    longitude.standard_name = 'longitude'
    depth = cdm.createAxis(depth, id='depth')
    depth.long_name = 'depth'
    depth.standard_name = 'depth'
    basinmask = cdm.createVariable(basinmask,
                                   id='basinmask',
                                   axes=[depth, latitude, longitude])
    mixmask = cdm.createVariable(mixmask,
                                 id='mixmask',
                                 axes=[depth, latitude, longitude])
    landsea = cdm.createVariable(landsea,
    dump_cmor(A, s, values, bounds)
else:
    if A.cmor and not hasCMOR:
        print(
            "Your Python does not have CMOR, using regular cdms to write out files"
        )
    print("MODEL ID:", model_id)
    if not os.path.exists(A.results_dir):
        os.makedirs(A.results_dir)
    end_tc = tc[-1].add(1, cdtime.Month)
    nm = os.path.join(
        A.results_dir,
        "{}_PMP_{}_{}_r{}i{}p{}_{}{:02d}-{}{:02d}-clim-{}.nc".format(
            v, model_id, exp, r, i, p, tc[0].year, tc[0].month, end_tc.year,
            end_tc.month, season))
    f = cdms2.open(nm, "w")
    # Global attributes copied
    for att, value in store_globals(filein).items():
        setattr(f, att, value)
    t = cdms2.createAxis(values)
    t.setBounds(numpy.array(bounds))
    t.designateTime()
    t.id = "time"
    s.setAxis(0, t)
    # copy orignal attributes
    for att, value in store_attributes(V).items():
        setattr(s, att, value)
    f.write(s, dtype=data.dtype)
    f.close()
    print("Results out to:", nm)
Exemplo n.º 56
0
def main():
    # User options ---
    mip = 'cmip5'
    mip = 'cmip6'
    mip = 'cmip5+6'
    #stat = 'rmse'
    #stat = 'rmsc'
    stat = 'stdv_ratio'
    SideBySide = True
    #OrgInPaper = True
    OrgInPaper = False

    #modes = ['SAM', 'NAM', 'NAO', 'PNA', 'PDO']
    modes = ['SAM', 'NAM', 'NAO', 'PNA']
    # ----------------
    if stat == 'rmse':
        Normalize = True
        plotTitle = 'RMS using CBF approach with 20CR'
    elif stat == 'rmsc':
        Normalize = True
        plotTitle = 'Centered RMS using CBF approach with 20CR'
    elif stat == 'stdv_ratio':
        Normalize = False
        plotTitle = 'Ratio of Model CBF and Obs PC std with 20CR'
    # ----------------
    imgName = 'PortraitPlot_' + mip + '_' + stat + '_include_GFDL-CM4_piControl'
    if OrgInPaper:
        imgName = imgName + '_OrgInPaper'
    # ----------------
    if mip in ['cmip5', 'cmip6']:
        stat_xy = getData(mip, stat, modes, OrgInPaper)
        stat_xy.id = stat
    elif mip in ['cmip5+6']:
        stat_xy_0 = getData('cmip5', stat, modes, OrgInPaper)
        stat_xy_1 = getData('cmip6', stat, modes, OrgInPaper)
        stat_xy_1_pi = getData('cmip6',
                               stat,
                               modes,
                               OrgInPaper,
                               piControl=True)
        if SideBySide:
            # CMIP5 and CMIP6 side by side if from same model family
            # Merge x-axis
            model_run_cmip5 = list(stat_xy_0.getAxis(1)[:])
            model_run_cmip6 = list(stat_xy_1.getAxis(1)[:])
            model_run_cmip6_pi = list(stat_xy_1_pi.getAxis(1)[:])
            model_run_all = model_run_cmip5 + model_run_cmip6 + model_run_cmip6_pi
            model_run_all = sort_human(model_run_all)
            model_run_selected = []
            model_run_selected_2 = []
            for c, model_run in enumerate(model_run_all):
                model_header = ''.join([
                    i for i in model_run.split('_')[0].split('-')[0]
                    if not i.isdigit()
                ])
                model = ''.join(
                    [i for i in model_run.split('_')[0] if not i.isdigit()])
                run = ''.join(
                    [i for i in model_run.split('_')[-1] if not i.isdigit()])
                print(model_header, model, run)
                # cmip5 models
                if model_run in model_run_cmip5:
                    # in case cmip5 model has corresponding cmip6 model
                    idx = model_run_cmip5.index(model_run)
                    #if any(model_header in s for s in model_run_cmip6) and model_header != 'GFDL':
                    if any(model_header in s
                           for s in model_run_cmip6) and model != 'GFDL-CMp':
                        try:
                            empty_array = np.concatenate(
                                (empty_array, stat_xy_0[:, idx]))
                        except:
                            empty_array = stat_xy_0[:, idx].copy()
                        model_run_selected.append(model_run)
                    # in case corresponding cmip6 model is not available
                    else:
                        if ((OrgInPaper and model_run
                             not in ['HadGEM2-CC_r2i1p1', 'HadGEM2-CC_r3i1p1'])
                                or (not OrgInPaper
                                    and not np.isnan(stat_xy_0[:, idx]).any())
                            ):  # exclude missing column
                            try:
                                empty_array2 = np.concatenate(
                                    (empty_array2, stat_xy_0[:, idx]))
                            except:
                                empty_array2 = stat_xy_0[:, idx].copy()
                            model_run_selected_2.append(model_run)
                # cmip6 models
                elif (model_run in model_run_cmip6 and model_header != 'GFDL'):
                    idx = model_run_cmip6.index(model_run)
                    try:
                        empty_array = np.concatenate(
                            (empty_array, stat_xy_1[:, idx]))
                    except:
                        empty_array = stat_xy_1[:, idx].copy()
                    model_run_selected.append(
                        '* ' + model_run)  # add star mark for x-axis labels
                elif (model_run in model_run_cmip6_pi
                      and model_header == 'GFDL'):
                    idx = model_run_cmip6_pi.index(model_run)
                    try:
                        empty_array = np.concatenate(
                            (empty_array, stat_xy_1_pi[:, idx]))
                    except:
                        empty_array = stat_xy_1_pi[:, idx].copy()
                    model_run_selected.append(
                        'p* ' + model_run)  # add star mark for x-axis labels
                else:
                    print('Skip ' + model_run)
            imgName = imgName + '_SideBySide_selected'
            xaxis_label = model_run_selected
            xaxis_label_2 = model_run_selected_2
            stat_xy = np.transpose(
                np.reshape(
                    empty_array,
                    (len(model_run_selected), len(stat_xy_0.getAxis(0)))))
            stat_xy_2 = np.transpose(
                np.reshape(
                    empty_array2,
                    (len(model_run_selected_2), len(stat_xy_0.getAxis(0)))))
        else:
            # Add empty column for clear separation btw cmip 5 and 6
            empty_col = np.empty((len(stat_xy_0.getAxis(0)), 3))
            empty_col[:] = np.nan
            stat_xy = np.concatenate((stat_xy_0, empty_col, stat_xy_1), axis=1)
            # Customize x-axis label
            xaxis_label = (
                ['[CMIP5] ' + stat_xy_0.getAxis(1)[0]]  # cmip5, 1st model
                + [r for r in list(stat_xy_0.getAxis(1)[1:])]  # cmip5, rest
                + [' '] * 3  # separation
                + ['[CMIP6] ' + stat_xy_1.getAxis(1)[0]] +
                [r for r in list(stat_xy_1.getAxis(1)[1:])])
        # Reduce text for x-axis label on plot
        xaxis_label = reduce_text(xaxis_label)
        # Decorate axes
        X = cdms2.createAxis(xaxis_label)
        Y = cdms2.createAxis(stat_xy_0.getAxis(0)[:])
        stat_xy = MV2.array(stat_xy, axes=(Y, X), id=stat)
    else:
        sys.exit('Error: mip ' + mip + ' not defined')
    # Normalize rows by its median
    if Normalize:
        # Normalize by median value
        stat_xy = normalize_by_median(stat_xy)
        # Revise image file name
        imgName = imgName + '_normalized'
    # Colormap to be used
    if stat in ['rmse', 'rmsc']:
        colormap = "bl_to_darkred"
        clevels = [
            -1.e20, -.5, -.4, -.3, -.2, -.1, 0, .1, .2, .3, .4, .5, 1.e20
        ]
        ccolors = vcs.getcolors(clevels, split=0, colors=range(16, 240))
    elif stat == 'stdv_ratio':
        colormap = "bl_to_darkred"
        clevels = [r / 10. for r in list(range(5, 16, 1))]
        clevels.insert(0, -1.e20)
        clevels.append(1.e20)
        ccolors = vcs.getcolors(clevels, split=0, colors=range(16, 240))
        ccolors[4:8] = ['lightgreen', 'green', 'green', 'darkgreen']
    #
    # Portrait plot
    #
    plot_portrait(
        stat_xy,
        imgName,
        plotTitle=plotTitle,
        colormap=colormap,
        clevels=clevels,
        ccolors=ccolors,
        parea=(.05, .88, .25, .9),
        img_length=2600,
        img_height=800,
        xtic_textsize=10,
        ytic_textsize=10,
        missing_color='grey',
        logo=False,
    )
    try:
        xaxis_label_2 = reduce_text(xaxis_label_2)
        X2 = cdms2.createAxis(xaxis_label_2)
        stat_xy_2 = MV2.array(stat_xy_2, axes=(Y, X2), id=stat)
        imgName2 = imgName + '_2'
        plot_portrait(
            stat_xy_2,
            imgName2,
            plotTitle=plotTitle,
            colormap=colormap,
            clevels=clevels,
            ccolors=ccolors,
            parea=(.05, .88, .25, .9),
            img_length=1300,
            img_height=800,
            xtic_textsize=10,
            ytic_textsize=10,
            missing_color='grey',
            logo=False,
        )
    except:
        pass
Exemplo n.º 57
0
def makeHeatContent(salt, temp, destMask, thetao, pressure):
    """
    The makeHeatContent() function takes 3D (not temporal) arguments and creates
    heat content which is then mapped to a destination grid and written to a
    specified variable

    Author: Paul J. Durack : [email protected] : @durack1.
    Created on Tue Nov 24 15:34:30 2015.

    Inputs:
    ------
    - salt(lev,lat,lon) - 3D array.
    - temp(lev,lat,lon) - 3D array either in-situ or potential temperature.
    - destGridArray(str) - 2D array with valid grid and mask.
    - thetao(bool) - boolean value specifying either in-situ or potential temperature arrays provided.
    - pressure(bool) - boolean value specifying whether lev-coordinate is pressure (dbar) or depth (m).

    Usage:
    ------
        >>> from oceanLib import makeHeatContent
        >>> makeHeatContent(salt,temp,destGridArray,thetao=True,pressure=False)

    Notes:
    -----
    - PJD 24 Nov 2015 - Migrated into new oceanLib from heatContentLib
    - TODO: Better deal with insitu vs thetao variables
    - TODO:
    """

    # Remap variables to short names
    #print salt.getAxisIds()
    s = salt(squeeze=1)
    # Trim off singleton time dimension
    #print s.getAxisIds()
    t = temp(squeeze=1)
    mask = destMask
    #print mask.getAxisIds()
    del (salt, temp, destMask)
    gc.collect()
    depthInd = 0
    # Set depth coordinate index

    #print 's:    ',s.min(),s.max()
    #print 't:    ',t.min(),t.max()

    # Fix out of bounds values
    t = mv.where(t < -2.6, -2.6, t)
    # Fix for NaN values

    # Calculate pressure - inputs depth & lat
    # Create z-coordinate from salinity input
    if not pressure:
        zCoord = s.getAxis(depthInd)
        # Assume time,depth,latitude,longitude grid
        yCoord = s.getAxis(depthInd + 1)
        yCoord = tile(yCoord, (s.shape[depthInd + 2], 1)).transpose()
        depthLevels = tile(
            zCoord.getValue(),
            (s.shape[depthInd + 2], s.shape[depthInd + 1], 1)).transpose()
        pressureLevels = sw.pres(np.array(depthLevels), np.array(yCoord))
        del (zCoord, yCoord, depthLevels)
        gc.collect()
    else:
        pressureLevels = s.getAxis(depthInd)
        #print pressureLevels.getValue()
        pressureLevels = transpose(
            tile(pressureLevels,
                 (s.shape[depthInd + 2], s.shape[depthInd + 1], 1)))
    pressureLevels = cdm.createVariable(pressureLevels, id='pressureLevels')
    pressureLevels.setAxis(0, s.getAxis(depthInd))
    pressureLevels.setAxis(1, s.getAxis(depthInd + 1))
    pressureLevels.setAxis(2, s.getAxis(depthInd + 2))
    pressureLevels.units_long = 'decibar (pressure)'
    pressureLevels.positive = 'down'
    pressureLevels.long_name = 'sea_water_pressure'
    pressureLevels.standard_name = 'sea_water_pressure'
    pressureLevels.units = 'decibar'
    pressureLevels.axis = 'Z'

    #print 'pres: ',pressureLevels.min(),pressureLevels.max()
    #print pressureLevels.shape
    #print s.shape
    #print t.shape
    #print mask.shape

    # Calculate temp,rho,cp - inputs temp,salt,pressure
    if thetao:
        # Process potential temperature to in-situ
        temp = sw.temp(np.array(s), np.array(t), np.array(pressureLevels))
        # units degrees C
    rho = sw.dens(np.array(s), np.array(temp), np.array(pressureLevels))
    # units kg m-3
    cp = sw.cp(np.array(s), np.array(temp), np.array(pressureLevels))
    # units J kg-1 C-1

    # Correct instances of NaN values and fix masks - applied before cdms variables are created otherwise names/ids/attributes are reset
    temp = scrubNaNAndMask(temp, s)
    rho = scrubNaNAndMask(rho, s)
    cp = scrubNaNAndMask(cp, s)

    #print 'temp: ',temp.min(),temp.max()
    #print 'rho:  ',rho.min(),rho.max()
    #print 'cp:   ',cp.min(),cp.max()

    # Calculate heatContent - inputs temp,rho,cp
    heatContent = np.array(temp) * np.array(rho) * np.array(cp)
    # units J

    # Correct instances of NaN values and fix masks - applied before cdms variables are created otherwise names/ids/attributes are reset
    heatContent = scrubNaNAndMask(heatContent, s)
    #print 'hc:   ',heatContent.min(),heatContent.max()

    # Interpolate to standard levels - inputs heatContent,levels
    newDepth = np.array([
        5, 10, 20, 30, 40, 50, 75, 100, 125, 150, 200, 300, 500, 700, 1000,
        1500, 1800, 2000
    ]).astype('f')
    newDepth_bounds = np.array([[0, 5], [5, 10], [10, 20], [20, 30], [30, 40],
                                [40, 50], [50, 75], [75, 100], [100, 125],
                                [125, 150], [150, 200], [200, 300], [300, 500],
                                [500, 700], [700, 1000], [1000, 1500],
                                [1500, 1800], [1800, 2000]]).astype('f')
    # Interpolate to standard levels
    #print heatContent.shape
    #print heatContent.getAxisIds()
    #print pressureLevels.shape
    #print pressureLevels.getAxisIds()

    # Reset variable axes
    heatContent.setAxis(0, s.getAxis(0))
    #heatContent.setAxis(1,s.getAxis(1))
    #heatContent.setAxis(2,s.getAxis(2))

    pdb.set_trace()

    heatContent.setGrid(s.getGrid())

    #print heatContent.shape
    #print heatContent.getAxisIds()
    #print pressureLevels.shape
    #print pressureLevels.getAxisIds()

    heatContent_depthInterp = cdu.linearInterpolation(heatContent,
                                                      pressureLevels,
                                                      levels=newDepth)
    # Fix bounds
    newDepth = heatContent_depthInterp.getAxis(0)
    newDepth.setBounds(newDepth_bounds)
    del (newDepth_bounds)
    newDepth.id = 'depth2'
    newDepth.units_long = 'decibar (pressure)'
    newDepth.positive = 'down'
    newDepth.long_name = 'sea_water_pressure'
    newDepth.standard_name = 'sea_water_pressure'
    newDepth.units = 'decibar'
    newDepth.axis = 'Z'

    #print 'hc_interp:',heatContent_depthInterp.min(),heatContent_depthInterp.max()

    # Integrate to 700 dbar - inputs heatContent

    heatContent_depthInteg = cdu.averager(
        heatContent_depthInterp[0:14, ...],
        axis=0,
        weights='weighted',
        action='sum')(squeeze=1)  # Calculate depth-weighted-integrated thetao
    # Assign all axis info

    #print heatContent_depthInteg.shape
    pdb.set_trace()
    # Interpolate in x,y - inputs heatContent
    #tmp1 = heatContent_depthInteg.regrid(mask.getGrid(),regridTool='esmf',regridMethod='linear') ; # Use defaults - ,coordSys='deg',diag = {},periodicity=1)
    tmp1 = heatContent_depthInteg.regrid(mask,
                                         regridTool='esmf',
                                         regridMethod='linear')
    # Use defaults - ,coordSys='deg',diag = {},periodicity=1)
    #print tmp1.shape

    tmp1 = mv.where(tmp1 < 0, 0, tmp1)
    # Fix for negative values

    # Infill - inputs heatContent
    # Create inputs for interpolation
    points = np.zeros([(mask.shape[0] * mask.shape[1]), 2])
    # Create 25380 vectors of lon/lat
    latcounter = 0
    loncounter = 0
    for count, data in enumerate(points):
        if not np.mod(count, 180) and not count == 0:
            latcounter = latcounter + 1
            loncounter = 0
        points[count, 0] = mask.getLatitude().getValue()[latcounter]
        points[count, 1] = mask.getLongitude().getValue()[loncounter]
        loncounter = loncounter + 1
    del (count, data, latcounter, loncounter)
    gc.collect()
    valid = np.logical_not(tmp1.mask)
    # Get inverted-logic boolean mask from variable
    if valid.size == 1:
        print '** No valid mask found, skipping **'
        return
    valid = valid.flatten()
    # Flatten 2D to 1D

    #maskFilled  = mask(tmp,points,valid)
    interpolant = interpolate.LinearNDInterpolator(
        points[valid, :],
        np.array(tmp1.flatten())[valid])
    # Create interpolant
    maskFill = interpolant(points[:, 0].squeeze(), points[:, 1].squeeze())
    # Use interpolant to create filled matrix
    maskFill = np.reshape(maskFill, mask.shape)
    # Resize to original dimensions

    # Fix issues with interpolant
    tmp2 = mv.where(np.isnan(maskFill), 1e+20, maskFill)
    # Fix for NaN values
    tmp2 = mv.where(tmp2 > tmp1.max(), 0, tmp2)
    # Fix for max values
    tmp2 = mv.where(tmp2 < tmp1.min(), 0, tmp2)
    # Fix for min values
    tmp = mv.masked_where(mask.mask, tmp2)
    #print tmp.shape

    # Redress variable
    heatContent = cdm.createVariable([tmp], id='heatContent')
    depthInt = cdm.createAxis([350], id='depth')
    depthInt.setBounds(np.array([0, 700]))
    depthInt.units_long = 'decibar (pressure)'
    depthInt.positive = 'down'
    depthInt.long_name = 'sea_water_pressure'
    depthInt.standard_name = 'sea_water_pressure'
    depthInt.units = 'decibar'
    depthInt.axis = 'Z'
    heatContent.setAxis(0, depthInt)
    heatContent.setAxis(1, mask.getAxis(0))
    heatContent.setAxis(2, mask.getAxis(1))
    heatContent.units_long = 'Joules'
    heatContent.long_name = 'sea_water_heat_content'
    heatContent.standard_name = 'sea_water_heat_content'
    heatContent.units = 'J'

    return heatContent
Exemplo n.º 58
0
def set_time_axis(length):
    Time = cdms.createAxis(np.arange(length) + 1)
    Time.id = 'time'
    return Time
Exemplo n.º 59
0
# Adapted for numpy/ma/cdms2 by convertcdms.py
import vcs
import cdms2 as cdms
import MV2
import support
import os
bg = support.bg
f = cdms.open(os.path.join(vcs.sample_data, 'clt.nc'))
s = f('clt', slice(0, 5), latitude=(0., 0., 'cob'), squeeze=1)

nt = s.shape[0]

ax = MV2.arange(nt, typecode='d')
bounds = MV2.zeros((nt, 2), typecode='d')

for i in range(nt):
    bounds[i, 0] = ax[i]
    bounds[i, 1] = ax[i] + 1.
ax = cdms.createAxis(ax)
s.setAxis(0, ax)
x = vcs.init()
x.plot(s, bg=bg)
support.check_plot(x)
y = vcs.init()
# instead of being halfway, bounds are from node to node
y.plot(s, ybounds=bounds, bg=bg)
support.check_plot(y)
Exemplo n.º 60
0
 def testBadCalendar(self):
     t = cdms2.createAxis([1, 2, 3, 4])
     t.designateTime()
     t.setCalendar(cdtime.ClimCalendar)
     with self.assertRaises(cdms2.CDMSError):
         t.setCalendar(3421)