def find_time_axis(self):
        if self._tvar is None:
            tdim = self._ext_dataset_res.dataset_description.parameters["temporal_dimension"]
            if tdim in self._ds.dimensions:
                if tdim in self._ds.variables:
                    self._tvar = self._ds.variables[tdim]
                else:
                    # find the first variable that has the tdim as the outer dimension
                    for vk in self._ds.variables:
                        var = self._ds.variables[vk]
                        if tdim in var.dimensions and var.dimensions.index(tdim) == 0:
                            self._tvar = var
                            break
            else:
                # try to figure out which is the time axis based on standard attributes
                if self._cdms_ds is None:
                    self._cdms_ds = cdms2.openDataset(self._ds_url)

                taxis = self._cdms_ds.getAxis('time')
                if taxis is not None:
                    self._tvar = self._ds.variables[taxis.id]
                else:
                    self._tvar = None

        return self._tvar
    def find_time_axis(self):
        if self._tvar is None:
            tdim = self._ext_dataset_res.dataset_description.parameters["temporal_dimension"]
            if tdim in self._ds.dimensions:
                if tdim in self._ds.variables:
                    self._tvar = self._ds.variables[tdim]
                else:
                    # find the first variable that has the tdim as the outer dimension
                    for vk in self._ds.variables:
                        var = self._ds.variables[vk]
                        if tdim in var.dimensions and var.dimensions.index(tdim) == 0:
                            self._tvar = var
                            break
            else:
                # try to figure out which is the time axis based on standard attributes
                if self._cdms_ds is None:
                    self._cdms_ds = cdms2.openDataset(self._ds_url)

                taxis = self._cdms_ds.getAxis("time")
                if taxis is not None:
                    self._tvar = self._ds.variables[taxis.id]
                else:
                    self._tvar = None

        return self._tvar
Esempio n. 3
0
def writenetcdf (slab, filename, mode="a"):
    """writenetcdf(slab, filename, mode="a") writes slab to the file.
       modes: 'a'  append
              'w'  replace
              'r'  replace (for legacy code only, deprecated)
       s can be anything asVariable will accept
    """
    if mode == 'r': mode = 'w'
    slab = cdms2.asVariable(slab, 0)
    f = cdms2.openDataset(filename, mode)
    f.write(slab)
    f.close()
Esempio n. 4
0
    def __init__(self, dataset_file, forecast_times, path="."):
        """Creates a set of forecasts.  Normally you do it by something like
        f = forecasts( 'file.xml', (min_time, max_time) )
        or
        f = forecasts( 'file.xml', (min_time, max_time), '/home/me/data/' )
        or
        f = forecasts( 'file.xml', [ time1, time2, time3, time4, time5 ] )

        where the two or three arguments are::

        1. the name of a dataset xml file generated by "cdscan --forecast ..."

        2. Times here are the times when the forecasts began (tau=0, aka reference time).
        (i) If you use a 2-item tuple, forecasts will be chosen which start at a time
        t between the min and max times, e.g. min_time <= t < max_time .
        (ii) If you use a list, it will be the exact start (tau=0) times for the
        forecasts to be included.
        (iii) If you use a 3-item tuple, the first items are (min_time,max_time)
        as in a 2-item tuple.  The third component of the tuple is the
        open-closed string.  This determines whether endpoints are included
        The first character should be 'o' or 'c' depending on whether you want t with
        min_time<t or min_time<=t.  Similarly the second character should be 'o' or c'
        for t<max_time or t<=max_time .  Any other characters will be ignored.
        Thus ( min_time, max_time, 'co' ) is equivalent to ( min_time, max_time ).
        (iv) The string 'All' means to use all available forecasts.

        Times can be specified either as 13-digit long integers, e.g.
        2006012300000 for the first second of January 23, 2006, or as
        component times (comptime) in the cdtime module, or as
        a string in the format "2010-08-25 15:26:00".

        3. An optional path for the data files; use this if the xml file
        contains filenames without complete paths.

        As for the forecast class, this opens files when initiated, so when you
        are finished with the forecasts, you should close the files by calling
        forecasts.close() .
        """

        # Create dataset_list to get a forecast file from each forecast time.
        self.dataset = cdms2.openDataset(dataset_file, dpath=path)
        fm = cdms2.dataset.parseFileMap(self.dataset.cdms_filemap)
        self.alltimesl = [f[4] for f in fm[0][1]]  # 64-bit (long) integers
        dataset_list = fm[0][1]
        for f in fm[1:]:
            dataset_list.extend(f[1])

        mytimesl = self.forecast_times_to_list(forecast_times)
        if mytimesl == []:
            raise CDMSError(
                "bad forecast_times argument to forecasts.__init__")
        self.fcs = [forecast(t, dataset_list, path) for t in mytimesl]
Esempio n. 5
0
def available_forecasts( dataset_file, path="." ):
    """Returns a list of forecasts (as their generating times) which are
    available through the specified cdscan-generated dataset xml file.
    The forecasts are given in 64-bit integer format, but can be converted
    to component times with the function two_times_from_one.
    This function may help in choosing the right arguments for initializing
    a "forecasts" (forecast set) object.
    """
    dataset=cdms2.openDataset( dataset_file, dpath=path )
    fm=cdms2.dataset.parseFileMap(dataset.cdms_filemap)
    alltimesl =[ f[4] for f in fm[0][1] ]  # 64-bit (long) integers
    dataset.close()
    return alltimesl
Esempio n. 6
0
 def print_Mdata(self, dataPath):
     for k in range(0, 30):
         if (os.path.isfile(dataPath)):
             f = cdms2.openDataset(dataPath)
             for variable in f.variables.values():
                 self.logger.info("Produced result " + variable.id +
                                  ", shape: " + str(variable.shape) +
                                  ", dims: " + variable.getOrder() +
                                  " from file: " + dataPath)
                 self.logger.info("Data Sample: " + str(variable[0]))
                 return
         else:
             time.sleep(1)
Esempio n. 7
0
def available_forecasts(dataset_file, path="."):
    """Returns a list of forecasts (as their generating times) which are
    available through the specified cdscan-generated dataset xml file.
    The forecasts are given in 64-bit integer format, but can be converted
    to component times with the function two_times_from_one.
    This function may help in choosing the right arguments for initializing
    a "forecasts" (forecast set) object.
    """
    dataset = cdms2.openDataset(dataset_file, dpath=path)
    fm = cdms2.dataset.parseFileMap(dataset.cdms_filemap)
    alltimesl = [f[4] for f in fm[0][1]]  # 64-bit (long) integers
    dataset.close()
    return alltimesl
Esempio n. 8
0
    def __init__( self, dataset_file, forecast_times, path="." ):
        """Creates a set of forecasts.  Normally you do it by something like
        f = forecasts( 'file.xml', (min_time, max_time) )
        or
        f = forecasts( 'file.xml', (min_time, max_time), '/home/me/data/' )
        or
        f = forecasts( 'file.xml', [ time1, time2, time3, time4, time5 ] )

        where the two or three arguments are::
        
        1. the name of a dataset xml file generated by "cdscan --forecast ..."
        
        2. Times here are the times when the forecasts began (tau=0, aka reference time).
        (i) If you use a 2-item tuple, forecasts will be chosen which start at a time
        t between the min and max times, e.g. min_time <= t < max_time .
        (ii) If you use a list, it will be the exact start (tau=0) times for the
        forecasts to be included.
        (iii) If you use a 3-item tuple, the first items are (min_time,max_time)
        as in a 2-item tuple.  The third component of the tuple is the
        open-closed string.  This determines whether endpoints are included
        The first character should be 'o' or 'c' depending on whether you want t with
        min_time<t or min_time<=t.  Similarly the second character should be 'o' or c'
        for t<max_time or t<=max_time .  Any other characters will be ignored.
        Thus ( min_time, max_time, 'co' ) is equivalent to ( min_time, max_time ).
        (iv) The string 'All' means to use all available forecasts.

        Times can be specified either as 13-digit long integers, e.g.
        2006012300000 for the first second of January 23, 2006, or as
        component times (comptime) in the cdtime module, or as
        a string in the format "2010-08-25 15:26:00".
        
        3. An optional path for the data files; use this if the xml file
        contains filenames without complete paths.
         
        As for the forecast class, this opens files when initiated, so when you
        are finished with the forecasts, you should close the files by calling
        forecasts.close() .
        """

        # Create dataset_list to get a forecast file from each forecast time.
        self.dataset=cdms2.openDataset( dataset_file, dpath=path )
        fm=cdms2.dataset.parseFileMap(self.dataset.cdms_filemap)
        self.alltimesl =[ f[4] for f in fm[0][1] ]  # 64-bit (long) integers
        dataset_list = fm[0][1]
        for f in fm[1:]:
            dataset_list.extend(f[1])

        mytimesl = self.forecast_times_to_list( forecast_times )
        if mytimesl == []:
            raise CDMSError, "bad forecast_times argument to forecasts.__init__"
        self.fcs = [ forecast( t, dataset_list, path ) for t in mytimesl ]
Esempio n. 9
0
def _make_scrip(src_dir, src_file, src_var, src_grid_title):
    #-------------------------------------------------------------------
    """
    See file header.
    """

    scrip_filename = src_grid_title + '_scrip.nc'

    fin = cdms2.openDataset(src_dir + '/' + src_file)
    src_grid = fin.variables[src_var].getGrid()

    cdms2.writeScripGrid(scrip_filename, src_grid, src_grid_title)

    return
Esempio n. 10
0
def writenetcdf(slab, filename, mode="a"):
    """
    writenetcdf(slab, filename, mode="a") writes slab to the file.
    :param mode: One of 'a' append, or 'w'  replace
    :type mode: str

    :param slab: Anything :py:func:`cdms2.asVariable` will accept
    :type: see :py:func:`cdms2.asVariable`
    """
    if mode == 'r':
        mode = 'w'
    slab = cdms2.asVariable(slab, 0)
    f = cdms2.openDataset(filename, mode)
    f.write(slab)
    f.close()
Esempio n. 11
0
def _check_endtime(filename):
#----------------------------
    """
    See file header.
    """

    fin = cdms2.openDataset(filename)

    sst = fin.variables['sst']

    t = sst.getTime()

    print ('Final time: ', cdtime.reltime(t[-1], t.units).tocomp())

    fin.close()

    return
Esempio n. 12
0
    def __init__(self, dataset_file, forecast_times, path="."):
        """
        Init
        """

        # Create dataset_list to get a forecast file from each forecast time.
        self.dataset = cdms2.openDataset(dataset_file, dpath=path)
        fm = cdms2.dataset.parseFileMap(self.dataset.cdms_filemap)
        self.alltimesl = [f[4] for f in fm[0][1]]  # 64-bit (long) integers
        dataset_list = fm[0][1]
        for f in fm[1:]:
            dataset_list.extend(f[1])

        mytimesl = self.forecast_times_to_list(forecast_times)
        if mytimesl == []:
            raise CDMSError(
                "bad forecast_times argument to forecasts.__init__")
        self.fcs = [forecast(t, dataset_list, path) for t in mytimesl]
Esempio n. 13
0
 def print_data(self, dataPath):
     for k in range(0, 30):
         if (os.path.isfile(dataPath)):
             try:
                 f = cdms2.openDataset(
                     dataPath)  # """:type : cdms2.CdmsFile """
                 varName = f.variables.values()[0].id
                 spatialData = f(
                     varName)  # """:type : cdms2.FileVariable """
                 self.logger.info("Produced result, shape: " +
                                  str(spatialData.shape) + ", dims: " +
                                  spatialData.getOrder())
                 #            self.logger.info( "Data: \n" + ', '.join( str(x) for x in spatialData.getValue() ) )
                 self.logger.info(
                     "Data: \n" +
                     str(spatialData.squeeze().flatten().getValue()))
             except Exception:
                 self.logger.error(" ** Error printing result data ***")
             return
         else:
             time.sleep(1)
Esempio n. 14
0
import os, random, string, cdms2, vcs

def random_id( length ): ''.join(random.choice( string.ascii_lowercase + string.ascii_uppercase + string.digits ) for _ in range(length))

def nbDisplay( x ):
    from IPython.display import Image, display
    outFile = '/tmp/vcsnb-{0}.png'.format( random_id(8) )
    x.png(outFile)
    display( Image(outFile) )

dataPath = "/home/tpmaxwel/.cdas/cache/cdscan/merra2_mon_ua.xml"
varName = "ua"
f = cdms2.openDataset(dataPath)
var = f( varName, time=slice(0,1),level=slice(10,11) )
s = var[0]
x = vcs.init()
x.plot(s,variable = var,bg=True)




Esempio n. 15
0
def process_file(ifile,suffix,average=False,forcedaily=False,mask=True,xlist=[]):

    try:
        d = cdms2.open(ifile)
    except:
        print "Error opening file", ifile
        usage()
        sys.exit(1)

    hcrit = 0.5 # Critical value of Heavyside function for inclusion.
    ofilelist = []

    for vn in d.variables:
        var = d.variables[vn]
        # Need to check whether it really has a stash_item to skip coordinate variables

        # Note: need to match both item and section number
        if not hasattr(var,'stash_item'):
            continue
        item_code = var.stash_section[0]*1000 + var.stash_item[0]
        if item_code in xlist:
            print "Skipping", item_code
            continue

        grid = var.getGrid()
        time = var.getTime()
        timevals = np.array(time[:])
        if forcedaily:
            # Work around cdms error in times
            for k in range(len(time)):
                timevals[k] = round(timevals[k],1)

        umvar = stashvar.StashVar(item_code,var.stash_model[0])
        vname = umvar.name
        print vname, var[0,0,0,0]

        # Create filename from variable name and cell_methods,
        # checking for name collisions
        if suffix:
            ofile = "%s_%s.nc" % (umvar.uniquename, suffix)
        else:
            ofile = "%s.nc" % umvar.uniquename
        if ofile in ofilelist:
            raise Exception("Duplicate file name %s" % ofile)
        ofilelist.append(ofile)

    #  If output file exists then append to it, otherwise create a new file
        try:
            file = cdms2.openDataset(ofile, 'r+')
            newv = file.variables[vname]
            newtime = newv.getTime()
        except cdms2.error.CDMSError:
            file = cdms2.createDataset(ofile)
        # Stop it creating the bounds_latitude, bounds_longitude variables
            cdms2.setAutoBounds("off")

            # By default get names like latitude0, longitude1
            # Need this awkwardness to get the variable/dimension name set correctly
            # Is there a way to change the name cdms uses after 
            # newlat = newgrid.getLatitude() ????
            newlat = file.createAxis('lat', grid.getLatitude()[:])
            newlat.standard_name = "latitude"
            newlat.axis = "Y"
            newlat.units = 'degrees_north'
            newlon = file.createAxis('lon', grid.getLongitude()[:])
            newlon.standard_name = "longitude"
            newlon.axis = "X"
            newlon.units = 'degrees_east'

            order = var.getOrder()
            if order[1] == 'z':
                lev = var.getLevel()
                if len(lev) > 1:
                    newlev = file.createAxis('lev', lev[:])
                    for attr in ('standard_name', 'units', 'positive', 'axis'):
                        if hasattr(lev,attr):
                            setattr(newlev, attr, getattr(lev,attr))
                else:
                    newlev = None
            else:
                # Pseudo-dimension
                pdim = var.getAxis(1)
                if len(pdim) > 1:
                    newlev = file.createAxis('pseudo', pdim[:])
                else:
                    newlev = None

            newtime = file.createAxis('time', None, cdms2.Unlimited)
            newtime.standard_name = "time"
            newtime.units = time.units # "days since " + `baseyear` + "-01-01 00:00"
            newtime.setCalendar(time.getCalendar())
            newtime.axis = "T"

            if var.dtype == np.dtype('int32'):
                vtype = cdms2.CdInt
                missval = -2147483647
            else:
                vtype = cdms2.CdFloat
                missval = 1.e20

            if newlev:
                newv = file.createVariable(vname, vtype, (newtime, newlev, newlat, newlon))
            else:
                newv = file.createVariable(vname, vtype, (newtime, newlat, newlon))
            for attr in ("standard_name", "long_name", "units"):
                if hasattr(umvar, attr):
                    newv.setattribute(attr, getattr(umvar,attr))
            newv.missing_value = missval
            newv.stash_section=var.stash_section[0] 
            newv.stash_item=var.stash_item[0] 
            newv._FillValue = missval

            try:
                newv.units = var.units
            except AttributeError:
                pass

        # Get appropriate file position
        # Uses 360 day calendar, all with same base time so must be 30 days on.
        k = len(newtime)
        # float needed here to get the later logical tests to work properly
        avetime = float(MV.average(timevals[:])) # Works in either case
        if k>0:
            if average:
                # if newtime[-1] != (avetime - 30):
                # For Gregorian calendar relax this a bit
                # Sometimes get differences slightly > 31
                if not 28 <= avetime - newtime[-1] <= 31.5:
                    raise error, "Times not consecutive %f %f %f" % (newtime[-1], avetime, timevals[0])
            else:
                if k > 1:
                    # Need a better test that works when k = 1. This is just a
                    # temporary workaround
                    if not np.allclose( newtime[-1] + (newtime[-1]-newtime[-2]), timevals[0] ):
                        raise error, "Times not consecutive %f %f " % (newtime[-1], timevals[0])

        if (30201 <= item_code <= 30303) and mask:
            # P LEV/UV GRID with missing values treated as zero.
            # Needs to be corrected by Heavyside fn
            heavyside = d.variables['psag']
            # Check variable code as well as the name.
            if heavyside.stash_item[0] != 301 or heavyside.stash_section[0] != 30:
                raise error, "Heavyside variable code mismatch"

        if average:
            newtime[k] = avetime
            if var.shape[1] > 1:
                # multiple levels
                newv[k] = MV.average(var[:],axis=0).astype(np.float32)
            else:
                # single level
                newv[k] = MV.average(var[:],axis=0)[0].astype(np.float32)
        else:
            for i in range(len(timevals)):
                if var.shape[1] > 1:
                    # Multi-level
                    if (30201 <= item_code <= 30303) and mask:
                        newv[k+i] = np.where( np.greater(heavyside[i], hcrit), var[i]/heavyside[0], newv.getMissing())
                    else:
                        newv[k+i] = var[i]
                else:
                    newv[k+i] = var[i,0]

                newtime[k+i] = timevals[i]

        file.close()
Esempio n. 16
0
    for k in range(len(time)):
        timevals[k] = round(timevals[k],1)

item_code = var.stash_section[0]*1000 + var.stash_item[0]
umvar = stashvar.StashVar(item_code,var.stash_model[0])
if not vname:
    vname = umvar.name
print vname, var[0,0,0,0]

hcrit = 0.5 # Critical value of Heavyside function for inclusion.
 
# print "LEN(TIME)", len(time)

#  If output file exists then append to it, otherwise create a new file
try:
    file = cdms2.openDataset(ofile, 'r+')
    newv = file.variables[vname]
    newtime = newv.getTime()
except cdms2.error.CDMSError:
    if not usenc4:
        # Force netCDF3 output
        cdms2.setNetcdfShuffleFlag(0)
        cdms2.setNetcdfDeflateFlag(0)
        cdms2.setNetcdfDeflateLevelFlag(0)
    file = cdms2.createDataset(ofile)
    file.history = "Created by um2netcdf.py."
    # Stop it creating the bounds_latitude, bounds_longitude variables
    cdms2.setAutoBounds("off")

    # By default get names like latitude0, longitude1
    # Need this awkwardness to get the variable/dimension name set correctly
Esempio n. 17
0
#!/usr/bin/env python

import cdms2, numpy.ma, regrid2 as regrid, os, sys
from regrid2 import Regridder
from markError import clearError, markError, reportError
clearError()

print 'Test 8: Regridding ...',

## lat = cdms2.createGaussianAxis(32)
## lon = cdms2.createUniformLongitudeAxis(0.0,64,360.0/64.)
## outgrid = cdms2.createRectGrid(lat,lon,'yx','gaussian')
outgrid = cdms2.createGaussianGrid(32)

f = cdms2.openDataset(os.path.join(sys.prefix, 'sample_data', 'readonly.nc'))
u = f.variables['u']
ingrid = u.getGrid()
try:
    sh = ingrid.shape
except:
    markError('Grid shape')

regridf = Regridder(ingrid, outgrid)
newu = regridf(u)

if (abs(newu[0, 0, -1] - 488.4763488) > 1.e-3):
    markError('regrid', newu[0, 0, -1])
newu = u.regrid(outgrid)
if (abs(newu[0, 0, -1] - 488.4763488) > 1.e-3):
    markError('regrid', newu[0, 0, -1])
Esempio n. 18
0
def writenetcdf (slab, filename, mode="a"):
    """writenetcdf(slab, filename, mode="a") writes slab to the file.
       modes: 'a'  append
              'w'  replace
              'r'  replace (for legacy code only, deprecated)
       s can be anything asVariable will accept
    """
    if mode == 'r': mode = 'w'
    slab = cdms2.asVariable(slab, 0)
    f = cdms2.openDataset(filename, mode)
    f.write(slab)
    f.close()

if __name__ == '__main__':
    from numpy.ma import allclose
    import pcmdi
    g = cdms2.openDataset('clt.nc','r')
    c = g.variables['clt']
    t = cdms2.asVariable([1.,2.,3.])
    t.id = 't'
    writenetcdf(c, 'test.nc', 'w')
    writenetcdf(t, 'test.nc', 'a')
    f = cdms2.open('test.nc')
    d = f.variables['clt']
    assert allclose(c,d)
    for name in ['clt', 't']:
        pcmdi.slabinfo(f.variables[name])


Esempio n. 19
0
var.param = -99
if var.param!=-99: markError("R/W param: "+var.param)

f.Conventions = 'CF1.0'

latobj[NLAT/2] = 6.5
if latobj[NLAT/2]==lat[NLAT/2]: markError("Rewrite axis: %f"%vlat[NLAT/2])
lat[NLAT/2] = 6.5
latobj.standard_name = 'Latitude'

p0 = f.createVariable('p0',cdms2.CdDouble,())
p0.assignValue(-99.9)

f.close()
#-----------------------------------------------------------
g = cdms2.openDataset('readwrite3.nc','r+')
con = g.Conventions
try:
    con = '<not read>'
    con = g.Conventions
except:
    markError("R/W global attr: "+con)
else:
    if g.Conventions!='CF1.0': markError("R/W global attr: "+con)

var = g.variables['u']
try:
    ln = '<not read'
    ln = var.long_name
except:
    markError("R/W long_name: "+ln)
Esempio n. 20
0
import os, random, string, cdms2, vcs

def random_id( length ): ''.join(random.choice( string.ascii_lowercase + string.ascii_uppercase + string.digits ) for _ in range(length))

def nbDisplay( x ):
    from IPython.display import Image, display
    outFile = '/tmp/vcsnb-{0}.png'.format( random_id(8) )
    x.png(outFile)
    display( Image(outFile) )

dataPath = "https://dataserver.nccs.nasa.gov/thredds/fileServer/bypass/edas/publish/x589hnZJ.nc"
varName = "tas"
f = cdms2.openDataset(dataPath)
var = f( varName ) # , time=slice(0,1),level=slice(10,11) )
x = vcs.init()
x.plot(s,variable = var,bg=True)




Esempio n. 21
0
 def mpl_plot(self, dataPath, nCols=2):
     f = cdms2.openDataset(dataPath)
     var = f.variables.values()[0]
     naxes = self.getNAxes(var.shape)
     if (naxes == 1): self.mpl_timeplot(dataPath)
     else: self.vcs_plot_eofs(dataPath, nCols)
Esempio n. 22
0
#!/usr/bin/env python

import cdms2,numpy.ma, regrid2 as regrid, os, sys
from regrid2 import Regridder
from markError import clearError,markError,reportError
clearError()

print 'Test 8: Regridding ...',

## lat = cdms2.createGaussianAxis(32)
## lon = cdms2.createUniformLongitudeAxis(0.0,64,360.0/64.)
## outgrid = cdms2.createRectGrid(lat,lon,'yx','gaussian')
outgrid = cdms2.createGaussianGrid(32)

f = cdms2.openDataset(os.path.join(sys.prefix,'sample_data','readonly.nc'))
u = f.variables['u']
ingrid = u.getGrid()
try:
    sh = ingrid.shape
except:
    markError('Grid shape')

regridf = Regridder(ingrid, outgrid)
newu = regridf(u)

if (abs(newu[0,0,-1]-488.4763488) > 1.e-3): markError('regrid',newu[0,0,-1])
newu = u.regrid(outgrid)
if (abs(newu[0,0,-1]-488.4763488) > 1.e-3): markError('regrid',newu[0,0,-1])

# Regrid TV
Esempio n. 23
0
import cdms2, numpy.ma, regrid2 as regrid, os, sys
from regrid2 import Regridder
from markError import clearError, markError, reportError
from markError import get_sample_data_dir

clearError()

print 'Test 8: Regridding ...',

## lat = cdms2.createGaussianAxis(32)
## lon = cdms2.createUniformLongitudeAxis(0.0,64,360.0/64.)
## outgrid = cdms2.createRectGrid(lat,lon,'yx','gaussian')
outgrid = cdms2.createGaussianGrid(32)

f = cdms2.openDataset(os.path.join(get_sample_data_dir(), 'readonly.nc'))
u = f.variables['u']
ingrid = u.getGrid()
try:
    sh = ingrid.shape
except:
    markError('Grid shape')

regridf = Regridder(ingrid, outgrid)
newu = regridf(u)

if (abs(newu[0, 0, -1] - 488.4763488) > 1.e-3):
    markError('regrid', newu[0, 0, -1])
newu = u.regrid(outgrid)
if (abs(newu[0, 0, -1] - 488.4763488) > 1.e-3):
    markError('regrid', newu[0, 0, -1])
Esempio n. 24
0
#!/usr/bin/env python

import cdms2,numpy.ma, regrid2 as regrid, os, sys
from regrid2 import Regridder
from markError import clearError,markError,reportError
from markError import get_sample_data_dir
clearError()

print 'Test 8: Regridding ...',

## lat = cdms2.createGaussianAxis(32)
## lon = cdms2.createUniformLongitudeAxis(0.0,64,360.0/64.)
## outgrid = cdms2.createRectGrid(lat,lon,'yx','gaussian')
outgrid = cdms2.createGaussianGrid(32)

f = cdms2.openDataset(os.path.join(get_sample_data_dir(),'readonly.nc'))
u = f.variables['u']
ingrid = u.getGrid()
try:
    sh = ingrid.shape
except:
    markError('Grid shape')

regridf = Regridder(ingrid, outgrid)
newu = regridf(u)

if (abs(newu[0,0,-1]-488.4763488) > 1.e-3): markError('regrid',newu[0,0,-1])
newu = u.regrid(outgrid)
if (abs(newu[0,0,-1]-488.4763488) > 1.e-3): markError('regrid',newu[0,0,-1])

# Regrid TV
Esempio n. 25
0
    def mpl_spaceplot(self, dataPath, numCols=4, timeIndex=0, smooth=False):
        if dataPath:
            for k in range(0, 30):
                if (os.path.isfile(dataPath)):
                    self.logger.info("Plotting file: " + dataPath)
                    f = cdms2.openDataset(
                        dataPath)  # type: cdms2.dataset.CdmsFile
                    vars = f.variables.values()
                    axes = f.axes.values()
                    lons = self.getAxis(axes, "X")
                    lats = self.getAxis(axes, "Y")
                    fig = plt.figure()
                    varNames = list(map(lambda v: v.id, vars))
                    varNames.sort()
                    nCols = min(len(varNames), numCols)
                    nRows = math.ceil(len(varNames) / float(nCols))
                    iplot = 1
                    for varName in varNames:
                        if not varName.endswith("_bnds"):
                            try:
                                variable = f(varName)
                                if len(variable.shape) > 1:
                                    m = Basemap(llcrnrlon=lons[0],
                                                llcrnrlat=lats[0],
                                                urcrnrlon=lons[len(lons) - 1],
                                                urcrnrlat=lats[len(lats) - 1],
                                                epsg='4326',
                                                lat_0=lats.mean(),
                                                lon_0=lons.mean())
                                    ax = fig.add_subplot(nRows, nCols, iplot)
                                    ax.set_title(varName)
                                    lon, lat = np.meshgrid(lons, lats)
                                    xi, yi = m(lon, lat)
                                    smoothing = 'gouraud' if smooth else 'flat'
                                    spatialData = variable(time=slice(
                                        timeIndex, timeIndex + 1),
                                                           squeeze=1)
                                    cs2 = m.pcolormesh(xi,
                                                       yi,
                                                       spatialData,
                                                       cmap='jet',
                                                       shading=smoothing)
                                    lats_space = abs(lats[0]) + abs(
                                        lats[len(lats) - 1])
                                    m.drawparallels(np.arange(
                                        lats[0], lats[len(lats) - 1],
                                        round(lats_space / 5, 0)),
                                                    labels=[1, 0, 0, 0],
                                                    dashes=[6, 900])
                                    lons_space = abs(lons[0]) + abs(
                                        lons[len(lons) - 1])
                                    m.drawmeridians(np.arange(
                                        lons[0], lons[len(lons) - 1],
                                        round(lons_space / 5, 0)),
                                                    labels=[0, 0, 0, 1],
                                                    dashes=[6, 900])
                                    m.drawcoastlines()
                                    m.drawstates()
                                    m.drawcountries()
                                    cbar = m.colorbar(cs2,
                                                      location='bottom',
                                                      pad="10%")
                                    print "Plotting variable: " + varName
                                    iplot = iplot + 1
                            except:
                                print "Skipping variable: " + varName

                    fig.subplots_adjust(wspace=0.1,
                                        hspace=0.1,
                                        top=0.95,
                                        bottom=0.05)
                    plt.show()
                    return
                else:
                    time.sleep(1)