Пример #1
0
'1980030100-1980033118',
'1980040100-1980043018',
'1980050100-1980053118',
'1980060100-1980063018',
'1980070100-1980073118',
'1980080100-1980083118',
'1980090100-1980093018',
'1980100100-1980103118',
'1980110100-1980113018',
'1980120100-1980123118']
#get the desired resolution from JRA-55 (1.25x1.25)
f1=cdms2.open('/att/dataportal01/CREATE-IP/reanalysis/JMA/JRA-55/6hr/atmos/ta/ta_6hr_reanalysis_JRA-55_1980010100-1980013118.nc')


ta2=f1('ta',genutil.picker(plev=(100000, 97500, 95000, 92500, 90000,
87500, 85000, 82500, 80000, 77500, 75000, 70000, 65000, 60000, 55000,
50000, 45000, 40000, 35000, 30000, 25000, 20000, 15000, 10000)))
ta2=ta2(time=('1980-01-00','cob'))
outfixlev=cdms2.open('/att/dmznsd/nobackup01/projects/ESGF/glpotter/CFSR/ensemble/level_test.nc','w')

ta2.id='ta'
outfixlev.write(ta2)
outfixlev.close()
f1=cdms2.open('/att/dmznsd/nobackup01/projects/ESGF/glpotter/CFSR/ensemble/level_test.nc')
ta1=f1('ta')
outgrid=ta1.getGrid()
lat=f1.getAxis('lat')
lon=f1.getAxis('lon')
plev=f1.getAxis('plev')

Пример #2
0
#!/usr/bin/env python
# Adapted for numpy/ma/cdms2 by convertcdms.py

import cdms2 as cdms,genutil,vcs,cdtime,os,sys
import vcs.test.support
bg=vcs.test.support.bg
cdms.setAutoBounds('on')
#f=cdms.open('/pcmdi/obs/mo/ta/rnl_ncep/ta.rnl_ncep.ctl')
f=cdms.open(os.path.join(vcs.sample_data,'ta_ncep_87-6-88-4.nc'))

levels = [1000,700,800]
try:
    s=f('ta',slice(0,1),genutil.picker(level=levels,match=1))
    error = False
except:
    print 'Ok failed getting the missing levels as expected!'
    error = True
if error == False:
    raise Exception,"Error it should have failed here!"


s=f('ta',slice(0,1),genutil.picker(level=levels,match=0))

if s.shape[1]!=3:
    raise "Error did not return 3 levels!"
if (s.getLevel()[:]!=levels).any():
    raise Exception,"Error did not retrieve the right levels!"

print "folowing plot should show all missing values, since 800 does not exisits!"
x=vcs.init()
x.plot(s[0,-1],bg=bg)
Пример #3
0
import numpy as np
import numpy.ma as ma

dataportal_data_path = '/att/dataportal01/CREATE-IP/reanalysis'
dass_data_path = '/dass/pubrepo/CREATE-IP/data/reanalysis'
data_path = dass_data_path

target_grid_dset = cdms2.open(
    data_path +
    '/JMA/JRA-55/6hr/atmos/ta/ta_6hr_reanalysis_JRA-55_1958010100-1958013118.nc'
)
target_grid_var = target_grid_dset['ta']
""":type : FileVariable """

dset_address = data_path + "/NASA-GMAO/GEOS-5/MERRA2/6hr/atmos/ta/ta_6hr_reanalysis_MERRA2_1980010100-1980013118.nc"
vname = "ta"
levs = (100000, 97500, 95000, 92500, 90000, 87500, 85000, 82500, 80000, 77500,
        75000, 70000, 65000, 60000, 55000, 50000, 45000, 40000, 35000, 30000,
        25000, 20000, 15000, 10000)

t0 = time.time()
dset = cdms2.open(dset_address)
var0 = dset[vname]
var1 = dset(vname, genutil.picker(level=levs))
""":type : AbstractVariable """
# newvar = var.regrid( target_grid_var.getGrid() )
t1 = time.time()

print "Completed test in time {0}, shape0 = {1}, shape1 = {2}".format(
    str(t1 - t0), str(var0.shape), str(var1.shape))
Пример #4
0
    def executeOperations(self, task, _inputs):
        """
        :type task: Task
        :type _inputs: dict[str,npArray]
        """
        cdms2.setAutoBounds(2)
        start = time.time()
        mdata = task.metadata
        """:type : dict[str,str] """
        self.logger.info(" Execute Ensemble Task with metadata: " +
                         str(task.metadata))
        target = str(mdata.get("target", ""))
        method = str(mdata.get("method", "linear")).lower()
        res = sa2f(self.getListParm(mdata, "res"))
        shape = sa2i(self.getListParm(mdata, "shape"))
        plevs = sa2f(self.getListParm(mdata, "plev"))
        resultDir = task.metadata.get("resultDir")

        target_input = _inputs.get(target, None)
        if (target_input is None):
            raise Exception(
                "Must set the 'target' parameter in EnsembleWorkflowKernel to the id of the input that determines the output grid"
            )

        outgrid = target_input.getGrid()
        arrays = []
        for (id, _input) in _inputs.iteritems():
            dset_address = _input.metadata.get("uri",
                                               _input.metadata.get("dataPath"))
            vname = _input.metadata.get("name")
            dset = cdms2.open(dset_address)
            var = dset(vname, genutil.picker(
                plev=plevs)) if (plevs) else dset(vname)
            """:type : AbstractVariable """
            if (id == target):
                lat = var.getLatitude()
                """:type : AbstractCoordinateAxis """
                lon = var.getLongitude()
                """:type : AbstractCoordinateAxis """
                plev = var.getLevel()
                """:type : AbstractCoordinateAxis """
                t = var.getTime()
                """:type : AbstractCoordinateAxis """
                units = var.getattribute("units")
                varname = vname
                arrays.append(var)
            else:
                arrays.append(var.regrid(outgrid))

        times = t.asComponentTime()
        trange = str(times[0]) + "-" + str(times[-1])
        concatenate = ma.masked_array(arrays)
        average = cdms2.MV2.average(concatenate, axis=0)
        ave = cdms2.createVariable(average, axes=(t, plev, lat, lon))
        ave.units = units
        ave.id = varname
        ave.long_name = 'ensemble average'
        outfile = cdms2.open(
            resultDir + "/" + varname + '_6hr_reanalysis_ensemble_ave_' +
            trange + '.nc', 'w')
        outfile.write(ave)
        outfile.close()
        stdv = genutil.statistics.std(concatenate, axis=0)
        stdvave = cdms2.createVariable(stdv, axes=(t, plev, lat, lon))
        stdvave.id = varname + '_stdv'
        stdvave.long_name = 'standard deviation'
        stdvave.units = units

        outfile_std = cdms2.open(
            resultDir + "/" + varname + '6hr_reanalysis_ensemble_std_' +
            trange + '.nc', 'w')
        outfile_std.write(stdvave)
        outfile_std.close()
        #newgrid.shape

        end = time.time()
        print "Completed EnsembleWorkflowKernel in " + str(
            (end - start) / 60.) + " min "
Пример #5
0
select = cdms2.selectors.Selector(lon=slice(0, 3), time=('1950', cdtime.comptime(1960)))
print u(equator)(select).shape
#  -> appliquez à la lecture du fichier






# Le module genutil : utilitaires generiques

# - contenu
import genutil
print dir(genutil)

# - statistics standards
print dir(genutil.statistics)
ustd = genutil.statistics.std(u, axis=0)                # testez les options
ustd.info()
print N.ma.allclose(ustd, u.std(axis=0))

# - salstat
print dir(genutil.salstat)
print genutil.salstat.kurtosis(u, axis='t')             # essayez d'autres fonctions

# - divers
_, uu = genutil.grower(u, u[0])                         # testez d'autres slices
print uu.shape
print genutil.minmax(u, u**2)
print u[:, 0, 0](genutil.picker(time=['1960-01-03', '1965-05-03'])) # testez option match