Exemple #1
0
def scrap(data, axis=0):
    originalOrder = data.getOrder(ids=True)
    if axis not in ['x', 'y', 'z', 't'] and not isinstance(axis, int):
        order = "({})...".format(axis)
    else:
        order = "{}...".format(axis)
    new = data(order=order)
    axes = new.getAxisList()  # Save for later
    new = MV2.array(new.asma())  # lose dims
    for i in range(new.shape[0] - 1, -1, -1):
        tmp = new[i]
        if not isinstance(tmp, (float, numpy.float)) and tmp.mask.all():
            a = new[:i]
            b = new[i + 1:]
            if b.shape[0] == 0:
                new = a
            else:
                new = MV2.concatenate((a, b))
    newAxis = []
    for v in new.getAxis(0):
        newAxis.append(axes[0][int(v)])
    ax = cdms2.createAxis(newAxis, id=axes[0].id)
    axes[0] = ax
    new.setAxisList(axes)
    return new(order=originalOrder)
Exemple #2
0
    def post(self,fetched,slab,axes,specifications,confined_by,aux,axismap):
        ''' Post processing retouches the bounds and later will deal with the mask'''
        import cdms2 as cdms
        fetched=cdms.createVariable(fetched,copy=1)
        faxes=fetched.getAxisList()
        a=None
        for i in range(len(faxes)):
            if confined_by[i] is self:
                newaxvals=[]
                bounds=[]
                a=None
                sh=list(fetched.shape)
                sh[i]=1
                for l in self.aux[i]:
                    try:
                        tmp=fetched(**{faxes[i].id:(l,l)})
                        ax=tmp.getAxis(i)
                        #print ax
                        newaxvals.append(ax[0])
			if ax.getBounds()!=None:
                   	     bounds.append(ax.getBounds()[0])
			else:
			     bounds=None
                    except Exception,err:
                        #print 'err:',err,'match:',self.match
                        if self.match==1:
                            raise Exception,'Error axis value :'+str(l)+' was requested but is not present in slab\n(more missing might exists)'
                        elif self.match==0:
                            tmp=MV2.ones(sh,typecode=MV2.float)
                            tmp=MV2.masked_equal(tmp,1)
                            if type(l)==type(cdtime.comptime(1999)) or type(l)==type(cdtime.reltime(0,'days since 1999')) or type(l)==type(''):
                                if type(l)!=type(''):
                                    newaxvals.append(l.torel(faxes[i].units).value)
                                else:
                                    newaxvals.append(cdtime.s2r(l,faxes[i].units).value)
                            else:
                                newaxvals.append(l)
                            if bounds is not None:
                                bounds.append([ax[-1]-1.,ax[-1]+1])
                        else:
                            tmp=None
                    if not tmp is None:
                        if a is None:
                            a=tmp
                        elif not tmp is None:
                            a=MV2.concatenate((a,tmp),i)
                if bounds is not None:
			newax=cdms.createAxis(numpy.array(newaxvals),bounds=numpy.array(bounds),id=ax.id)
		else:
			newax=cdms.createAxis(numpy.array(newaxvals),id=ax.id)
                for att in faxes[i].attributes.keys():
                    setattr(newax,att,faxes[i].attributes.get(att))
                for j in range(len(fetched.shape)):
                    if j==i:
                        a.setAxis(i,newax)
                    else:
                        a.setAxis(j,faxes[j])
                fetched=a.astype(fetched.dtype.char)
                faxes=fetched.getAxisList()
Exemple #3
0
    def _get(self):
        if 'relative' in self.portrait_types.keys():
            d=self.portrait_types['relative']
            vals=d[1]
            real_value=getattr(self,d[0])
            real=self.__get()
            setattr(self,d[0],vals[0])
            a0=self.__get()
            sh=list(a0.shape)
            sh.insert(0,1)
            a0=MV2.reshape(a0,sh)
            for v in vals[1:]:
                setattr(self,d[0],v)
                tmp=self.__get()
                tmp=MV2.reshape(tmp,sh)
                a0=MV2.concatenate((a0,tmp))
            a0=MV2.sort(a0,0).filled()
            real2=real.filled()
            a0=MV2.reshape(a0,(a0.shape[0],sh[1]*sh[2]))
            real2=MV2.reshape(real2,(sh[1]*sh[2],))
            a0=MV2.transpose(a0)
            indices=[]
            for i in range(len(real2)):
                indices.append(MV2.searchsorted(a0[i],real2[i]))
            indices=MV2.array(indices)
            indices=MV2.reshape(indices,(sh[1],sh[2]))
            if not ((real.mask is None) or (real.mask is MV2.nomask)):
                indices=MV2.masked_where(real.mask,indices)
            a=MV2.masked_equal(a0,1.e20)
            a=MV2.count(a,1)
            a=MV2.reshape(a,indices.shape)
            indices=indices/a*100
            setattr(self,d[0],real_value)
            indices.setAxisList(real.getAxisList())
##             print indices.shape
            return indices
        else:
            return self.__get()
Exemple #4
0
    cdutil.setTimeBoundsMonthly(th)
    #ref = cdutil.YEAR.climatology(th(time=('1951-1-1','1980-12-31')))

    #dep_h = cdutil.YEAR.departures(th,ref=ref)(time=(truestart,'2005-12-31'))
    dep_h = cdutil.YEAR(th)(time=(truestart, '2005-12-31'))

    tr = cdutil.averager(fr("tas"), axis='xy')
    start_r = cmip5.start_time(tr)
    stop_r = cmip5.stop_time(tr)

    cdutil.setTimeBoundsMonthly(tr)

    #dep_r = cdutil.YEAR.departures(tr,ref=ref)(time=(start_r,'2099-12-31'))
    dep_r = cdutil.YEAR(tr)(time=(start_r, '2099-12-31'))
    try:
        BIG[i] = MV.concatenate((dep_h, dep_r))
    except:
        print rip
        print MV.concatenate((dep_h, dep_r)).shape
        continue
    i += 1

T = np.arange(1900, 2100)
tax = cdms.createAxis(T)

tax.units = 'years since 0001-1-1'
tax.designateTime()

BIG2 = MV.masked_where(np.abs(BIG) > 1.e10, BIG)
modax = cdms.createAxis(np.arange(L))
modax.models = str(overlap.tolist())
Exemple #5
0
                                temporary[region] = d_sub_aave(time=(start_t,
                                                                     end_t))
                                continue
                            else:
                                # n-1 year 7/1~12/31
                                part1 = copy.copy(temporary[region])
                                # n year 1/1~6/30
                                part2 = d_sub_aave(time=(
                                    cdtime.comptime(year),
                                    cdtime.comptime(year, 6, 30, 23, 59, 59)))
                                start_t = cdtime.comptime(year, 7, 1)
                                end_t = cdtime.comptime(
                                    year, 12, 31, 23, 59, 59)
                                temporary[region] = d_sub_aave(time=(start_t,
                                                                     end_t))
                                d_sub_aave = MV2.concatenate([part1, part2],
                                                             axis=0)
                                if debug:
                                    print(
                                        'debug: ', region, year,
                                        d_sub_aave.getTime().asComponentTime())

                        # get pentad time series
                        list_d_sub_aave_chunks = list(
                            divide_chunks_advanced(d_sub_aave, n, debug=debug))
                        pentad_time_series = []
                        for d_sub_aave_chunk in list_d_sub_aave_chunks:
                            # ignore when chunk length is shorter than defined
                            if d_sub_aave_chunk.shape[0] >= n:
                                ave_chunk = MV2.average(d_sub_aave_chunk,
                                                        axis=0)
                                pentad_time_series.append(float(ave_chunk))
Exemple #6
0
 idi = "i"
 idj = "j"
 lat_units = 'degrees_north'
 lon_units = 'degrees_east'
 iaxis = TransientVirtualAxis(idi,ni)
 jaxis = TransientVirtualAxis(idj,nj)
 lataxis = TransientAxis2D(lat, axes=(iaxis, jaxis), 
                        attributes={'units':lat_units}, id="latitude")
 lonaxis = TransientAxis2D(lon, axes=(iaxis, jaxis), 
                        attributes={'units':lon_units}, id="longitude")
 curvegrid = TransientGenericGrid(lataxis, lonaxis, tempmask=None)
 attributs = None ; vid = None
 if hasattr(v,'attributes') : attributs= v.attributes
 if hasattr(v,'id') : vid= v.id
 axis0 = v.getAxis(0)
 return cdms2.createVariable(v, axes=[axis0,iaxis,jaxis], grid=curvegrid, \
                            attributes=attributs, id=v.id)


lat = MV2.array([[-20,-10,0,-15,-5]],'f')
lon = MV2.array([[0,10,20,50,60]],'f')

data1 = MV2.array([[[2,3,1,6,2]]],'f')
data2 = MV2.array([[[2,3,1,6,2]]],'f')


data1 = CurveGrid(data1, lat, lon)
data2 = CurveGrid(data2, lat, lon)

result = MV2.concatenate([data1,data2],axis=0)
Exemple #7
0
    def read(self, cfg=None):
        """ Lecture des fichiers NetCDF de NAR SST """
        import cdms2, sys, os, glob
        import numpy, MV2
        import cdtime
        from vacumm.misc.axes import create_lon
        from vacumm.misc.grid import create_grid, set_grid
        from vacumm.misc.atime import create_time
        from vacumm.misc.phys.units import kel2degc

        if self.ctdeb >= cdtime.comptime(2012, 01, 01, 0, 0, 0):
            # -- Creation d'un objet cdms nomme self.data et d'un tableau cumt pour les dates extraites des noms de fichiers
            self.data = ()  # Initialise un tuple
            # =============== ATTENTION ====================
            # Initialiser self.data pour ne pas dupliquer en memoire !!!!!!!!!
            # ============================================

            # -- Methode amelioree
            # Cree une liste
            files = []
            # Cree la liste des fichiers correspondants a la periode consideree
            ctest = self.ctdeb
            while ctest <= self.ctfin:
                for iH in numpy.arange(24):
                    flnme_only = "%04d%02d%02d%02d*.nc" % (ctest.year, ctest.month, ctest.day, iH)
                    files.extend(glob.glob(os.path.join(self.OBSDIR, flnme_only)))
                ctest = ctest.add(1, cdtime.Days)
                # --

            if cfg is None:
                config = ConfigParser.RawConfigParser()
                config.read(os.path.join(self.SCRIPT_DIR, "config.cfg"))
                lomin = float(config.get("Domain", "lomin"))
                lomax = float(config.get("Domain", "lomax"))
                lamin = float(config.get("Domain", "lamin"))
                lamax = float(config.get("Domain", "lamax"))
            else:
                lomin = cfg["Domain"]["lomin"]
                lomax = cfg["Domain"]["lomax"]
                lamin = cfg["Domain"]["lamin"]
                lamax = cfg["Domain"]["lamax"]

            if cfg is None:
                try:
                    timerange = config.get("Seviri SST", "timerange")
                    # timerange = 'midnight' pour donnees a minuit seulement
                    # timerange = 'all' pour donnees a minuit seulement
                except ConfigParser.NoOptionError:
                    # print 'No Time Range'
                    timerange = "all"  # Par defaut, lecture de toutes les heures
            else:
                timerange = cfg["Seviri SST"]["timerange"]

            if files == []:
                print "No data file to read ..."
            else:
                for ifile, filename in enumerate(files):
                    # -- Lecture du fichier filename
                    f = cdms2.open(filename)
                    temp = f("sea_surface_temperature", lon=(lomin, lomax), lat=(lamin, lamax))
                    f.close()

                    # -- Transfert de temp dans l'objet cdat self.data (concatenation)
                    self.data += (temp,)

                    # -- Creation de l'axe temporel
                    # taxis = create_time(cumt)

                    # -- MV2.concatenate pour concatener les informations dans self.data (entre autre construit l'axe temporel)
                self.data = MV2.concatenate(self.data)

                # -- Informations sur le dataset
                # self.data.name = "SEVIRI_SST"
                self.data.units = "degree_Celsius"
                self.data.standard_name = "satellite_sea_surface_temperature"
                self.data.long_name = "Satellite Sea Surface Temperature - SEVIRI"

                # -- Change unit
                self.data = kel2degc(self.data)
s=f("clt")
cdutil.setTimeBoundsMonthly(s)

print 'Getting JJA, which should be inexistant in data'

try:
 cdutil.JJA(s[:5]) 
 raise RuntimeError( "data w/o season did not fail")
except:
  pass

## Create a year worth of data w/o JJA
s1 = s[:5]
s2 = s[8:12]

s3 = MV2.concatenate((s1,s2))
t = MV2.concatenate((s1.getTime()[:],s2.getTime()[:]))
t = cdms2.createAxis(t,id='time')
t.units=s.getTime().units
t.designateTime()

s3.setAxis(0,t)
cdutil.setTimeBoundsMonthly(s3)
try:
  cdutil.JJA(s3)
  raise RuntimeError, "data w/o season did not return None"
except:
  pass
try:
  cdutil.JJA.departures(s3)
  raise RuntimeError, "data w/o season did not return None for dep"
# Deal with plev17 to plev19 conversion
plev19 = np.append(plev17, [500., 100.])
# Add missing upper two values
plev19 = cdm.createAxis(plev19, id='plev')
plev19.designateLevel()
plev19.axis = 'Z'
plev19.long_name = 'pressure'
plev19.positive = 'down'
plev19.realtopology = 'linear'
plev19.standard_name = 'air_pressure'
plev19.units = 'Pa'

# Pad data array with missing values
d2 = np.ma.array(np.ma.ones([d1.shape[0], 2, d1.shape[2], d1.shape[3]]),
                 mask=True) * 1e20
d = mv.concatenate((d1, d2), axis=1)

del (d1, d2, plev17)
# Cleanup

#%% Initialize and run CMOR
# For more information see https://cmor.llnl.gov/mydoc_cmor3_api/
cmor.setup(inpath='./',
           netcdf_file_action=cmor.CMOR_REPLACE_4)  #,logfile='cmorLog.txt')
cmor.dataset_json(inputJson)
cmor.load_table(cmorTable)
#cmor.set_cur_dataset_attribute('history',f.history) ; # Force input file attribute as history
axes = [
    {
        'table_entry': 'time',
        'units': time.units,  # 'days since 1870-01-01',
Exemple #10
0
ct1 = MV2.TransientVariable([1,1,2,0,1])
ctr = MV2.choose(ct1, [numpy.ma.masked, 10,20,30,40])
if not MV2.allclose(ctr, [10, 10, 20, 100, 10]): markError('choose error 1')
ctx = MV2.TransientVariable([1,2,3,150,4])
cty = -MV2.TransientVariable([1,2,3,150,4])
ctr = MV2.choose(MV2.greater(ctx,100), (ctx, 100))
if not MV2.allclose(ctr, [1,2,3,100,4]): markError('choose error 2')
ctr = MV2.choose(MV2.greater(ctx,100), (ctx, cty))
if not MV2.allclose(ctr, [1,2,3,-150,4]): markError('choose error 3')

## concatenate(arrays, axis=0, axisid=None, axisattributes=None) 
##   Concatenate the arrays along the given axis. Give the extended axis the id and
##   attributes provided - by default, those of the first array.

try:
    xcon = MV2.concatenate((ud,vd))
except:
    markError('Concatenate error')

## isMaskedVariable(x) 
##   Is x a masked variable, that is, an instance of AbstractVariable?
im1 = MV2.isMaskedVariable(xones)
im2 = MV2.isMaskedVariable(xmasked)

## outerproduct(a, b) 
##   outerproduct(a,b) = {a[i]*b[j]}, has shape (len(a),len(b))
xouter = MV2.outerproduct(MV2.arange(16.),MV2.arange(32.))
lat = uf.getLatitude()
lon = uf.getLongitude()
xouter.setAxis(0,lat)
xouter.setAxis(1,lon)
Exemple #11
0
    def read(self):
        """ Lecture des fichiers NetCDF de NAR SST """
        import cdms2,sys,os, glob
        import numpy,MV2
        import cdtime
        from vacumm.misc.axes import create_lon
        from vacumm.misc.grid import create_grid,  set_grid
        from vacumm.misc.atime import create_time
        from vacumm.misc.phys.units import kel2degc

        # -- Dans le cas d'un NAR SST on lit la grille lon lat dans le fichier grid
        # -- Lecture de la grille
        znar=self.ZONE_NAR
        gridfile="grid_%(znar)s.nc"%vars()
        f=cdms2.open(gridfile)
        la = f('latitude')
        lo = f('longitude')
        f.close()

        # -- Creation des axes longitude et latitude
        lat_axis = create_lon(la,id='latitude')
        lon_axis = create_lon(lo,id='longitude')
        # -- Creation de la grille
        grid = create_grid(lon_axis, lat_axis)

        # -- Creation d'un objet cdms nomme self.data et d'un tableau cumt pour les dates extraites des noms de fichiers
        self.data = () #Initialise un tuple
        # =============== ATTENTION ====================
        # Initialiser self.data pour ne pas dupliquer en memoire !!!!!!!!!
        # ============================================

        #cumt = [] # Initialise un array

        # -- Boucle sur les fichiers presents dans le WORKDIR

        #url_file_def="%(YYYY)s%(MM)s%(DD)s%(HH)s%(ext)s"%vars()

        #self.ctdeb

        # -- Ancienne methode
        #files = glob.glob(os.path.join(self.WORKDIR, '2*.nc'))
        #files.sort()
        # --

        # -- Methode amelioree
        # Cree une liste
        files = []
        # Cree la liste des fichiers correspondants a la periode consideree
        ctest = self.ctdeb
        while ctest <= self.ctfin:
            flnme_only = '%(#)04d%(##)02d%(###)02d*.nc'%{'#':ctest.year, '##':ctest.month, '###':ctest.day}
            files.extend(glob.glob(os.path.join(self.WORKDIR, flnme_only)))
            ctest=ctest.add(1,cdtime.Days)
        # --

        for filename in files:
            # -- Lecture du fichier filename
            f = cdms2.open(filename)
            temp = f('sea_surface_temperature')
            # =============== ATTENTION ==================================
            # Verifier que temp utilise tout le temps le meme espace memoire ... voir du cote de cdms
            # ==========================================================
            f.close()

            # -- Extraction de la date et heure du nom du fichier
            #ty = numpy.int(filename[-15:-11])
            #tm = numpy.int(filename[-11:-9])
            #tj = numpy.int(filename[-9:-7])
            #th = numpy.int(filename[-7:-5])
            #tt = cdtime.comptime(ty,tm,tj,th)
            #cumt.append(tt)

            # -- Transfert de temp dans l'objet cdat self.data (concatenation)
            # =============== ATTENTION ====================
            # Faire une variable intermediaire qui sera au prealable allouee en memoire pour eviter
            # trop de copies en memoire !!!!!!!!!!!!!!!!
            # ============================================
            self.data += temp,


        # -- Creation de l'axe temporel
        #taxis = create_time(cumt)

        # -- MV2.concatenate pour concatener les informations dans self.data (entre autre construit l'axe temporel)
        self.data = MV2.concatenate(self.data)



        # -- Attribution de la grille a l'objet self.data
        set_grid(self.data, grid, axes=True)

        # -- Informations sur le dataset
        self.data.name = "NAR_SST"
        self.data.units = "degree_Celsius"
        self.data.standard_name = "satellite_sea_surface_temperature"
        self.data.long_name = "Satellite Sea Surface Temperature - NAR"

        # -- Change unit
        self.data = kel2degc(self.data)
    plev19 = np.append(plev19, plev3)
    plev19 = np.append(plev19, plev4)
    # Add missing upper two values
    plev19[:] = plev19[:] * 100.
    plev19 = cdm.createAxis(plev19, id='plev')
    plev19.designateLevel()
    plev19.axis = 'Z'
    plev19.long_name = 'pressure'
    plev19.positive = 'down'
    plev19.realtopology = 'linear'
    plev19.standard_name = 'air_pressure'
    plev19.units = 'Pa'

    # Pad data array with missing values
    #  d2 = np.ma.array(np.ma.ones([d1.shape[0],2,d1.shape[2],d1.shape[3]]),mask=True)*1e20
    d = mv.concatenate((d1, d2, d3, d4), axis=1)

    #del(d1,d2,d3,d4,plev1,plev2,plev3,plev4) ; # Cleanup

    #%% Initialize and run CMOR
    # For more information see https://cmor.llnl.gov/mydoc_cmor3_api/
    cmor.setup(
        inpath='./',
        netcdf_file_action=cmor.CMOR_REPLACE_4)  #,logfile='cmorLog.txt')
    cmor.dataset_json(inputJson)
    cmor.load_table(cmorTable)
    #cmor.set_cur_dataset_attribute('history',f.history) ; # Force input file attribute as history
    axes = [
        {
            'table_entry': 'time',
            'units':
Exemple #13
0
    def read(self, verbose=False, cfg=None):
        """ Lecture des fichiers NetCDF de NAR SST """
        import cdms2, sys, os, glob
        import numpy, MV2
        import cdtime
        from vacumm.misc.axes import create_lon, set_order
        from vacumm.misc.grid import create_grid, set_grid
        from vacumm.misc.atime import create_time
        from vacumm.misc.phys.units import kel2degc
        import gc

        # Get the configuration file information
        #cfg = self.get_config()
        #print cfg

        # -- Creation d'un objet cdms nomme self.data et d'un tableau cumt pour les dates extraites des noms de fichiers
        self.data = ()  #Initialise un tuple
        # =============== ATTENTION ====================
        # Initialiser self.data pour ne pas dupliquer en memoire !!!!!!!!!
        # ============================================

        # -- Methode amelioree
        # Cree une liste
        files = []
        # Cree la liste des fichiers correspondants a la periode consideree
        if cfg is None:
            config = ConfigParser.RawConfigParser()
            config.read(os.path.join(self.SCRIPT_DIR, 'config.cfg'))
            hr_satellites = config.get('Nar SST', 'hr_satellites')
        else:
            hr_satellites = cfg['Nar SST']['hr_satellites']

        hr_satellites = hr_satellites.split(',')
        #hr_satellites = cfg['hr_satellites']

        #print hr_satellites

        ctest = self.ctdeb
        while ctest <= self.ctfin:
            for isat, s_name in enumerate(hr_satellites):

                flnme_only = '%04d%02d%02d*%s*.nc' % (ctest.year, ctest.month,
                                                      ctest.day, s_name)
                files.extend(glob.glob(os.path.join(flnme_only)))

            ctest = ctest.add(1, cdtime.Days)
        # --

        if cfg is None:
            lomin = float(config.get('Domain', 'lomin'))
            lomax = float(config.get('Domain', 'lomax'))
            lamin = float(config.get('Domain', 'lamin'))
            lamax = float(config.get('Domain', 'lamax'))
        else:
            lomin = cfg['Domain']['lomin']
            lomax = cfg['Domain']['lomax']
            lamin = cfg['Domain']['lamin']
            lamax = cfg['Domain']['lamax']

        #print files

        if files == []:
            print 'No data file to read ...'
        else:

            # ---- Lecture et creation de la grille ----
            #
            # -- Lecture du fichier filename
            f = cdms2.open(files[0])
            lo = f.getVariable('lon')
            la = f.getVariable('lat')
            # -- Creation des axes longitude et latitude
            #            lat_axis = create_lon(la,id='latitude')
            #            lon_axis = create_lon(lo,id='longitude')
            # -- Creation de la grille
            grid = create_grid(lo, la)

            del lo, la

            for ifile, filename in enumerate(files):

                # -- Lecture du fichier filename
                f = cdms2.open(filename)

                temp2 = f('sea_surface_temperature')
                set_order(temp2, 'tyx')  # pour que averager.py fontionne

                # modif J.Gatti : utilisation de l'index de qualite (0:unprocessed 1:cloudy 2:bad 3:suspect 4:acceptable 5:excellent)
                temp2.set_fill_value(temp2._FillValue)
                conf = f('proximity_confidence')
                MV2.putmask(temp2.data, conf.data < 3,
                            temp2._FillValue)  #  ne change que data
                MV2.putmask(temp2.mask, conf.data < 3,
                            True)  # ne change que mask
                #autre methode
                #--------------------
                #temp2=MV2.masked_where(conf.data<3,temp2)          # ne change que mask
                #oldmask=temp2.mask
                #temp2[:]=temp2.filled()                                                  # change data mais met mask a false partout
                #temp2.mask=oldmask                                                     # remet le bon mask sans lien
                del conf
                # fin modif J.Gatti : utilisation de l'index de qualite

                # -- Attribution de la grille a l'objet self.data
                set_grid(temp2, grid, axes=True)

                temp = temp2(lon=(lomin, lomax), lat=(lamin, lamax))

                if verbose:
                    # == TEST OCCUPATION MEMOIRE ===
                    print ctest, 'Avant'
                    #print psutil.Process(os.getpid()).get_memory_percent()
                    #print psutil.Process(os.getpid()).get_memory_info()
                    #print 'CPU percent: ', psutil.cpu_percent(interval=0.1)
                    #print 'Used phymem: ', psutil.used_phymem()
                    #print 'Used virtmem: ', psutil.used_virtmem()

                del temp2

                if verbose:
                    # == TEST OCCUPATION MEMOIRE ===
                    print ctest, 'Apres del'
                    #print psutil.Process(os.getpid()).get_memory_percent()
                    #print psutil.Process(os.getpid()).get_memory_info()
                    #print 'CPU percent: ', psutil.cpu_percent(interval=0.1)
                    #print 'Used phymem: ', psutil.used_phymem()
                    #print 'Used virtmem: ', psutil.used_virtmem()

                # =============== ATTENTION ==================================
                # Verifier que temp utilise tout le temps le meme espace memoire ... voir du cote de cdms
                # ==========================================================
                f.close()

                # -- Transfert de temp dans l'objet cdat self.data (concatenation)
                # =============== ATTENTION ====================
                # Faire une variable intermediaire qui sera au prealable allouee en memoire pour eviter
                # trop de copies en memoire !!!!!!!!!!!!!!!!
                # ============================================
                #self.data += temp,
                if ifile == 0:
                    self.data = temp
                else:
                    self.data = MV2.concatenate((self.data, temp))

                if verbose:
                    # == TEST OCCUPATION MEMOIRE ===
                    print ctest, 'Avant gccollect'
                    #print psutil.Process(os.getpid()).get_memory_percent()
                    #print psutil.Process(os.getpid()).get_memory_info()
                    #print 'CPU percent: ', psutil.cpu_percent(interval=0.1)
                    #print 'Used phymem: ', psutil.used_phymem()
                    #print 'Used virtmem: ', psutil.used_virtmem()
                    print gc.collect()
                gc.collect()

                if verbose:
                    # == TEST OCCUPATION MEMOIRE ===
                    print ctest, 'Apres gccollect'
                    #print psutil.Process(os.getpid()).get_memory_percent()
                    #print psutil.Process(os.getpid()).get_memory_info()
                    #print 'CPU percent: ', psutil.cpu_percent(interval=0.1)
                    #print 'Used phymem: ', psutil.used_phymem()
                    #print 'Used virtmem: ', psutil.used_virtmem()

            # -- Creation de l'axe temporel
            #taxis = create_time(cumt)

            # -- MV2.concatenate pour concatener les informations dans self.data (entre autre construit l'axe temporel)
            #self.data = MV2.concatenate(self.data)

        # -- Informations sur le dataset
        #self.data.name = "NAR_SST"
        self.data.units = "degree_Celsius"
        self.data.standard_name = "satellite_sea_surface_temperature"
        self.data.long_name = "Satellite Sea Surface Temperature - NAR"

        # -- Change unit
        self.data = kel2degc(self.data)
Exemple #14
0
def cat(var1,var2,axis=0,verbose=False):
    '''Concatenate 2 variables along axis.

    <var1>,<var2>: Variables to be concatenated, in the order of \
            <var1>, <var2>;
    <axis>: int, index of axis to be concatenated along.

    Return <result>
    '''

    import MV2 as MV
    import numpy

    try:
        order=var1.getAxisListIndex()
    except:
        order=numpy.arange(var1.ndim)  # if var1 is np.ndarray

    var1=MV.array(var1)
    var2=MV.array(var2)

    try:
        attdict=attribute_obj2dict(var1)
        hasatt=True
    except:
        hasatt=False

    if not hasattr(var1.getAxis(axis),'units'):
        ax=var1.getAxis(axis)
        ax.units=''
        var1.setAxis(axis,ax)
    if not hasattr(var2.getAxis(axis),'units'):
        ax=var2.getAxis(axis)
        ax.units=''
        var2.setAxis(axis,ax)

    if verbose:
        print('# <cat>: Original order:',order)

    if axis!=0:
        #----Switch order------
        order[axis]=0
        order[0]=axis
        if verbose:
            print('# <cat>: New order:',order)

        var1=var1(order=order)
        var2=var2(order=order)

        result=MV.concatenate((var1,var2))
        #result=numpy.concatenate((var1,var2),axis=0)

        #NOTE: There seems to be some problems with MV.concatenate() when axis
        # is not 0, but can not remember what the problem is. That is why this function
        # is written.
        # And also some issues regards to the re-ordering and MV.concatenate()
        # method defined here. When I concatenated something along the 2nd
        # axis and do a MV.std(var,axis=2) (and numpy.std(), an attributeError was raised.
        # But other times it works ok. Maybe because of some attributes of my
        # variable is gone when putting into MV.std(). No idea why.
        # That problem was solved by replacing MV.concatenate() with numpy.concatenate().
        # But this will cause the output to be numpy.ndarray rather than MV.transientVariable.
        # So be aware that this function may cause some errors if inputs <var1>,<var2>
        # are numpy.ndarray.

        #-------Switch back----------
        result=result(order=order)
    else:
        result=MV.concatenate((var1,var2))

    if hasatt:
        result=attribute_dict2obj(attdict,result)

    return result
Exemple #15
0
def precip_variability_across_timescale(file, syr, eyr, dfrq, mip, dat, var,
                                        fac, nperseg, noverlap, outdir, cmec):
    """
    Regridding -> Anomaly -> Power spectra -> Domain&Frequency average -> Write
    """

    psdmfm = {"RESULTS": {}}

    f = cdms.open(file)
    cal = f[var].getTime().calendar
    if "360" in cal:
        ldy = 30
    else:
        ldy = 31
    print(dat, cal)
    print("syr, eyr:", syr, eyr)
    for iyr in range(syr, eyr + 1):
        print(iyr)
        do = (f(
            var,
            time=(
                str(iyr) + "-1-1 0:0:0",
                str(iyr) + "-12-" + str(ldy) + " 23:59:59",
            ),
        ) * float(fac))

        # Regridding
        rgtmp = Regrid2deg(do)
        if iyr == syr:
            drg = copy.deepcopy(rgtmp)
        else:
            drg = MV.concatenate((drg, rgtmp))
        print(iyr, drg.shape)

    f.close()

    # Anomaly
    if dfrq == "day":
        ntd = 1
    elif dfrq == "3hr":
        ntd = 8
    else:
        sys.exit("ERROR: dfrq " + dfrq + " is not defined!")
    clim, anom = ClimAnom(drg, ntd, syr, eyr)

    # Power spectum of total
    freqs, ps, rn, sig95 = Powerspectrum(drg, nperseg, noverlap)
    # Domain & Frequency average
    psdmfm_forced = Avg_PS_DomFrq(ps, freqs, ntd, dat, mip, "forced")
    # Write data (nc file)
    outfilename = "PS_pr." + str(dfrq) + "_regrid.180x90_" + dat + ".nc"
    with cdms.open(
            os.path.join(outdir(output_type="diagnostic_results"),
                         outfilename), "w") as out:
        out.write(freqs, id="freqs")
        out.write(ps, id="power")
        out.write(rn, id="rednoise")
        out.write(sig95, id="sig95")

    # Power spectum of anomaly
    freqs, ps, rn, sig95 = Powerspectrum(anom, nperseg, noverlap)
    # Domain & Frequency average
    psdmfm_unforced = Avg_PS_DomFrq(ps, freqs, ntd, dat, mip, "unforced")
    # Write data (nc file)
    outfilename = "PS_pr." + str(
        dfrq) + "_regrid.180x90_" + dat + "_unforced.nc"
    with cdms.open(
            os.path.join(outdir(output_type="diagnostic_results"),
                         outfilename), "w") as out:
        out.write(freqs, id="freqs")
        out.write(ps, id="power")
        out.write(rn, id="rednoise")
        out.write(sig95, id="sig95")

    # Write data (json file)
    psdmfm["RESULTS"][dat] = {}
    psdmfm["RESULTS"][dat]["forced"] = psdmfm_forced
    psdmfm["RESULTS"][dat]["unforced"] = psdmfm_unforced

    outfilename = ("PS_pr." + str(dfrq) + "_regrid.180x90_area.freq.mean_" +
                   dat + ".json")
    JSON = pcmdi_metrics.io.base.Base(outdir(output_type="metrics_results"),
                                      outfilename)
    JSON.write(
        psdmfm,
        json_structure=[
            "model+realization", "variability type", "domain", "frequency"
        ],
        sort_keys=True,
        indent=4,
        separators=(",", ": "),
    )
    if cmec:
        JSON.write_cmec(indent=4, separators=(",", ": "))
Exemple #16
0
ct1 = MV2.TransientVariable([1, 1, 2, 0, 1])
ctr = MV2.choose(ct1, [numpy.ma.masked, 10, 20, 30, 40])
if not MV2.allclose(ctr, [10, 10, 20, 100, 10]): markError('choose error 1')
ctx = MV2.TransientVariable([1, 2, 3, 150, 4])
cty = -MV2.TransientVariable([1, 2, 3, 150, 4])
ctr = MV2.choose(MV2.greater(ctx, 100), (ctx, 100))
if not MV2.allclose(ctr, [1, 2, 3, 100, 4]): markError('choose error 2')
ctr = MV2.choose(MV2.greater(ctx, 100), (ctx, cty))
if not MV2.allclose(ctr, [1, 2, 3, -150, 4]): markError('choose error 3')

## concatenate(arrays, axis=0, axisid=None, axisattributes=None)
##   Concatenate the arrays along the given axis. Give the extended axis the id and
##   attributes provided - by default, those of the first array.

try:
    xcon = MV2.concatenate((ud, vd))
except:
    markError('Concatenate error')

## isMaskedVariable(x)
##   Is x a masked variable, that is, an instance of AbstractVariable?
im1 = MV2.isMaskedVariable(xones)
im2 = MV2.isMaskedVariable(xmasked)

## outerproduct(a, b)
##   outerproduct(a,b) = {a[i]*b[j]}, has shape (len(a),len(b))
xouter = MV2.outerproduct(MV2.arange(16.), MV2.arange(32.))
lat = uf.getLatitude()
lon = uf.getLongitude()
xouter.setAxis(0, lat)
xouter.setAxis(1, lon)
Exemple #17
0
    pth + 'u_2002.nc',
]

for file in files:
    f = cdms.open(file)
    u = f('u')

    if file == files[0]:  # First file
        sh = list(u.shape)  # Create a list with the shape of the data
        sh.insert(0, 1)  # Insert value 1 in front of the list
        accumulation = u
        newdim = MV.reshape(u, sh)  # Create a new 1D dimension

    else:
        # add u at the end of accumaltion on dimension 0
        accumulation = MV.concatenate((accumulation, u))
        tmp = MV.reshape(u, sh)  # Create a new 1D dimension
        newdim = MV.concatenate(
            (newdim, tmp))  # Add u to the newdim over the new dimension

    f.close()

print accumulation.shape  # All time added over the same dimension
print newdim.shape  # Has a new dimension for years

avg = MV.average(accumulation)
std = genutil.statistics.std(newdim)

print avg.shape
print std.shape
Exemple #18
0
    def read(self):
        """ Lecture des fichiers NetCDF de NAR SST """
        import cdms2, sys, os, glob
        import numpy, MV2
        import cdtime
        from vacumm.misc.axes import create_lon
        from vacumm.misc.grid import create_grid, set_grid
        from vacumm.misc.atime import create_time
        from vacumm.misc.phys.units import kel2degc

        # -- Dans le cas d'un NAR SST on lit la grille lon lat dans le fichier grid
        # -- Lecture de la grille
        znar = self.ZONE_NAR
        gridfile = "grid_%(znar)s.nc" % vars()
        f = cdms2.open(gridfile)
        la = f('latitude')
        lo = f('longitude')
        f.close()

        # -- Creation des axes longitude et latitude
        lat_axis = create_lon(la, id='latitude')
        lon_axis = create_lon(lo, id='longitude')
        # -- Creation de la grille
        grid = create_grid(lon_axis, lat_axis)

        # -- Creation d'un objet cdms nomme self.data et d'un tableau cumt pour les dates extraites des noms de fichiers
        self.data = ()  #Initialise un tuple
        # =============== ATTENTION ====================
        # Initialiser self.data pour ne pas dupliquer en memoire !!!!!!!!!
        # ============================================

        #cumt = [] # Initialise un array

        # -- Boucle sur les fichiers presents dans le WORKDIR

        #url_file_def="%(YYYY)s%(MM)s%(DD)s%(HH)s%(ext)s"%vars()

        #self.ctdeb

        # -- Ancienne methode
        #files = glob.glob(os.path.join(self.WORKDIR, '2*.nc'))
        #files.sort()
        # --

        # -- Methode amelioree
        # Cree une liste
        files = []
        # Cree la liste des fichiers correspondants a la periode consideree
        ctest = self.ctdeb
        while ctest <= self.ctfin:
            flnme_only = '%(#)04d%(##)02d%(###)02d*.nc' % {
                '#': ctest.year,
                '##': ctest.month,
                '###': ctest.day
            }
            files.extend(glob.glob(os.path.join(self.WORKDIR, flnme_only)))
            ctest = ctest.add(1, cdtime.Days)
        # --

        for filename in files:
            # -- Lecture du fichier filename
            f = cdms2.open(filename)
            temp = f('sea_surface_temperature')
            # =============== ATTENTION ==================================
            # Verifier que temp utilise tout le temps le meme espace memoire ... voir du cote de cdms
            # ==========================================================
            f.close()

            # -- Extraction de la date et heure du nom du fichier
            #ty = numpy.int(filename[-15:-11])
            #tm = numpy.int(filename[-11:-9])
            #tj = numpy.int(filename[-9:-7])
            #th = numpy.int(filename[-7:-5])
            #tt = cdtime.comptime(ty,tm,tj,th)
            #cumt.append(tt)

            # -- Transfert de temp dans l'objet cdat self.data (concatenation)
            # =============== ATTENTION ====================
            # Faire une variable intermediaire qui sera au prealable allouee en memoire pour eviter
            # trop de copies en memoire !!!!!!!!!!!!!!!!
            # ============================================
            self.data += temp,

        # -- Creation de l'axe temporel
        #taxis = create_time(cumt)

        # -- MV2.concatenate pour concatener les informations dans self.data (entre autre construit l'axe temporel)
        self.data = MV2.concatenate(self.data)

        # -- Attribution de la grille a l'objet self.data
        set_grid(self.data, grid, axes=True)

        # -- Informations sur le dataset
        self.data.name = "NAR_SST"
        self.data.units = "degree_Celsius"
        self.data.standard_name = "satellite_sea_surface_temperature"
        self.data.long_name = "Satellite Sea Surface Temperature - NAR"

        # -- Change unit
        self.data = kel2degc(self.data)
Exemple #19
0
f = cdms2.open(
    os.path.join(cdms2.__path__[0], '..', '..', '..', '..', 'sample_data',
                 'clt.nc'))
s = f("clt")
cdutil.setTimeBoundsMonthly(s)

print 'Getting JJA, which should be inexistant in data'

if cdutil.JJA(s[:5]) is not None:
    raise RuntimeError, "data w/o season did not return None"

## Create a year worth of data w/o JJA
s1 = s[:5]
s2 = s[8:12]

s3 = MV2.concatenate((s1, s2))
t = MV2.concatenate((s1.getTime()[:], s2.getTime()[:]))
t = cdms2.createAxis(t, id='time')
t.units = s.getTime().units
t.designateTime()

s3.setAxis(0, t)
cdutil.setTimeBoundsMonthly(s3)
if cdutil.JJA(s3) is not None:
    raise RuntimeError, "data w/o season did not return None"
if cdutil.JJA.departures(s3) is not None:
    raise RuntimeError, "data w/o season did not return None for dep"
if cdutil.JJA.climatology(s3) is not None:
    raise RuntimeError, "data w/o season did not return None for clim"

# Now gets seasonal cycle, should have JJA all missing
def regrid_models(variable, experiment):

    #get the shape from CESM2

    model = "CESM2"

    rip = "r1i1p1f1"
    allfiles = sorted(
        glob.glob("/home/kdm2144/DROUGHT/DOWNLOADED_RAW/" + "/" + variable +
                  "/" + model + "/*." + experiment + ".*." + rip + ".*"))

    f = cdms.open(allfiles[0])
    data = f(variable)
    #for model in get_ok_models("SW"):

    grid = data.getGrid()
    nyears = 86  #historical runs begin in 2015-2100
    model_shape = (12 * nyears, ) + grid.shape

    f.close()

    models = dh.get_ok_models("SW")
    nmodels = len(models)
    bigshape = (nmodels, ) + model_shape

    allmodels = MV.zeros(bigshape)
    for modeli in range(nmodels):
        model = models[modeli]
        print(model)

        #Get landmask
        landthresh = 1
        fixedvardirec = external_drive + "DROUGHT/fixedvar/"
        #Get the land fraction
        landfiles = glob.glob(fixedvardirec + "sftlf*" + model + ".*")

        if len(landfiles) == 1:
            fland = cdms.open(landfiles[0])
            landfrac = fland("sftlf")
            fland.close()
        else:
            print("can't find land fraction file for", model)
            print(landfiles)
            continue

        allfiles_rips = sorted(
            glob.glob("/home/kdm2144/DROUGHT/DOWNLOADED_RAW/" + "/" +
                      variable + "/" + model + "/*." + experiment + ".*"))
        rips = sorted(np.unique([x.split(".")[3] for x in allfiles_rips]))
        if len(rips) > 0:
            rip = rips[0]
        else:
            continue
        allfiles = sorted(
            glob.glob("/home/kdm2144/DROUGHT/DOWNLOADED_RAW/" + "/" +
                      variable + "/" + model + "/*." + experiment + ".*." +
                      rip + ".*"))
        if len(allfiles) >= nyears:
            for i in range(nyears):
                f = cdms.open(allfiles[i])
                data = f(variable)
                landdata = cmip5.cdms_clone(
                    np.repeat(.01 * landfrac.asma()[np.newaxis], 12, axis=0) *
                    data, data)
                data_regrid = landdata.regrid(grid, regridTool='regrid2')
                if i == 0:
                    bigdata = data_regrid
                else:
                    bigdata = MV.concatenate((bigdata, data_regrid))
                f.close()
            allmodels[modeli] = bigdata
        else:
            allmodels[modeli] = 1.e20
    allmodels = MV.masked_where(np.abs(allmodels) > 1.e10, allmodels)
    modax = cmip5.make_model_axis(models)
    tax = cdms.createAxis(np.arange(12 * 86))
    tax.units = "months since 2015-1-1"
    tax.designateTime()
    tax.id = 'time'
    allmodels.setAxis(0, modax)
    allmodels.setAxis(1, tax)
    cdutil.setTimeBoundsMonthly(allmodels)
    allmodels.setAxis(2, data_regrid.getLatitude())
    allmodels.setAxis(3, data_regrid.getLongitude())
    allmodels.name = variable
    allmodels.id = variable
    return allmodels
Exemple #21
0
                        # Verifier que temp utilise tout le temps le meme espace memoire ... voir du cote de cdms
                        # ==========================================================
                    f.close()

                    # -- Transfert de temp dans l'objet cdat self.data (concatenation)
                    # =============== ATTENTION ====================
                    # Faire une variable intermediaire qui sera au prealable allouee en memoire pour eviter
                    # trop de copies en memoire !!!!!!!!!!!!!!!!
                    # ============================================
                    self.data += (temp,)

                    # -- Creation de l'axe temporel
                    # taxis = create_time(cumt)

                    # -- MV2.concatenate pour concatener les informations dans self.data (entre autre construit l'axe temporel)
                self.data = MV2.concatenate(self.data)

                # -- Informations sur le dataset
                # self.data.name = "SEVIRI_SST"
                self.data.units = "degree_Celsius"
                self.data.standard_name = "satellite_sea_surface_temperature"
                self.data.long_name = "Satellite Sea Surface Temperature - SEVIRI"

                # -- Change unit
                self.data = kel2degc(self.data)

                # -- Fin de lecture des donnees
                # ----------------------------------------------------

    def read_assim(self, cfg=None):
        """ Lecture des fichiers NetCDF de SST SEVIRI formatte pour l'assimilation """
Exemple #22
0
    def read(self, verbose=False, cfg=None):
        """ Lecture des fichiers NetCDF de NAR SST """
        import cdms2,sys,os, glob
        import numpy,MV2
        import cdtime
        from vacumm.misc.axes import create_lon, set_order
        from vacumm.misc.grid import create_grid,  set_grid
        from vacumm.misc.atime import create_time
        from vacumm.misc.phys.units import kel2degc
        import gc
      
        # Get the configuration file information 
        #cfg = self.get_config()
        #print cfg
              
        # -- Creation d'un objet cdms nomme self.data et d'un tableau cumt pour les dates extraites des noms de fichiers
        self.data = () #Initialise un tuple
        # =============== ATTENTION ====================
        # Initialiser self.data pour ne pas dupliquer en memoire !!!!!!!!!
        # ============================================

        # -- Methode amelioree
        # Cree une liste
        files = []
        # Cree la liste des fichiers correspondants a la periode consideree
        if cfg is None:
	  config = ConfigParser.RawConfigParser()
	  config.read(os.path.join(self.SCRIPT_DIR,'config.cfg'))
	  hr_satellites = config.get('Nar SST', 'hr_satellites')
	else:
	  hr_satellites = cfg['Nar SST']['hr_satellites']
	  
	hr_satellites = hr_satellites.split(',')
	#hr_satellites = cfg['hr_satellites']
	  

        #print hr_satellites
        
        ctest = self.ctdeb
        while ctest <= self.ctfin:
            for isat, s_name in enumerate(hr_satellites):
                
                flnme_only = '%04d%02d%02d*%s*.nc'%(ctest.year, ctest.month, ctest.day, s_name)
                files.extend(glob.glob(os.path.join(flnme_only)))
                
            ctest=ctest.add(1,cdtime.Days)
        # --
      
	if cfg is None:
	  lomin = float(config.get('Domain', 'lomin') )
	  lomax = float(config.get('Domain', 'lomax')      )
	  lamin = float(config.get('Domain', 'lamin')     )
	  lamax = float(config.get('Domain', 'lamax')     )
	else:
	  lomin = cfg['Domain']['lomin']
	  lomax = cfg['Domain']['lomax']
	  lamin = cfg['Domain']['lamin']
	  lamax = cfg['Domain']['lamax']
	  
        
        #print files

        if files == []:
            print 'No data file to read ...'
        else:
            
            # ---- Lecture et creation de la grille ----
            #
            # -- Lecture du fichier filename
            f = cdms2.open(files[0])                        
            lo = f.getVariable('lon')
            la = f.getVariable('lat')
            # -- Creation des axes longitude et latitude
#            lat_axis = create_lon(la,id='latitude')
#            lon_axis = create_lon(lo,id='longitude')
            # -- Creation de la grille
            grid = create_grid(lo, la)
            
            del lo,  la
            
            for ifile, filename in enumerate(files):
      
                
                # -- Lecture du fichier filename
                f = cdms2.open(filename)
                
                temp2 = f('sea_surface_temperature')
                set_order(temp2, 'tyx') # pour que averager.py fontionne
                
                # modif J.Gatti : utilisation de l'index de qualite (0:unprocessed 1:cloudy 2:bad 3:suspect 4:acceptable 5:excellent)
                temp2.set_fill_value(temp2._FillValue)
                conf=f('proximity_confidence')
                MV2.putmask(temp2.data,conf.data<3,temp2._FillValue)   #  ne change que data   
                MV2.putmask(temp2.mask,conf.data<3,True)                    # ne change que mask
                #autre methode
                #--------------------
                #temp2=MV2.masked_where(conf.data<3,temp2)          # ne change que mask
                #oldmask=temp2.mask
                #temp2[:]=temp2.filled()                                                  # change data mais met mask a false partout
                #temp2.mask=oldmask                                                     # remet le bon mask sans lien
                del conf
                # fin modif J.Gatti : utilisation de l'index de qualite    
                
                # -- Attribution de la grille a l'objet self.data
                set_grid(temp2, grid, axes=True)     
                
                temp = temp2(lon=(lomin, lomax), lat=(lamin, lamax))
                
                if verbose:
                    # == TEST OCCUPATION MEMOIRE ===
                    print ctest,  'Avant'
                    #print psutil.Process(os.getpid()).get_memory_percent()
                    #print psutil.Process(os.getpid()).get_memory_info()
                    #print 'CPU percent: ', psutil.cpu_percent(interval=0.1)
                    #print 'Used phymem: ', psutil.used_phymem()
                    #print 'Used virtmem: ', psutil.used_virtmem()
                
                del temp2
                
                if verbose:
                    # == TEST OCCUPATION MEMOIRE ===
                    print ctest,  'Apres del'
                    #print psutil.Process(os.getpid()).get_memory_percent()
                    #print psutil.Process(os.getpid()).get_memory_info()
                    #print 'CPU percent: ', psutil.cpu_percent(interval=0.1)
                    #print 'Used phymem: ', psutil.used_phymem()
                    #print 'Used virtmem: ', psutil.used_virtmem()
 
                # =============== ATTENTION ==================================
                # Verifier que temp utilise tout le temps le meme espace memoire ... voir du cote de cdms
                # ==========================================================
                f.close()
                
                
                


                # -- Transfert de temp dans l'objet cdat self.data (concatenation)
                # =============== ATTENTION ====================
                # Faire une variable intermediaire qui sera au prealable allouee en memoire pour eviter
                # trop de copies en memoire !!!!!!!!!!!!!!!!
                # ============================================
                #self.data += temp,
                if ifile == 0:
                    self.data = temp
                else:                
                    self.data = MV2.concatenate((self.data, temp))
                
                if verbose:
                    # == TEST OCCUPATION MEMOIRE ===
                    print ctest,  'Avant gccollect'
                    #print psutil.Process(os.getpid()).get_memory_percent()
                    #print psutil.Process(os.getpid()).get_memory_info()
                    #print 'CPU percent: ', psutil.cpu_percent(interval=0.1)
                    #print 'Used phymem: ', psutil.used_phymem()
                    #print 'Used virtmem: ', psutil.used_virtmem()
                    print gc.collect()
                gc.collect()
                
                if verbose:
                    # == TEST OCCUPATION MEMOIRE ===
                    print ctest,  'Apres gccollect'
                    #print psutil.Process(os.getpid()).get_memory_percent()
                    #print psutil.Process(os.getpid()).get_memory_info()
                    #print 'CPU percent: ', psutil.cpu_percent(interval=0.1)
                    #print 'Used phymem: ', psutil.used_phymem()
                    #print 'Used virtmem: ', psutil.used_virtmem()

            # -- Creation de l'axe temporel
            #taxis = create_time(cumt)
            
            # -- MV2.concatenate pour concatener les informations dans self.data (entre autre construit l'axe temporel)
            #self.data = MV2.concatenate(self.data)
 
            
 
 
        # -- Informations sur le dataset
        #self.data.name = "NAR_SST"
        self.data.units = "degree_Celsius"
        self.data.standard_name = "satellite_sea_surface_temperature"
        self.data.long_name = "Satellite Sea Surface Temperature - NAR"
        
        # -- Change unit
        self.data = kel2degc(self.data)
Exemple #23
0
    iaxis = TransientVirtualAxis(idi, ni)
    jaxis = TransientVirtualAxis(idj, nj)
    lataxis = TransientAxis2D(lat,
                              axes=(iaxis, jaxis),
                              attributes={'units': lat_units},
                              id="latitude")
    lonaxis = TransientAxis2D(lon,
                              axes=(iaxis, jaxis),
                              attributes={'units': lon_units},
                              id="longitude")
    curvegrid = TransientGenericGrid(lataxis, lonaxis, tempmask=None)
    attributs = None
    vid = None
    if hasattr(v, 'attributes'): attributs = v.attributes
    if hasattr(v, 'id'): vid = v.id
    axis0 = v.getAxis(0)
    return cdms2.createVariable(v, axes=[axis0,iaxis,jaxis], grid=curvegrid, \
                               attributes=attributs, id=v.id)


lat = MV2.array([[-20, -10, 0, -15, -5]], 'f')
lon = MV2.array([[0, 10, 20, 50, 60]], 'f')

data1 = MV2.array([[[2, 3, 1, 6, 2]]], 'f')
data2 = MV2.array([[[2, 3, 1, 6, 2]]], 'f')

data1 = CurveGrid(data1, lat, lon)
data2 = CurveGrid(data2, lat, lon)

result = MV2.concatenate([data1, data2], axis=0)
Exemple #24
0
 def testConcatenate(self):
     xcon = MV2.concatenate((self.u_file, self.v_file))
     self.assertEqual(xcon.shape,
                      (self.u_file.shape[0] + self.v_file.shape[0],
                       self.u_file.shape[1], self.u_file.shape[2]))
Exemple #25
0
          pth+'u_2002.nc',
          ]

for file in files:
    f=cdms.open(file)
    u=f('u')
    
    if file == files[0]:          # First file
        sh=list(u.shape)          # Create a list with the shape of the data
        sh.insert(0,1)            # Insert value 1 in front of the list
        accumulation = u
        newdim = MV.reshape(u,sh) # Create a new 1D dimension
        
    else:
        # add u at the end of accumaltion on dimension 0
        accumulation = MV.concatenate((accumulation,u))
        tmp = MV.reshape(u,sh)                # Create a new 1D dimension
        newdim = MV.concatenate((newdim,tmp)) # Add u to the newdim over the new dimension
        
    f.close()
    
    
print accumulation.shape   # All time added over the same dimension
print newdim.shape         # Has a new dimension for years

avg = MV.average(accumulation)
std = genutil.statistics.std(newdim)

print avg.shape
print std.shape
                                temporary[region] = d_sub_aave(
                                    time=(start_t, end_t))
                                continue
                            else:
                                # n-1 year 7/1~12/31
                                part1 = copy.copy(temporary[region])
                                # n year 1/1~6/30
                                part2 = d_sub_aave(time=(cdtime.comptime(
                                    year), cdtime.comptime(year, 6, 30, 23,
                                                           59, 59)))
                                start_t = cdtime.comptime(year, 7, 1)
                                end_t = cdtime.comptime(
                                    year, 12, 31, 23, 59, 59)
                                temporary[region] = d_sub_aave(
                                    time=(start_t, end_t))
                                d_sub_aave = MV2.concatenate(
                                    [part1, part2], axis=0)
                                if debug:
                                    print('debug: ', region, year,
                                          d_sub_aave.getTime().asComponentTime())

                        # get pentad time series
                        list_d_sub_aave_chunks = list(
                            divide_chunks_advanced(d_sub_aave, n, debug=debug))
                        pentad_time_series = []
                        for d_sub_aave_chunk in list_d_sub_aave_chunks:
                            # ignore when chunk length is shorter than defined
                            if d_sub_aave_chunk.shape[0] >= n:
                                ave_chunk = MV2.average(
                                    d_sub_aave_chunk, axis=0)
                                pentad_time_series.append(float(ave_chunk))
                        if debug: