Esempio n. 1
0
def compute_rmse(mv1, mv2):
    """compute rmse and correlations """
    import genutil.statistics, numpy, cdutil
    RMSE = -numpy.infty
    CORR = -numpy.infty
    try:
        weights = cdutil.area_weights(mv1)
        RMSE = float( genutil.statistics.rms(mv1, mv2, axis='xy', weights=weights) )
        CORR = float( genutil.statistics.correlation( mv1, mv2, axis='xy', weights=weights) )
    except Exception, err:
        pass
Esempio n. 2
0
def area_average_biome(fname):
    #Get the appropriate grid
    fland = cdms.open(cmip5.landfrac(fname))
    landfrac = fland("sftlf")
    fland.close()
    f = open(fname)
    K = pickle.load(f)
    f.close()
    climates = np.unique(K.compressed())
    d={}
    weights = cdutil.area_weights(landfrac)
    for climate in climates:
        d[climate]=float(MV.sum(MV.masked_where(K!=climate,landfrac*weights)))
    return d
Esempio n. 3
0
def compute_rmse(mv1, mv2):
    """compute rmse and correlations """
    import genutil.statistics, numpy, cdutil
    RMSE = -numpy.infty
    CORR = -numpy.infty
    try:
        weights = cdutil.area_weights(mv1)
        RMSE = float(
            genutil.statistics.rms(mv1, mv2, axis='xy', weights=weights))
        CORR = float(
            genutil.statistics.correlation(mv1,
                                           mv2,
                                           axis='xy',
                                           weights=weights))
    except Exception, err:
        pass
Esempio n. 4
0
def fingerprint_agreement_percentages(D,SM=None):
    pdsi_eof=da.get_orientation(D.ALL.solver)*D.ALL.solver.eofs()[0]
    mask = D.ALL.obs[0].mask
    if SM is None:
        SM = b.SoilMoisture(mask)
    SM30_eof=da.get_orientation(SM.solvers["30cm"] )*SM.solvers["30cm"].eofs()[0]
    SM2m_eof=da.get_orientation(SM.solvers["2m"] )*SM.solvers["2m"].eofs()[0]
    samesign=cmip5.cdms_clone(np.sign(pdsi_eof)*np.sign(SM30_eof),pdsi_eof)
    aw=cdutil.area_weights(pdsi_eof)
    test_area=np.ma.sum(MV.absolute(samesign)*aw)
    samesign_area=np.ma.sum(MV.masked_where(samesign<1,samesign)*aw)
    print "PDSI and 30cm have same sign in "+str(samesign_area/test_area*100)+"% of area"

    samesign=cmip5.cdms_clone(np.sign(pdsi_eof)*np.sign(SM2m_eof),pdsi_eof)
    samesign_area=np.ma.sum(MV.masked_where(samesign<1,samesign)*aw)
    print "PDSI and 2m have same sign in "+str(samesign_area/test_area*100)+"% of area"

    samesign=cmip5.cdms_clone(np.sign(SM30_eof)*np.sign(SM2m_eof),pdsi_eof)
    samesign_area=np.ma.sum(MV.masked_where(samesign<1,samesign)*aw)
    print "30cm and 2m have same sign in "+str(samesign_area/test_area*100)+"% of area"
Esempio n. 5
0
def calcSTDmap(a):
    # Calculate spatial standard deviation from 2D map field
    # a: cdms 2d (xy) variables
    wts = cdutil.area_weights(a)
    std = genutil.statistics.std(a, axis='xy', weights=wts)
    return float(std)
Esempio n. 6
0
    result = averager(x, axis='tx', weight=['equal', 'equal'])
except AveragerError:
    print 'Failure! 29'


try:
    result = averager(x, axis='2t', weight=['generate', 'equal'])
except AveragerError:
    print 'Failure! 30'


#**********************************************************************
#
# Create the area weights 
#
aw = area_weights(x)
#
#
#**********************************************************************


try:
    result = averager(x, axis='x', weight=aw)
except AveragerError:
    print 'Failure! 31'


try:
    result = averager(x, axis='xy', weight=aw) 
except AveragerError:
    print 'Failure! 32'
Esempio n. 7
0
            basinmask3[yInd,xInd] = 1 ; # Correct to Atlantic
            basinmask4[yInd,xInd] = 1 ; # Correct to Atlantic
        print 'Spilt mask fix complete'

    basinmask3 = cdm.createVariable(np.int16(basinmask3),id='basinmask3',axes=[latitude,longitude])
    basinmask3.index = '1: Atlantic Ocean; 2: Pacific Ocean; 3: Indian Ocean;'
    basinmask4 = cdm.createVariable(np.int16(basinmask4),id='basinmask4',axes=[latitude,longitude])
    basinmask4.index = '1: Atlantic Ocean; 2: Pacific Ocean; 3: Indian Ocean; 4: Arctic Ocean;'

    # Add area weights
    earthSurfaceAreaKm2 = 510.1e6 ; # Corrected from km2^6 to km2 as below
    earthWaterAreaKm2   = 361.132e6
    earthLandAreaKm2    = 148.94e6
    #grid = cdm.createGenericGrid(latitude,longitude)
    grid = cdm.createVariable(np.ma.zeros([len(latitude),len(longitude)],),id='mask',axes=[latitude,longitude])
    frac = cdu.area_weights(grid)
    area = frac*(earthSurfaceAreaKm2*1e6) # ; Area m2
    area = mv.masked_where(basinmask3.mask,area) # ; Masked area 344.3 km^2
    basinmask3_area = cdm.createVariable(np.float32(area),id='basinmask3_area',axes=[latitude,longitude])
    basinmask3_area.earthSurfaceAreaM2  = earthSurfaceAreaKm2*1e6 ; # Corrected inflation km2 -> 1000x1000 -> m2
    basinmask3_area.earthWaterAreaM2    = earthWaterAreaKm2*1e6
    basinmask3_area.earthLandAreaM2     = earthLandAreaKm2*1e6
    basinmask3_area.oceanSurfaceAreaM2  = area.sum()
    basinmask3_area.units = 'm^2'

    # Plot to check
    #import vcs as vc
    #v1 = vc.init()
    #v1.plot(area)

    # Fix missing value within range - useful in uint8 case
Esempio n. 8
0
def generateSurfaceTypeByRegionMask(mask,sftbyrgn=None,sftbyrgnmask=215,regions=range(201,223),maximum_regions_per_cell=4,extend_up_to=3,verbose=True):
    """
    Maps a "regions" dataset onto a user provided land/sea mask or grid
    
    Usage:
    -----
    mapped,found = generateSurfaceTypeByRegionMask(mask,sftbyrgn=None,sftbyrgnmask=None,regions=None,maximum_regions_per_cell=4,extend_up_to=3,verbose=True)

    Input:
    -----
    mask                        User provided land/sea mask (100/0) or grid (the land/sea mask will be generated automagically) which will be mapped using the "sftbyrgn" internal dataset (will generate a land/sea mask for you)
    sftbyrgn                    Mask you wish to map onto your grid (if None uses internal "sftbyrgn" dataset (old ezget type))
    sftbyrgnmask                Land/sea mask for sftbyrgn (or a number specifying value limits for sftbyrgn which indicates land/sea threshold (greater values are land) - see URL below for integer region map)
    regions                     Numbers from sftbyrgn array that you want to map onto mask (integers from 201-222)
    maximum_regions_per_cell    Maximum number of regions considered for a single cell
    extend_up_to                How many grid cells around a cell can we extend to identify a guess
    verbose                     Prints to the screen what's going on (default is True)

    Output:
    -----
    mapped                      Mapped input grid/mask using provided (or default) regions - sftbyrgn -> user provided grid/mask
    found                       Matrix containing number of regions matched for each output cell
    
    Notes:
    -----
    - More detailed information, including a region map and tabulated region numbers are available from http://www-pcmdi.llnl.gov/publications/pdf/34.pdf
    """
    
    cdat_info.pingPCMDIdb("cdat","cdutil.generateSurfaceTypeByRegionMask")
    ## OK first determine which regions are available
    ## Must be integer values
    if isinstance(mask,cdms2.grid.TransientRectGrid):
        mask = cdutil.generateLandSeaMask(mask)*100.

    if sftbyrgn is None:
        sftbyrgn = cdms2.open(os.path.join(cdat_info.get_prefix(),'share','cdutil','sftbyrgn.nc'))('sftbyrgn')
        
    if regions is None:
        if verbose: print 'Preparing regions'
        #regions = range(201,223)

        regions = []
        for i in range(0,10000):
            genutil.statusbar(i,9999)
            c = float(MV2.sum(MV2.ravel(MV2.equal(sftbyrgn,i)),0))
            if c != 0: regions.append(i)

    if verbose: print 'Regions:',regions
    ## If no mask passed fr sftbyrgn, assumes everything greater 5000 is land)
    if isinstance(sftbyrgnmask,int):
        split           = sftbyrgnmask
        n               = MV2.maximum(mask)
        sftbyrgnmask    = MV2.greater_equal(sftbyrgn,sftbyrgnmask)*n
    else:
        split           = MV2.maximum(sftbyrgnmask)/2.
    ## Now guess the type for each regions
    keys = {}
    ## ## Nice way to do it
    ##     for r in regions:
    ##         c=MV2.not_equal(sftbyrgn,r)
    ##         c=MV2.masked_where(c,sftbyrgnmask)
    ##         n=MV2.count(c)
    ##         c=float(MV2.sum(MV2.ravel(c),0)/n)
    ##         print r,c,n
    ##         keys[r]=c
    ## Fast but not so "general" way to do it
    for r in regions:
        if r< split:
            keys[r] = 0.
        else:
            keys[r] = 100.
    sh              = list(mask.shape)
    sh.insert(0,maximum_regions_per_cell)
    potential       = MV2.ones(sh,dtype='d')*-999
    potential_reg   = MV2.ones(sh,dtype='d')*-999

    g1  = sftbyrgn.getGrid()
    g2  = mask.getGrid()
    r1  = regrid2.Horizontal(g1,g2)
    w   = cdutil.area_weights(sftbyrgn)

    if verbose: print 'First pass'
    itmp = 0.
    for ireg in keys.keys():
        genutil.statusbar(itmp,len(keys.keys())-1)
        itmp += 1.
        c       = MV2.equal(sftbyrgn,ireg)
        w2      = 1.-c*w
        s2,w3   = r1(sftbyrgn,mask=w2.filled(),returnTuple=1)
        c2      = MV2.equal(mask,keys[ireg])
        loop(potential,potential_reg,c2,w3,ireg)

    found = MV2.zeros(sh[1:],typecode='f')
    for i in range(maximum_regions_per_cell):
        found = found+MV2.not_equal(potential[i],-999)
    sh2 = list(sh)
    for k in range(extend_up_to):
        sh2[1] = sh[1]+2*(k+1)
        sh2[2] = sh[2]+2*(k+1)
        ## Form the possible i/j couples !
        s = MV2.sum(MV2.ravel(MV2.equal(potential[0],-999)),0)
        if verbose: print 'Expanding up to',k+1,'cells while trying to fix',s,'cells'
            #if dump:
                #f=cdms2.open('tmp_'+str(k)+'.nc','w')
                #f.write(sumregions(potential_reg,potential).astype('f'),id='sftbyrgn',axes=mask.getAxisList())
                #f.close()
                #g=sumregions(potential_reg,potential).astype('d')
                #g=MV2.masked_equal(g,-999)
                #g=MV2.greater(g,4999)*100.
                #g=MV2.absolute(mask-g)
                #g=MV2.masked_equal(g,0.)
                #print 'Number of differences:',MV2.count(g)

        if float(s) != 0:
            c0 = MV2.equal(potential[0],-999)
            couples = []
            sft2 = MV2.zeros(sh2[1:],dtype='d')-888.
            sft2[k+1:-k-1,k+1:-k-1] = mask
            for i in range(-k-1,k+2):
                for j in range(-k-1,k+2):
                    if abs(i)>k or abs(j)>k: couples.append([i,j])
            ntot = len(keys.keys())*len(couples)-1
            itmp = 0
            for ireg in keys.keys():
                c = MV2.equal(sftbyrgn,ireg)
                w2 = 1.-c*w
                s2,w3 = r1(sftbyrgn,mask=w2.filled(),returnTuple=1)
                w4 = MV2.zeros(sh2[1:],typecode='d')
                w4[k+1:-k-1,k+1:-k-1] = w3
                for i,j in couples:
                    if verbose: genutil.statusbar(itmp,ntot)
                    itmp += 1.
                    c2 = MV2.equal(sft2[j+k+1:j+k+1+sh[1],i+k+1:i+k+1+sh[2]],keys[ireg])
                    c3 = MV2.equal(sft2[j+k+1:j+k+1+sh[1],i+k+1:i+k+1+sh[2]],mask)
                    c2 = MV2.logical_and(c2,c3)
                    c2 = MV2.logical_and(c2,c0)
                    loop(potential,potential_reg,c2,w4[j+k+1:j+k+1+sh[1],i+k+1:i+k+1+sh[2]],ireg)
           
        found = MV2.where(MV2.equal(potential[0],-999),found-1,found)

    out = sumregions(potential_reg,potential)
    out.setAxisList(mask.getAxisList())
    out.id = 'sftbyrgn'
    out = out.astype('i')
    out.missing_value = -999
    found.setAxisList(mask.getAxisList())
    found.id = 'found'
    found = found.astype('i')
    found.missing_value = -999

    del(out.name)
    del(found.name)
    return out,found
Esempio n. 9
0
    def _compute(self,icall,ref=None,test=None,output=None,returnTuple=1):
        idoclim=self.idoclim

        testin=test
        refin=ref
        # Test passed ?
        if test is None or ref is None:
            ref,test=self.get(returnTuple=returnTuple)
            testin=test
            refin=ref

        if isinstance(test,(list,tuple)):
            testfrac=test[1]
            test=test[0]
        else:
            testfrac=test.mask
            if not testfrac is None:
                testfrac=MV2.array(1.-testfrac)
            else:
                testfrac=MV2.ones(test.shape,numpy.float32)
            testfrac.setAxisList(test.getAxisList())

        if isinstance(ref,(list,tuple)):
            reffrac=ref[1]
            ref=ref[0]
        else:
            reffrac=ref.mask
            if not reffrac is None:
                reffrac=MV2.array(1.-reffrac)
            else:
                reffrac=MV2.ones(ref.shape,numpy.float32)
            reffrac.setAxisList(ref.getAxisList())

        if test.shape != ref.shape:
            raise ValueError, "Input arrays have different shape:"+str(test.shape)+" "+str(ref.shape)

##         # Calendar 360 ?
##         try:
##             icalndr=test.getTime().getCalendar()
##             if icalndr == cdtime.Calendar360:
##                 icalndr = 2
##             else:
##                 icalndr = 1
##         except Exception,err:
##             icalndr = 1



##         sh=ref.shape
##         tmpw=cdutil.area_weights(reffrac(squeeze=1)[0])
##         lenreg=sh[-1]*sh[-2]
##         tmpw=MV2.reshape(tmpw,(lenreg,))
##         tmpw=MV2.resize(tmpw,(sh[0],lenreg))
##         tmpw=MV2.reshape(tmpw,sh)

##         a=reffrac*tmpw
##         print 'Right inside::',sh[0],MA.sum(a.ravel())/sh[0],MA.sum(MA.ravel(reffrac))/sh[0]



        icalndr = 1

        # lon lat time only ?
        if test.rank()!=3:
            test=test(squeeze=1)
            testfrac=testfrac(squeeze=1)
            if test.rank()!=3:
                raise ComparisonStatisticsError," Rank of test data is not 3 !"
        if ref.rank()!=3:
            ref=ref(squeeze=1)
            reffrac=reffrac(squeeze=1)
            if ref.rank()!=3:
                raise ComparisonStatisticsError," Rank of ref data is not 3 !"
        # Time
        tim=test.getTime()

        # Dimensions lengths
        nlat = len(test.getLatitude())
        nlon = len(test.getLongitude())
        lenreg =  nlat*nlon
        lentime = len(tim)

        # Input Frequency
        inptfreq=self.inptfreq
        if inptfreq!=12:
            raise ComparisonStatisticsError,'Frequency can only be 12 at this time\n(mosea1 is not treated right if not 1)'


        # First month
        mosea1 = tim.asComponentTime()[0].month
        ## Makes sure the reference starts the same month
        testrefvalid=ref.getTime().asComponentTime()[0].month
        if testrefvalid!=mosea1:
            raise ComparisonStatisticsError," Reference and Test do not start at the same month: ref is "+str(testrefvalid)+", Test is: "+str(mosea1)
##         Commented out on 7/10 by Karl, he thinks we don't need it
##         if 12 < icall < 17:
##             if mosea1 < 3:
##                 mosea1 = 1
##             elif mosea1 < 6:
##                 mosea1 = 2
##             elif mosea1 < 9:
##                 mosea1 = 3
##             elif mosea1 < 12:
##                 mosea1 = 4
##             else:
##                 mosea1 = 1
##   Commented by C Doutriaux on 7/8/2003,
##   seemed to create offset in ref starting at 2
## So why there was an else ? mystere et boule de gommes !
##         else:
##             mosea1 = 1

        # Reshape test (time,space)
        tmptest=MV2.reshape(test,(lentime,lenreg))
        # TestMask stuff
        # First makes weights 1 for time dim
        sh=test.shape
        a=MV2.ones(tuple(sh[1:]))
        a.setAxisList(test.getAxisList()[1:])
        tmpw=cdutil.area_weights(a)
        tmpw=MV2.reshape(tmpw,(lenreg,))
        tmpw=MV2.resize(tmpw,(lentime,lenreg))
        tmpw=MV2.reshape(tmpw,test.shape)
        testmask=tmpw*testfrac
        testmask=MV2.reshape(testmask,(lentime,lenreg))
        # Reshape reference (time,space)
        tmpref=MV2.reshape(ref,(lentime,lenreg))
        # RefMask stuff
        refmask=tmpw*reffrac
        refmask=MV2.reshape(refmask,(lentime,lenreg))
        # Fill the MV2s
        tmptest=tmptest.filled(0.)
        tmpref=tmpref.filled(0.)
        testmask=testmask.filled(0.).astype(numpy.float32)
        refmask=refmask.filled(0.).astype(numpy.float32)
        # Max year
        maxyr= (lentime -1) / inptfreq + 1

        # Number of times per year
        if icall <= 17:
            itimpyr=1
##         elif icall<=16:
##             itimpyr=1
##             # Now make sure it's a multiple of 4 !
##             if lentime % 4 != 0:
##                 nadd = 4 - (lentime % 4) # number of time steps to add
##         elif icall==17:
##             itimpyr=1
##             # Now make sure it's a multiple of 12 !
##             if lentime % 12 != 0:
##                 nadd = 12 - (lentime % 12) # number of time steps to add
        elif icall == 18:
            itimpyr = 4
        elif icall == 19:
            itimpyr = inptfreq

        # Now adds the missing months at the end if necessary
##         if nadd != 0:
##             sh=tmptest.shape
##             zadd=Numeric.zeros((nadd,sh[1]),Numeric.Float32)
##             tmptest=Numeric.concatenate((tmptest,zadd))
##             tmpref=Numeric.concatenate((tmpref,zadd))
##             testmask=Numeric.concatenate((testmask,zadd))
##             refmask=Numeric.concatenate((refmask,zadd))
##             lentime += nadd # New length of time dimension

        # determine nmx
        nmx = itimpyr * maxyr
        fracmin=self.fracmin
        minyr=self.minyr

        # Additional masking where ref is masked
        testmask=numpy.where(numpy.equal(refmask,0.),0.,testmask).astype(numpy.float32)




        if icall == 19:
##             mosea1=numpy.array(mosea1,copy=1) # make sure it's contiguous
            testmask = testmask.transpose()
            compall.mkmask(icalndr, itimpyr, mosea1, minyr, fracmin, testmask, refmask.transpose(),lenreg, lentime)
            testmask = testmask.transpose()
            tmptest=test.filled(1.E20)
            testmask=numpy.reshape(testmask,(lentime,nlat,nlon))
            tmpref=ref.filled(1.E20)

            
            
            # temporary arrays
            wt6=numpy.zeros((nlon,nlat,itimpyr))
            a2=numpy.zeros((itimpyr))
            a3=numpy.zeros((nlat))
            a4=numpy.zeros((nlat,itimpyr))
            a5=numpy.zeros((nlon,nlat))
            a6=numpy.zeros((nlon,nlat,itimpyr))
            b2=numpy.zeros((itimpyr))
            b3=numpy.zeros((nlat))
            b4=numpy.zeros((nlat,itimpyr))
            b5=numpy.zeros((nlon,nlat))
            b6=numpy.zeros((nlon,nlat,itimpyr))
            wt2=numpy.zeros((itimpyr))
            wt3=numpy.zeros((nlat))
            wt4=numpy.zeros((nlat,itimpyr))
            wt5=numpy.zeros((nlon,nlat))
            ai1=numpy.zeros((maxyr))
            ai2=numpy.zeros((lentime))
            ai3=numpy.zeros((nlat,maxyr))
            ai4=numpy.zeros((nlat,lentime))
            ai5=numpy.zeros((nlon,nlat,maxyr))
            bi1=numpy.zeros((maxyr))
            bi2=numpy.zeros((lentime))
            bi3=numpy.zeros((nlat,maxyr))
            bi4=numpy.zeros((nlat,lentime))
            bi5=numpy.zeros((nlon,nlat,maxyr))
            wi1=numpy.zeros((maxyr))
            wi2=numpy.zeros((lentime))
            wi3=numpy.zeros((nlat,maxyr))
            wi4=numpy.zeros((nlat,lentime))
            wi5=numpy.zeros((nlon,nlat,maxyr))
            siwyr=numpy.zeros((maxyr*2))
            siayr=numpy.zeros((maxyr*2))
            sibyr=numpy.zeros((maxyr*2))

            testmask=testmask.transpose()
            wts, avga, avgb, vara, varb, correl, rms = compall.resolve(idoclim, tmptest.transpose(), tmpref.transpose(), testmask,
                                                                       a2,a3,a4,a5,a6,
                                                                       b2,b3,b4,b5,b6,
                                                                       wt2,wt3,wt4,wt5,wt6,
                                                                       ai1,ai2,ai3,ai4,ai5,
                                                                       bi1,bi2,bi3,bi4,bi5,
                                                                       wi1,wi2,wi3,wi4,wi5,
                                                                       siwyr,siayr,sibyr,
                                                                       nlon, nlat, itimpyr, maxyr, lentime,
                                                                       )
            testmask=testmask.transpose()
        else:
##             mosea1=numpy.array(mosea1,copy=1) # make sure it's contiguous
##             testmask = numpy.array(testmask,copy=1)
##             tmptest = numpy.array(tmptest,copy=1)
##             refmask = numpy.array(refmask,copy=1)
##             tmpref = numpy.array(tmpref,copy=1)
            testmask = numpy.transpose(testmask)
            tmptest = numpy.transpose(tmptest)
            refmask = numpy.transpose(refmask)
            tmpref = numpy.transpose(tmpref)
##             testmask = numpy.array(testmask,copy=1)
##             tmptest = numpy.array(tmptest,copy=1)
##             refmask = numpy.array(refmask,copy=1)
##             tmpref = numpy.array(tmpref,copy=1)

            a1,awt1,a2,awt2,lentime=compall.mksubset(icall,icalndr,idoclim,
                                                     inptfreq,mosea1,testmask, tmptest,refmask,tmpref,nmx,lenreg,lentime)
            compall.mkmask(icalndr, itimpyr, mosea1, minyr, fracmin, awt1[:,:lentime], awt2[:,:lentime],lenreg, lentime)
            a1=numpy.reshape(numpy.transpose(a1),(nmx,nlat,nlon))
            awt1=numpy.reshape(numpy.transpose(awt1),(nmx,nlat,nlon))
            a2=numpy.reshape(numpy.transpose(a2),(nmx,nlat,nlon))
            # temporary arrays
            wt6=numpy.zeros((nlon,nlat,itimpyr))
            A2=numpy.zeros((itimpyr))
            a3=numpy.zeros((nlat))
            a4=numpy.zeros((nlat,itimpyr))
            a5=numpy.zeros((nlon,nlat))
            a6=numpy.zeros((nlon,nlat,itimpyr))
            b2=numpy.zeros((itimpyr))
            b3=numpy.zeros((nlat))
            b4=numpy.zeros((nlat,itimpyr))
            b5=numpy.zeros((nlon,nlat))
            b6=numpy.zeros((nlon,nlat,itimpyr))
            wt2=numpy.zeros((itimpyr))
            wt3=numpy.zeros((nlat))
            wt4=numpy.zeros((nlat,itimpyr))
            wt5=numpy.zeros((nlon,nlat))
            ai1=numpy.zeros((maxyr))
            ai2=numpy.zeros((lentime))
            ai3=numpy.zeros((nlat,maxyr))
            ai4=numpy.zeros((nlat,lentime))
            ai5=numpy.zeros((nlon,nlat,maxyr))
            bi1=numpy.zeros((maxyr))
            bi2=numpy.zeros((lentime))
            bi3=numpy.zeros((nlat,maxyr))
            bi4=numpy.zeros((nlat,lentime))
            bi5=numpy.zeros((nlon,nlat,maxyr))
            wi1=numpy.zeros((maxyr))
            wi2=numpy.zeros((lentime))
            wi3=numpy.zeros((nlat,maxyr))
            wi4=numpy.zeros((nlat,lentime))
            wi5=numpy.zeros((nlon,nlat,maxyr))
            siwyr=numpy.zeros((maxyr*2))
            siayr=numpy.zeros((maxyr*2))
            sibyr=numpy.zeros((maxyr*2))

            awt1=awt1[:lentime].transpose()
            wts, avga, avgb, vara, varb, correl, rms = compall.resolve(idoclim,
                                                                       numpy.transpose(a1[:lentime]),
                                                                       numpy.transpose(a2[:lentime]),
                                                                       awt1,
                                                                       A2,a3,a4,a5,a6,
                                                                       b2,b3,b4,b5,b6,
                                                                       wt2,wt3,wt4,wt5,wt6,
                                                                       ai1,ai2,ai3,ai4,ai5,
                                                                       bi1,bi2,bi3,bi4,bi5,
                                                                       wi1,wi2,wi3,wi4,wi5,
                                                                       siwyr,siayr,sibyr,
                                                                       nlon, nlat, itimpyr, maxyr, lentime,
                                                                       )
            awt1=awt1.transpose()
        wts    = wts.transpose()
        avga   = avga.transpose()
        avgb   = avgb.transpose()
        vara   = vara.transpose()
        varb   = varb.transpose()
        correl = correl.transpose()
        rms = rms.transpose()
        
        if not refin is None:
            ref=refin
            if isinstance(ref,(list,tuple)):
                ref,reffrac=refin
        if not testin is None:
            test=testin
            if isinstance(test,(list,tuple)):
                test,testfrac=testin
        if returnTuple:
            return (ref,reffrac),(test,testfrac), wts, avga, avgb, vara, varb, correl, rms
        else:
            return ref, test, wts, avga, avgb, vara, varb, correl, rms
 if m=='FGOALS-gl':
     if string.split(timereg(t11)[0],'-')[0]!='1000':
         raise 
 else:
     if string.split(timereg(t11)[0],'-')[0]!='850':
         raise
 if string.split(timereg(t11)[1],'-')[0]!='1699': raise
 #if string.split(timereg(t11)[1],'-')[0]!='1805': raise
 cdutil.times.setTimeBoundsMonthly(t11)
 print t11.shape
 t1=MV.zeros((t11.shape[0]),typecode=MV.float32)
 t2=MV.zeros((t11.shape[0]),typecode=MV.float32)
 t1z=MV.zeros((t11.shape[0],t11.shape[1]),typecode=MV.float32)
 for i in range(t1.shape[0]):
     print i
     wgt = cdutil.area_weights(t11[i,:,:])
     t1[i] = cdutil.averager(t11[i,:,:],axis='xy',weight=wgt)
     t1z[i] = cdutil.averager(t11[i,:,:],axis='x',weight=wgt)
 t1.setAxis(0,t11.getAxis(0))
 t2.setAxis(0,t11.getAxis(0))
 t1z.setAxis(0,t11.getAxis(0))
 t1z.setAxis(1,t11.getAxis(1))
     
 ##############
 ##############
 ##############
 ##############
 # comment one or the other    
 ##############
 #MOY GLOBAL
 ##############
Esempio n. 11
0
    basinmask4 = cdm.createVariable(np.int16(basinmask4),
                                    id='basinmask4',
                                    axes=[latitude, longitude])
    basinmask4.index = '1: Atlantic Ocean; 2: Pacific Ocean; 3: Indian Ocean; 4: Arctic Ocean;'

    # Add area weights
    earthSurfaceAreaKm2 = 510.1e6
    # Corrected from km2^6 to km2 as below
    earthWaterAreaKm2 = 361.132e6
    earthLandAreaKm2 = 148.94e6
    #grid = cdm.createGenericGrid(latitude,longitude)
    grid = cdm.createVariable(np.ma.zeros(
        [len(latitude), len(longitude)], ),
                              id='mask',
                              axes=[latitude, longitude])
    frac = cdu.area_weights(grid)
    area = frac * (earthSurfaceAreaKm2 * 1e6)  # ; Area m2
    area = mv.masked_where(basinmask3.mask, area)  # ; Masked area 344.3 km^2
    basinmask3_area = cdm.createVariable(np.float32(area),
                                         id='basinmask3_area',
                                         axes=[latitude, longitude])
    basinmask3_area.earthSurfaceAreaM2 = earthSurfaceAreaKm2 * 1e6
    # Corrected inflation km2 -> 1000x1000 -> m2
    basinmask3_area.earthWaterAreaM2 = earthWaterAreaKm2 * 1e6
    basinmask3_area.earthLandAreaM2 = earthLandAreaKm2 * 1e6
    basinmask3_area.oceanSurfaceAreaM2 = area.sum()
    basinmask3_area.units = 'm^2'

    # Plot to check
    #import vcs as vc
    #v1 = vc.init()
Esempio n. 12
0
        latitude.long_name      = 'latitude'
        latitude.axis           = 'Y'
        delattr(latitude,'realtopology')
        # longitude
        longitude               = var.getAxis(2)
        longitude.id            = 'lon'
        longitude.standard_name = 'longitude'
        longitude.long_name     = 'longitude'
        longitude.axis          = 'X'
        delattr(longitude,'realtopology')

        # Create areacello and sftof variables
        fxFiles = ['areacello','sftof']
        if 'amipbc_sic' in filePath:
            # areacello
            areacello                   = cdu.area_weights(var[0,]) ; # areacello.sum() = 1.0
            earthSurfaceArea            = 510.1 ; # million km2
            earthSurfaceAreaM2          = earthSurfaceArea*1e12 ; # m2
            areacelloM2                 = areacello*earthSurfaceAreaM2
            areacelloM2.standard_name   = 'cell_area'
            areacelloM2.long_name       = 'Ocean Grid-Cell Area'
            areacelloM2.units           = 'm2'
            areacelloM2.id              = 'areacello'
            areacello                   = areacelloM2 ; del(areacelloM2)
            # sftlf
            maskFile                    = '/work/durack1/Shared/obs_data/WOD13/170425_WOD13_masks_1deg.nc'
            fMask                       = cdm.open(maskFile)
            landSea1deg                 = fMask('landsea')
            # Fix longitude
            a                           = landSea1deg[:,0:180]
            b                           = landSea1deg[:,180:]
Esempio n. 13
0
def generateSurfaceTypeByRegionMask(mask,
                                    sftbyrgn=None,
                                    sftbyrgnmask=215,
                                    regions=range(201, 223),
                                    maximum_regions_per_cell=4,
                                    extend_up_to=3,
                                    verbose=True):
    """ Maps a "types" dataset onto a landsea mask
    Usage:
    mapped,found = generateSurfaceTypeByRegionMask(mask,sftbyrgn,sftbyrgnmask=None,regions=None,maximum_regions_per_cell=4,extend_up_to=3,verbode=True)
    Input:
    mask : land/sea mask (100/0) onto you wish to map our grid (will generate a ld/sea mask for you)
    sftbyrgn: mask you wish to map
              if None then uses our own "sftbyrgn" dataset (old ezget type)
    sftbyrgnmask: land/sea mask for sftbyrgn
                  or a number specifying limit in values of sftbygrn
                  which indicate the threshold land/sea (greater values are land)
    regions: Numbers from sftbyrgn array that you want to map onto mask
    maximum_regions_per_cell: maximum number f regions concidered in a cell
    extend_up_to : how many grid cells away around a cell can we extent to identify a guess
    verbose: prints to the screen what's going on (default is True)

    Output:
     mapped : mapped input mask
     found  : ???
    """
    ## OK first determine which regions are available
    ## Must be integer values
    if isinstance(mask, cdms2.grid.TransientRectGrid):
        mask = cdutil.generateLandSeaMask(mask) * 100.

    if sftbyrgn is None:
        sftbyrgn = cdms2.open(
            os.path.join(sys.prefix, 'sample_data', 'sftbyrgn.nc'))('sftbyrgn')

    if regions is None:
        if verbose: print 'Preparing regions'
        ##         regions = range(201,223)

        regions = []
        for i in range(0, 10000):
            genutil.statusbar(i, 9999)
            c = float(MV2.sum(MV2.ravel(MV2.equal(sftbyrgn, i)), 0))
            if c != 0: regions.append(i)

    if verbose: print 'Regions:', regions
    ## If no mask passed fr sftbyrgn, assumes everything greater 5000 is land)
    if isinstance(sftbyrgnmask, int):
        split = sftbyrgnmask
        n = MV2.maximum(mask)
        sftbyrgnmask = MV2.greater_equal(sftbyrgn, sftbyrgnmask) * n
    else:
        split = MV2.maximum(sftbyrgnmask) / 2.
    ## Now guess the type for each regions
    keys = {}
    ## ## Nice way to do it
    ##     for r in regions:
    ##         c=MV2.not_equal(sftbyrgn,r)
    ##         c=MV2.masked_where(c,sftbyrgnmask)
    ##         n=MV2.count(c)
    ##         c=float(MV2.sum(MV2.ravel(c),0)/n)
    ##         print r,c,n
    ##         keys[r]=c
    ## Fast but not so "general" way to do it
    for r in regions:
        if r < split:
            keys[r] = 0.
        else:
            keys[r] = 100.
    sh = list(mask.shape)
    sh.insert(0, maximum_regions_per_cell)
    potential = MV2.ones(sh, dtype='d') * -999
    potential_reg = MV2.ones(sh, dtype='d') * -999

    g1 = sftbyrgn.getGrid()
    g2 = mask.getGrid()
    r1 = regrid2.Regridder(g1, g2)
    w = cdutil.area_weights(sftbyrgn)

    if verbose: print 'First pass'
    itmp = 0.
    for ireg in keys.keys():
        genutil.statusbar(itmp, len(keys.keys()) - 1)
        itmp += 1.
        c = MV2.equal(sftbyrgn, ireg)
        w2 = 1. - c * w
        s2, w3 = r1(sftbyrgn, mask=w2.filled(), returnTuple=1)
        c2 = MV2.equal(mask, keys[ireg])
        loop(potential, potential_reg, c2, w3, ireg)

    found = MV2.zeros(sh[1:], typecode='f')
    for i in range(maximum_regions_per_cell):
        found = found + MV2.not_equal(potential[i], -999)
    sh2 = list(sh)
    for k in range(extend_up_to):
        sh2[1] = sh[1] + 2 * (k + 1)
        sh2[2] = sh[2] + 2 * (k + 1)
        ## Form the possible i/j couples !
        s = MV2.sum(MV2.ravel(MV2.equal(potential[0], -999)), 0)
        if verbose:
            print 'Expanding up to', k + 1, 'cells while trying to fix', s, 'cells'
        ##         if dump:
        ##             f=cdms2.open('tmp_'+str(k)+'.nc','w')
        ##             f.write(sumregions(potential_reg,potential).astype('f'),id='sftbyrgn',axes=mask.getAxisList())
        ##             f.close()
        ##         g=sumregions(potential_reg,potential).astype('d')
        ##         g=MV2.masked_equal(g,-999)
        ##         g=MV2.greater(g,4999)*100.
        ##         g=MV2.absolute(mask-g)
        ##         g=MV2.masked_equal(g,0.)
        ##         print 'Number of differences:',MV2.count(g)

        if float(s) != 0:
            c0 = MV2.equal(potential[0], -999)
            couples = []
            sft2 = MV2.zeros(sh2[1:], dtype='d') - 888.
            sft2[k + 1:-k - 1, k + 1:-k - 1] = mask
            for i in range(-k - 1, k + 2):
                for j in range(-k - 1, k + 2):
                    if abs(i) > k or abs(j) > k: couples.append([i, j])
            ntot = len(keys.keys()) * len(couples) - 1
            itmp = 0
            for ireg in keys.keys():
                c = MV2.equal(sftbyrgn, ireg)
                w2 = 1. - c * w
                s2, w3 = r1(sftbyrgn, mask=w2.filled(), returnTuple=1)
                w4 = MV2.zeros(sh2[1:], typecode='d')
                w4[k + 1:-k - 1, k + 1:-k - 1] = w3
                for i, j in couples:
                    if verbose: genutil.statusbar(itmp, ntot)
                    itmp += 1.
                    c2 = MV2.equal(
                        sft2[j + k + 1:j + k + 1 + sh[1],
                             i + k + 1:i + k + 1 + sh[2]], keys[ireg])
                    c3 = MV2.equal(
                        sft2[j + k + 1:j + k + 1 + sh[1],
                             i + k + 1:i + k + 1 + sh[2]], mask)
                    c2 = MV2.logical_and(c2, c3)
                    c2 = MV2.logical_and(c2, c0)
                    loop(
                        potential, potential_reg, c2,
                        w4[j + k + 1:j + k + 1 + sh[1],
                           i + k + 1:i + k + 1 + sh[2]], ireg)

        found = MV2.where(MV2.equal(potential[0], -999), found - 1, found)

    out = sumregions(potential_reg, potential)
    out.setAxisList(mask.getAxisList())
    found.setAxisList(mask.getAxisList())
    found = found.astype('i')
    found.missing_value = -999
    found.id = 'found'
    out.id = 'sftbyrgn'
    out = out.astype('i')
    out.missing_value = -999
    del (out.name)
    del (found.name)
    return out, found
Esempio n. 14
0
def generateSurfaceTypeByRegionMask(mask,sftbyrgn=None,sftbyrgnmask=215,regions = range(201,223), maximum_regions_per_cell=4,extend_up_to=3,verbose=True):
    """ Maps a "types" dataset onto a landsea mask
    Usage:
    mapped,found = generateSurfaceTypeByRegionMask(mask,sftbyrgn,sftbyrgnmask=None,regions=None,maximum_regions_per_cell=4,extend_up_to=3,verbode=True)
    Input:
    mask : land/sea mask (100/0) onto you wish to map our grid (will generate a ld/sea mask for you)
    sftbyrgn: mask you wish to map
              if None then uses our own "sftbyrgn" dataset (old ezget type)
    sftbyrgnmask: land/sea mask for sftbyrgn
                  or a number specifying limit in values of sftbygrn
                  which indicate the threshold land/sea (greater values are land)
    regions: Numbers from sftbyrgn array that you want to map onto mask
    maximum_regions_per_cell: maximum number f regions concidered in a cell
    extend_up_to : how many grid cells away around a cell can we extent to identify a guess
    verbose: prints to the screen what's going on (default is True)

    Output:
     mapped : mapped input mask
     found  : ???
    """
    ## OK first determine which regions are available
    ## Must be integer values
    if isinstance(mask, cdms2.grid.TransientRectGrid):
        mask = cdutil.generateLandSeaMask(mask)*100.

    if sftbyrgn is None:
        sftbyrgn = cdms2.open(os.path.join(sys.prefix,'sample_data','sftbyrgn.nc'))('sftbyrgn')
        
    if regions is None:
        if verbose: print 'Preparing regions'
##         regions = range(201,223)

        regions=[]
        for i in range(0,10000):
            genutil.statusbar(i,9999)
            c=float(MV2.sum(MV2.ravel(MV2.equal(sftbyrgn,i)),0))
            if c!=0: regions.append(i)

    if verbose: print 'Regions:',regions
    ## If no mask passed fr sftbyrgn, assumes everything greater 5000 is land)
    if isinstance(sftbyrgnmask,int):
        split = sftbyrgnmask
        n=MV2.maximum(mask)
        sftbyrgnmask=MV2.greater_equal(sftbyrgn,sftbyrgnmask)*n
    else:
        split = MV2.maximum(sftbyrgnmask)/2.
    ## Now guess the type for each regions
    keys={}
## ## Nice way to do it
##     for r in regions:
##         c=MV2.not_equal(sftbyrgn,r)
##         c=MV2.masked_where(c,sftbyrgnmask)
##         n=MV2.count(c)
##         c=float(MV2.sum(MV2.ravel(c),0)/n)
##         print r,c,n
##         keys[r]=c
## Fast but not so "general" way to do it
    for r in regions:
        if r< split:
            keys[r]=0.
        else:
            keys[r]=100.
    sh=list(mask.shape)
    sh.insert(0,maximum_regions_per_cell)
    potential=MV2.ones(sh,dtype='d')*-999
    potential_reg=MV2.ones(sh,dtype='d')*-999

    g1=sftbyrgn.getGrid()
    g2=mask.getGrid()
    r1=regrid2.Regridder(g1,g2)
    w=cdutil.area_weights(sftbyrgn)

    if verbose: print 'First pass'
    itmp=0.
    for ireg in keys.keys():
        genutil.statusbar(itmp,len(keys.keys())-1)
        itmp+=1.
        c=MV2.equal(sftbyrgn,ireg)
        w2=1.-c*w
        s2,w3=r1(sftbyrgn,mask=w2.filled(),returnTuple=1)
        c2=MV2.equal(mask,keys[ireg])
        loop(potential,potential_reg,c2,w3,ireg)

    found=MV2.zeros(sh[1:],typecode='f')
    for i in range(maximum_regions_per_cell):
        found=found+MV2.not_equal(potential[i],-999)
    sh2=list(sh)
    for k in range(extend_up_to):
        sh2[1]=sh[1]+2*(k+1)
        sh2[2]=sh[2]+2*(k+1)
        ## Form the possible i/j couples !
        s=MV2.sum(MV2.ravel(MV2.equal(potential[0],-999)),0)
        if verbose: print 'Expanding up to',k+1,'cells while trying to fix',s,'cells'
##         if dump:
##             f=cdms2.open('tmp_'+str(k)+'.nc','w')
##             f.write(sumregions(potential_reg,potential).astype('f'),id='sftbyrgn',axes=mask.getAxisList())
##             f.close()
##         g=sumregions(potential_reg,potential).astype('d')
##         g=MV2.masked_equal(g,-999)
##         g=MV2.greater(g,4999)*100.
##         g=MV2.absolute(mask-g)
##         g=MV2.masked_equal(g,0.)
##         print 'Number of differences:',MV2.count(g)

        if float(s)!=0:
            c0=MV2.equal(potential[0],-999)
            couples=[]
            sft2=MV2.zeros(sh2[1:],dtype='d')-888.
            sft2[k+1:-k-1,k+1:-k-1]=mask
            for i in range(-k-1,k+2):
                for j in range(-k-1,k+2):
                    if abs(i)>k or abs(j)>k: couples.append([i,j])
            ntot=len(keys.keys())*len(couples)-1
            itmp=0
            for ireg in keys.keys():
                c=MV2.equal(sftbyrgn,ireg)
                w2=1.-c*w
                s2,w3=r1(sftbyrgn,mask=w2.filled(),returnTuple=1)
                w4=MV2.zeros(sh2[1:],typecode='d')
                w4[k+1:-k-1,k+1:-k-1]=w3
                for i,j in couples:
                    if verbose: genutil.statusbar(itmp,ntot)
                    itmp+=1.
                    c2=MV2.equal(sft2[j+k+1:j+k+1+sh[1],i+k+1:i+k+1+sh[2]],keys[ireg])
                    c3=MV2.equal(sft2[j+k+1:j+k+1+sh[1],i+k+1:i+k+1+sh[2]],mask)
                    c2=MV2.logical_and(c2,c3)
                    c2=MV2.logical_and(c2,c0)
                    loop(potential,potential_reg,c2,w4[j+k+1:j+k+1+sh[1],i+k+1:i+k+1+sh[2]],ireg)
           
        found=MV2.where(MV2.equal(potential[0],-999),found-1,found)

    out=sumregions(potential_reg,potential)
    out.setAxisList(mask.getAxisList())
    found.setAxisList(mask.getAxisList())
    found=found.astype('i')
    found.missing_value=-999
    found.id='found'
    out.id='sftbyrgn'
    out=out.astype('i')
    out.missing_value=-999
    del(out.name)
    del(found.name)
    return out,found
Esempio n. 15
0
             for x in dat2.getLatitude().getBounds()
         ]
         V2 = fo["gw"]
         V2[:] = wgts[:]
         if dat2.ndim > 3:
             dat2 = dat2[0, 0]
         else:
             dat2 = dat2[0]
         if not args.quiet: print "Computing area weights"
         fw = cdms2.open(args.weights)
         area = fw("area_b")
         fw.close()
         if numpy.allclose(area, 0.):
             if not args.quiet:
                 print "area is all zeroes computing it for you"
             area = cdutil.area_weights(dat2) * numpy.pi * 4.
         area = MV2.reshape(area, dat2.shape[-2:])
         V2 = fo["area"]
         V2[:] = area[:]
 else:
     if not args.quiet: print i, NVARS, "Rewriting as is:", V.id
     try:
         V2 = fo[V.id]
         if V2.rank() == 0:
             V2[:] = V.getValue()
         elif V2.id == "time_written":
             d = datetime.datetime.utcnow()
             time_written = "%.2i:%.2i:%.2i" % \
                            (d.hour, d.minute, d.second)
             V2[:] = numpy.array([x for x in time_written])
         elif V2.id == "date_written":
Esempio n. 16
0
fnm = os.path.join(data_pth,"ps_ne120.nc")
fnm_grid = os.path.join(data_pth,"ne120np4_pentagons_100310.nc")
f = cdms2.open(fnm)
dat = f('PS')
f.close()

f = cdms2.open(fnm_grid)
area_in = f("grid_area")
area_in.info()

print "SUM IN WTS:",area_in.sum()

f=cdms2.open(os.path.join(sys.prefix,"sample_data","clt.nc"))
out=f("clt",slice(0,1),squeeze=1)

area_out = cdutil.area_weights(out)*5.112E14
area_out.units = "m**2"
print area_out.sum()


import ESMP
mthd = ESMP.ESMP_REGRIDMETHOD_CONSERVE
diags = {
        "srcGridshape":area_in.shape,
        "dstGridshape":area_out.shape,
        "srcAreaFractions" : area_in,
        "sdstreaFractions" : area_out,
        "regridMethod":mthd,
        "staggerLoc" : "center",
        "periodicity":0,
        "coordSys":"deg",
Esempio n. 17
0
from vcmq import *
f = cdms2.open(data_sample('uv_pacific.nc'))
u = f('uwnd')
f.close()

# - construire une climatologie mensuelle et des anomalies
cdutil.setTimeBoundsMonthly(u)                          # importance des bounds (autres ?)
uclim = cdutil.ANNUALCYCLE.climatology(u)               # climato
uanom = cdutil.ANNUALCYCLE.departures(u, ref=uclim)     # anomalies
print uclim.std(), uanom.std()
djf = cdutil.times.Seasons('DJF')                       # creation d'une saison
udjf = djf(u)                                           # extraction
dfj = cdutil.DJF                                        # des saisons existent déjà

# - averager
ut = cdutil.averager(u, axis='yx',  weights=cdutil.area_weights(u)) # moyenne spatiale
help(cdutil.averager)
#  -> essayez la moyenne temporelle

# - regions et selecteurs
equator = cdutil.region.domain(lat=(-2, 2))
select = cdms2.selectors.Selector(lon=slice(0, 3), time=('1950', cdtime.comptime(1960)))
print u(equator)(select).shape
#  -> appliquez à la lecture du fichier






# Le module genutil : utilitaires generiques
Esempio n. 18
0
    def __init__(self, data, weights=None, norm=None, keep_invalids=False,
        minvalid=None, clean_weights=True,
        logger=None, loglevel=None, zerofill=False, **kwargs):

        # Logger
        Logger.__init__(self, logger=logger, loglevel=loglevel, **dict_filter(kwargs, 'log_'))

        # Guess data type and copy
        if cdms2_isVariable(data):
            self.array_type = 'MV2'
            self.array_mod = MV2
            data = data.clone()
        elif npy.ma.isMA(data):
            self.array_type = 'numpy.ma'
            self.array_mod = numpy.ma
            data = data.copy()
        else:
            self.array_type = 'numpy'
            data = data.copy()
            self.array_mod = numpy
        self.data = data
        self.dtype = data.dtype
        data = data.astype('d')


         # Shape
        self.shape = data.shape
        self.ndim = data.ndim
        self.nt = self.shape[0]
        self.nstot = data.size/self.nt
        self.nsdim = data.ndim-1

        # Check time axis
        if cdms2_isVariable(data) and data.getTime() is not None:
            order = data.getOrder()
            if not order.startswith('t'):
                warn('Time axis is not the first axis of input variable (order="%s")'%order)

        # Weights ?
        if weights is None or weights is False:
            if False and weights is not False and data.ndim == 3 and \
                cdms2_isVariable(data) and \
                'x' in data.getOrder() and 'y' in data.getOrder():
                import cdutil# FIXME: WARNING FALSE
                weights = cdutil.area_weights(data[0]).data.astype('d') # Geographic weights
            elif self.nstot==1:
                weights = npy.ones(1)
            else:
                weights = npy.ones(self.shape[1:])
        elif npy.ma.isMA(weights):
            weights = weight.astype('d').filled(0.)
        else:
            weights = npy.asarray(weights, dtype='d')
        if data.ndim>1 and self.shape[1:] != weights.shape:
            self.error('Weights must be of shape %s (instead of %s)'
                %(self.shape[1:],  weights.shape))

        # Store some info
        # - time
        if not cdms2_isVariable(data):
            self.taxis = data.shape[0]
        else:
            self.taxis = data.getAxis(0)
        # - others axes and attributes
        if cdms2_isVariable(data): # cdms -> ids
            self.saxes = data.getAxisList()[1:]
            self.id = data.id
            self.atts =  {}
            for att in data.listattributes():
                self.atts[att] = data.attributes[att]
            self.grid = data.getGrid()
            data = data.asma()
        else: # numpy -> length
            self.saxes = data.shape[1:]
            self.id = None
            self.atts = None
            self.grid = None
        # - missing value
        if npy.ma.isMA(data):
            self.missing_value = data.get_fill_value()
        else:
            self.missing_value = 1.e20
        # - special cases
        for att in 'long_name', 'units':
            if hasattr(data, att):
                setattr(self, att, data.attributes[att])


        # Masking nans
        nans = npy.isnan(data)
        if nans.any():
            self.warning("Masking %i NaNs"%nans.sum())
            if self.array_type == 'numpy':
                self.array_type = 'numpy.ma'
                self.array_mod = numpy.ma
                data = npy.ma.array(data, mask=nans, copy=False)
            else:
                data[nans] = npy.ma.masked
            self.data = data

        # Mask (1 means good)
        # - real good values
        bmask = npy.ma.getmaskarray(data)
        good = 1-bmask.astype('l')
        # - first from data (integrate) => 1D
        count = npy.atleast_1d(good.sum(axis=0))
        del good
        # - now remove channels where weight is zero
        if clean_weights:
            count[npy.atleast_1d(weights==0.)] = 0
        # - check number of valid data along time
        minvalid = kwargs.pop('nvalid', minvalid)
        if minvalid is not None and minvalid < 0:
            minvalid = -int(round(npy.clip(minvalid, -100., 0)*self.nt/100))
        minvalid = npy.clip(int(minvalid), 1, self.nt) if minvalid is not None else 1
        count[count<minvalid] = 0 # <minvalid -> 0
        count = npy.clip(count, 0, 1)
        # - save as 0/1
        self.ns = long(count.sum())
        self.compress = count.size != self.ns
        self.good = count>0 # points in space where there are enough data in time
        self.minvalid = self.nvalid = minvalid

        # Scale unpacked data
        if not self.good.any():
            self.warning('No valid data')
            self.norm = 1.
            self.mean = 0
        else:
            # - mean
            self.mean = data.mean(axis=0)
            # - normalisation factor
            if norm is True or norm is None:
                norm = self.data.std() # Standard norm
            elif norm is not False:
                if norm <0: # Relative norm, else strict norm
                    norm = abs(norm)*self.data.std()
            else:
                norm = 1.
            self.norm = norm
            # - apply
            self.scale(data)

        # Fill data
        # - fill with missing value or mean (0.) where possible
        if minvalid != self.nt:
#            invalids = bmask & self.good # invalids = masked data that will be analyzed
#            data[invalids] = 0. if zerofill else default_missing_value
#            data[invalids] = default_missing_value
            data[:, ~self.good] = default_missing_value
            if keep_invalids:
                self.invalids = bmask & self.good # invalids = masked data that will be analyzed
            else:
                self.invalids = None
                #del invalids
        else:
            self.invalids = None
        # - finally fill with missing values at zero
        if npy.ma.isMA(data):
            data_num = data.filled(default_missing_value)
        else:
            data_num = data

        # Pack
        # - data
        self.packed_data = self.core_pack(data_num, force2d=True)
        self.masked = npy.isclose(self.packed_data, default_missing_value).any()
        # - weights
        self.packed_weights = self.core_pack(weights)
                raise
            # if m=='FGOALS-gl':
            #    if string.split(timereg(t11)[0],'-')[0]!='1000':
            #        raise
            # else:
            #    if string.split(timereg(t11)[0],'-')[0]!='850':
            #        raise
            # if string.split(timereg(t11)[1],'-')[0]!='1699': raise
            # if string.split(timereg(t11)[1],'-')[0]!='1805': raise
            cdutil.times.setTimeBoundsMonthly(t11)
            print t11.shape
            t1 = MV.zeros((t11.shape[0]), typecode=MV.float32)
            t2 = MV.zeros((t11.shape[0]), typecode=MV.float32)
            # t1z=MV.zeros((t11.shape[0],t11.shape[1]),typecode=MV.float32)
            # wgt = cdutil.area_weights(t11[0,:,:])
            wgt2 = cdutil.area_weights(t11)
            # for i in range(t1.shape[0]):
            #    #print i
            #    t1[i] = cdutil.averager(t11[i,:,:],axis='xy',weight=wgt)
            #    #t1z[i] = cdutil.averager(t11[i,:,:],axis='x',weight=wgt)
            t1 = cdutil.averager(t11, axis="xy", weight=wgt2)
            # raise
            # t1.setAxis(0,t11.getAxis(0))
            t2.setAxis(0, t11.getAxis(0))
            # t1z.setAxis(0,t11.getAxis(0))
            # t1z.setAxis(1,t11.getAxis(1))

            ##############
            ##############
            ##############
            ##############
Esempio n. 20
0
try:
    b = numpy.ma.array(a)
    averager(x, axis='tx', weight=['equal', b])
    raise RuntimeError("Test did not fail as it should.")
except AveragerError:
    pass

result = averager(x, axis='tx', weight=['equal', 'equal'])

result = averager(x, axis='2t', weight=['generate', 'equal'])

#**********************************************************************
#
# Create the area weights
#
aw = area_weights(x)
#
#
#**********************************************************************

result = averager(x, axis='x', weight=aw)

result = averager(x, axis='xy', weight=aw)

#
# Now I want the Longitude axis to be area weighted (including any missing data)
# but the latitude axis to be equally weighted
#
result, newwts = averager(x, axis='x', weight=aw, returned=1)
new_result = averager(result, axis='y', weight='equal')
result = averager(x, axis='21', weight=aw)
Esempio n. 21
0
         wgts = [numpy.sin(x[1]*numpy.pi/180.) -
                 numpy.sin(x[0]*numpy.pi/180.)
                 for x in dat2.getLatitude().getBounds()]
         V2 = fo["gw"]
         V2[:] = wgts[:]
         if dat2.ndim > 3:
             dat2 = dat2[0, 0]
         else:
             dat2 = dat2[0]
         if not args.quiet: print "Computing area weights"
         fw = cdms2.open(args.weights)
         area = fw("area_b")
         fw.close()
         if numpy.allclose(area,0.):
           if not args.quiet: print "area is all zeroes computing it for you"
           area = cdutil.area_weights(dat2)*numpy.pi*4.
         area = MV2.reshape(area, dat2.shape[-2:])
         V2 = fo["area"]
         V2[:] = area[:]
 else:
     if not args.quiet: print i, NVARS, "Rewriting as is:", V.id
     try:
         V2 = fo[V.id]
         if V2.rank() == 0:
             V2[:] = V.getValue()
         elif V2.id == "time_written":
             d = datetime.datetime.utcnow()
             time_written = "%.2i:%.2i:%.2i" % \
                            (d.hour, d.minute, d.second)
             V2[:] = numpy.array([x for x in time_written])
         elif V2.id == "date_written":