Ejemplo n.º 1
0
 def regrid(self, input):
   axes=input.getAxisList()
   input_id=input.id
   input=input.filled()
   sh=input.shape
   #dest_field=numpy.zeros((n,self.n_b,))
   dest_field = acme_regridder._regrid.apply_weights(input,self.S,self.row,self.col,self.frac_b)
   print "DEST FIELD",dest_field.shape
   dest_field = dest_field.astype(input.dtype)
   dest_field=numpy.ma.masked_where(self.mask_b,dest_field)
   if self.regular:
     sh2=list(sh[:-1])#+[len(self.lats),len(self.lons)]
     sh2.append(len(self.lats))
     sh2.append(len(self.lons))
     dest_field.shape=sh2
     # 'attributes' is a property of the CdmsObj class.  It holds 'external', i.e.
     # 'persistent' attributes, those included when the object is written to a file.
     # An example is 'units'.
     # Note that MV2.array is normally TransientVariable.  So the following line
     # is equivalent to a call of createVariable():
     dest_field=MV2.array(dest_field,id=input_id,attributes=input.attributes)
     dest_field.setAxis(-1,self.lons)
     dest_field.setAxis(-2,self.lats)
     for i in range(len(sh2)-2):
       dest_field.setAxis(i,axes[i])
   else:
     print "The regridder has failed to create a cdms2 object.  It will return",
     print "the regridded data in a raw form."
   return dest_field
Ejemplo n.º 2
0
def make_var(lap, id=None, shape=None):
    lap = MV2.array(lap)
    if shape is not None:
        lap = MV2.reshape(lap, shape)
    if id is not None:
        lap.id = id
    return lap
Ejemplo n.º 3
0
 def regrid(self, input):
     axes = input.getAxisList()
     input_id = input.id
     M = input.getMissing()
     if input.mask is numpy.ma.nomask:
         isMasked = False
         input = input.data
     else:
         isMasked = True
         input = input.filled(float(M))
     sh = input.shape
     if isMasked:
         dest_field = \
             metrics.packages.acme_regridder._regrid.apply_weights_masked(
                 input, self.S, self.row, self.col, self.nb, float(M))
     else:
         dest_field = metrics.packages.acme_regridder._regrid.apply_weights(
             input, self.S, self.row, self.col, self.nb)
     if self.mask_b is not False:
         dest_field = numpy.ma.masked_where(self.mask_b, dest_field)
     if self.regular:
         sh2 = list(sh[:-1])
         sh2.append(len(self.lats))
         sh2.append(len(self.lons))
         dest_field.shape = sh2
         dest_field = MV2.array(dest_field, id=input_id)
         dest_field.setAxis(-1, self.lons)
         dest_field.setAxis(-2, self.lats)
         for i in range(len(sh2)-2):
             dest_field.setAxis(i, axes[i])
         if isMasked:
             dest_field.setMissing(M)
     return dest_field
Ejemplo n.º 4
0
def _extremum_(func,ctime,i0,i,var,spline):
    """Extremum possibly using splines"""
    nt = len(var)
    if spline and nt >= 4: # and i != 0 and i != (nt-1)
        if i == 0:
            ifirst, ilast = 0, 4
        elif i == nt-1:
            ifirst, ilast = nt-4, nt
        else:
            icenter = i - int(var[i-1] > var[i+1])
            ifirst = max(icenter-1, 0)
            ilast = ifirst + 4
            if ilast > nt:
                ilast -= 1
                ifirst -= 1
        mn_units = 'minutes since %s'%ctime[i0+ifirst]
        old_rts = cdms2.createAxis(N.asarray([ct.torel(mn_units).value for ct in ctime[i0+ifirst:i0+ilast]],dtype='d'))
        old_var = MV2.array(var[ifirst:ilast], axes=[old_rts], copyaxes=0)
        mn_rts =  cdms2.createAxis(N.arange(int(old_rts[-1]+1),dtype='d'))
        mn_var = interp1d(old_var, mn_rts, method='cubic')
        del old_var, old_rts
#       mn_var = spline_interpolate(old_rts,var[i-1:i+2],mn_rts)
#       mn_var = splev(mn_rts, splrep(old_rts,var[ifirst:ilast]))
        mn_i = func(mn_var)
        val = mn_var[mn_i]
        del mn_var
        this_ctime = cdtime.reltime(mn_i,mn_units).tocomp()
    else:
        this_ctime = ctime[i0+i]
        val = var[i]
    return val,this_ctime
Ejemplo n.º 5
0
def scrap(data, axis=0):
    originalOrder = data.getOrder(ids=True)
    if axis not in ['x', 'y', 'z', 't'] and not isinstance(axis, int):
        order = "({})...".format(axis)
    else:
        order = "{}...".format(axis)
    new = data(order=order)
    axes = new.getAxisList()  # Save for later
    new = MV2.array(new.asma())  # lose dims
    for i in range(new.shape[0] - 1, -1, -1):
        tmp = new[i]
        if not isinstance(tmp, (float, numpy.float)) and tmp.mask.all():
            a = new[:i]
            b = new[i + 1:]
            if b.shape[0] == 0:
                new = a
            else:
                new = MV2.concatenate((a, b))
    newAxis = []
    for v in new.getAxis(0):
        newAxis.append(axes[0][int(v)])
    ax = cdms2.createAxis(newAxis, id=axes[0].id)
    axes[0] = ax
    new.setAxisList(axes)
    return new(order=originalOrder)
Ejemplo n.º 6
0
def mk_time(offset=0,len=120,units="months since 1800"):
    t=cdms2.createAxis(numpy.arange(offset,offset+len))
    t.designateTime()
    t.id='time'
    t.units=units
    data= MV2.array(numpy.random.random((len)))
    data.setAxis(0,t)
    cdutil.setTimeBoundsMonthly(t)
    return data,t,t.asComponentTime()
Ejemplo n.º 7
0
def LiftWet(To, Po, Pend, deltaP=1000.0, method=None):
    """
    Lift a parcel moist adiabatically from Po to Pend.
    Initial temperature is To in K.
    To in K
    Po in Pa
    detail # number to increment P slowly
    returns T at all P (every deltaP Pa), and P
    """
    if method is None:
        method = 5  # MUCH faster
    temp = [To]
    pres = [Po]
    while pres[-1] - deltaP >= Pend:
        tmp = temp[-1] - deltaP * gammaw(temp[-1], 100, pres[-1] - deltaP / 2, method=method)
        temp.append(tmp)
        pres.append(pres[-1] - deltaP)
    return MV2.array(temp, id="T"), MV2.array(pres, id="P")
def compute_season(data, season_indices, weights):
    out = numpy.ma.zeros(data.shape[1:], dtype=data.dtype)
    N = 0
    for i in season_indices:
        out += data[i] * weights[i]
        N += weights[i]
    out = MV2.array(out)
    out.id = data.id
    out.setAxisList(data.getAxisList()[1:])
    return out / N
Ejemplo n.º 9
0
 def get_mask_from_var(self, var):
     try:
         o_mask = self.file_mask_template.get('sftlf')
     except Exception:
         o_mask = cdutil.generateLandSeaMask(
             var, regridTool=self.regrid_tool).filled(1.) * 100.
         o_mask = MV2.array(o_mask)
         o_mask.setAxis(-1, var.getLongitude())
         o_mask.setAxis(-2, var.getLatitude())
     return o_mask
Ejemplo n.º 10
0
    def rank_nD(self, data, axis=0):
        if axis not in [0, 1]:
            if not isinstance(axis, str):
                raise 'Ranking error, axis can only be 1 or 2 or name'
            else:
                nms = data.getAxisIds()
                for i in range(len(nms)):
                    nm = nms[i]
                    if axis in nm.split('___'):
                        axis = i
                if axis not in [0, 1]:
                    raise 'Ranking error, axis can only be 1 or 2 or name'

        if axis != 0:
            data = data(order=(str(axis) + '...'))
        a0 = MV2.argsort(data.filled(1.E20), axis=0)
        n = a0.shape[0]
        b = MV2.zeros(a0.shape, MV2.float)
        sh = a0[1].shape
        for i in range(n):
            Indx = MV2.ones(sh) * i
            c = MV2.array(a0[i].filled(n - 1))
            b = genutil.arrayindexing.set(b, c, Indx)
        m = data.mask
        if m is not None:
            b = MV2.masked_where(m, b)
        else:
            b = MV2.array(b)
        n = MV2.count(b, 0)
        n.setAxisList(b.getAxisList()[1:])
        b, n = genutil.grower(b, n)
        b = 100. * b / (n - 1)
        b.setAxisList(data.getAxisList())
        if axis != 0:
            st = ''
            for i in range(axis):
                st += str(i + 1)
            st += '0...'
            data = data(order=st)
            b = b(order=st)
        return b
Ejemplo n.º 11
0
def symetrick(slab,axis='y'):
    """ Breaks a field into the symetric and antisymetric components
    symetric component goes into first half,
    antisymetric into the second half
    """
    if type(axis)==type(1):
        ax=slab.getAxisIds(axis)
    else:
        if axis=='x':
            ax=slab.getLongitude()
        elif axis=='y':
            ax=slab.getLatitude()
        elif axis=='z':
            ax=slab.getLevel()
        elif axis=='t':
            ax=slab.getTime()
        else:
            axs=slab.getAxisIndex(axis)
            if axs==-1:
                raise 'Error could not find axis:'+axis
            ax=slab.getAxis(axs)
    if ax is None:
        raise 'Error Axis: '+str(axis)+' does not exists (getAxis returned None)'

    n=len(ax)
    ax1=ax[0]
    ax2=ax[-1]
    ## Gets first and second half of the dataset
    tmp=MV2.array(slab*1.,copy=1)
    tmp.setAxisList(slab.getAxisList())
    tmp=tmp(order=str(axis)+'...')
    if n%2==0:
        H1=tmp[:n/2]
    else:
        H1=tmp[:n/2+1]
    H2=tmp[n/2:]
    H1=H1[::-1]
    sym=(H1+H2)/2.
    anti=(H2-H1)/2.
    if n%2==0:
        tmp[:n/2]=sym[::-1]
        tmp[n/2:]=anti
    else:
        tmp[:n/2+1]=sym[::-1]
        tmp[n/2+1:]=anti[1:]
    sh=tmp.shape
    for i in range(1,len(sh)):
        tmp.setAxis(i,H1.getAxis(i))
    tmp.setAxis(0,ax)
    tmp.id=slab.id
    for a in slab.attributes.keys():
        setattr(tmp,a,getattr(slab,a))
    return tmp(order=slab.getOrder(ids=1))
Ejemplo n.º 12
0
    def rank(self, data, axis=0):
        if axis not in [0, 1]:
            if not isinstance(axis, str):
                raise 'Ranking error, axis can only be 1 or 2 or name'
            else:
                nms = data.getAxisIds()
                for i in range(len(nms)):
                    nm = nms[i]
                    if axis in nm.split('___'):
                        axis = i
                if axis not in [0, 1]:
                    raise 'Ranking error, axis can only be 1 or 2 or name'
        if data.ndim > 2:
            raise "Ranking error, array can only be 2D"

        if axis == 1:
            data = MV2.transpose(data)
        a0 = MV2.argsort(data.filled(1.E20), axis=0)
        n = a0.shape[0]
        b = MV2.zeros(a0.shape, MV2.float)
        sh = a0[1].shape
        for i in range(n):
            Indx = MV2.ones(sh) * i
            c = MV2.array(a0[i].filled(n - 1))
            b = genutil.arrayindexing.set(b, c, Indx)

        m = data.mask
        if m is not None:
            b = MV2.masked_where(m, b)
        else:
            b = MV2.array(b)
        n = MV2.count(b, 0)
        n.setAxis(0, b.getAxis(1))
        b, n = genutil.grower(b, n)
        b = 100. * b / (n - 1)
        b.setAxisList(data.getAxisList())
        if axis == 1:
            b = MV2.transpose(b)
            data = MV2.transpose(data)
        return b
Ejemplo n.º 13
0
def doMyNcea(variable, dataMin, dataMax, path, infile, outdir, outfile):
    
    # initialise
    thisfile=cdms2.open(path+infile[0],'r')
    if not (variable in thisfile.variables.keys()):
        exitMessage('Key {0} not found in keys list: {1}. Exit(20).'.format(variable, thisfile.variables.keys()), 20)
    dimensions=thisfile.variables[variable][:].shape
    accum = numpy.zeros( (dimensions[0], dimensions[1]*dimensions[2] ))
    counter = numpy.zeros( (dimensions[1] * dimensions[2]) )
    minimum = numpy.ones((dimensions[0], dimensions[1]*dimensions[2] ) ) * dataMax
    maximum = numpy.ones((dimensions[0], dimensions[1]*dimensions[2] ) ) * dataMin

    refGrid=thisfile.variables[variable].getGrid()
    refAxis=thisfile.variables[variable].getAxisList()
    thisfile = None

    for ii in range(0,1): #range(0, len(infile)):
        print 'processing file {0}'.format(ii)
        thisfile = cdms2.open(path+infile[ii],'r')
        data = numpy.array(thisfile.variables[variable][:])
        print '\tmask'
        wtk = numpy.ravel( ( numpy.ravel(data[0,:]) > dataMin ) * ( numpy.ravel(data[0,:]) < dataMax ))
        if wtk.any():
            print '\tsumming'
            data = numpy.reshape(data, (dimensions[0], dimensions[1]*dimensions[2] )) 
            accum[ :, wtk ] = accum[ :, wtk ] + data[ :, wtk ]
            print '\tcounting'
            counter[ wtk ] = counter[ wtk ] + 1
            print '\tminimum detection'
            minimum[ :, wtk ] = numpy.minimum( minimum[:, wtk], data[:, wtk] )
            print '\tmaximum detection'
            maximum[ :, wtk ] = numpy.maximum( maximum[:, wtk], data[:, wtk] )
        thisfile=None
        

    wta = counter>0
    if wta.any():
        print accum.shape,counter.shape, wta.shape
        for ii in range(dimensions[0]):
            accum[ii, wta] = accum[ii, wta] / counter[wta]

        outfile = cdms2.open(outdir+'avg_'+outfile,'w')
        accum = accum.reshape(dimensions)
 
        myvar=MV2.array(accum).astype(numpy.float32)
        myvar.id='avg_{0}'.format(variable)
        myvar.setAxisList((refAxis,))
        outfile.write(myvar)

        
        outfile.close()
Ejemplo n.º 14
0
 def _dump_array_(self, f, var, id, axis):
     """Dump an array or a list of them in a nctdf file"""
     istime = isinstance(var, list)
     ishist = 'hist' in id
     var = self._asarray_(var)
     var = MV2.array(var, copy=0, id=id)
     var.setAxis(int(not istime and ishist), axis)
     if ishist:
         var.setAxis(istime, self._baxis)
     if var.dtype.char in 'fd':
         var.set_fill_value(1e20)
     else:
         var.set_fill_value(-1)
     return f.write(var, extend=0)
Ejemplo n.º 15
0
def zeros(var, ref='mean',mean=None, getref=True, **kwargs):
    """Get the zeros of a tidal signal

    :Returns: A :mod:`cdms2` variable of signs (-1,1) with a time axis

    :Usage:

    >>> tidal_zeros = zeros(sea_level,ref='demerliac')
    >>> print tidal_zeros[0:1]
    >>> print tidal_zeros[0:1].getTime().asComponentTime()
    """
    # Get anomaly
    ref = kwargs.pop('reference', ref)
    vara, varref = _get_anomaly_(var, ref=ref,mean=mean)
    taxis = vara.getTime()
    vara = vara.filled()
    longref = hasattr(varref, '__len__')

    # Find indices
    sign = N.sign(vara)
    izeros =  N.arange(len(vara)-1).compress(sign[:-1]!=sign[1:])

    # Interpolate
    units = taxis.units
    times = taxis.getValue()
    zeros = N.zeros((len(izeros), ))
    if getref:
        ret = MV2.zeros(len(zeros), id='zeros')
        if not longref:
            ret[:] = varref
    for i, i0 in enumerate(izeros):
        dv = vara[i0+1]-vara[i0]
        zeros[i] = times[i0]*vara[i0+1]/dv - times[i0+1]*vara[i0]/dv
        if getref and longref:
            dt = times[i0+1]-times[i0]
            ret[i] = var_ref[i0]*vara[i0+1]/dv - var_ref[i0+1]*vara[i0]/dv

    # Format
    if not getref:
        ret = MV2.array(sign[izeros], id='zeros')
        ret.units = '1 up and -1 down'
    else:
        cp_atts(var, ret)
    ret.long_name = 'Zeros'
    zeros = create_time(zeros, units)
    ret.setAxis(0, zeros)
    return ret
Ejemplo n.º 16
0
    def compute(self):
        # *** IMPORTANT ***
        # Once someone figures out how to pass the tvariable object, to this
        # module none of the computation in this method is necessary 
        
        # Check ports
        if not self.hasInputFromPort('cdmsfile'):
            raise ModuleError(self, "'cdmsfile' is mandatory.")
        if not self.hasInputFromPort('id'):
            raise ModuleError(self, "'id' is mandatory.")

        # Get input from ports
        cdmsfile = self.getInputFromPort('cdmsfile')
        id = self.getInputFromPort('id')
        axes = self.forceGetInputFromPort('axes') # None if no input
        axesOperations = self.forceGetInputFromPort('axesOperations') # None if no input

        # Get the variable
        varType = self.getVarType(id, cdmsfile)
        if (varType == 'variable'):
            var = cdmsfile.__call__(id)
        elif (varType == 'axis'):
            varID = self.getAxisID(id)            
            axis = getattr(cdmsfile, 'axes')[varID]
            var = MV2.array(axis)
            var.setAxis(0, axis)
        elif (varType == 'weighted-axis'):
            varID, axisID = self.getVarAndAxisID(id)
            var = cdmsfile.__call__(varID)            
            var = genutil.getAxisWeightByName(var, axisID)
            var.id = varID +'_' + axisID + '_weight'
        else:
            var = None

        # Eval the variable with the axes
        if axes is not None and var is not None:
            try:
                kwargs = eval(axes)
                var = var(**kwargs)
            except:
                raise ModuleError(self, "Invalid 'axes' specification", axes)

        # Apply axes ops to the variable
        if axesOperations is not None:
            var = self.applyAxesOperations(var, axesOperations)

        self.setResult('variable', var)
Ejemplo n.º 17
0
def get_EOFs_predictors(vORG, choice, num_eof, flag_mask_MOD, selec):
    if choice == 'HIS_v' or choice == 'SSP_v':
        totnum = int(vORG.shape[0])
        for i in range(totnum):
            if i == 0:
                vORG_new = vORG[i]
            else:
                vORG_new = np.r_[vORG_new, vORG[i]]
        vORG_new = MV.array(vORG_new)
        vORG_new.setAxis(0, set_time_axis(vORG_new.shape[0]))
        vORG_new.setAxis(1, vORG.getAxis(2))
        vORG_new.setAxis(2, vORG.getAxis(3))
    else:
        vORG_new = vORG
    EOFs_2D, EOFs_spat = get_eofs(vORG_new, num_eof, scaling_factor,
                                  flag_mask_MOD, selec)
    return EOFs_2D, EOFs_spat
Ejemplo n.º 18
0
    def compute(self):
        # *** IMPORTANT ***
        # Once someone figures out how to pass the tvariable object, to this
        # module none of the computation in this method is necessary 
        
        # Check ports
#        if not self.hasInputFromPort('cdmsfile'):
#            raise ModuleError(self, "'cdmsfile' is mandatory.")
#        if not self.hasInputFromPort('id'):
#            raise ModuleError(self, "'id' is mandatory.")

        # Get input from ports
        if self.hasInputFromPort('inputVariable'):
            var = self.getInputFromPort('inputVariable')
        else:    
            if self.hasInputFromPort('cdmsfile'):
                cdmsfile = self.getInputFromPort('cdmsfile')
            if self.hasInputFromPort('id'):
                id = self.getInputFromPort('id')
            # Get the variable
            varType = self.getVarType(id, cdmsfile)
            if (varType == 'variable'):
                var = cdmsfile.__call__(id)
            elif (varType == 'axis'):
                varID = self.getAxisID(id)            
                axis = getattr(cdmsfile, 'axes')[varID]
                var = MV2.array(axis)
                var.setAxis(0, axis)
            elif (varType == 'weighted-axis'):
                varID, axisID = self.getVarAndAxisID(id)
                var = cdmsfile.__call__(varID)            
                var = genutil.getAxisWeightByName(var, axisID)
                var.id = varID +'_' + axisID + '_weight'
            else:
                var = None


        axes = self.forceGetInputFromPort('axes') # None if no input
        axesOperations = self.forceGetInputFromPort('axesOperations') # None if no input
        # Eval the variable with the axes
        if axes is not None and var is not None:
            try:
                var = eval("var(%s)"%axes)
            except Exception, e:
                raise ModuleError(self, "Invalid 'axes' specification: %s"%str(e))
 def class_cal_loo(self, X_idx, Y, tot, add_const):
     loo = LeaveOneOut()
     test_Y, stds_Y, real_Y = [], [], []
     vORG = self.vORG
     HISm_mean = self.HISm_mean
     HISm_spat = self.HISm_spat
     lat, lon = vORG.getAxis(2), vORG.getAxis(3)
     if tot == 16:
         # Remove NESM3
         vORG = np.delete(vORG, 15, axis=0)
         vORG = MV.array(vORG)
         vORG.setAxis(2, lat)
         vORG.setAxis(3, lon)
         HISm_mean = np.delete(HISm_mean, 15, axis=0)
         HISm_spat = np.delete(HISm_spat, 15, axis=0)
     for train_index, test_index in loo.split(np.arange(tot)):
         num = train_index.shape[0]
         if X_idx == 'STD_only': X = np.std(HISm_mean, axis=1)
         if X_idx == 'STD_EOFs':
             EOFreturn = self.class_cal_eofs(vORG, HISm_mean, HISm_spat,
                                             test_index)
             std_HISm = EOFreturn['std_HISm']
             std_EOFsOnHISm = EOFreturn['std_EOFsOnHISm']
             X = np.c_[std_HISm, std_EOFsOnHISm[:, 1:-1]]
         train_X, train_Y = X[train_index], Y[train_index]
         tests_X, tests_Y = X[test_index], Y[test_index]
         if add_const == 0: train_X, tests_X = train_X, tests_X
         if add_const == 1:
             train_X, tests_X = np.c_[np.ones(num), train_X], np.c_[1,
                                                                    tests_X]
         central_regressions = sm.OLS(train_Y, train_X).fit()
         central_predictions = central_regressions.get_prediction(tests_X)
         predicted_val = central_predictions.predicted_mean
         predicted_std = central_predictions.se_obs
         test_Y.append(float(predicted_val))
         stds_Y.append(float(predicted_std))
         real_Y.append(float(tests_Y))
     test_Y2, real_Y2 = np.array(test_Y), np.array(real_Y)
     mean_Y = np.mean(real_Y)
     mse_REA = np.sum((real_Y2 - mean_Y)**2) / real_Y2.shape[0]
     mse_TES = np.sum((real_Y2 - test_Y2)**2) / test_Y2.shape[0]
     SR = mse_TES / mse_REA
     R2 = stats.pearsonr(real_Y2, test_Y2)[0]**2
     return SR, R2, test_Y2, stds_Y
Ejemplo n.º 20
0
def all_clim_sens():
    """ get the climate sensitivity"""
    curdir=__file__.split("CMIP5_tools.py")[0]
    cs = open(curdir+"clim_sens.txt")
    lns = cs.readlines()
    cs.close()
    models = np.array([x.split("\t")[0].replace("_","-") for x in lns[2:]])
    newmodels = []
    sens = [x.split("\t")[2].split("\n")[0] for x in lns[2:]]
    thesens = []
    for i in range(len(sens)):
        ecs = sens[i]
        if ecs != "":
            
            thesens += [float(ecs)/2.]
            newmodels += [models[i]]
    thesens = MV.array(thesens)
    thesens.setAxis(0,make_model_axis(newmodels))        
    return thesens
Ejemplo n.º 21
0
def pressure_to_plevs(var, plev):
    """Convert from pressure coordinate to desired pressure level(s)."""
    # Construct pressure level for interpolation
    var_plv = var.getLevel()
    levels_orig = MV2.array(var_plv[:])
    levels_orig.setAxis(0, var_plv)
    # grow 1d levels_orig to mv dimention
    var, levels_orig = genutil.grower(var, levels_orig)
    # levels_orig.info()
    # logLinearInterpolation only takes positive down plevel:
    # "I :      interpolation field (usually Pressure or depth)
    # from TOP (level 0) to BOTTOM (last level), i.e P value
    # going up with each level"
    if var.getLevel()[0] > var.getLevel()[-1]:
        var = var(lev=slice(-1, None, -1))
        levels_orig = levels_orig(lev=slice(-1, None, -1))
    var_p = cdutil.vertical.logLinearInterpolation(
        var(squeeze=1), levels_orig(squeeze=1), plev)
    return var_p
Ejemplo n.º 22
0
def correct_phase(P,reference = None):
    if reference is None:
        amp_solar, phase_solar = get_insolation()
        if phase_solar.shape != P.shape:
            grid = P.getGrid()
            phase_solar = phase_solar.regrid(grid,regridTool='regrid2')
        reference = phase_solar
    
    #print "got insolation"
    Convert2Day = np.vectorize(subtract_months)
    #print "vectorized"
    forward,backward = Convert2Day(P,reference)
    #print "converted to days"
    Merged = merge(forward,backward)
    #print "Merged"
    Pnew = fix_all_bad(Merged)
    #print "fixed discontinuities"
    Pnew = MV.array(Pnew)
    Pnew.setAxisList(P.getAxisList())
    return Pnew
Ejemplo n.º 23
0
    def noise(self, season, start_time=None, init=10):
        sigmas = []
        if start_time is None:
            start_time = cdtime.comptime(1980, 1, 1)
        end_time = cdtime.comptime(2100, 12, 31)
        trend_end = start_time.add(init, cdtime.Years)

        nsig = end_time.year - trend_end.year + 1
        concatenated = self.concatenate_piControl(season)
        Ls = np.arange(nsig) + init
        for L in Ls:
            distr = bootstrap_slopes(concatenated, L)
            #distr=fast_slopes(concatenated,L)
            sigmas += [np.std(distr)]
        sigmas = MV.array(sigmas)
        tax = cdms.createAxis(np.arange(nsig) + init + start_time.year)
        tax.id = 'time'
        tax.units = 'years since ' + str(start_time)
        sigmas.setAxis(0, tax)
        return sigmas
    def testVCS1DBoxfill(self):
        data = MV2.array([4, 5, 6, 7, 1, 3, 7, 9, ]) + 230.

        p = cdms2.createAxis([2, 5, 100, 200, 500, 800, 850, 1000])

        data.setAxis(0, p)

        data.id = "jim"

        gm = self.x.create1d()

        gm.linewidth = 0
        gm.datawc_x1 = 1000
        gm.datawc_x2 = 0

        gm.markersize = 30

        self.x.plot(data, gm, bg=self.bg)

        fnm = "test_vcs_1d_marker_not_shown_if_xaxis_flipped.png"
        self.checkImage(fnm)
Ejemplo n.º 25
0
 def test2D(self):
     a = MV2.array(range(6))
     a = MV2.resize(a, (2, 3))
     ax1 = cdms2.createAxis(["A", "B"], id="UPPER")
     ax2 = cdms2.createAxis(["a", "b", "c"], id="lower")
     a.setAxis(0, ax1)
     a.setAxis(1, ax2)
     jsn, struct = MV2Json(a)
     self.assertEqual(jsn, {
         'A': {
             'a': 0,
             'b': 1,
             'c': 2
         },
         'B': {
             'a': 3,
             'b': 4,
             'c': 5
         }
     })
     self.assertEqual(struct, ['UPPER', 'lower'])
Ejemplo n.º 26
0
    def plot_model_trends(self, ax=None, legend=False, change_units=False):
        months = [
            "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP",
            "OCT", "NOV", "DEC"
        ]
        if ax is None:
            fig = plt.figure()
            ax = plt.subplot(111)

        test = self.ensemble_average("ssp585")
        if change_units:
            if self.variable in ["mrso", "mrsos"]:
                test = self.standardize_zscore(test)
            else:
                test = self.convert_to_percentage(test)
        nmod = test.shape[0]
        trends = np.zeros((nmod, 12))
        models = cmip5.models(test)
        for i in range(12):
            month = months[i]
            mdat = getattr(cdutil, month).departures(test)

            trends[:, i] = cmip5.get_linear_trends(mdat)
        trends = MV.array(trends,
                          mask=cdutil.ANNUALCYCLE.climatology(test).mask)

        trends.setAxis(0, test.getAxis(0))
        for i in range(nmod):
            model = models[i]
            c = get_model_colors(model)
            ls = get_model_ls(model)
            ax.plot(np.arange(12), trends[i].asma(), c=c, ls=ls, label=model)

        ax.set_xticks(np.arange(12))
        ax.set_xticklabels(months)
        ax.set_ylabel(self.variable)
        #ax.set_title(region)
        ax.axhline(0, ls=":", lw=.5, c="k")
        if legend:
            plt.legend(fontsize=6, ncol=2)
Ejemplo n.º 27
0
def reconstructPressureFromHybrid(ps, A, B, Po):
    """
    Reconstruct the Pressure field on sigma levels, from the surface pressure
    
    Input
    Ps   : Surface pressure
    A,B,Po: Hybrid Convertion Coefficients, such as: p=B.ps+A.Po
    Ps: surface pressure
    B,A are 1D : sigma levels
    Po and Ps must have same units
    
    Output
    Pressure field
    Such as P=B*Ps+A*Po

    Example
    P=reconstructPressureFromHybrid(ps,A,B,Po)
    """
    # Compute the pressure for the sigma levels
    ax1 = ps.getAxisList()
    ax2 = A.getAxisList()
    sh = list(ps.shape)
    sh.insert(1, len(A))
    dummy = numpy.zeros(sh)

    p = MV2.array(
        numbaReconstruct4D(ps.filled(), A.filled(), B.filled(), Po, dummy))
    p.setAxisList((ax1[0], ax2, ax1[1], ax1[3]))

    p.id = 'P'
    try:
        p.units = ps.units
    except:
        pass
    t = ps.getTime()
    if not t is None:
        p = p(order='tz...')
    else:
        p = p(order='z...')
    return p
Ejemplo n.º 28
0
def read_json_and_merge_axes(stat, modes):
    model_run_list = []
    mode_season_list = []
    a = []
    for mode in modes:
        input_file = 'var_mode_' + mode + '_EOF1_stat_cmip5_historical_mo_atm_1900-2005_adjust_based_tcor_pseudo_vs_model_pcs.json'
        # open json
        with open(os.path.join('json_files', input_file)) as f:
            d = json.load(f)
        # Get potential x-axis first
        if mode == modes[0]:
            models_list = sorted(list(d["RESULTS"].keys()))
            for model in models_list:
                runs_list = sort_human(list(d["RESULTS"][model].keys()))
                for run in runs_list:
                    model_run_list.append(model + '_' + run)
            print(model_run_list)
        # season depending on mode
        if mode == 'PDO':
            seasons = ['monthly']
        else:
            seasons = ['DJF', 'MAM', 'JJA', 'SON']
        # season loop
        for season in seasons:
            mode_season_list.append(mode + '_' + season)
            for model_run in model_run_list:
                model = model_run.split('_')[0]
                run = model_run.split('_')[-1]
                try:
                    a.append(d["RESULTS"][model][run]["defaultReference"][mode]
                             [season][stat])
                except:
                    #a.append(-1.e20)
                    a.append(np.nan)
    # convert to array and decorate axes
    a = np.array(a).reshape(len(mode_season_list), len(model_run_list))
    X = cdms2.createAxis(model_run_list)
    Y = cdms2.createAxis(mode_season_list)
    a = MV2.array(a, axes=(Y, X), id=stat)
    return a
Ejemplo n.º 29
0
def reconstructPressureFromHybrid(ps,A,B,Po):
    """
    Reconstruct the Pressure field on sigma levels, from the surface pressure
    
    Input
    Ps   : Surface pressure
    A,B,Po: Hybrid Convertion Coefficients, such as: p=B.ps+A.Po
    Ps: surface pressure
    B,A are 1D : sigma levels
    Po and Ps must have same units
    
    Output
    Pressure field
    Such as P=B*Ps+A*Po

    Example
    P=reconstructPressureFromHybrid(ps,A,B,Po)
    """
    # Compute the pressure for the sigma levels
    ax1 = ps.getAxisList()
    ax2 = A.getAxisList()
    sh = list(ps.shape)
    sh.insert(1,len(A))
    dummy= numpy.zeros(sh)


    p = MV2.array(numbaReconstruct4D(ps.filled(),A.filled(),B.filled(),Po,dummy))
    p.setAxisList((ax1[0],ax2,ax1[1],ax1[3]))

    p.id='P'
    try:
      p.units=ps.units
    except:
      pass
    t=ps.getTime()
    if not t is None:
      p=p(order='tz...')
    else:
     p=p(order='z...')
    return p
Ejemplo n.º 30
0
def dLatitude(var, R=6371000, verbose=True):
    '''Return a slab of latitudinal increment (meter) delta_y.

    Args:
        var (cdms.TransientVariable): variable from which latitude axis is
            obtained;

    Kwargs:
        R (float): radius of Earth;

    Returns:
        delta_y (ndarray): 2d-array, latitudinal grid lengths.
    '''

    latax = var.getLatitude()
    lonax = var.getLongitude()

    if latax is None:
        raise Exception("<var> has no latitude axis.")
    if lonax is None:
        raise Exception("<var> has no longitude axis.")

    #---------Get axes and bounds-------------------
    latax_bounds = latax.getBounds()

    delta_y = []

    for ii in range(len(latax)):
        d_theta = abs(latax_bounds[ii][0] - latax_bounds[ii][1]) * np.pi / 180.
        dy = R * d_theta
        delta_y.append(dy)

    #-------Repeat array to get slab---------------
    delta_y = MV.array(delta_y)
    delta_y = MV.reshape(delta_y, (len(latax), 1))
    delta_y = MV.repeat(delta_y, len(lonax), axis=1)
    delta_y.setAxisList((latax, lonax))

    return delta_y
Ejemplo n.º 31
0
def by_month(X):
    time_i = X.getAxisIds().index('time')
    nm = X.shape[time_i]
    nyears = int(nm / 12)
    newtime = (nyears, 12)
    d = {}
    for i in range(len(X.shape)):
        d[i] = X.shape[i]
    d[time_i] = newtime
    #now make the new shape
    newshape = ()
    for i in range(len(X.shape)):
        x = d[i]
        if type(x) == type(()):
            newshape += x
        else:
            newshape += (x, )
    Xr = MV.array(X.asma().reshape(newshape))
    axlist = range(len(X.shape))
    for i in axlist:
        if i != time_i:
            Xr.setAxis(i, X.getAxis(i))
    monthax = cdms.createAxis(np.arange(12) + 1)
    monthax.id = "months"
    monthax.months = str([
        "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT",
        "NOV", "DEC"
    ])
    Xr.setAxis(time_i + 1, monthax)

    yearax = cdms.createAxis(X.getTime()[6::12])
    for att in X.getTime().attributes:
        setattr(yearax, att, X.getTime().attributes[att])

    yearax.id = "time"
    yearax.designateTime()
    Xr.setAxis(time_i, yearax)

    return Xr
Ejemplo n.º 32
0
 def _get(self):
     if 'relative' in self.portrait_types.keys():
         d = self.portrait_types['relative']
         vals = d[1]
         real_value = getattr(self, d[0])
         real = self.__get()
         setattr(self, d[0], vals[0])
         a0 = self.__get()
         sh = list(a0.shape)
         sh.insert(0, 1)
         a0 = MV2.reshape(a0, sh)
         for v in vals[1:]:
             setattr(self, d[0], v)
             tmp = self.__get()
             tmp = MV2.reshape(tmp, sh)
             a0 = MV2.concatenate((a0, tmp))
         a0 = MV2.sort(a0, 0).filled()
         real2 = real.filled()
         a0 = MV2.reshape(a0, (a0.shape[0], sh[1] * sh[2]))
         real2 = MV2.reshape(real2, (sh[1] * sh[2], ))
         a0 = MV2.transpose(a0)
         indices = []
         for i in range(len(real2)):
             indices.append(MV2.searchsorted(a0[i], real2[i]))
         indices = MV2.array(indices)
         indices = MV2.reshape(indices, (sh[1], sh[2]))
         if not ((real.mask is None) or (real.mask is MV2.nomask)):
             indices = MV2.masked_where(real.mask, indices)
         a = MV2.masked_equal(a0, 1.e20)
         a = MV2.count(a, 1)
         a = MV2.reshape(a, indices.shape)
         indices = indices / a * 100
         setattr(self, d[0], real_value)
         indices.setAxisList(real.getAxisList())
         ##             print indices.shape
         return indices
     else:
         return self.__get()
Ejemplo n.º 33
0
    def gather(self,array,pos=None,root=0):

        lst = comm.gather(array)
        lst2 = comm.gather(self)
        if pos is None:
            pos=self.pos
            
        if self.n == root:
            sh=[]
            axes=[]
            j=0
            for i,a in enumerate(self.axes):
                if i in pos:
                    sh.append(len(a))
                    axes.append(self.axes[i])
                else:
                    sh.append(array.shape[i])
                    axes.append(array.getAxis(i))
                
                    
            out=numpy.zeros(sh)
            for i in range(self.N):
                slices=lst2[i].getSlices()
                print i,slices
                Slices=[]
                k=0
                for j in range(len(self.axes)):
                    print k,j,pos
                    if j in pos:
                        Slices.append(slices[k])
                        k+=1
                    else:
                        Slices.append(slice(0,None))
                out[Slices]=lst[i]
            out=MV2.array(out)
            out.setAxisList(axes)
            out.id=array.id
            return out
Ejemplo n.º 34
0
    def ensemble_average(self, experiment):
        if type(experiment) == type("string"):
            self.get_ensemble(experiment)
            data = getattr(self, experiment)
        else:
            data = experiment
        nens, ntime = data.shape
        #models=sorted(self.ensemble_dict.keys())
        models = get_ok_models(self.region)

        nmod = len(models)
        # print("Number of models is", nmod)
        EnsembleAverage = np.ma.zeros((nmod, ntime)) + 1.e20
        fnames = np.array(
            get_ensemble_filenames(self.variable, self.region, experiment))
        counter = 0
        for model in models:
            #fnames=np.array(get_ensemble_filenames(self.variable,self.region,experiment))
            I = np.where([x.split(".")[2] == model for x in fnames])[0]
            if len(I) > 0:
                EnsembleAverage[counter] = np.ma.average(data.asma()[I],
                                                         axis=0)
            else:
                if self.verbose:
                    print("missing data for " + model + " " + self.variable +
                          " " + experiment)
            counter += 1
        EnsembleAverage = MV.masked_where(
            np.abs(EnsembleAverage) > 1.e10, EnsembleAverage)
        EnsembleAverage = MV.masked_where(np.isnan(EnsembleAverage),
                                          EnsembleAverage)
        EnsembleAverage = MV.array(EnsembleAverage)

        EnsembleAverage.setAxis(1, data.getTime())
        modax = cmip5.make_model_axis(models)
        EnsembleAverage.setAxis(0, modax)
        cdutil.setTimeBoundsMonthly(EnsembleAverage)
        return EnsembleAverage
Ejemplo n.º 35
0
    def _get(self):
        if 'relative' in self.portrait_types.keys():
            d=self.portrait_types['relative']
            vals=d[1]
            real_value=getattr(self,d[0])
            real=self.__get()
            setattr(self,d[0],vals[0])
            a0=self.__get()
            sh=list(a0.shape)
            sh.insert(0,1)
            a0=MV2.reshape(a0,sh)
            for v in vals[1:]:
                setattr(self,d[0],v)
                tmp=self.__get()
                tmp=MV2.reshape(tmp,sh)
                a0=MV2.concatenate((a0,tmp))
            a0=MV2.sort(a0,0).filled()
            real2=real.filled()
            a0=MV2.reshape(a0,(a0.shape[0],sh[1]*sh[2]))
            real2=MV2.reshape(real2,(sh[1]*sh[2],))
            a0=MV2.transpose(a0)
            indices=[]
            for i in range(len(real2)):
                indices.append(MV2.searchsorted(a0[i],real2[i]))
            indices=MV2.array(indices)
            indices=MV2.reshape(indices,(sh[1],sh[2]))
            if not ((real.mask is None) or (real.mask is MV2.nomask)):
                indices=MV2.masked_where(real.mask,indices)
            a=MV2.masked_equal(a0,1.e20)
            a=MV2.count(a,1)
            a=MV2.reshape(a,indices.shape)
            indices=indices/a*100
            setattr(self,d[0],real_value)
            indices.setAxisList(real.getAxisList())
##             print indices.shape
            return indices
        else:
            return self.__get()
Ejemplo n.º 36
0
    def setUp(self):
        super(VCSTaylorBaseTest, self).setUp()
        # Create dummy 7 data
        corr = [.2, .5, .7, .85, .9, .95, .99]
        std = [1.6, 1.7, 1.5, 1.2, .8, .9, .98]
        self.Npoints = len(std)
        self.data = MV2.array(list(zip(std, corr)))
        self.data.id = "My Taylor Diagram Data"
        # Markers attributes for later
        self.ids = ["A1", "A2", "A3", "B", "C1", "C2", "C3"]
        self.sizes = [3., 1., 2., 2., 2., 2., 2., ]
        self.symbols = [
            "square",
            "dot",
            "circle",
            "triangle_right",
            "triangle_left",
            "triangle_up",
            "triangle_down"]
        self.colors = [
            "red",
            "black",
            "black",
            "black",
            "black",
            "black",
            "blue"]
        self.id_sizes = [20., 15., 15., 15., 15., 15., 15., ]
        self.id_colors = [
            "orange",
            "grey",
            "grey",
            "grey",
            "grey",
            "grey",
            "cyan"]

        self.taylor = self.x.createtaylordiagram()
Ejemplo n.º 37
0
def running_mean(data, N):

    # Compute N-yr running mean
    back = N / 2
    forward = N / 2 + 1
    avgdata = np.empty(data.shape)
    avgdata[:] = np.nan
    for month in range(12):
        subset = data[month::12, :]
        avgsubset = np.empty(subset.shape)
        avgsubset[:] = np.nan
        LS = len(subset)
        for ii in range(LS):
            start = np.max((ii - back, 0))
            finish = np.min((ii + forward, LS))
            avgsubset[ii, :] = np.ma.average(subset[int(start):int(finish), :],
                                             0)
        avgdata[month::12, :] = avgsubset

    avgdata = MV2.array(avgdata)  # convert to a cdms2 transient variable
    avgdata.setAxisList(data.getAxisList())

    return avgdata
Ejemplo n.º 38
0
    def gather(self, array, pos=None, root=0):

        lst = comm.gather(array)
        lst2 = comm.gather(self)
        if pos is None:
            pos = self.pos

        if self.n == root:
            sh = []
            axes = []
            j = 0
            for i, a in enumerate(self.axes):
                if i in pos:
                    sh.append(len(a))
                    axes.append(self.axes[i])
                else:
                    sh.append(array.shape[i])
                    axes.append(array.getAxis(i))

            out = numpy.zeros(sh)
            for i in range(self.N):
                slices = lst2[i].getSlices()
                print i, slices
                Slices = []
                k = 0
                for j in range(len(self.axes)):
                    print k, j, pos
                    if j in pos:
                        Slices.append(slices[k])
                        k += 1
                    else:
                        Slices.append(slice(0, None))
                out[Slices] = lst[i]
            out = MV2.array(out)
            out.setAxisList(axes)
            out.id = array.id
            return out
Ejemplo n.º 39
0
def _extremum_(func, ctime, i0, i, var, spline):
    """Extremum possibly using splines"""
    nt = len(var)
    if spline and nt >= 4:  # and i != 0 and i != (nt-1)
        if i == 0:
            ifirst, ilast = 0, 4
        elif i == nt - 1:
            ifirst, ilast = nt - 4, nt
        else:
            icenter = i - int(var[i - 1] > var[i + 1])
            ifirst = max(icenter - 1, 0)
            ilast = ifirst + 4
            if ilast > nt:
                ilast -= 1
                ifirst -= 1
        mn_units = 'minutes since %s' % ctime[i0 + ifirst]
        old_rts = cdms2.createAxis(
            N.asarray([
                ct.torel(mn_units).value
                for ct in ctime[i0 + ifirst:i0 + ilast]
            ],
                      dtype='d'))
        old_var = MV2.array(var[ifirst:ilast], axes=[old_rts], copyaxes=0)
        mn_rts = cdms2.createAxis(N.arange(int(old_rts[-1] + 1), dtype='d'))
        mn_var = interp1d(old_var, mn_rts, method='cubic')
        del old_var, old_rts
        #       mn_var = spline_interpolate(old_rts,var[i-1:i+2],mn_rts)
        #       mn_var = splev(mn_rts, splrep(old_rts,var[ifirst:ilast]))
        mn_i = func(mn_var)
        val = mn_var[mn_i]
        del mn_var
        this_ctime = cdtime.reltime(mn_i, mn_units).tocomp()
    else:
        this_ctime = ctime[i0 + i]
        val = var[i]
    return val, this_ctime
Ejemplo n.º 40
0
def get_variance_maps_models(variable="pr",models=None,cmip_dir = None,period=12):
    """ find latitudes in each model where the annual cycle is not dominant"""
    if models is None:
        f = cdms.open("/work/marvel1/SEASONAL/MMA/cmip5.ZONALMMA.historical-rcp85.rip.mo.atm.Amon.pr.ver-1.AmpPhase.nc")
        phase = f("phase")
        models = eval(phase.getAxis(0).models)
        f.close()
    if cmip_dir is None:
        cmip_dir = "/work/cmip5/historical-rcp85/atm/mo/"+variable+"/"
    fobs = cdms.open("/work/marvel1/SEASONAL/OBS/GPCP.precip.mon.mean.nc")
    the_grid = fobs("precip").getGrid()
    nlat,nlon=the_grid.shape
    fobs.close()
    VarianceMaps = np.zeros((len(models),nlat))+1.e20
    counter=0
    for model in models:
        print model
        try:
            fname = sorted(glob.glob(cmip_dir+"*."+model+".*"))[0]
            fp = cdms.open(fname)
            prtest = fp(variable,time=("1979-1-1","2014-12-31")).regrid(the_grid,regridTool='regrid2')
            przonal = cdutil.averager(prtest,axis='x')
            dom = variance_map(przonal,period=period)
            VarianceMaps[counter]=dom
            fp.close()
            counter+=1
        except:
            continue
        
    modax = cdms.createAxis(range(len(models)))
    modax.id = "model"
    modax.models = str(models)
    VarianceMaps = MV.array(VarianceMaps)
    VarianceMaps.setAxis(0,modax)
    VarianceMaps.setAxis(1,the_grid.getLatitude())
    return MV.masked_where(VarianceMaps>1.e10,VarianceMaps)
Ejemplo n.º 41
0
def applyCropIdx(slab, cropidx):
    '''Cut out a bounding box from given 2d slab given corner indices

    Args:
        slab (ndarray): 2D array to cut a box from.
        cropidx (tuple): (y, x) coordinate indices, output from cropMask().

    Returns:
        cropslab (ndarray): 2D sub array cut from <slab> using <cropidx> as
                            boundary indices.
    '''

    cropslab = np.array(slab)[np.ix_(*cropidx)]
    try:
        croplat = slab.getLatitude()[:][cropidx[0]]
        croplon = slab.getLongitude()[:][cropidx[1]]

        croplat = cdms.createAxis(croplat)
        croplat.designateLatitude()
        croplat.id = 'y'
        croplat.units = 'degree'
        croplat.name = 'latitude'

        croplon = cdms.createAxis(croplon)
        croplon.designateLongitude()
        croplon.id = 'x'
        croplon.units = 'degree'
        croplon.name = 'longitude'

        cropslab = MV.array(cropslab)
        cropslab.setAxis(0, croplat)
        cropslab.setAxis(1, croplon)
    except:
        pass

    return cropslab
Ejemplo n.º 42
0
def get_eofs(v_HIS, num_eof, scaling_eof, flag_mask_MOD, selec):
    TotGriNum = v_HIS.shape[1] * v_HIS.shape[2]
    if flag_mask_MOD == True:
        fmask = cdms.open(data_path + 'mask_subregions.nc')
        if selec == 1: vMask = fmask('vHadCRUT_mask')
        if selec == 2: vMask = fmask('vBerkEar_mask')
        fmask.close()
    else:
        vMask = 1
    v_rem_mean = v_HIS - np.array(cdutil.averager(v_HIS, axis='yx'))[:, None,
                                                                     None]
    v_HIS_spat = v_rem_mean - cdutil.averager(
        v_rem_mean, axis=0, weights='equal')
    eofs2, pc2, vari2, eigv2 = get_EOFs(v_HIS_spat,
                                        num=num_eof,
                                        scaling=scaling_eof)
    eofs2 = eofs2 * np.sqrt(
        TotGriNum) * EOFsSignAdjust[:, None, None]  # Normalized by RMS
    eofs2_MV = MV.array(eofs2)
    eofs2_MV.setAxis(1, v_HIS.getAxis(1))
    eofs2_MV.setAxis(2, v_HIS.getAxis(2))
    predictor_OnlyPattern = get_constant_term(eofs2)
    return_spat = np.ma.filled(predictor_OnlyPattern, 0)
    return eofs2_MV, return_spat
Ejemplo n.º 43
0
def annual_avg(data):
    """
    Compute annual means without forcing it to be Jan through Dec
    """

    A = data.shape[0]
    anndata0 = np.empty(data.shape)
    anndata0[:] = np.nan
    cnt = -1
    for i in np.arange(0, A, 12):
        try:
            LD = len(data[i:i + 12])
        except:
            continue
        if LD == 12:  # only take full 12-month periods
            cnt += 1
            anndata0[cnt] = np.ma.average(data[i:i + 12], 0)

    B = cnt + 1
    anndata = MV2.array(anndata0[:B])  # convert to a cdms2 transient variable
    if type(anndata) != float:
        anndata.setAxisList(data[:B * 12:12].getAxisList())

    return anndata
                    dict_head = result_dict['RESULTS'][model][run][
                        'defaultReference'][mode][season]['cbf']
                    debug_print('CBF approach start', debug)

                    # Regrid (interpolation, model grid to ref grid)
                    model_timeseries_season_regrid = model_timeseries_season.regrid(
                        ref_grid_global, regridTool='regrid2', mkCyclic=True)
                    model_timeseries_season_regrid_subdomain = model_timeseries_season_regrid(
                        region_subdomain)

                    # Matching model's missing value location to that of observation
                    # Save axes for preserving
                    axes = model_timeseries_season_regrid_subdomain.getAxisList(
                    )
                    # 1) Replace model's masked grid to 0, so theoritically won't affect to result
                    model_timeseries_season_regrid_subdomain = MV2.array(
                        model_timeseries_season_regrid_subdomain.filled(0.))
                    # 2) Give obs's mask to model field, so enable projecField functionality below
                    model_timeseries_season_regrid_subdomain.mask = eof_obs[
                        season].mask
                    # Preserve axes
                    model_timeseries_season_regrid_subdomain.setAxisList(axes)

                    # CBF PC time series
                    cbf_pc = gain_pseudo_pcs(
                        solver_obs[season],
                        model_timeseries_season_regrid_subdomain,
                        eofn_obs,
                        reverse_sign_obs[season],
                        EofScaling=EofScaling)

                    # Calculate stdv of cbf pc time series
Ejemplo n.º 45
0
    def draw_values(self, raveled, mesh, meshfill, template):
        # Values to use (data or user passed)
        if self.PLOT_SETTINGS.values.array is None:
            data = MV2.array(raveled)
        else:
            data = MV2.ravel(self.PLOT_SETTINGS.values.array)
        if isinstance(raveled, numpy.ma.core.MaskedArray):
            data.mask = data.mask + raveled.mask

        # Now remove masked values
        if data.mask is not numpy.ma.nomask:  # we have missing
            indices = numpy.argwhere(numpy.ma.logical_not(data.mask))
            data = data.take(indices).filled(0)[:, 0]
            M = mesh.filled()[indices][:, 0]
            raveled = raveled.take(indices).filled(0.)[:, 0]
        else:
            M = mesh.filled()

        # Baricenters
        xcenters = numpy.average(M[:, 1], axis=-1)
        ycenters = numpy.average(M[:, 0], axis=-1)
        self.PLOT_SETTINGS.values.text.viewport = [template.data.x1, template.data.x2,
                                                   template.data.y1, template.data.y2]
        if not numpy.allclose(meshfill.datawc_x1, 1.e20):
            self.PLOT_SETTINGS.values.text.worldcoordinate = [meshfill.datawc_x1,
                                                              meshfill.datawc_x2,
                                                              meshfill.datawc_y1,
                                                              meshfill.datawc_y2]
        else:
            self.PLOT_SETTINGS.values.text.worldcoordinate = [M[:, 1].min(),
                                                              M[:, 1].max(),
                                                              M[:, 0].min(),
                                                              M[:, 0].max()]

        self.PLOT_SETTINGS.values.text.string = [
            self.PLOT_SETTINGS.values.format.format(value) for value in data]

        # Now that we have the formatted values we need get the longest string
        lengths = [len(txt) for txt in self.PLOT_SETTINGS.values.text.string]
        longest = max(lengths)
        index = lengths.index(longest)

        tmptxt = vcs.createtext()
        tmptxt.string = self.PLOT_SETTINGS.values.text.string[index]
        tmptxt.x = xcenters[index]
        tmptxt.y = ycenters[index]
        smallY = M[index, 0, :].min()
        bigY = M[index, 0, :].max()
        smallX = M[index, 1, :].min()
        bigX = M[index, 1, :].max()
        tmptxt.worldcoordinate = self.PLOT_SETTINGS.values.text.worldcoordinate
        tmptxt.viewport = self.PLOT_SETTINGS.values.text.viewport
        # Now try to shrink until it fits
        extent = self.x.gettextextent(tmptxt)[0]
        while ((extent[1] - extent[0]) / (bigX - smallX) > 1.01 or
               (extent[3] - extent[2]) / (bigY - smallY) > 1.01) and \
                tmptxt.height >= 1:
            tmptxt.height -= 1
            extent = self.x.gettextextent(tmptxt)[0]
        self.PLOT_SETTINGS.values.text.height = tmptxt.height

        # Finally we need to split into two text objects for dark and light background
        # Step 1: figure out each bin color type (dark/light)
        colormap = self.x.colormap
        if colormap is None:
            colormap = vcs._colorMap
        cmap = vcs.getcolormap(colormap)
        colors = meshfill.fillareacolors
        dark_bins = [
            is_dark_color_type(
                *cmap.getcolorcell(color)) for color in colors]

        # Step 2: put values into bin (color where they land)
        bins = meshfill.levels[1:-1]
        binned = numpy.digitize(raveled, bins)
        isdark = [dark_bins[indx] for indx in binned]
        tmptxt = vcs.createtext(
            Tt_source=self.PLOT_SETTINGS.values.text.Tt_name,
            To_source=self.PLOT_SETTINGS.values.text.To_name)
        for pick, color in [(numpy.argwhere(isdark), self.PLOT_SETTINGS.values.lightcolor),
                            (numpy.argwhere(numpy.logical_not(isdark)), self.PLOT_SETTINGS.values.darkcolor)]:
            tmptxt.x = xcenters.take(pick)[:, 0].tolist()
            tmptxt.y = ycenters.take(pick)[:, 0].tolist()
            tmptxt.string = numpy.array(
                self.PLOT_SETTINGS.values.text.string).take(pick)[
                :, 0].tolist()
            tmptxt.color = color
            self.x.plot(tmptxt, bg=self.bg)
Ejemplo n.º 46
0
def logLinearInterpolation(A,P,levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000],status=None):
    """
    Log-linear interpolation
    to convert a field from sigma levels to pressure levels
    Value below surface are masked
    
    Input
    A :    array on sigma levels
    P :    pressure field from TOP (level 0) to BOTTOM (last level)
    levels : pressure levels to interplate to (same units as P), default levels are:[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000]

    P and levels must have same units

    Output
    array on pressure levels (levels)
    
    Examples:
    A=logLinearInterpolation(A,P),levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000])
    """
    
    try:
        nlev=len(levels)  # Number of pressure levels
    except:
        nlev=1  # if only one level len(levels) would breaks
        levels=[levels,]
    order=A.getOrder()
    A=A(order='z...')
    P=P(order='z...')
    sh=list(P.shape)
    nsigma=sh[0] #number of sigma levels
    sh[0]=nlev
    t=MV2.zeros(sh,typecode=MV2.float32)
    sh2=P[0].shape
    prev=-1
    for ilev in range(nlev): # loop through pressure levels
        if status is not None:
            prev=genutil.statusbar(ilev,nlev-1.,prev)
        lev=levels[ilev] # get value for the level
        Pabv=MV2.ones(sh2,MV2.float)
        Aabv=-1*Pabv # Array on sigma level Above
        Abel=-1*Pabv # Array on sigma level Below
        Pbel=-1*Pabv # Pressure on sigma level Below
        Pabv=-1*Pabv # Pressure on sigma level Above
        Peq=MV2.masked_equal(Pabv,-1) # Area where Pressure == levels
        for i in range(1,nsigma): # loop from second sigma level to last one
            a=MV2.greater_equal(P[i],  lev) # Where is the pressure greater than lev
            b=   MV2.less_equal(P[i-1],lev) # Where is the pressure less than lev
            # Now looks if the pressure level is in between the 2 sigma levels
            # If yes, sets Pabv, Pbel and Aabv, Abel
            a=MV2.logical_and(a,b)
            Pabv=MV2.where(a,P[i],Pabv) # Pressure on sigma level Above
            Aabv=MV2.where(a,A[i],Aabv) # Array on sigma level Above
            Pbel=MV2.where(a,P[i-1],Pbel) # Pressure on sigma level Below
            Abel=MV2.where(a,A[i-1],Abel) # Array on sigma level Below
            Peq= MV2.where(MV2.equal(P[i],lev),A[i],Peq)

        val=MV2.masked_where(MV2.equal(Pbel,-1),numpy.ones(Pbel.shape)*lev) # set to missing value if no data below lev if there is
        
        tl=MV2.log(val/Pbel)/MV2.log(Pabv/Pbel)*(Aabv-Abel)+Abel # Interpolation
        if ((Peq.mask is None) or (Peq.mask is MV2.nomask)):
            tl=Peq
        else:
            tl=MV2.where(1-Peq.mask,Peq,tl)
        t[ilev]=tl.astype(MV2.float32)
        
    ax=A.getAxisList()
    autobnds=cdms2.getAutoBounds()
    cdms2.setAutoBounds('off')
    lvl=cdms2.createAxis(MV2.array(levels).filled())
    cdms2.setAutoBounds(autobnds)
    try:
        lvl.units=P.units
    except:
        pass
    lvl.id='plev'
    
    try:
      t.units=P.units
    except:
      pass
  
    ax[0]=lvl
    t.setAxisList(ax)
    t.id=A.id
    for att in A.listattributes():
        setattr(t,att,getattr(A,att))
    return t(order=order)
Ejemplo n.º 47
0
    def concatenate_piControl(self, season=None, compressed=False):
        experiment = "piControl"
        fnames = sorted(
            get_ensemble_filenames(self.variable, self.region, experiment))
        #models=sorted(self.ensemble_dict.keys())
        models = get_ok_models(self.region)

        nmod = len(models)
        ntimes = []
        model_names = []
        #Loop over without loading data to figure out the shortest length control run
        for model in models:
            # print(model)
            I = np.where([x.split(".")[2] == model for x in fnames])[0]
            if len(I) > 0:
                first_member = int(I[0])

                fname = fnames[first_member]

                model_names += [fname]
                f = cdms.open(fname)
                ntimes += [int(f[self.variable].shape[0])]
                f.close()
        L = np.min(ntimes)
        #Set the time axis to be the time axis of the shortest control rin
        imin = np.argmin(ntimes)

        fshortest = model_names[imin]
        f = cdms.open(fshortest)
        tax = f(self.variable).getTime()
        tax.id = 'time'
        tax.designateTime()
        f.close()
        #Load data
        #SingleMember=np.ma.zeros((len(model_names),L))+1.e20
        SingleMember = np.ma.zeros((nmod, L)) + 1.e20
        i = 0
        for model in models:
            I = np.where([x.split(".")[2] == model for x in fnames])[0]
            if len(I) > 0:
                first_member = I[0]
                fname = fnames[first_member]
                f = cdms.open(fname)
                vdata = f(self.variable)

                SingleMember[i] = vdata[:L]
                i += 1
            else:
                if self.verbose:
                    print("No piControl data for " + model + " " +
                          self.variable)

            f.close()

        #Historical units are already converted; need to convert piControl from
        #kg m-2 s-1 to mm day-1
        #if self.variable in ["pr","evspsbl","prsn","mrros","mrro"]:
        #   SingleMember = SingleMember*86400.
        SingleMember = MV.masked_where(
            np.abs(SingleMember) > 1.e10, SingleMember)
        SingleMember = MV.array(SingleMember)
        SingleMember.setAxis(1, tax)

        SingleMember.setAxis(0, cmip5.make_model_axis(models))
        ###KLUDGE: FIRST YEAR IS ZERO- FIX THIS IN DOWNLOADER
        SingleMember = MV.masked_where(SingleMember == 0, SingleMember)
        # if self.variable in ["mrsos","mrso"]:
        #     if not raw:
        #         SingleMember=self.standardize_zscore(SingleMember)
        # else:
        #     if not raw:
        #         SingleMember=self.convert_to_percentage(SingleMember)
        if season is None:
            return SingleMember
        cdutil.setTimeBoundsMonthly(SingleMember)
        seasonal = getattr(cdutil, season).departures(SingleMember)

        return DA_tools.concatenate_this(seasonal, compressed=compressed)
Ejemplo n.º 48
0
def linearInterpolation(A,I,levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000], status=None):
    """
    Linear interpolation
    to interpolate a field from some levels to another set of levels
    Value below "surface" are masked
    
    Input
    A :      array to interpolate
    I :      interpolation field (usually Pressure or depth) from TOP (level 0) to BOTTOM (last level), i.e P value going up with each level
    levels : levels to interplate to (same units as I), default levels are:[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000]

    I and levels must have same units

    Output
    array on new levels (levels)
    
    Examples:
    A=interpolate(A,I,levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000])
    """
    
    try:
        nlev=len(levels)  # Number of pressure levels
    except:
        nlev=1  # if only one level len(levels) would breaks
        levels=[levels,]
    order=A.getOrder()
    A=A(order='z...')
    I=I(order='z...')
    sh=list(I.shape)
    nsigma=sh[0] #number of sigma levels
    sh[0]=nlev
    t=MV2.zeros(sh,typecode=MV2.float32)
    sh2=I[0].shape
    prev=-1
    for ilev in range(nlev): # loop through pressure levels
        if status is not None:
            prev=genutil.statusbar(ilev,nlev-1.,prev)
        lev=levels[ilev] # get value for the level
        Iabv=MV2.ones(sh2,MV2.float)
        Aabv=-1*Iabv # Array on sigma level Above
        Abel=-1*Iabv # Array on sigma level Below
        Ibel=-1*Iabv # Pressure on sigma level Below
        Iabv=-1*Iabv # Pressure on sigma level Above
        Ieq=MV2.masked_equal(Iabv,-1) # Area where Pressure == levels
        for i in range(1,nsigma): # loop from second sigma level to last one
            a = MV2.greater_equal(I[i],  lev) # Where is the pressure greater than lev
            b =    MV2.less_equal(I[i-1],lev) # Where is the pressure less than lev
            # Now looks if the pressure level is in between the 2 sigma levels
            # If yes, sets Iabv, Ibel and Aabv, Abel
            a=MV2.logical_and(a,b)
            Iabv=MV2.where(a,I[i],Iabv) # Pressure on sigma level Above
            Aabv=MV2.where(a,A[i],Aabv) # Array on sigma level Above
            Ibel=MV2.where(a,I[i-1],Ibel) # Pressure on sigma level Below
            Abel=MV2.where(a,A[i-1],Abel) # Array on sigma level Below
            Ieq= MV2.where(MV2.equal(I[i],lev),A[i],Ieq)

        val=MV2.masked_where(MV2.equal(Ibel,-1.),numpy.ones(Ibel.shape)*lev) # set to missing value if no data below lev if there is
        
        tl=(val-Ibel)/(Iabv-Ibel)*(Aabv-Abel)+Abel # Interpolation
        if ((Ieq.mask is None) or (Ieq.mask is MV22.nomask)):
            tl=Ieq
        else:
            tl=MV2.where(1-Ieq.mask,Ieq,tl)
        t[ilev]=tl.astype(MV2.float32)

    ax=A.getAxisList()
    autobnds=cdms2.getAutoBounds()
    cdms2.setAutoBounds('off')
    lvl=cdms2.createAxis(MV2.array(levels).filled())
    cdms2.setAutoBounds(autobnds)
    try:
        lvl.units=I.units
    except:
        pass
    lvl.id='plev'
    
    try:
      t.units=I.units
    except:
      pass
  
    ax[0]=lvl
    t.setAxisList(ax)
    t.id=A.id
    for att in A.listattributes():
        setattr(t,att,getattr(A,att))
    return t(order=order)
Ejemplo n.º 49
0
    def __call__(self, merge=[], **kargs):
        """ Returns the array of values"""
        # First clean up kargs
        if "merge" in kargs:
            merge = kargs["merge"]
            del (kargs["merge"])
        order = None
        axes_ids = self.getAxisIds()
        if "order" in kargs:
            # If it's an actual axis assume that it's what user wants
            # Otherwise it's an out order keyword
            if "order" not in axes_ids:
                order = kargs["order"]
                del (kargs["order"])
        ab = cdms2.getAutoBounds()
        cdms2.setAutoBounds("off")
        axes = self.getAxisList()
        if merge != []:
            if isinstance(merge[0], str):
                merge = [
                    merge,
                ]
        if merge != []:
            for merger in merge:
                for merge_axis_id in merger:
                    if merge_axis_id not in axes_ids:
                        raise RuntimeError(
                            "You requested to merge axis is '{}' which is not valid. Axes: {}"
                            .format(merge_axis_id, axes_ids))
        sh = []
        ids = []
        used_ids = []
        for a in axes:
            # Regular axis not a merged one
            sh.append(len(a))  # store length to construct array shape
            ids.append(a.id)  # store ids

            used_ids.append(a.id)

        # first let's see which vars are actually asked for
        # for now assume all keys means restriction on dims
        if not isinstance(merge, (list, tuple)):
            raise RuntimeError(
                "merge keyword must be a list of dimensions to merge together")

        if len(merge) > 0 and not isinstance(merge[0], (list, tuple)):
            merge = [
                merge,
            ]

        for axis_id in kargs:
            if axis_id not in ids:
                raise ValueError("Invalid axis '%s'" % axis_id)
            index = ids.index(axis_id)
            value = kargs[axis_id]
            if isinstance(value, basestring):
                value = [value]
            if not isinstance(value, (list, tuple, slice)):
                raise TypeError(
                    "Invalid subsetting type for axis '%s', axes can only be subsetted by string,list or slice"
                    % axis_id)
            if isinstance(value, slice):
                axes[index] = axes[index].subAxis(value.start, value.stop,
                                                  value.step)
                sh[index] = len(axes[index])
            else:  # ok it's a list
                for v in value:
                    if v not in axes[index][:]:
                        raise ValueError("Unkwown value '%s' for axis '%s'" %
                                         (v, axis_id))
                axis = cdms2.createAxis(value, id=axes[index].id)
                axes[index] = axis
                sh[index] = len(axis)

        array = numpy.ma.ones(sh, dtype=numpy.float)
        # Now let's fill this array
        self.get_array_values_from_dict_recursive(array, [], [], [], axes)

        # Ok at this point we need to take care of merged axes
        # First let's create the merged axes
        axes_to_group = []
        for merger in merge:
            merged_axes = []
            for axid in merger:
                for ax in axes:
                    if ax.id == axid:
                        merged_axes.append(ax)
            axes_to_group.append(merged_axes)
        new_axes = [groupAxes(grp_axes) for grp_axes in axes_to_group]
        sh2 = list(sh)
        for merger in merge:
            for merger in merge:  # loop through all possible merging
                merged_indices = []
                for id in merger:
                    merged_indices.append(axes_ids.index(id))
                for indx in merged_indices:
                    sh2[indx] = 1
                smallest = min(merged_indices)
                for indx in merged_indices:
                    sh2[smallest] *= sh[indx]

        myorder = []
        for index in range(len(sh)):
            if index in myorder:
                continue
            for merger in merge:
                merger = [axes_ids.index(x) for x in merger]
                if index in merger and index not in myorder:
                    for indx in merger:
                        myorder.append(indx)
            if index not in myorder:  # ok did not find this one anywhere
                myorder.append(index)

        outData = numpy.transpose(array, myorder)
        outData = numpy.reshape(outData, sh2)

        yank = []
        for merger in merge:
            merger = [axes_ids.index(x) for x in merger]
            mn = min(merger)
            merger.remove(mn)
            yank += merger
        yank = sorted(yank, reverse=True)
        for yk in yank:
            extract = (slice(0, None), ) * yk
            extract += (0, )
            outData = outData[extract]
        # Ok now let's apply the newaxes
        sub = 0
        outData = MV2.array(outData)
        merged_axis_done = []
        for index in range(len(array.shape)):
            foundInMerge = False
            for imerge, merger in enumerate(merge):
                merger = [axes_ids.index(x) for x in merger]
                if index in merger:
                    foundInMerge = True
                    if imerge not in merged_axis_done:
                        merged_axis_done.append(imerge)
                        setMergedAxis = imerge
                    else:
                        setMergedAxis = -1
            if not foundInMerge:
                outData.setAxis(index - sub, axes[index])
            else:
                if setMergedAxis == -1:
                    sub += 1
                else:
                    outData.setAxis(index - sub, new_axes[setMergedAxis])
        outData = MV2.masked_greater(outData, 9.98e20)
        outData.id = "pmp"
        if order is not None:
            myorder = "".join(["({})".format(nm) for nm in order])
            outData = outData(order=myorder)
        # Merge needs cleaning for extra dims crated
        if merge != []:
            for i in range(outData.ndim):
                outData = scrap(outData, axis=i)
        outData = MV2.masked_greater(outData, 9.9e19)
        cdms2.setAutoBounds(ab)
        return outData
Ejemplo n.º 50
0
#pdb.set_trace()
for fn in fns:
    f = cdms2.open(old_dir + '/' + fn)
    g = cdms2.open(new_dir + '/' + fn, 'w')
    store_provenance(g)
    
    for key, value in f.attributes.iteritems():
        setattr(g, key, value)

    for varid in vars:
        #print varid

        var = f(varid)
        #print varid, type(var)
        if isinstance(var,numpy.float64):
            var = MV2.array(var, id=varid)
        elif hasattr(var, 'getLatitude'):
            l = var.getLatitude()
            if hasattr(l, '_bounds_'):
                l._bounds_ = None
            L=var.getLongitude()
            if hasattr(L, '_bounds_'):
                L._bounds_ = None
        #else:
        #    pass
        g.write(var)
    
    
    #special processing
    #create a variable so that isccp_tau and isccp_prs are stored
    tau = f['isccp_tau']
Ejemplo n.º 51
0
data_patch = '/lustre/scratch/leiduan/MERRA2_data/'
case_name = get_prefix_name(int(year)) + str(year) + '_wcf100m031225.nc'
isleap = calendar.isleap(int(year))
if isleap == True:
    leap_year = 1
else:
    leap_year = 0

f_mask = cdms.open('SWGDN.nc')
v = f_mask('SWGDN')
lat = v.getAxis(1)
lon = v.getAxis(2)
f_mask.close()

fw = cdms.open(data_patch + case_name)
wcf = MV.array(fw('wcf', squeeze=1))
wcf[wcf < 0] = 0.
wcf[wcf > 1] = 1.
fw.close()

# use NetCDF3 Classic format
cdms.setNetcdfShuffleFlag(0)  # netcdf3 classic...
cdms.setNetcdfDeflateFlag(0)  # netcdf3 classic...
cdms.setNetcdfDeflateLevelFlag(0)  # netcdf3 classic...

fm = cdms.open('selected_mask_NYS.nc')
region_mask_list = {0: 'wmask_NYS'}
mask_idx = MV.array(fm(region_mask_list[idx]))
# Pre-define the output variable and output file
g = cdms.open('averaged_NYS_wcf' + str(year) + '.nc', 'w')
new_data = MV.array(np.zeros(len_axis))
Ejemplo n.º 52
0
           pass
    
    med_rms1 = np.ma.median(out1_rel[vn,:])
    med_rms2 = np.ma.median(out2_rel[vn,:])
    med_rms3 = np.ma.median(out3_rel[vn,:])
   
    out1_rel[vn,:]=(out1_rel[vn,:]-med_rms1)/med_rms1
    out2_rel[vn,:]=(out2_rel[vn,:]-med_rms2)/med_rms2
    out3_rel[vn,:]=(out3_rel[vn,:]-med_rms3)/med_rms3

# ADD SPACES FOR LABELS TO ALIGN AXIS LABELS WITH PLOT
yax = [ m.encode('utf-8')+" " for m in mods ]
xax = [ v+" " for v in vars ]
  
# Convert to MV so we can decorate
out1_rel = MV2.array(out1_rel)
out2_rel = MV2.array(out2_rel)
out3_rel = MV2.array(out3_rel)

# GENERATE PLOT 
P.decorate(out1_rel,xax,yax)
P.decorate(out2_rel,xax,yax)
P.decorate(out3_rel,xax,yax)

# PLOT
P.plot(out1_rel,x=x,multiple=1.4,bg=1)
P.plot(out2_rel,x=x,multiple=2.4,bg=1)
P.plot(out3_rel,x=x,multiple=3.4,bg=1)
# END OF PLOTTING

# SAVE PLOT
Ejemplo n.º 53
0
#             axis[kp] = parent.panelDM.dim[p].new_axis[k].astype(axis.dtype.char)
#             kp += 1
#             if kp == axis_len: break
   else:
       ## Charles changed it from apply varobj.subRegion to varobj, as curvilinear and
       ## generic grid don't work with subRegion
##       slab = apply(var.subRegion, tuple(dargs), kpargs)
##       print 'getting it',var,tuple(dargs), kpargs
      if var.shape!=():
        slab = apply(var, tuple(dargs), kpargs)
      else:
          slab=var

   #Make sure it's an MV, for 0D var
   if not MV2.isMaskedVariable(slab):
       slab=MV2.array(slab)
   # Make sure to change the Id
   if d_name != None: slab.id = slab.name = d_name

   #
   # record subRegion command
   r=''
   if from_file == 1:
      var_name = var
      if d_name is not None: var_name = d_name
      s_index = "%s=fid2( '%s', " % ( var_name, var )
      s_coord = "%s=fid2( '%s', " % ( var_name, var )
   else:
      var_name = var.id
      if new_var is not None: var_name = new_var
      s_index = "%s=%s( " % ( var_name, var.id )
Ejemplo n.º 54
0
data = [1,]*12+[2,]*12

print data

months = range(24)

t=cdms2.createAxis(months)
t.designateTime()
t.units="months since 2014"

import cdutil

cdutil.setTimeBoundsMonthly(t)
import MV2,numpy
data = numpy.array(data)
data=MV2.array(data)
data.setAxis(0,t)
print t.asComponentTime()
djf = cdutil.times.DJF(data)
djfc = cdutil.times.DJF.climatology(data)
print djf
assert(numpy.allclose(djf[0],1.) and numpy.allclose(djf[1],1.6666667) and numpy.allclose(djf[2],2.))
print djfc
assert(numpy.allclose(djfc,1.625))
djf = cdutil.times.DJF(data,criteriaarg=[.5,None])
djfc = cdutil.times.DJF.climatology(data,criteriaarg=[.5,None])

print djf
assert(numpy.ma.allclose(djf[0],1.) and numpy.ma.allclose(djf[1],1.6666667) and numpy.ma.allclose(djf[2],numpy.ma.masked))
print djfc
assert(numpy.allclose(djfc,1.4))
Ejemplo n.º 55
0
def mixed_layer_depth(data, depth=None, lat=None, zaxis=None,
    mode=None, deltatemp=.2, deltadens=.01, kzmax=0.0005,
    potential=True, format_axes=False):
    """Get mixed layer depth from temperature and salinity

    :Params:

        - **temp**: Insitu or potential temperature.
        - **sal**: Salinity.
        - **depth**, optional: Depth at temperature and salinty points.
        - **lat**, optional: Latitude.
        - **mode**, optional: ``"deltatemp"``, ``"deltadens"``, ``"kz"``
          or ``"twolayers"``


    :Raise: :class:`~vacumm.VACUMMError` if can't get depth (and latitude for density).
    """

    # TODO: positive up

    # Inspection
    if isinstance(data, tuple): # data = temp,sal

        temp, sal=data

        # Get density
        if mode!='deltatemp':

            res = density(temp, sal, depth=depth, lat=lat,
                format_axes=False, potential=potential, getdepth=True)
            if isinstance(res, tuple):
                dens, depth = res
            else:
                dens = res
            dens = dens.asma()
            if mode is None:
                mode = 'deltadens'

        else:

            temp = data[0]

        # Check mode
        if mode == 'kz':
            warn("Switching MLD computation mode to 'deltadens'")
            mode = "deltadens"

    elif match_var(data, 'temp', mode='nslu'):

        if mode is not None and mode!='deltatemp':
            warn("Switching MLD computation mode to 'deltatemp'")
        mode = 'deltatemp'
        temp = data

    elif match_var(data, 'dens', mode='nslu'):

        if mode in ['kz', 'deltatemp']:
            warn("Switching MLD computation mode to 'deltadens'")
            mode = None
        if mode is None:
            mode = "deltadens"
        dens = data

    elif match_var(data, 'kz', mode='nslu'):

        if mode is None:
            mode = "kz"
        if mode != "kz":
            warn("Switching MLD computation mode to 'kz'")
        kz = data

    else:

        if mode in ['deltadens', 'twolayers']:
            dens = data
        elif mode == "deltatemp":
            temp = data
        elif mode == "kz":
            kz = data
        elif mode is not None:
            raise VACUMMError("Invalid MLD computation mode : '%s'"%mode)
        else:
            raise VACUMMError("Can't guess MLD computation mode")

        temp = delta

    # Find Z dim
    data0 = data[0] if isinstance(data, tuple) else data
    depth = grow_depth(data0, depth, mode='raise', getvar=False)
    zaxis = get_zdim(data0, axis=zaxis)
    if zaxis is None:
        raise VACUMMError("Can't guess zaxis")
    slices = get_axis_slices(data0, zaxis)

    # Init MLD
    axes = data0.getAxisList()
    del axes[zaxis]
    mld = MV2.array(data0.asma()[slices['first']], copy=1, axes=axes, copyaxes=False)
    set_grid(mld, get_grid(data0))
    format_var(mld, 'mld', format_axes=format_axes)
    mld[:] = MV2.masked

    # Two-layers
    if mode=='twolayers':

        densbot = dens[slices['first']]
        denstop = dens[slices['last']]
        del dens
        H = 1.5*depth[slices['first']] - 0.5*depth[slices['firstp1']]
        H = -1.5*depth[slices['last']] + 0.5*depth[slices['lastm1']]
        mld[:] = -H*(densbot-denstop)/(densbot-denstop)
        del H

    elif mode=='deltadens':

        denscrit = dens[slices['last']]+deltadens
        mld[:] = -_val2z_(dens, depth, denscrit, zaxis, -1)
        del dens

    elif mode=='deltatemp':

        tempcrit = temp[slices['last']]-deltatemp
        mld[:] = -_val2z_(temp, depth, tempcrit, zaxis, 1)

    elif mode=='kz':

        mld[:] = -_valmin2z_(kz, depth, kzmax, zaxis, 1)

    else:

        raise VACUMMError("Invalid mode for computing MLD (%s)."%mode +
            "Please choose one of: deltadens, twolayers")

    # Mask zeros
    mld[:] = MV2.masked_values(mld, 0., copy=0)

    return mld
Ejemplo n.º 56
0
# Now make the template grid appear and extend thru
# the whole grid
t.ytic2.x1=t.data.x1
t.ytic2.x2=t.data.x2
t.ytic2.line='gl'

## Same for the min tics
t.ymintic2.x1=t.data.x1
t.ymintic2.x2=t.data.x2
t.ymintic2.line='gdl'

# Same thing the circles
t.xtic2.x1=t.data.x1
t.xtic2.x2=t.data.x2
t.xtic2.line='gl'
# Same thing the mintic circles
t.xmintic2.x1=t.data.x1
t.xmintic2.x2=t.data.x2
t.xmintic2.line='gdl'

# turn the circles on
t.xtic2.priority=1
t.xmintic2.priority=1

# Create some dummy data for display purposes
data=MV.array([[1.52,.52,],[.83,.84]])

x.plot(data,t,td,bg=bg)
support.check_plot(x)

Ejemplo n.º 57
0
    u=MV2.ones((args.nlat,args.nlon))
elif args.angle in [-135,-180,135]:
    u=-MV2.ones((args.nlat,args.nlon))
else:
    u=MV2.zeros((args.nlat,args.nlon))
if args.angle in [45,90,135]:
    v=MV2.ones((args.nlat,args.nlon))
elif args.angle in [-45,-90,-135]:
    v=-MV2.ones((args.nlat,args.nlon))
else:
    v=MV2.zeros((args.nlat,args.nlon))
if args.amplitude:
  nm_xtra="_amplitude"
  U=numpy.cos(lons[:])
  V=numpy.sin(lats[:])
  A=3+MV2.array(V[:,numpy.newaxis]*U[numpy.newaxis,:])
  A.setAxis(0,lats)
  A.setAxis(1,lons)
  u*=A
  v*=A
  #Now plots the amplitude underneath the data
  b=x.createboxfill()
  b.xticlabels1=vcs.elements["list"]["Lon30"]
  b.yticlabels1=vcs.elements["list"]["Lat20"]
  x.plot(A,b,bg=bg)
u.setAxis(0,lats)
u.setAxis(1,lons)
v.setAxis(0,lats)
v.setAxis(1,lons)
x.plot(u,v,gm,bg=bg)
ret=0
Ejemplo n.º 58
0
data = """-11.14902417  -9.17390922  -7.29515002  
-7.51774549  -8.63608171
  -10.4827395   -9.93859485  -7.3394366   -5.39241468  -5.74825567
     -6.74967902  -7.09622319  -5.93836983  -4.04592997  -2.65591499
        -1.68180032  -0.86935245  -0.40114047  -0.54273785  -1.36178957
           -2.67488251  -3.87524401  -4.84708491  -5.49186142  -5.28618944
              -4.30557389  -2.89804038  -1.53825408  -1.84771029  -2.74948361
                 -2.23517037  -1.73306118  -0.71200646   0.76416785   1.51511193
                    -0.04018418  -1.54564706  -1.88664877  -0.43751604   0.89988184
                        0.33437949  -1.70341844  -3.79880014  -4.03570169  -4.7740073
                           -5.04626101  -3.77609961  -3.18667176  -2.21038272  -1.3666902
                              -0.54267951  -0.16472441  -0.52871418  -0.83520848  -0.90315403
                                 -0.21747426   0.01922666   0.89621996   1.75691927   3.12657503
                                     4.55749531   6.04921304   7.20744489   7.65294958""".split()
data = numpy.array(data,dtype=numpy.float)
data = MV2.array(data)

data=MV2.masked_where(MV2.logical_and(data>-4,data<-2),data)

#yx.datawc_x1 = 0
#yx.datawc_x2 = 80
##yx.datawc_y1 =-12 
#yx.datawc_y2 = 12 


x.plot(data,yx,bg=1)
fnm = "test_vcs_1d_missing.png"
x.png(fnm)

print "fnm:",fnm
print "src:",src
Ejemplo n.º 59
0
    med_rms1 = np.ma.median(out1_rel[vn, :])
    med_rms2 = np.ma.median(out2_rel[vn, :])
    med_rms3 = np.ma.median(out3_rel[vn, :])
    med_rms4 = np.ma.median(out4_rel[vn, :])

    out1_rel[vn, :] = (out1_rel[vn, :] - med_rms1) / med_rms1
    out2_rel[vn, :] = (out2_rel[vn, :] - med_rms2) / med_rms2
    out3_rel[vn, :] = (out3_rel[vn, :] - med_rms3) / med_rms3
    out4_rel[vn, :] = (out4_rel[vn, :] - med_rms4) / med_rms4

# ADD SPACES FOR LABELS TO ALIGN AXIS LABELS WITH PLOT
yax = [m.encode("utf-8") + " " for m in mods]
xax = [v + " " for v in vars]

# Convert to MV so we can decorate
out1_rel = MV2.array(out1_rel)
out2_rel = MV2.array(out2_rel)
out3_rel = MV2.array(out3_rel)
out4_rel = MV2.array(out4_rel)

# GENERATE PLOT
P.decorate(out1_rel, xax, yax)
P.decorate(out2_rel, xax, yax)
P.decorate(out3_rel, xax, yax)
P.decorate(out4_rel, xax, yax)

# PLOT
P.plot(out1_rel, x=x, multiple=1.4, bg=1)
P.plot(out2_rel, x=x, multiple=2.4, bg=1)
P.plot(out3_rel, x=x, multiple=3.4, bg=1)
P.plot(out4_rel, x=x, multiple=4.4, bg=1)
Ejemplo n.º 60
0
    def test_portrait(self):
        try:
            import vcs
        except:
            raise RuntimeError(
                "Sorry your python is not build with VCS support cannot geenrate portrait plots")

        import json
        # CDAT MODULES
        import pcmdi_metrics
        import pcmdi_metrics.graphics.portraits
        import MV2
        import numpy
        from genutil import statistics
        import os
        import sys
        import glob

        # CREATES VCS OBJECT AS A PORTAIT PLOT AND LOADS PLOT SETTINGS FOR
        # EXAMPLE
        x = vcs.init()
        x.portrait()
        # Turn off antialiasing for test suite
        x.setantialiasing(0)

        # PARAMETERS STUFF
        P = pcmdi_metrics.graphics.portraits.Portrait()

        # Turn off verbosity
        P.verbose = False

        P.PLOT_SETTINGS.levels = [-1.e20, -.5, -.4, -.3, -.2, -.1,
                                  0., .1, .2, .3, .4, .5, 1.e20]

        P.PLOT_SETTINGS.x1 = .1
        P.PLOT_SETTINGS.x2 = .85
        P.PLOT_SETTINGS.y1 = .12
        P.PLOT_SETTINGS.y2 = .95

        P.PLOT_SETTINGS.xtic2y1 = P.PLOT_SETTINGS.y1
        P.PLOT_SETTINGS.xtic2y2 = P.PLOT_SETTINGS.y2
        P.PLOT_SETTINGS.ytic2x1 = P.PLOT_SETTINGS.x1
        P.PLOT_SETTINGS.ytic2x2 = P.PLOT_SETTINGS.x2

        # P.PLOT_SETTINGS.missing_color = 3
        # P.PLOT_SETTINGS.logo = None
        P.PLOT_SETTINGS.time_stamp = None
        P.PLOT_SETTINGS.draw_mesh = 'n'
        # P.PLOT_SETTINGS.tictable.font = 3

        x.scriptrun(
            os.path.join(
                pcmdi_metrics.__path__[0],
                "..",
                "..",
                "..",
                "..",
                "share",
                "graphics",
                'vcs',
                'portraits.scr'))
        P.PLOT_SETTINGS.colormap = 'bl_rd_12'
        # cols=vcs.getcolors(P.PLOT_SETTINGS.levels,range(16,40),split=1)
        cols = vcs.getcolors(P.PLOT_SETTINGS.levels, range(144, 156), split=1)
        P.PLOT_SETTINGS.fillareacolors = cols

        P.PLOT_SETTINGS.parametertable.expansion = 100

        # LIST OF VARIABLES TO BE USED IN PORTRAIT PLOT
        vars = [
            'pr',
            'rsut',
            'rsutcs',
            'rlutcs',
            'tas',
            'tos',
            'sos',
            'zos',
            'ua-850',
            'ua-200',
            'zg-500']
        vars = []

        # LOAD METRICS DICTIONARIES FROM JSON FILES FOR EACH VAR AND STORE AS A
        # SINGLE DICTIONARY
        var_cmip5_dics = {}
        mods = set()
        json_files = glob.glob(
            os.path.join(
                pcmdi_metrics.__path__[0],
                "..",
                "..",
                "..",
                "..",
                "share",
                "CMIP_metrics_results",
                "CMIP5",
                "amip",
                "*.json"))

        for fnm in json_files:
            f = open(fnm)
            d = json.load(f)
            var = os.path.basename(fnm).split("_")[0]
            vars.append(var)
            for m in d.keys():
                mods.add(m)
            if var in var_cmip5_dics:
                var_cmip5_dics[var].update(d)
            else:
                var_cmip5_dics[var] = d

        vars.sort()
        mods = sorted(list(mods))
        print "Models:",mods
        for bad in ["GridInfo", "References", "RegionalMasking",
                    "metrics_git_sha1", "uvcdat_version"]:
            if bad in mods:
                mods.remove(bad)
            else:
                print "Not removing column %s (not present)" % bad

        # ORGANIZE METRICS INTO A VARIABLES X MODELS MATRIX

        out1_rel = MV2.zeros(
            (len(vars),
             len(mods)),
            MV2.float32)  # DEFINE ARRAY

        vn = -1  # VARIABLE INDEX
        for var in vars:  # LOOP OVER VARIABLE
            vn = vn + 1

            vals = []
            for mod in mods:  # LOOP OVER MODEL
                try:
                    rms = var_cmip5_dics[var][mod]["defaultReference"][
                        "r1i1p1"]["global"]['rms_xyt_ann_GLB']
                    if P.verbose:
                        print var, ' ', mod, '  ', repr(rms), ' WITH global'
                except:
                    rms = 1.e20
                    if P.verbose:
                        print var, ' ', mod, '  ', repr(rms), ' missing'

                rms = float(rms)
                vals.append(rms)

            vars_ar = MV2.array(vals)
            # COMPUTE MEDIAN RESULT FOR PORTRAIT NORMALIZATION
            med_rms = statistics.median(vars_ar)[0]

            mn = -1  # MODEL INDEX
            for mod in mods:
                mn = mn + 1
                try:
                    out1_rel[vn, mn] = (float(var_cmip5_dics[var][mod]["defaultReference"][
                                        "r1i1p1"]["global"]['rms_xyt_ann_GLB']) - med_rms) / med_rms  # RELATIVE ERROR
                except:
                    out1_rel[vn, mn] = numpy.ma.masked

        # ADD SPACES FOR LABELS TO ALIGN AXIS LABELS WITH PLOT
        modsAxis = mods
        varsAxis = vars

        # LOOP THROUGH LISTS TO ADD SPACES
        for i in range(len(modsAxis)):
            modsAxis[i] = modsAxis[i] + '  '
        for i in range(len(varsAxis)):
            varsAxis[i] = varsAxis[i] + '  '

        yax = [s.encode('utf-8')
               for s in mods]  # CHANGE FROM UNICODE TO BYTE STRINGS
        xax = vars

        # GENERATE PLOT
        P.decorate(out1_rel, xax, yax)
        # P.plot(out1_rel,x=x,multiple=1.1,bg=0)  # FOR PLOTTING TRIANGLES WHEN
        # USING TWO OR MORE REFERENCE DATA SETS
        P.plot(out1_rel, bg=1, x=x)
        # x.backend.renWin.Render()

        # END OF PLOTTING

        # SAVE PLOT
        src = os.path.join(os.path.dirname(__file__), "testPortrait.png")
        print src
        fnm = os.path.join(os.getcwd(), "testPortrait.png")
        x.png(fnm)
        ret = checkimage.check_result_image(
            fnm,
            src,
            checkimage.defaultThreshold)
        if ret != 0:
            sys.exit(ret)