def getLevs(dat, zmin=0, zmax=100000, convertPressureUnit=None): """ TBD """ from Scientific.IO.NetCDF import NetCDFFile as ncf filename = cfile(dat) fileobj = ncf(filename) min_lev = zmin max_lev = zmax my_levs = None levname = None for varname in fileobj.variables: if varname in [ 'level', 'levels', 'lev', 'levs', 'depth', 'deptht', 'DEPTH', 'DEPTHT', 'plev' ]: levname = varname levunits = fileobj.variables[levname].units for lev in fileobj.variables[levname].getValue(): #print lev if min_lev <= lev <= max_lev: if convertPressureUnit: if convertPressureUnit == 'hPaToPa': lev = lev * 100 if convertPressureUnit == 'PaTohPa': lev = lev / 100 if my_levs: my_levs = my_levs + ',' + str(int(lev)) else: my_levs = str(int(lev)) return my_levs
def getLevs(dat, zmin, zmax, convertPressureUnit=None): from Scientific.IO.NetCDF import NetCDFFile as ncf filename = dat.baseFiles() fileobj = ncf(filename) min_lev = zmin max_lev = zmax my_levs = None levname = None for varname in fileobj.variables: if varname in ["level", "levels", "lev", "levs", "depth", "deptht", "DEPTH", "DEPTHT", "plev"]: levname = varname for lev in fileobj.variables[levname].getValue(): # print lev if min_lev <= lev <= max_lev: if convertPressureUnit: if convertPressureUnit == "hPaToPa": lev = lev * 100 if convertPressureUnit == "PaTohPa": lev = lev / 100 if my_levs: my_levs = my_levs + "," + str(int(lev)) else: my_levs = str(int(lev)) return my_levs
def fileHasVar(filename,varname): """ returns True if FILENAME has variable VARNAME """ from Scientific.IO.NetCDF import NetCDFFile as ncf rep=False clogger.debug("opening "+filename) try : fileobj=ncf(filename) except: clogger.error("Issue opening file "+filename) return False for filevar in fileobj.variables : if filevar == varname : rep=True break fileobj.close() return(rep)
def varOfFile(filename) : """ returns the name of the unique non-dimension variable in NetCDF file FILENAME, or None if it is not unique """ from Scientific.IO.NetCDF import NetCDFFile as ncf varname=None fileobj=ncf(filename) #import NetCDF4 #fileobj=netCDF4.Dataset(filename) for filevar in fileobj.variables : if ((filevar not in fileobj.dimensions) and not re.findall("^time_",filevar) and not re.findall("_bnds$",filevar) ): if varname is None : varname=filevar else : clogger.debug("Got at least two variables (%s and %s) " "and no direction to choose - File is %s"%\ (varname,filevar,filename)) return(None) fileobj.close() return(varname)
def cread(datafile,varname=None): import re if not datafile : return(None) if re.findall(".png$",datafile) : subprocess.Popen(["display",datafile,"&"]) elif re.findall(".nc$",datafile) : clogger.debug("reading NetCDF file %s"%datafile) if varname is None: varname=varOfFile(datafile) if varname is None: return(None) from Scientific.IO.NetCDF import NetCDFFile as ncf fileobj=ncf(datafile) #import netCDF4 #fileobj=netCDF4.Dataset(datafile) # Note taken from the CDOpy developper : .data is not backwards # compatible to old scipy versions, [:] is data=fileobj.variables[varname][:] fillv=fileobj.variables[varname]._FillValue import numpy.ma rep= numpy.ma.array(data,mask = data==fillv) fileobj.close() return(rep) else : clogger.error("cannot yet handle %s"%datafile) return None
fname = sys.argv[4+fnum] m = rcmre.search(fname) if m: label = m.group(1) year0 = int(m.group(2)) year1 = int(m.group(3)) elif fname.startswith('ERAINT'): label = 'ERAINT' else: label = 'CRU' CRUnum = fnum if not omitcru or label != 'CRU': labels.append(label) file = ncf(fname,'r') var = file.variables[varname] vardata = var.getValue() if label == 'regcm411' and varname!='RT': vardata = var[:,0] else: vardata = var.getValue() if hasattr(var,'missing_value'): fillval = var.missing_value maskeddata = ma.masked_where(vardata==fillval,vardata) elif hasattr(var,'_FillValue'): fillval = var._FillValue maskeddata = ma.masked_where(vardata==fillval,vardata) if label == 'CRU': globalmask = maskeddata.mask
levels={ 'eur': {'t2avg':np.arange(-5,6,0.5), 't2max':np.arange(-5,6,0.5), 't2min':np.arange(-5,6,0.5),'pcpavg':np.arange(-5,6,0.5)}, # 'eur': {'t2avg':np.arange(-2.5,2.6,0.5), 't2max':np.arange(-2.5,2.6,0.5), 't2min':np.arange(-2.5,2.6,0.5),'pcpavg':np.arange(-5,6,0.5)}, 'ce': {'t2avg':np.arange(-5,6,0.5), 't2max':np.arange(-5,6,0.5), 't2min':np.arange(-5,6,0.5),'pcpavg':np.arange(-5,6,0.5)} }[sys.argv[1]] bufferzonewidth={'ce':55,'eur':20}[sys.argv[1]] outpostfix=sys.argv[1] parallelsdis={'ce':5.,'eur':10}[sys.argv[1]] meridiansdis={'ce':5.,'eur':10}[sys.argv[1]] if year1==year2: headerfilename='%s.%d.%s.%s.nc' % (inprefix, year1, ncvars[0], timespan) else: headerfilename='%s.%d-%d.%s.%s.nc' % (inprefix, year1, year2, ncvars[0], timespan) ff = ncf(headerfilename,'r') """ xlon = headerfile.variables['longitude'].getValue() xlat = headerfile.variables['latitude'].getValue() m = Basemap(projection='cyl',llcrnrlat=xlat[0], urcrnrlat=xlat[-1], llcrnrlon=xlon[0], urcrnrlon=xlon[-1], resolution='l') """ lat_0 = float(ff.latitude_of_projection_origin) lon_0 = float(ff.longitude_of_projection_origin) lat_1 = float(ff.standard_parallel[0]) lat_2 = float(ff.standard_parallel[1]) grdis = float(ff.grid_size_in_meters) nx = int(ff.dimensions['x'])
inprefix='CORDEX-Africa-sec-noemiss-CRU-af' year1=1990 year2=1990 timespan='yseasavg' seasons = ('DJF','MAM','JJA','SON') levels={ 'eur': {'t2avg':range(-10,11), 't2max':range(-10,11), 't2min':range(-10,11),'pcpavg':range(-10,10)}, # 'eur': {'t2avg':np.arange(-2.5,2.6,0.5), 't2max':np.arange(-2.5,2.6,0.5), 't2min':np.arange(-2.5,2.6,0.5),'pcpavg':np.arange(-5,6,0.5)}, 'ce': {'t2avg':range(-5,6), 't2max':range(-8,9), 't2min':range(-5,6),'pcpavg':range(-3,4)} }[sys.argv[1]] bufferzonewidth={'ce':55,'eur':12}[sys.argv[1]] outpostfix=sys.argv[1] parallelsdis={'ce':5.,'eur':10}[sys.argv[1]] meridiansdis={'ce':5.,'eur':10}[sys.argv[1]] headerfile = ncf(sys.argv[2],'r') xlon = headerfile.variables['lon'].getValue() xlat = headerfile.variables['lat'].getValue() m = Basemap(projection='cyl',llcrnrlat=xlat[0], urcrnrlat=xlat[-1], llcrnrlon=xlon[0], urcrnrlon=xlon[-1], resolution='l') x,y = m(xlon,xlat) for v in ncvars: if year1==year2: infilename='%s.%d.%s.%s' % (inprefix, year1, v, timespan) else: infilename='%s.%d-%d.%s.%s' % (inprefix, year1, year2, v, timespan) if v == 'RT': infilename += '.perc.nc' else: infilename += '.nc'
#!/usr/bin/env python from Scientific.IO.NetCDF import NetCDFFile as ncf import sys from datetime import date refdate = date(1949,12,1) year1=int(sys.argv[2]) year2=int(sys.argv[3]) print year1, year2 ff = ncf(sys.argv[1],'a') ff.createDimension('bnds',2) tb = ff.createVariable('time_bnds','d',('time','bnds')) t = ff.variables['time'] tbdata = [] tdata = [] for y in range(year1,year2+1): for m in range(1,13): mp = (m+1)%12 yp = y if mp == 0: mp = 12 if mp == 1: yp += 1 _date1 = date(y,m,1) _date2 = date(yp,mp,1)
def zonmean_interpolation(dat1, dat2=None, vertical_levels=None, cdo_horizontal_grid='r1x90'): """ Interpolates the zonal mean field dat1 via two possible ways: - either by providing a target zonal field dat2 => dat1 is regridded both horizontally and vertically on dat2 - or by providing a list of vertical levels => dat1 is regridded horizontally on the cdo_horizontal_grid (default='r1x90'), and vertically on the list of vertical levels The user can provide the vertical levels (in Pa) like this: vertical_levels=[100000,85000,50000,20000,...] # or vertical_levels='100000,85000,50000,20000' Before the computations, the function checks the unit of the vertical axis; it is converted to Pa if necessary directly in the netcdf file(s) corresponding to dat1(2). >>> dat = ds(project='CMIP5',model='IPSL-CM5A-LR',variable='ua',period='1980-1985', experiment='historical',table='Amon') >>> ref = ds(project='ref_pcmdi',variable='ua',product='ERAINT') >>> zonmean_dat = zonmean(time_average(dat)) >>> zonmean_ref = zonmean(time_average(ref)) >>> dat_interpolated_on_ref = zonmean_interpolation(zonmean_dat,zonmean_ref) >>> dat_interpolated_on_list_of_levels = zonmean_interpolation(zonmean_dat,vertical_levels='100000,85000,50000,20000,10000,5000,2000,1000') """ from climaf.anynetcdf import ncf file1 = cfile(dat1) ncfile1 = ncf(file1) # -- First, we check the unit of the vertical dimension of file1 levname1 = None for varname in ncfile1.variables: if varname.lower() in [ 'level', 'levels', 'lev', 'levs', 'depth', 'deptht', 'plev' ]: levname1 = varname if not levname1: print 'Name of the vertical axis not found for dat1' levunits1 = ncfile1.variables[levname1].units if levunits1.lower() in ['hpa', 'millibar', 'mbar', 'hectopascal']: # -- Multiplier par 100 cmd1 = 'ncap2 -As "' + levname1 + '=' + levname1 + '*100" ' + file1 + ' ' + file1 cmd2 = 'ncatted -O -a units,' + levname1 + ',o,c,Pa ' + file1 print cmd1 print cmd2 os.system(cmd1) os.system(cmd2) # -> The vertical axis of file1 is now set to Pa # # -- Second, we check the unit of the vertical dimension of file2 if dat2: file2 = cfile(dat2) ncfile2 = ncf(file2) levname2 = None for varname in ncfile2.variables: if varname.lower() in [ 'level', 'levels', 'lev', 'levs', 'depth', 'deptht', 'plev' ]: levname2 = varname if not levname2: print 'Name of the vertical axis not found for dat2' levunits2 = ncfile2.variables[levname2].units levValues2 = ncfile2.variables[levname2].getValue() if levunits2.lower() in ['hpa', 'millibar', 'mbar', 'hectopascal']: # -- Multiplier par 100 cmd1 = 'ncap2 -As "' + levname2 + '=' + levname2 + '*100" ' + file2 + ' ' + file2 cmd2 = 'ncatted -O -a units,' + levname2 + ',o,c,Pa ' + file2 print cmd1 print cmd2 os.system(cmd1) os.system(cmd2) # -> The vertical axis of file2 is now set to Pa in the netcdf file scale = 100.0 else: scale = 1.0 # # --> We get the values of the vertical levels of dat2 (from the original file, that's why we apply a scale) levels = '' for lev in levValues2: levels = levels + ',' + str(lev * scale) # # --> We can now interpolate dat1 on dat2 verticaly and horizontally print levels regridded_dat1 = ccdo(regrid(dat1, dat2), operator='intlevel' + levels) else: if vertical_levels: if isinstance(vertical_levels, list): levels = '' for lev in vertical_levels: levels = levels + ',' + str(lev) else: levels = ',' + vertical_levels regridded_dat1 = ccdo(regridn(dat1, cdogrid=cdo_horizontal_grid), operator='intlevel' + levels) else: print '--> Provide a list of vertical levels with vertical_levels' return regridded_dat1