def dailyAve(): from nco import Nco import datetime nco = Nco() for d in range(365): dp = datetime.date(startY,1,1)+datetime.timedelta(d) print "Averaging TRMM 3B42 for day "+dp.strftime('%j')+"..." ifile = ' '.join("3B42_daily."+str(year)+"."+dp.strftime('%m')+"."+dp.strftime('%d')+".7.nc" for year in range(startY,endY)) ofile = "3B42_aver."+dp.strftime('%j')+".nc" if not os.path.isfile(ofile): nco.ncra(input=ifile, output=ofile) nco.ncrcat(input="3B42_aver.*.nc", output="3B42_cat.nc", options="-d time,1,365") nco.ncwa(input="3B42_cat.nc", output="3B42_MAP.nc", options='-N -a time') return None
def dailyAve(): from nco import Nco import datetime nco = Nco() for d in range(365): dp = datetime.date(startY, 1, 1) + datetime.timedelta(d) print "Averaging TRMM 3B42 for day " + dp.strftime('%j') + "..." ifile = ' '.join("3B42_daily." + str(year) + "." + dp.strftime('%m') + "." + dp.strftime('%d') + ".7.nc" for year in range(startY, endY)) ofile = "3B42_aver." + dp.strftime('%j') + ".nc" if not os.path.isfile(ofile): nco.ncra(input=ifile, output=ofile) nco.ncrcat(input="3B42_aver.*.nc", output="3B42_cat.nc", options="-d time,1,365") nco.ncwa(input="3B42_cat.nc", output="3B42_MAP.nc", options='-N -a time') return None
def run(self, latidx, lonidx): try: inputfile_dir = self.config.get_dict(self.translator_type, 'inputfile_dir', default=os.path.join( '..', '..')) inputfiles = self.config.get_dict(self.translator_type, 'inputfile', default='1.soil.tile.nc4') inputfiles = param_to_list(inputfiles) latdelta, londelta = [ double(d) / 60 for d in self.config.get('delta').split(',') ] # convert to degrees nco = Nco() outputfiles = self.config.get_dict( self.translator_type, 'outputfile', default=inputs_to_outputs(inputfiles)) outputfiles = param_to_list(outputfiles) inputfiles = apply_prefix(inputfiles, inputfile_dir) for i in range(len(inputfiles)): inputfile = inputfiles[i] outputfile = outputfiles[i] with nc(inputfile) as f: variables = f.variables.keys() soil_id = f.getncattr('soil_id') # get latitude, longitude limits minlat = 90 - latdelta * latidx maxlat = minlat + latdelta minlon = -180 + londelta * (lonidx - 1) maxlon = minlon + londelta # additional options options = '-h -a lat,lon -d lat,%f,%f -d lon,%f,%f --rd' % ( minlat, maxlat, minlon, maxlon) if 'cropland' in variables: options += ' -w cropland' # perform aggregation nco.ncwa(input=inputfile, output=outputfile, options=options) # add degenerate profile dimension nco.ncecat(input=outputfile, output=outputfile, options='-O -h -u profile') nco.ncap2(input=outputfile, output=outputfile, options='-O -h -s profile[profile]=1') # add soil_id variable nco.ncap2(input=outputfile, output=outputfile, options='-O -h -s soil_id[profile,lat,lon]=1') nco.ncatted(input=outputfile, output=outputfile, options='-O -h -a units,soil_id,c,c,"mapping"') nco.ncatted(input=outputfile, output=outputfile, options='-O -h -a long_name,soil_id,c,c,"%s"' % str(soil_id)) nco.ncatted(input=outputfile, output=outputfile, options='-O -h -a soil_id,global,d,c,""') # change latitude, longitude to simulated point with nc(outputfile, 'a') as f: latv = f.variables['lat'] latv[:] = 90 - latdelta * (latidx - 0.5) lonv = f.variables['lon'] lonv[:] = -180 + londelta * (lonidx - 0.5) return True except: print "[%s]: %s" % (os.path.basename(__file__), traceback.format_exc()) return False
def run(self, latidx, lonidx): try: inputfile_dir = self.config.get_dict(self.translator_type, 'inputfile_dir', default=os.path.join( '..', '..')) inputfiles = self.config.get_dict(self.translator_type, 'inputfile') variables = self.config.get_dict(self.translator_type, 'variables', default=None) slicefirst = self.config.get_dict(self.translator_type, 'slicefirst', default=False) latdelta, londelta = [ double(d) / 60 for d in self.config.get('delta').split(',') ] # convert to degrees refyear = self.config.get('ref_year') numyears = self.config.get('num_years') if inputfiles: inputfiles = param_to_list(inputfiles) inputfiles = apply_prefix(inputfiles, inputfile_dir) else: inputfiles = list_tiles( os.path.join(inputfile_dir, '*.clim.tile.nc4')) outputfiles = self.config.get_dict( self.translator_type, 'outputfile', default=inputs_to_outputs(inputfiles)) outputfiles = param_to_list(outputfiles) if variables: variables = variables.split(',') nco = Nco() for i in range(len(inputfiles)): inputfile = inputfiles[i] outputfile = outputfiles[i] with nc(inputfile) as f: lats, lons = f.variables['lat'][:], f.variables['lon'][:] varkeys = array(f.variables.keys()) flatdelta = abs(diff(lats)[0]) # assume uniform grid flondelta = abs(diff(lons)[0]) if slicefirst and not i: time = f.variables['time'][:].astype( int) # convert to integers refyeari = int( findall(r'\d+', f.variables['time'].units)[0]) ndays0 = (datetime(refyear, 1, 1) - datetime(refyeari, 1, 1)).days ndays1 = (datetime(refyear + numyears, 12, 31) - datetime(refyeari, 1, 1)).days tidx0 = where( time == ndays0)[0][0] if ndays0 in time else 0 tidx1 = where( time == ndays1)[0][0] if ndays1 in time else len(time) - 1 if flatdelta > latdelta: minlat = 90 - latdelta * (latidx - 0.5) maxlat = minlat else: minlat = 90 - latdelta * latidx maxlat = minlat + latdelta if flondelta > londelta: minlon = -180 + londelta * (lonidx - 0.5) maxlon = minlon else: minlon = -180 + londelta * (lonidx - 1) maxlon = minlon + londelta # lookup variables if variables: varnames = [0] * len(variables) for j in range(len(variables)): idx = foundVar(varkeys, variables[j]) if not len(idx): raise Exception( 'Variable %s not found in file %s' % (variables[j], inputfile)) varnames[j] = str(','.join(varkeys[idx])) # basic options options = '-h -a lat,lon -d lat,%f,%f -d lon,%f,%f --rd' % ( minlat, maxlat, minlon, maxlon) # select time if slicefirst and not i and (tidx0 != 0 or tidx1 != len(time) - 1): options += ' -d time,%d,%d' % (tidx0, tidx1) # select variables if variables: options += ' -v %s' % ','.join(varnames) # average over space if logical_and(lats >= minlat, lats <= maxlat).sum() > 1 or logical_and( lons >= minlon, lons <= maxlon).sum() > 1: options += ' -w cropland' # average over latitude, longitude nco.ncwa(input=inputfile, output=outputfile, options=options) # change latitude, longitude to simulated point with nc(outputfile, 'a') as f: latv = f.variables['lat'] latv[:] = 90 - latdelta * (latidx - 0.5) lonv = f.variables['lon'] lonv[:] = -180 + londelta * (lonidx - 0.5) return True except: print "[%s]: %s" % (os.path.basename(__file__), traceback.format_exc()) return False
from cdo import Cdo from nco import Nco import cartopy.crs as ccrs import cartopy.feature as cfeature import matplotlib.pyplot as plt import numpy as np import xarray as xr import sys import metpy.calc as mpcalc from metpy.interpolate import cross_section cdo = Cdo() cdo.timmean(input='PFile.nc', output='AFile.nc') nco = Nco() nco.ncwa(input='AFile.nc', output='BFile.nc', options=['-a', 'bnds']) nco.ncks(input='BFile.nc', output='CFile.nc', options=['-C', '-O', '-x', '-v', 'time_bnds']) cdo.timmean(input='rhumFile.nc', output='dFile.nc') nco.ncwa(input='dFile.nc', output='eFile.nc', options=['-a', 'bnds']) nco.ncks(input='eFile.nc', output='fFile.nc', options=['-C', '-O', '-x', '-v', 'time_bnds']) fFile = Dataset('fFile.nc', 'r') rhum = fFile.variables['rhum'][:] z = np.zeros((1, 9, 73, 144), dtype=rhum.dtype) c = np.concatenate((rhum, z), axis=1)
logger.info("merge PR, TAS, and SMB") nco.ncks(input=smb_merged_file_time_mean, output=merged_file_time_mean, overwrite=True) nco.ncks(input=pr_merged_file_time_mean, output=merged_file_time_mean, append=True) nco.ncks(input=tas_merged_file_time_mean, output=merged_file_time_mean, append=True) # add topo file logger.info("removing height dimension of topo file") topo_file_tmp_1 = "tmp1_topo_geog.nc" topo_file_tmp_2 = "tmp2_topo_geog.nc" nco.ncwa(input=topo_file, output=topo_file_tmp_1, average="height") nco.ncwa(input=topo_file_tmp_1, output=topo_file_tmp_2, average="time") logger.info("renaming variables and dimensions") rDict = {"var6": "usurf"} dDict = {"x": "rlon", "y": "rlat"} nco.ncrename( input=topo_file_tmp_2, options=[c.Rename("variable", rDict), c.Rename("dimension", dDict)]) logger.info("add topo file {} to {}".format(topo_file, merged_file_time_mean)) nco.ncks(input=topo_file_tmp_2, output=merged_file_time_mean, append=True, variable="usurf") opt = [