def splice_historical(self, ssp, single_member=True): #concatenate hist and future if single_member: func = self.single_member_ensemble hist = func("historical") all_data = MV.concatenate((hist, func(ssp)), axis=1) tax = cdms.createAxis(np.arange(all_data.shape[1])) tax.units = hist.getTime().units tax.id = 'time' tax.designateTime() all_data.setAxis(1, tax) cdutil.setTimeBoundsMonthly(all_data) all_data.setAxis(0, hist.getAxis(0)) all_data.id = hist.id else: self.get_ensemble("historical") self.get_ensemble(ssp) hdata = getattr(self, "historical") sspdata = getattr(self, ssp) all_data = splice_data(hdata, sspdata) cdutil.setTimeBoundsMonthly(all_data) return all_data
def testContiguousRegridNANIssue(self): a = MV2.reshape(MV2.sin(MV2.arange(20000)), (2, 1, 100, 100)) lon = cdms2.createAxis(MV2.arange(100) * 3.6) lon.designateLongitude() lon.units = "degrees_east" lon.id = "longitude" lat = cdms2.createAxis(MV2.arange(100) * 1.8 - 90.) lat.id = "latitude" lat.designateLatitude() lat.units = "degrees_north" lev = cdms2.createAxis([1000.]) lev.id = "plev" lev.designateLevel() lev.units = "hPa" t = cdms2.createAxis([0, 31.]) t.id = "time" t.designateTime() t.units = "days since 2014" cdutil.setTimeBoundsMonthly(t) a.setAxisList((t, lev, lat, lon)) a = MV2.masked_less(a, .5) grd = cdms2.createGaussianGrid(64) a = a.ascontiguous() a = a.regrid(grd, regridTool="regrid2") a = cdutil.averager(a, axis='txy') self.assertEqual(a[0], 0.7921019540305255)
def get_obs_tas_and_precip(from_scratch=False): if from_scratch: fpr = cdms.open("OBS/gpcc.precip.mon.total.v7.nc") pro = fpr("precip",time=("1981-1-1","2010-12-31")) cdutil.setTimeBoundsMonthly(pro) proa=cdutil.ANNUALCYCLE.climatology(pro) f = cdms.open("OBS/air.mon.ltm.v401.nc") taso = f("air") f.close() fpr.close() tasoa=MV.masked_where(proa.mask,taso) proa =MV.masked_where(tasoa.mask,proa) proa.id="pr" tasoa.id="tas" fw = cdms.open("OBS/UDel_GPCC_climatologies_1981_2010.nc","w") tasoa.setAxis(0,proa.getTime()) fw.write(tasoa) fw.write(proa) fw.close() else: fr = cdms.open("OBS/UDel_GPCC_climatologies_1981_2010.nc") tasoa = fr("tas") proa = fr("pr") fr.close() return tasoa,proa
def dictionary_ensemble_average(d, grid=None): if grid is None: shape = 1.e20 if grid is None: for m in d.keys(): gridsize = d[m].shape[-1] * d[m].shape[-2] if gridsize < shape: shape = gridsize themodel = m coarsest_grid = d[themodel].getGrid() allstop = str(np.min([cmip5.stop_time(d[m]).year for m in d.keys()])) + "-12-31" allstart = str(np.max([cmip5.start_time(d[m]).year for m in d.keys()])) + "-1-11" standardize = lambda data: data(time=(allstart, allstop)).regrid( coarsest_grid, regridTool='regrid2') counter = 0 goodmodels = list(d) L = len(goodmodels) for m in d.keys(): modeldata = standardize(MV.average( d[m], axis=0)) # average over individual ensemble members if counter == 0: MME = MV.zeros((L, ) + modeldata.shape) MME[counter] = modeldata counter += 1 modax = cmip5.make_model_axis(list(d)) axlist = [modax] + modeldata.getAxisList() MME.setAxisList(axlist) cdutil.setTimeBoundsMonthly(MME) #MME.id=variable return MME
def standardize_zscore(self, alldata): self.get_ensemble("piControl") piC = self.piControl cdutil.setTimeBoundsMonthly(piC) npiCmodels, npiCt = piC.shape mu = np.ma.zeros((npiCmodels, 12)) sigma = np.ma.zeros((npiCmodels, 12)) for i in range(12): mu[:, i] = np.ma.average(piC[:, i::12], axis=1) sigma[:, i] = np.ma.std(piC[:, i::12], axis=1) pmodels = [x.split(".")[-3] for x in cmip5.models(piC)] if len(cmip5.models(alldata)[0].split(".")) == 1: emodels = cmip5.models(alldata) else: emodels = [x.split(".")[-3] for x in cmip5.models(alldata)] nmembers = len(emodels) Z = np.zeros_like(alldata) for ens_i in range(nmembers): model = emodels[ens_i] corr_piC = pmodels.index(model) mu_piC = mu[corr_piC] sigma_piC = sigma[corr_piC] for month_i in range(12): Z[ens_i, month_i::12] = (alldata[ens_i, month_i::12] - mu_piC[month_i]) / sigma_piC[month_i] Z = cmip5.cdms_clone(Z, alldata) Z.id = alldata.id return (Z)
def convert_to_percentage(self, alldata): self.get_ensemble("piControl") piC = self.piControl cdutil.setTimeBoundsMonthly(piC) npiCmodels, npiCt = piC.shape ac = cdutil.ANNUALCYCLE.climatology(piC) pmodels = [x.split(".")[-3] for x in cmip5.models(piC)] if len(cmip5.models(alldata)[0].split(".")) == 1: emodels = cmip5.models(alldata) else: emodels = [x.split(".")[-3] for x in cmip5.models(alldata)] nmembers = len(emodels) Z = np.zeros_like(alldata) for ens_i in range(nmembers): model = emodels[ens_i] corr_piC = pmodels.index(model) ac_piC = ac[corr_piC] for month_i in range(12): Z[ens_i, month_i::12] = (alldata[ens_i, month_i::12] - ac_piC[month_i]) / ac_piC[month_i] Z = cmip5.cdms_clone(Z * 100., alldata) Z.id = alldata.id return (Z)
def ensemble_average(self,experiment): self.get_ensemble(experiment) data=getattr(self,experiment) nens,ntime=data.shape #models=sorted(self.ensemble_dict.keys()) models=get_ok_models(self.region) nmod=len(models) # print("Number of models is", nmod) EnsembleAverage=np.ma.zeros((nmod,ntime))+1.e20 fnames=np.array(get_ensemble_filenames(self.variable,self.region,experiment)) counter=0 for model in models: #fnames=np.array(get_ensemble_filenames(self.variable,self.region,experiment)) I=np.where([x.split(".")[2]==model for x in fnames])[0] if len(I)>0: EnsembleAverage[counter]=np.ma.average(data.asma()[I],axis=0) else: if self.verbose: print("missing data for "+model+" "+self.variable+" "+experiment) counter+=1 EnsembleAverage=MV.masked_where(np.abs(EnsembleAverage)>1.e10,EnsembleAverage) EnsembleAverage=MV.masked_where(np.isnan(EnsembleAverage),EnsembleAverage) EnsembleAverage=MV.array(EnsembleAverage) EnsembleAverage.setAxis(1,data.getTime()) modax=cmip5.make_model_axis(models) EnsembleAverage.setAxis(0,modax) cdutil.setTimeBoundsMonthly(EnsembleAverage) return EnsembleAverage
def single_member_ensemble(self, experiment): """Get a single member from each ensemble""" self.get_ensemble(experiment) data = getattr(self, experiment) nens, ntime = data.shape #models=sorted(self.ensemble_dict.keys()) models = get_ok_models(self.region) nmod = len(models) SingleMember = np.ma.zeros((nmod, ntime)) + 1.e20 fnames = sorted( get_ensemble_filenames(self.variable, self.region, experiment)) counter = 0 for model in models: fnames = np.array( get_ensemble_filenames(self.variable, self.region, experiment)) I = np.where([x.split(".")[2] == model for x in fnames])[0] if len(I) > 0: first_member = I[0] SingleMember[counter] = data.asma()[first_member] else: if self.verbose: print("missing data for " + model + " " + self.variable + " " + experiment) counter += 1 SingleMember = MV.masked_where( np.abs(SingleMember) > 1.e10, SingleMember) SingleMember = MV.masked_where(np.isnan(SingleMember), SingleMember) # SingleMember=MV.array(SingleMember) SingleMember.setAxis(1, data.getTime()) modax = cmip5.make_model_axis(models) SingleMember.setAxis(0, modax) cdutil.setTimeBoundsMonthly(SingleMember) return SingleMember
def opendap_ensemble(model, variable, experiment): rips = get_rips_opendap(model, variable, experiment) L = len(rips) i = 0 ens_member = opendap_data(model, variable, experiment, rips[i]) ENS = MV.zeros((L, ) + ens_member.shape) + 1.e20 ENS[i] = ens_member if L > 1: for i in range(L)[1:]: try: ens_member = opendap_data(model, variable, experiment, rips[i]) ENS[i] = ens_member except: print("problem downloading ", model + "." + rips[i]) ENS = MV.masked_where(ENS > 1.e10, ENS) fnames_rip = [ variable + "." + experiment + "." + model + "." + rip for rip in rips ] modax = cmip5.make_model_axis(fnames_rip) axlist = [modax] + ens_member.getAxisList() ENS.id = variable ENS.setAxisList(axlist) cdutil.setTimeBoundsMonthly(ENS) return ENS
def testDJFCriteria(self): data = [1,]*12+[2,]*12 print(data) months = list(range(24)) t=cdms2.createAxis(months) t.designateTime() t.units="months since 2014" cdutil.setTimeBoundsMonthly(t) data = numpy.array(data) data=MV2.array(data) data.setAxis(0,t) print(t.asComponentTime()) djf = cdutil.times.DJF(data) djfc = cdutil.times.DJF.climatology(data) print(djf) self.assertTrue(numpy.allclose(djf[0],1.) and numpy.allclose(djf[1],1.6666667) and numpy.allclose(djf[2],2.)) print(djfc) self.assertTrue(numpy.allclose(djfc,1.625)) djf = cdutil.times.DJF(data,criteriaarg=[.5,None]) djfc = cdutil.times.DJF.climatology(data,criteriaarg=[.5,None]) print(djf) self.assertTrue(numpy.ma.allclose(djf[0],1.) and numpy.ma.allclose(djf[1],1.6666667) and numpy.ma.allclose(djf[2],numpy.ma.masked)) print(djfc) self.assertTrue(numpy.allclose(djfc,1.4))
def cru_jja(): f = cdms.open( "../DROUGHT_ATLAS/scPDSI.cru_ts3.26early.bams2018.GLOBAL.1901.2017.nc") cru = f("scpdsi") cru.getTime().units = 'days since 1900-1-1' cdutil.setTimeBoundsMonthly(cru) cdutil.setTimeBoundsMonthly(cru) cru_jja = cdutil.JJA(cru) cru_jja = MV.masked_where(np.abs(cru_jja) > 1000, cru_jja) fgrid = cdms.open("OBS/gpcp.precip.mon.mean.nc") gpcp_grid = fgrid("precip").getGrid() fgrid.close() cru2 = cru_jja.regrid(gpcp_grid, regridTool='regrid2') cru2.id = "pdsi" for att in cru.attributes.keys(): setattr(cru2, att, cru.attributes[att]) fw = cdms.open("../DROUGHT_ATLAS/OBSERVATIONS/CRU_selfcalibrated.nc", "w") fw.write(cru2) fw.close() f.close() return cru2
def testAnnualSeasonalAverage(self): f = cdms2.open(self.filename, "r") # Read in the raw data EXCLUDING a leap year obs_timeseries1 = f('obs', time=slice(0, 48)) # 1900.1. to 1903.12. # Read in the raw data INCLUDING a leap year obs_timeseries2 = f('obs', time=slice( 0, 60)) # 1900.1. to 1904.12., 1904 is year lear ### Truncate first Jan, Feb and last Dec before get Annual cycle anomaly ... (to have fair DJF seasonal mean later) obs_timeseries1 = obs_timeseries1[2:-1] obs_timeseries2 = obs_timeseries2[2:-1] ### Set monthly time bounds ... cdutil.setTimeBoundsMonthly(obs_timeseries1) cdutil.setTimeBoundsMonthly(obs_timeseries2) #### Removing Annual cycle ... obs_timeseries_ano1 = cdutil.ANNUALCYCLE.departures(obs_timeseries1) obs_timeseries_ano2 = cdutil.ANNUALCYCLE.departures(obs_timeseries2) #### Calculate time average ... obs_timeseries_ano_timeave1 = cdutil.averager( obs_timeseries_ano1, axis='t') ## This should be zero and it does obs_timeseries_ano_timeave2 = cdutil.averager( obs_timeseries_ano2, axis='t') ## This should be zero BUT it does NOT #### SEASONAL MEAN TEST #### obs_timeseries_ano1_DJF = cdutil.DJF(obs_timeseries_ano1, criteriaarg=[0.95, None]) obs_timeseries_ano2_DJF = cdutil.DJF(obs_timeseries_ano2, criteriaarg=[0.95, None]) obs_timeseries_ano1_JJA = cdutil.JJA(obs_timeseries_ano1, criteriaarg=[0.95, None]) obs_timeseries_ano2_JJA = cdutil.JJA(obs_timeseries_ano2, criteriaarg=[0.95, None]) #### Calculate time average ... obs_timeseries_ano1_DJF_timeave = cdutil.averager( obs_timeseries_ano1_DJF, axis='t') ## This should be zero and it does obs_timeseries_ano2_DJF_timeave = cdutil.averager( obs_timeseries_ano2_DJF, axis='t') ## This should be zero BUT it does NOT obs_timeseries_ano1_JJA_timeave = cdutil.averager( obs_timeseries_ano1_JJA, axis='t') ## This should be zero and it does obs_timeseries_ano2_JJA_timeave = cdutil.averager( obs_timeseries_ano2_JJA, axis='t') ## This should be zero and it does numpy.testing.assert_almost_equal(obs_timeseries_ano_timeave2, obs_timeseries_ano_timeave1, 10) numpy.testing.assert_almost_equal(obs_timeseries_ano1_JJA_timeave, obs_timeseries_ano2_JJA_timeave, 10) numpy.testing.assert_almost_equal(obs_timeseries_ano1_DJF_timeave, obs_timeseries_ano2_DJF_timeave, 10)
def mk_time(offset=0,len=120,units="months since 1800"): t=cdms2.createAxis(numpy.arange(offset,offset+len)) t.designateTime() t.id='time' t.units=units data= MV2.array(numpy.random.random((len))) data.setAxis(0,t) cdutil.setTimeBoundsMonthly(t) return data,t,t.asComponentTime()
def mk_time(self,offset=0,length=120,units="months since 1800"): t=cdms2.createAxis(numpy.arange(offset,offset+length)) t.designateTime() t.id='time' t.units=units data= MV2.array(numpy.random.random((length))) data.setAxis(0,t) cdutil.setTimeBoundsMonthly(t) return data,t,t.asComponentTime()
def read_data_in( path, var_in_data, var_to_consider, start_time, end_time, UnitsAdjust, LandMask, debug=False, ): f = cdms2.open(path) data_timeseries = f(var_in_data, time=(start_time, end_time), latitude=(-90, 90)) cdutil.setTimeBoundsMonthly(data_timeseries) # missing data check check_missing_data(data_timeseries) if UnitsAdjust[0]: data_timeseries = getattr(MV2, UnitsAdjust[1])(data_timeseries, UnitsAdjust[2]) if var_to_consider == "ts" and LandMask: # Replace temperature below -1.8 C to -1.8 C (sea ice) data_timeseries = sea_ice_adjust(data_timeseries) # Check available time window and adjust if needed data_stime = data_timeseries.getTime().asComponentTime()[0] data_etime = data_timeseries.getTime().asComponentTime()[-1] data_syear = data_stime.year data_smonth = data_stime.month data_eyear = data_etime.year data_emonth = data_etime.month if data_smonth > 1: data_syear = data_syear + 1 if data_emonth < 12: data_eyear = data_eyear - 1 debug_print( "data_syear: " + str(data_syear) + " data_eyear: " + str(data_eyear), debug) data_timeseries = data_timeseries(time=( cdtime.comptime(data_syear, 1, 1, 0, 0, 0), cdtime.comptime(data_eyear, 12, 31, 23, 59, 59), )) f.close() return data_timeseries, data_syear, data_eyear
def computer(name, start_lon, end_lon, start_lat, end_lat, Nb=200, Lb=24, windows = [20,30,40,50,75,100]): ''' Where the hell is my docstring? ''' # filtering parameters fs = 1; f_hi = 1/(12*2.0); f_lo = fs/(12*7.0) # open file f = cdms2.open(name, 'r') start_time = f.getAxis('time').asRelativeTime()[0] end_time = f.getAxis('time').asRelativeTime()[-1] # extract variable of interest in east pacific area coral = f('pseudocoral',latitude=(start_lat,end_lat),longitude=(start_lon,end_lon)) # print 'coral' # print coral f.close() # compute spatial mean cdutil.setTimeBoundsMonthly(coral,stored=0) spatial_mean = cdutil.averager(coral,axis='xy') # generate boostrap samples Xb = bootstrap.block_bootstrap_ET(spatial_mean, Lb, Nb) #print 'spatial_mean_bootstrap' #print spatial_mean_bootstrap nw = len(windows) # number of windows seasonal_amp = np.empty((nw,Nb)) variance = np.empty((nw,Nb)) index = 0 # loop over windows for i in windows: Xw = Xb[:,:i*12] # sample over window clim, anom = seasonal_cycle(Xw) # isolate seasonal cycle # compute seasonal amplitude smax = np.nanmax(clim, axis=1) smin = np.nanmin(clim, axis=1) seasonal_amp[index,:] = smax - smin # compute ENSO variance anom2_7 = np.empty(anom.shape) for b in range(Nb): # apply bandpass filter anom2_7[b,:] = bandpass.butter_bandpass_filter(anom[b,:],f_lo,f_hi,fs) # compute variance per se variance[index,:] = np.var(anom2_7,axis=1) index +=1 # update index return (variance, seasonal_amp)
def var_annual_cycle(var, seasons): "Calculate annual cycle climatology of each variable" var_season_data = np.empty([len(seasons)]) * np.nan cdutil.setTimeBoundsMonthly(var) var_season_data = cdutil.ANNUALCYCLE.climatology(var)(squeeze=1) # convert units if var.id == 'tas': var_season_data = var_season_data - 273.15 if var.id == 'pr': var_season_data = var_season_data * 3600. * 24. return var_season_data
def santerTime(array,calendar=None): """ Documentation for santerTime(array,calendar): ------- The santerTime(array) function converts a known-time array to the standard time calendar - if non-gregorian the source calendar should be specified for accurate conversion Specified calendars can be one of the 5 calendars available within the cdtime module: GregorianCalendar MixedCalendar JulianCalendar NoLeapCalendar Calendar360 For more information consult: http://uvcdat.llnl.gov/documentation/cdms/cdms_3.html#3.2 Author: Paul J. Durack : [email protected] Usage: ------ >>> from durolib import santerTime >>> import cdtime >>> newVar = santerTime(var,calendar=cdtime.NoLeapCalendar) Notes: ----- """ # Test calendar if calendar: cdtCalendar = calendar else: cdtCalendar = cdt.GregorianCalendar # Set time_since - months 1800-1-1 time = array.getTime() time_new = [] for tt in time: reltime = cdt.reltime(tt,time.units) time_new.append(reltime.torel('months since 1800-1-1',cdtCalendar).value) time_axis = cdm.createAxis(time_new) time_axis.id = 'time' time_axis.units = 'months since 1800-1-1' time_axis.axis = 'T' time_axis.calendar = 'gregorian' array.setAxis(0,time_axis) cdu.setTimeBoundsMonthly(array) return array
def dai_jja(): f = cdms.open("../DROUGHT_ATLAS/pdsi.mon.mean.selfcalibrated.nc") dai = f("pdsi") cdutil.setTimeBoundsMonthly(dai) dai_jja = cdutil.JJA(dai) fgrid = cdms.open("OBS/gpcp.precip.mon.mean.nc") gpcp_grid = fgrid("precip").getGrid() fgrid.close() dai2 = dai_jja.regrid(gpcp_grid, regridTool='regrid2') dai2.id = "pdsi" for att in dai.attributes.keys(): setattr(dai2, att, dai.attributes[att]) fw = cdms.open("../DROUGHT_ATLAS/OBSERVATIONS/DAI_selfcalibrated.nc", "w") fw.write(dai2) fw.close() return dai2
def perform_regression(data, parameter, var, region, land_frac, ocean_frac, nino_index): ts_var = data.get_timeseries_variable(var) domain = utils.general.select_region(region, ts_var, land_frac, ocean_frac, parameter) # Average over selected region, and average # over months to get the yearly mean. cdutil.setTimeBoundsMonthly(domain) # Get anomaly from annual cycle climatology if parameter.print_statements: print("domain.shape: {}".format(domain.shape)) anomaly = cdutil.ANNUALCYCLE.departures(domain) nlat = len(anomaly.getLatitude()) nlon = len(anomaly.getLongitude()) reg_coe = anomaly[0, :, :](squeeze=1) confidence_levels = cdutil.ANNUALCYCLE.departures(domain)[0, :, :]( squeeze=1) # Neither of the following methods work, so we just set values in confidence_levels # to be explicitly 0 or 1. # confidence_levels = anomaly[0, :, :](squeeze=1).fill(0) # confidence_levels = numpy.zeros_like(reg_coe) for ilat in range(nlat): if parameter.print_statements: print("ilat: {}".format(ilat)) for ilon in range(nlon): dependent_var = anomaly[:, ilat, ilon] independent_var = nino_index # Uncomment the following line to use CDAT/genutil instead # (You'll also need to set pvalue) # slope, intercept = genutil.statistics.linearregression(dependent_var, x=independent_var) slope, _, _, pvalue, _ = scipy.stats.linregress( independent_var, dependent_var) reg_coe[ilat, ilon] = slope # Set confidence level to 1 if significant and 0 if not if pvalue < 0.05: # p-value < 5% # This implies significance at 95% confidence level confidence_levels[ilat, ilon] = 1 else: confidence_levels[ilat, ilon] = 0 if parameter.print_statements: print("confidence in fn:", confidence_levels.shape) sst_units = "degC" reg_coe.units = "{}/{}".format(ts_var.units, sst_units) if parameter.print_statements: print("reg_coe.shape: {}".format(reg_coe.shape)) return domain, reg_coe, confidence_levels
def setTimeBounds( self, var ): time_axis = var.getTime() if time_axis._bounds_ == None: try: time_unit = time_axis.units.split(' since ')[0].strip() if time_unit == 'hours': values = time_axis.getValue() freq = 24/( values[1]-values[0] ) cdutil.setTimeBoundsDaily( time_axis, freq ) elif time_unit == 'days': cdutil.setTimeBoundsDaily( time_axis ) elif time_unit == 'months': cdutil.setTimeBoundsMonthly( time_axis ) elif time_unit == 'years': cdutil.setTimeBoundsYearly( time_axis ) except Exception, err: wpsLog.debug( "Exception in setTimeBounds:\n " + traceback.format_exc() )
def zon_p_plots(var, dsetA, dsetB, title="", nlev=None): # Average Zonally over last 20 years bt = 30 * 12 controlV = dsetA(var, time=slice(bt, 600)) compareV = dsetB(var, time=slice(bt, 600)) a = cdutil.averager(controlV, axis="x") b = cdutil.averager(compareV, axis="x") c = b - a cdutil.setTimeBoundsMonthly(c) plotme = cdutil.DJF.climatology(c) lat_pressure_contour_cdms(plotme, tit="%s DJF" % title, nlev=nlev) plotme = cdutil.JJA.climatology(c) lat_pressure_contour_cdms(plotme, tit="%s JJA" % title, nlev=nlev)
def var_seasons(var, seasons): "Calculate seasonal climatology of each variable" var_season_data = np.empty([len(seasons)]) * np.nan cdutil.setTimeBoundsMonthly(var) for k, season in enumerate(seasons): if season == 'ANN': months = cdutil.times.Seasons('DJFMAMJJASON') else: months = cdutil.times.Seasons(str(season)) var_season_data[k] = months.climatology(var) # convert units if var.id == 'tas': var_season_data = var_season_data - 273.15 if var.id == 'pr': var_season_data = var_season_data * 3600. * 24. return var_season_data
def get_ensemble(model, variable, experiment): rawdir = get_rawdir(variable) rips = np.unique([ x.split(".")[-3] for x in glob.glob(rawdir + model + "/*" + experiment + "*") ]) nrips = len(rips) fnames = sorted(get_filenames(model, variable, experiment, rips[0])) L = len(fnames) #get shape f = cdms.open( glob.glob(rawdir + model + "/" + variable + "." + experiment + "." + model + "." + rips[0] + ".*")[0]) gridsize = (f[variable].shape)[1:] f.close() historical = MV.zeros((nrips, L * 12) + gridsize) for ripi in range(len(rips)): rip = rips[ripi] fnames = sorted( glob.glob(rawdir + model + "/" + variable + "." + experiment + "." + model + "." + rip + ".*")) for timei in range(len(fnames)): f = cdms.open(fnames[timei]) data = f(variable) historical[ripi, 12 * timei:12 * (timei + 1)] = data f.close() fnames_rip = [ variable + "." + experiment + "." + model + "." + rip for rip in rips ] modax = cmip5.make_model_axis(fnames_rip) tax = get_tax_from_files(fnames) latax = data.getLatitude() lonax = data.getLongitude() axlist = [modax, tax, latax, lonax] historical.setAxisList(axlist) historical.id = variable cdutil.setTimeBoundsMonthly(historical) return historical
def region(self, latBounds, lonBounds, i, userkey): cdmsVar = userdata[userkey]['var'] latCoords = userdata[userkey]['latCoords'] lonCoords = userdata[userkey]['lonCoords'] clevs = userdata[userkey]['clevs'] #self.debug("get data for only this region") # need to expand bounds by one due to the difference in how # basemap and cdms work with bounds t = len(latCoords) - 1 n = len(lonCoords) - 1 a, b, c, d = latBounds[0], latBounds[1], lonBounds[0], lonBounds[1] regiondata = cdmsVar[:, (a - 1 if a > 0 else a):(b + 1 if b < t else b), (c - 1 if c > 0 else c):(d + 1 if d < n else d)] #self.debug("perform time average on data") cdutil.setTimeBoundsMonthly(regiondata) avg = cdutil.averager(regiondata, axis='t') # setup figure to have no borders fig = plt.figure(figsize=((d - c) * 0.15, (b - a) * 0.1), frameon=False) ax = plt.Axes(fig, [0., 0., 1., 1.]) ax.set_axis_off() fig.add_axes(ax) #self.debug("plot using basemap") lons, lats = avg.getLongitude()[:], avg.getLatitude()[:] m = Basemap(projection='cyl', resolution='c', llcrnrlon=lonCoords[lonBounds[0]], llcrnrlat=latCoords[latBounds[0]], urcrnrlon=lonCoords[lonBounds[1]], urcrnrlat=latCoords[latBounds[1]], fix_aspect=False) x, y = m(*np.meshgrid(lons, lats)) try: m.contourf(x, y, avg.asma(), clevs, cmap=plt.cm.RdBu_r, extend='both') except Exception, err: import traceback tb = traceback.format_exc() self.debug(tb) self.debug("Region lat(%d,%d) lon(%d,%d) faled" % (latBounds[0], latBounds[1], lonBounds[0], lonBounds[1]))
def write_merra2(surf=None, root=None): if surf is None: surf, root = merra2() cdutil.setTimeBoundsMonthly(surf) cdutil.setTimeBoundsMonthly(root) fw = cdms.open( "../DROUGHT_ATLAS/OBSERVATIONS/MERRA2_soilmoisture_summerseason.nc", "w") djf_surf = cdutil.DJF(surf, criteriaarg=(1, None))[1:] jja_surf = cdutil.DJF(surf, criteriaarg=(1, None))[1:] ss_surf = summerseason_GLEAM(jja_surf, djf_surf) ss_surf.id = "smsurf" fw.write(ss_surf) djf_root = cdutil.DJF(root, criteriaarg=(1, None))[1:] jja_root = cdutil.DJF(root, criteriaarg=(1, None))[1:] ss_root = summerseason_GLEAM(jja_root, djf_root) ss_root.id = "smroot" fw.write(ss_root) fw.close()
def get_firstmember(model, variable, experiment, rip=None): rawdir = get_rawdir(variable) rips = np.unique([ x.split(".")[-3] for x in glob.glob(rawdir + model + "/*" + experiment + "*") ]) if rip is None: rip = rips[0] fnames = sorted(get_filenames(model, variable, experiment, rip)) L = len(fnames) #get shape f = cdms.open( glob.glob(rawdir + model + "/" + variable + "." + experiment + "." + model + "." + rips[0] + ".*")[0]) gridsize = (f[variable].shape)[1:] f.close() nyears = L * 12 simulationdata = MV.zeros((nyears, ) + gridsize) fnames = sorted( glob.glob(rawdir + model + "/" + variable + "." + experiment + "." + model + "." + rip + ".*")) for timei in range(len(fnames)): f = cdms.open(fnames[timei]) data = f(variable) simulationdata[12 * timei:12 * (timei + 1)] = data f.close() tax = get_tax_from_files(fnames) latax = data.getLatitude() lonax = data.getLongitude() axlist = [tax, latax, lonax] simulationdata.setAxisList(axlist) simulationdata.id = variable cdutil.setTimeBoundsMonthly(simulationdata) return simulationdata
def splice_data(hdata, sspdata): cdutil.setTimeBoundsMonthly(hdata) cdutil.setTimeBoundsMonthly(sspdata) scenario = cmip5.models(sspdata)[0].split("/")[-2] sspmodels = [fname.split(".")[2] for fname in cmip5.models(sspdata)] ssprips = [fname.split(".")[3] for fname in cmip5.models(sspdata)] sspids = [] for mod, rip in zip(sspmodels, ssprips): sspids += [mod + "." + rip] hmodels = [fname.split(".")[2] for fname in cmip5.models(hdata)] hrips = [fname.split(".")[3] for fname in cmip5.models(hdata)] hids = [mod + "." + rip for mod, rip in zip(hmodels, hrips)] lenhist = hdata.shape[1] lenssp = sspdata.shape[1] intersect = np.intersect1d(np.array(hids), np.array(sspids)) nmod = len(intersect) spliced = MV.zeros((nmod, lenhist + lenssp)) counter = 0 splicedmods = [] for pr in intersect: i = hids.index(pr) j = sspids.index(pr) dat = MV.concatenate((hdata[i], sspdata[j])) spliced[counter] = dat splicedmods += [ cmip5.models(hdata)[i].replace("historical", "historical_" + scenario) ] #print(cmip5.models(hdata)[i]) counter += 1 modax = cmip5.make_model_axis(splicedmods) units = hdata.getTime().units htime = hdata.getTime()[:] stime = sspdata.getTime().asComponentTime() stime_new = np.array([x.torel(units).value for x in stime]) tax = cdms.createAxis(np.append(htime, stime_new)) tax.designateTime() tax.id = "time" htax = hdata.getTime() for att in htax.attributes.keys(): setattr(tax, att, htax.attributes[att]) spliced.setAxisList([modax, tax]) spliced.id = hdata.id cdutil.setTimeBoundsMonthly(spliced) return spliced
#nt = sst.shape[0] #lons[0] = 0 #nlat = len(lats) #nlon = len(lons) t = sst.getTime().asRelativeTime("months since 1980") t = np.array([x.value for x in t]) tyears = 1980 + t / 12. MMstarti = np.where(MMt == tyears[0])[0][0] MMendi = np.where(MMt == tyears[-1])[0][0] #tyears = np.arange(np.ceil(t[0]), np.round(t[-1])) #subtract seasonal cycle from fields cdutil.setTimeBoundsMonthly(sst) cdutil.setTimeBoundsMonthly(field) cdutil.setTimeBoundsMonthly(u) cdutil.setTimeBoundsMonthly(v) cdutil.setTimeBoundsMonthly(ps) field = cdutil.ANNUALCYCLE.departures(field) sst = cdutil.ANNUALCYCLE.departures(sst) u = cdutil.ANNUALCYCLE.departures(u) v = cdutil.ANNUALCYCLE.departures(v) ps = cdutil.ANNUALCYCLE.departures(ps) CTIminlati = np.argmin(np.abs(lats - (-6))) CTImaxlati = np.argmin(np.abs(lats - 6)) CTIminloni = np.argmin(np.abs(lons - 0)) CTImaxloni = np.argmin(np.abs(lons - 90))
#!/usr/bin/env python # Adapted for numpy/ma/cdms2 by convertcdms.py import cdms2, cdutil, os, sys, cdat_info f = cdms2.open(os.path.join(cdat_info.get_sampledata_path(), 'tas_mo.nc')) s = f('tas') tc = s.getTime().asComponentTime() print tc[0], tc[-1] cdutil.setTimeBoundsMonthly(s) ref = cdutil.ANNUALCYCLE.climatology(s(time=('1980', '1985', 'co'))) dep = cdutil.ANNUALCYCLE.departures(s) ref = ref(order='y...') dep = cdutil.ANNUALCYCLE.departures(s, ref=ref) # testing that an ma in worng order would fail try: dep = cdutil.ANNUALCYCLE.departures(s, ref=ref(order='t...').filled()) raise RuntimeError("Should have failed with ma passed as ref (not mv2)") except: pass
print data2.shape # (48, 73, 144) grid1 = data1.getGrid() print grid1 print 'original ERA40 data shape: ', data1.shape # original ERA40 data shape: (48, 160, 320) grid2 = data2.getGrid() print grid2 regridfunc = Regridder(grid1, grid2) data1 = regridfunc(data1) print 'new ERA40 data shape: ', data1.shape cdutil.setTimeBoundsMonthly(data1) cdutil.setTimeBoundsMonthly(data2) start_time = cdtime.comptime(1991, 1, 1) end_time = cdtime.comptime(1993, 12, 1) ac1 = cdutil.ANNUALCYCLE.climatology(data1(time=(start_time, end_time, 'cob'))) ac2 = cdutil.ANNUALCYCLE.climatology(data2(time=(start_time, end_time, 'cob'))) print ac1 data1 = cdutil.ANNUALCYCLE.departures(data1, ref=ac1) data2 = cdutil.ANNUALCYCLE.departures(data2, ref=ac2) print data1.shape, data2.shape tim = data2.getTime()
("rlut", "toa_net_longwave_flux"), ("rst", "toa_net_shortwave_flux"), ("rls", "surface_net_longwave_flux"), ("rss", "surface_net_shortwave_flux"), ("hfss", "surface_sensible_heat_flux"), ("hfls", "surface_latent_heat_flux"), ) ) # Global Energy Budget#{{{ if False: var = [] for key in var_names: td = {} tt = cdutil.averager(fc02(key), axis="xy", weights="generate") cdutil.setTimeBoundsMonthly(tt) tt = cdutil.YEAR(tt) td = (key, {"val": tt, "name": key, "title": var_names[key]}) var.append(td) var = dict(var) # Top of Atmospher Global Energy Budget if False: toa_lw = var["rlut"]["val"] toa_sw = var["rst"]["val"] net = toa_lw + toa_sw fig = figure(figsize=(15, 5)) fig.suptitle("TOA Energy Budget Warm Sun") subplot(131)
print 'domain decomp: ', npLat, ' x ', npLon iLatBeg , iLatEnd = slab[0].start, slab[0].stop iLonBeg , iLonEnd = slab[1].start, slab[1].stop print '[%d] sub-domain slab: %d:%d, %d:%d dims %d x %d size: %d' % (rk, iLatBeg, iLatEnd, iLonBeg, iLatEnd, iLatEnd - iLatBeg, iLonEnd - iLonBeg, (iLatEnd - iLatBeg)*(iLonEnd - iLonBeg)) value=0 cdms2.setNetcdfShuffleFlag(value) ## where value is either 0 or 1 cdms2.setNetcdfDeflateFlag(value) ## where value is either 0 or 1 cdms2.setNetcdfDeflateLevelFlag(value) ## where value is a integer between 0 and 9 included # read local data daclt = clt[:,iLatBeg:iLatEnd,iLonBeg:iLonEnd] # time average cdutil.setTimeBoundsMonthly(daclt) mp = cdutil.averager(daclt,axis='t') if rk==0: print "Gathering results" lst = MPI.COMM_WORLD.gather(mp,root=0) if rk==0: print "Gathered" out = numpy.zeros(clt.shape[1:],clt.dtype) for proc in range(sz): print len(lst),proc,lst[proc].shape for proc in range(sz): slab = decomp.getSlab(proc) iLatBeg , iLatEnd = slab[0].start, slab[0].stop iLonBeg , iLonEnd = slab[1].start, slab[1].stop
grid1=data1.getGrid() print grid1 print 'original ERA40 data shape: ',data1.shape # original ERA40 data shape: (48, 160, 320) grid2 = data2.getGrid() print grid2 regridfunc=Regridder(grid1,grid2) data1=regridfunc(data1) print 'new ERA40 data shape: ' ,data1.shape cdutil.setTimeBoundsMonthly(data1) cdutil.setTimeBoundsMonthly(data2) start_time = cdtime.comptime(1991,1,1) end_time = cdtime.comptime(1993,12,1) ac1=cdutil.ANNUALCYCLE.climatology(data1(time=(start_time, end_time, 'cob'))) ac2=cdutil.ANNUALCYCLE.climatology(data2(time=(start_time, end_time, 'cob'))) print ac1 data1=cdutil.ANNUALCYCLE.departures(data1,ref=ac1) data2=cdutil.ANNUALCYCLE.departures(data2,ref=ac2) print data1.shape,data2.shape tim = data2.getTime()
def ensemble_average(basedir, grid = None, func = None): models = np.unique(map(lambda x: x.split(".")[1],glob.glob(basedir+"*"))) #Deal with extremely annoying GISS physics giss = np.where([x.find("GISS")>=0 for x in models])[0] oldmodels = models models = np.delete(models, giss) for gissmo in oldmodels[giss]: physics_versions = np.unique([x.split(".")[3][-2:] for x in glob.glob(basedir+"*"+gissmo+"*")]) for pv in physics_versions: models = np.append(models, gissmo+" "+pv) if grid is None: #Get the coarsest grid the_file,grid = get_coarsest_grid(basedir) if "CESM1-WACCM" in models: i = np.argwhere(models == "CESM1-WACCM") models = np.delete(models,i) if "CanCM4" in models: i = np.argwhere(models == "CanCM4") models = np.delete(models,i) mo = models[0] print mo ens = get_ensemble(basedir,mo) ens0 = ens[0] print ens0 f = cdms.open(ens0) variable = ens0.split(".")[-4] data = f(variable).regrid(grid,regridTool='regrid2') cdutil.setTimeBoundsMonthly(data) if func is not None: data = func(data) f.close() time_and_space = data.shape realizations = MV.zeros((len(ens),)+time_and_space) realizations[0] = data if len(ens)>1: for i in range(len(ens))[1:]: f = cdms.open(ens[i]) print ens[i] data = f(variable).regrid(grid,regridTool='regrid2') f.close() cdutil.setTimeBoundsMonthly(data) if func is not None: data = func(data) realizations[i] = data model_average = MV.zeros((len(models),)+time_and_space)+1.e20 j= 0 model_average[j] = MV.average(realizations,axis=0) for mo in models[1:]: print mo j+=1 ens = get_ensemble(basedir,mo) realizations = MV.zeros((len(ens),)+time_and_space) for i in range(len(ens)): f = cdms.open(ens[i]) #print ens[i] data = f(variable).regrid(grid,regridTool='regrid2') f.close() cdutil.setTimeBoundsMonthly(data) if func is not None: data = func(data) print data.shape print time_and_space print data.shape == time_and_space if data.shape == time_and_space: realizations[i] = data masked_ma = False else: masked_ma = True if not masked_ma: model_average[j] = MV.average(realizations,axis=0) else: print "not the right shape: "+mo model_average[j] = MV.ones(time_and_space)+1.e20 M2 = MV.masked_where(model_average>1.e10,model_average) M = MV.average(M2,axis=0) M.setAxisList(data.getAxisList()) M.id = data.id M.name = M.id return M
elif d.getAxisIndex('PRESSURE') != -1 and\ shape(d)[d.getAxisIndex('time')] == 12 and\ 'UCSD' in args.data_source: # Case ARGO UCSD - test for 3d and trim off top layer if args.target_variable in 'sos': d_mean = f_h('ARGO_SALINITY_MEAN') elif args.target_variable in 'tos': d_mean = f_h('ARGO_TEMPERATURE_MEAN') # Create annual cycle from annual mean d_ancycle = d_mean + d # print d_ancycle.shape # print d_ancycle[:,0,...].shape # print d_ancycle.getAxisIds() d_ancycle = d_ancycle[:, 0, ...] # print d_ancycle.getAxisIds() cdu.setTimeBoundsMonthly(d_ancycle) # clim_ac = cdu.ANNUALCYCLE.climatology(d_ancycle) ; #shape 12,58,260,720 clim_ac = d_ancycle start_month_s = '01' end_month_s = '12' else: if args.data_source in 'HadISST': boundnodes = 'co' elif args.data_source in 'NOAA_OISSTv2': boundnodes = 'oob' else: boundnodes = 'ocn' cdu.setTimeBoundsMonthly(d) # Set bounds before trying to chop up clim_ac = cdu.ANNUALCYCLE.climatology( d( time=(
# now get the variable 'data' attributes and put into another dictionary list_data=data.attributes.keys() data_dic={} for i in range(0,len(list_data)): data_dic[i]=list_data[i],data.attributes[list_data[i] ] # print the list and the dictionary print list_data print data_dic # # calculate Annual Cycle # cdutil.setTimeBoundsMonthly(data) start_time = data.getTime().asComponentTime()[0] end_time = data.getTime().asComponentTime()[-1] # print the time extent of the data: print 'start_time :',start_time,' end_time: ',end_time # calculate annualcycle climatology ac=cdutil.ANNUALCYCLE.climatology(data(time=(start_time, end_time, 'cob'))) for i in range(0,len(data_dic)): dm=data_dic[i] setattr(ac,dm[0],dm[1]) # # write out file and add global attributes to file
cdms2.setNetcdfShuffleFlag(0) cdms2.setNetcdfDeflateFlag(1) # was 0 130717 cdms2.setNetcdfDeflateLevelFlag(9) # was 0 130717 cdms2.setAutoBounds(1) # Ensure bounds on time and depth axes are generated filepath = "/p/user_pub/e3sm/zhang40/analysis_data_e3sm_diags/HadISST/original_data/" filename1 = "HadISST_ice.nc" filename2 = "HadISST_sst.nc" fin1 = cdms2.open(filepath + filename1) fin2 = cdms2.open(filepath + filename2) ice = fin1("sic") sst = fin2("sst") fout = cdms2.open(filepath + "HadISST_sst_ice_masked.nc", "w") sst_masked = MV2.masked_where(ice > 0, sst, copy=True) sst_masked.id = "sst" cdutil.setTimeBoundsMonthly(sst_masked) # reverse latitude so that latitude in ascending sst_masked = sst_masked[:, ::-1, :] fout.write(sst_masked) att_keys = fin2.attributes.keys() att_dic = {} for i in range(len(att_keys)): att_dic[i] = att_keys[i], fin2.attributes[att_keys[i]] to_out = att_dic[i] setattr(fout, to_out[0], to_out[1]) print(fout.attributes) fout.close()
print "".join(['** Processing annual means for ',str(lb),' to ',str(ub),' **']) print d.shape print d.getTime() t = d.getTime() mon = 1 for ind,val in enumerate(t): if ind == 0: print [format(ind,'03d'),format(mon,'02d'),t.asComponentTime()[ind]] writeToLog(logfile,"".join(['Start: ',str([format(ind,'03d'),format(mon,'02d'),t.asComponentTime()[ind]]),'\n'])) elif ind == d.shape[0]-1: print [format(ind,'03d'),format(mon,'02d'),t.asComponentTime()[ind]] writeToLog(logfile,"".join(['Start: ',str([format(ind,'03d'),format(mon,'02d'),t.asComponentTime()[ind]]),'\n'])) mon = mon + 1 if mon == 13: mon = 1 cdu.setTimeBoundsMonthly(d) ; # Correct CCSM4 bounds # Check units and correct in case of salinity if var == 'so' or var == 'sos': [d,_] = fixVarUnits(d,var,True) dan = cdu.YEAR(d) dan = dan.astype('float32') ; # Recast from float64 back to float32 precision - half output file sizes print "".join(['Start time: ',str(lb),' End time: ',str(ub),' input shape: ',str(d.shape),' output shape: ',str(dan.shape)]) writeToLog(logfile,"".join(['Start time: ',str(lb),' End time: ',str(ub),' input shape: ',str(d.shape),' output shape: ',str(dan.shape),'\n'])) # Open outfile to write g = cdm.open(os.path.join(outdir,mod,run,ver,fout),'w+') # Copy across attributes # Write variable attributes back out to new variable for k in d.attributes.keys(): setattr(dan,k,d.attributes[k]) # Write out file global atts
# Le module cdutil : utilitaires orientes climat # - contenu import cdutil print dir(cdutil) # - chargement des données (vent Pacifique central sur plusieurs années) from vcmq import * f = cdms2.open(data_sample('uv_pacific.nc')) u = f('uwnd') f.close() # - construire une climatologie mensuelle et des anomalies cdutil.setTimeBoundsMonthly(u) # importance des bounds (autres ?) uclim = cdutil.ANNUALCYCLE.climatology(u) # climato uanom = cdutil.ANNUALCYCLE.departures(u, ref=uclim) # anomalies print uclim.std(), uanom.std() djf = cdutil.times.Seasons('DJF') # creation d'une saison udjf = djf(u) # extraction dfj = cdutil.DJF # des saisons existent déjà # - averager ut = cdutil.averager(u, axis='yx', weights=cdutil.area_weights(u)) # moyenne spatiale help(cdutil.averager) # -> essayez la moyenne temporelle # - regions et selecteurs equator = cdutil.region.domain(lat=(-2, 2)) select = cdms2.selectors.Selector(lon=slice(0, 3), time=('1950', cdtime.comptime(1960)))
timeEnd = int(timeEnd.torelative(timeUnitsStr).value) if 'dayStep' in locals() and calendarStep == 'days': times = np.float32(range(timeStart,timeEnd+1,dayStep)) ; # range requires +1 to reach end points else: #times = np.float32(range(timeStart,(timeEnd))) times = np.float32(range(timeStart,timeEnd+1)) ; # range requires +1 to reach end points times = cdm.createAxis(times) times.designateTime() times.id = 'time' times.units = timeUnitsStr times.long_name = 'time' times.standard_name = 'time' times.calendar = 'gregorian' times.axis = 'T' if calendarStep == 'months': cdu.setTimeBoundsMonthly(times) elif calendarStep == 'days': #cdu.setTimeBoundsDaily(times,frequency=(1./dayStep)) pass times.toRelativeTime(''.join(['days since ',str(times.asComponentTime()[0].year),'-1-1'])) timeBounds = times.getBounds() times[:] = (timeBounds[:,0]+timeBounds[:,1])/2. return times #%% def matchAndTrimBlanks(varList,listFilesList,newVarId): """ Documentation for matchAndTrimBlanks(): ------- The matchAndTrimBlanks() function takes a nested list of files, a
#lats = sst.getLatitude()[:] #lons = sst.getLongitude()[:] #nt = sst.shape[0] #lons[0] = 0 #nlat = len(lats) #nlon = len(lons) t = field.getTime().asRelativeTime("months since 1980") t = np.array([x.value for x in t]) tyears = 1980 + t / 12. #tyears = np.arange(np.ceil(t[0]), np.round(t[-1])) #subtract seasonal cycle from fields cdutil.setTimeBoundsMonthly(sst) cdutil.setTimeBoundsMonthly(field) field = cdutil.ANNUALCYCLE.departures(field) sst = cdutil.ANNUALCYCLE.departures(sst) CTIminlati = np.argmin(np.abs(lats - (-6))) CTImaxlati = np.argmin(np.abs(lats - 6)) CTIminloni = np.argmin(np.abs(lons - 0)) CTImaxloni = np.argmin(np.abs(lons - 90)) # CTI Filter requirements. order = 5 fs = 1 # sample rate, (cycles per month) Tn = 3. cutoff = 1 / Tn # desired cutoff frequency of the filter (cycles per month)
import cdms2,cdutil,sys,MV2,numpy,os,cdat_info f=cdms2.open(os.path.join(cdat_info.get_prefix(),'sample_data','clt.nc')) s=f("clt") cdutil.setTimeBoundsMonthly(s) print 'Getting JJA, which should be inexistant in data' try: cdutil.JJA(s[:5]) raise RuntimeError( "data w/o season did not fail") except: pass ## Create a year worth of data w/o JJA s1 = s[:5] s2 = s[8:12] s3 = MV2.concatenate((s1,s2)) t = MV2.concatenate((s1.getTime()[:],s2.getTime()[:])) t = cdms2.createAxis(t,id='time') t.units=s.getTime().units t.designateTime() s3.setAxis(0,t) cdutil.setTimeBoundsMonthly(s3) try: cdutil.JJA(s3) raise RuntimeError, "data w/o season did not return None" except: pass
masked_tas = numpy.multiply(tas_new.filled(), land) x.clear() x.plot(masked_sst) y.clear() y.plot(masked_tas) # add land and ocean contributions for the merged product merged = masked_sst + masked_tas # add metadata to this numeric array merged = cdms2.createVariable(merged, axes=(tim, lat, lon), typecode="f", id="merged_tas_sst") merged.id = "merged_tas_sst" merged.set_fill_value(1e20) cdutil.setTimeBoundsMonthly(merged) x.clear() x.plot(merged) # write out the total temperature data to a netcdf file o = cdms.open("era40_merged_tas_sst.nc", "w") o.write(merged) # crete base period 1991-1993, inclusive start_time = cdtime.comptime(1991, 1, 1) end_time = cdtime.comptime(1993, 12, 1) # the annualcycle ac = cdutil.ANNUALCYCLE.climatology(merged(time=(start_time, end_time, "co")))
#cdutil.setTimeBoundsMonthly(CAM5_PR) CAM5_PR=CAM5_PR[1560:-120,:,:] #====================================================================== CAM5_dD_pr = f('dD',longitude=(0,360), latitude = (-90., 90.))#, time=(start_time,end_time)) CAM5_dD_pr=CAM5_dD_pr[1560:-120,:,:] #====================================================================== times=np.arange(1560.0,1860.0,1) newTimeAxis = cdms2.createAxis(times, id='time') newTimeAxis.units = 'months since 1850' newTimeAxis.designateTime() CAM5_PR.setAxis(0,newTimeAxis) cdutil.setTimeBoundsMonthly(CAM5_PR) #====================================================================== times=np.arange(1560.0,1860.0,1) newTimeAxis = cdms2.createAxis(times, id='time') newTimeAxis.units = 'months since 1850' newTimeAxis.designateTime() CAM5_dD_pr.setAxis(0,newTimeAxis) cdutil.setTimeBoundsMonthly(CAM5_dD_pr) #====================================================================== #====================================================================== # # SAVE
fH = cdm.open(filePath) if (last_month == 6 and last_year in filePath): var = fH(varLoad,time=slice(0,6)) else: var = fH(varLoad) varLen = var.shape[0] #%% Cleanup coord atts # time time = var.getAxis(0) time.standard_name = 'time' time.long_name = 'time' time.calendar = 'gregorian' ; # Force Gregorian time.axis = 'T' time.toRelativeTime('days since 1870-1-1') ; # Fix negative values cdu.setTimeBoundsMonthly(time) ; # Resolve issues with bounds being mid-time values rather than month-end/start values if BC == 'bcs': time._bounds_ = None ; # Required to purge bounds created by cdu call above #%% Write timestep to composite variable if varLen == 12: countUp = count + 12 else: countUp = count + varLen varComp[count:countUp] = var timeComp[count:countUp] = time count = countUp #%% Cleanup coord atts and create areacello if last_year in filePath: # latitude
lon=cdms2.createAxis(MV2.arange(100)*3.6) lon.designateLongitude() lon.units="degrees_east" lon.id="longitude" lat = cdms2.createAxis(MV2.arange(100)*1.8-90.) lat.id="latitude" lat.designateLatitude() lat.units="degrees_north" lev = cdms2.createAxis([1000.]) lev.id="plev" lev.designateLevel() lev.units="hPa" t=cdms2.createAxis([0,31.]) t.id="time" t.designateTime() t.units="days since 2014" cdutil.setTimeBoundsMonthly(t) a.setAxisList((t,lev,lat,lon)) a=MV2.masked_less(a,.5) grd=cdms2.createGaussianGrid(64) a=a.ascontiguous() a=a.regrid(grd,regridTool="regrid2") a=cdutil.averager(a,axis='txy') assert a[0]==0.7921019540305255
def concatenate_piControl(self, season=None, compressed=False): experiment = "piControl" fnames = sorted( get_ensemble_filenames(self.variable, self.region, experiment)) #models=sorted(self.ensemble_dict.keys()) models = get_ok_models(self.region) nmod = len(models) ntimes = [] model_names = [] #Loop over without loading data to figure out the shortest length control run for model in models: # print(model) I = np.where([x.split(".")[2] == model for x in fnames])[0] if len(I) > 0: first_member = int(I[0]) fname = fnames[first_member] model_names += [fname] f = cdms.open(fname) ntimes += [int(f[self.variable].shape[0])] f.close() L = np.min(ntimes) #Set the time axis to be the time axis of the shortest control rin imin = np.argmin(ntimes) fshortest = model_names[imin] f = cdms.open(fshortest) tax = f(self.variable).getTime() tax.id = 'time' tax.designateTime() f.close() #Load data #SingleMember=np.ma.zeros((len(model_names),L))+1.e20 SingleMember = np.ma.zeros((nmod, L)) + 1.e20 i = 0 for model in models: I = np.where([x.split(".")[2] == model for x in fnames])[0] if len(I) > 0: first_member = I[0] fname = fnames[first_member] f = cdms.open(fname) vdata = f(self.variable) SingleMember[i] = vdata[:L] i += 1 else: if self.verbose: print("No piControl data for " + model + " " + self.variable) f.close() #Historical units are already converted; need to convert piControl from #kg m-2 s-1 to mm day-1 #if self.variable in ["pr","evspsbl","prsn","mrros","mrro"]: # SingleMember = SingleMember*86400. SingleMember = MV.masked_where( np.abs(SingleMember) > 1.e10, SingleMember) SingleMember = MV.array(SingleMember) SingleMember.setAxis(1, tax) SingleMember.setAxis(0, cmip5.make_model_axis(models)) ###KLUDGE: FIRST YEAR IS ZERO- FIX THIS IN DOWNLOADER SingleMember = MV.masked_where(SingleMember == 0, SingleMember) # if self.variable in ["mrsos","mrso"]: # if not raw: # SingleMember=self.standardize_zscore(SingleMember) # else: # if not raw: # SingleMember=self.convert_to_percentage(SingleMember) if season is None: return SingleMember cdutil.setTimeBoundsMonthly(SingleMember) seasonal = getattr(cdutil, season).departures(SingleMember) return DA_tools.concatenate_this(seasonal, compressed=compressed)
try: d = f_in(var) except: # Report failure to logfile print "** PROBLEM 1 (read var error - ann calc failed) with: " + l + " found and breaking to next loop entry.. **" nc_bad1 = nc_bad1 + 1; if 'logfile' in locals(): logtime_now = datetime.datetime.now() logtime_format = logtime_now.strftime("%y%m%d_%H%M%S") time_since_start = time.time() - start_time ; time_since_start_s = '%09.2f' % time_since_start err_text = 'PROBLEM 1 (read var error - ann calc failed) creating ' writeToLog(logfile,"".join(['** ',format(nc_bad1,"07d"),': ',logtime_format,' ',time_since_start_s,'s; ',err_text,l,' **'])) continue # Explicitly set timeBounds - problem with cmip5.NorESM1-M.rcp45.r1i1p1.mo.tas.ver-v20110901.xml cdu.setTimeBoundsMonthly(d) # Check units and correct in case of salinity if var in ['so','sos']: [d,_] = fixVarUnits(d,var,True,logfile) # Get time dimension and convert to component time dt = d.getTime() dtc = dt.asComponentTime() dfirstyr = dtc[0].year dlastyr = dtc[-1].year # Use cdutil averager functions to generate annual means print "** Calculating annual mean **" time_anncalc_start = time.time() try: # Determine first January for counter,compTime in enumerate(dtc):
print file_dic # now get the variable 'data' attributes and put into another dictionary list_data = data.attributes.keys() data_dic = {} for i in range(0, len(list_data)): data_dic[i] = list_data[i], data.attributes[list_data[i]] # print the list and the dictionary print list_data print data_dic # # calculate Annual Cycle # cdutil.setTimeBoundsMonthly(data) start_time = data.getTime().asComponentTime()[0] end_time = data.getTime().asComponentTime()[-1] # print the time extent of the data: print 'start_time :', start_time, ' end_time: ', end_time # calculate annualcycle climatology ac = cdutil.ANNUALCYCLE.climatology(data(time=(start_time, end_time, 'cob'))) for i in range(0, len(data_dic)): dm = data_dic[i] setattr(ac, dm[0], dm[1]) # # write out file and add global attributes to file #
def compute(self): variable = self.getInputFromPort('tvariable') cdutil.setTimeBoundsMonthly(variable.data) self.setResult('tvariable', variable)