def project_east_west(self, dataset, experiment, best_fit=True): X = self.OBS[string.upper(dataset)] fingerprint = getattr(self, experiment) westsolver = fingerprint.solvers["west"] westfac = da.get_orientation(westsolver) time_plot(westfac * westsolver.projectField(X.reshaped["west"])[:, 0], label="WEST", color=get_colors("west")) eastsolver = fingerprint.solvers["east"] eastfac = da.get_orientation(eastsolver) time_plot(eastfac * eastsolver.projectField(X.reshaped["east"])[:, 0], label="EAST", color=get_colors("east")) plt.legend() plt.ylabel("Projection onto " + fingerprint.experiment + " fingerprint") if best_fit: y = westfac * westsolver.projectField(X.reshaped["west"])[:, 0] t = cmip5.get_plottable_time(y) p = np.polyfit(t, y.asma(), 1) plt.plot(t, np.polyval(p, t), "--", color=get_colors("west")) y = eastfac * eastsolver.projectField(X.reshaped["east"])[:, 0] t = cmip5.get_plottable_time(y) p = np.polyfit(t, y.asma(), 1) plt.plot(t, np.polyval(p, t), "--", color=get_colors("east"))
def plot_eastwest(X): if len(X.reshaped["west"].shape) > 2: data = [ MV.average(cmip5.ensemble2multimodel(X.reshaped["west"]), axis=0), MV.average(cmip5.ensemble2multimodel(X.reshaped["east"]), axis=0) ] else: data = [X.reshaped["west"], X.reshaped["east"]] solver = MultivariateEof(data) weofs, eeofs = solver.eofs() westsolver = weofs[0] eastsolver = eeofs[0] fac = da.get_orientation(solver) plt.subplot(211) months = [ "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC" ] plt.plot(fac * westsolver.asma(), label="WEST") plt.plot(eastsolver.asma() * fac, label="EAST") plt.xticks(np.arange(12), months) plt.legend() plt.subplot(212) time_plot(fac * solver.pcs()[:, 0], label="WEST")
def standardize_zscore(self, alldata): self.get_ensemble("piControl") piC = self.piControl cdutil.setTimeBoundsMonthly(piC) npiCmodels, npiCt = piC.shape mu = np.ma.zeros((npiCmodels, 12)) sigma = np.ma.zeros((npiCmodels, 12)) for i in range(12): mu[:, i] = np.ma.average(piC[:, i::12], axis=1) sigma[:, i] = np.ma.std(piC[:, i::12], axis=1) pmodels = [x.split(".")[-3] for x in cmip5.models(piC)] if len(cmip5.models(alldata)[0].split(".")) == 1: emodels = cmip5.models(alldata) else: emodels = [x.split(".")[-3] for x in cmip5.models(alldata)] nmembers = len(emodels) Z = np.zeros_like(alldata) for ens_i in range(nmembers): model = emodels[ens_i] corr_piC = pmodels.index(model) mu_piC = mu[corr_piC] sigma_piC = sigma[corr_piC] for month_i in range(12): Z[ens_i, month_i::12] = (alldata[ens_i, month_i::12] - mu_piC[month_i]) / sigma_piC[month_i] Z = cmip5.cdms_clone(Z, alldata) Z.id = alldata.id return (Z)
def plot_obs_trends(self, dataset, **kwargs): X = self.OBS[string.upper(dataset)] west = X.reshaped["west"] east = X.reshaped["east"] if "start" not in kwargs.keys(): start = cmip5.start_time(east) start = cdtime.comptime(start.year, start.month, 1) else: start = kwargs.pop("start") if "stop" not in kwargs.keys(): stop = cmip5.stop_time(east) stop = cdtime.comptime(stop.year, stop.month, 30) else: stop = kwargs.pop("stop") west = west(time=(start, stop)) east = east(time=(start, stop)) west.getAxis(0).id = "time" east.getAxis(0).id = "time" plt.plot(cmip5.get_linear_trends(west).asma(), label="WEST", color=get_colors("west"), **kwargs) plt.plot(cmip5.get_linear_trends(east).asma(), label="EAST", color=get_colors("east"), **kwargs) months = [ "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC" ] plt.xticks(np.arange(12), months)
def scatterplot_cmip(X,Y): """ Scatterplot the arrays X and Y. If ensemble_average, just plot. Otherwise, group ensemble members by model. X and Y must be of the same length and have the same model axes. """ if len(cmip5.models(X)[0].split("."))==1: ensemble_average=True else: ensemble_average=False markers = model_dictionary() if not ensemble_average: ed = cmip5.ensemble_dictionary(X) models = sorted(ed.keys()) for i in range(len(models)): model = models[i] print(model) c = markers[model]["color"] marker=markers[model]["marker"] plt.plot(X.asma()[ed[models[i]]],Y.asma()[ed[models[i]]], marker,markersize=10,color=c,label=models[i]) else: models = cmip5.models(X) for i in range(len(models)): model = models[i] c = markers[model]["color"] marker=markers[model]["marker"] plt.plot([X[i]],[Y[i]], marker,markersize=10,color=c,label=models[i])
def for_figure_4(self,start_time,stop_time=None,overlapping=True,include_trees=True,include_dai=False,include_cru=False,include_piControl=False,noisestart=None,solver=None): data={} if stop_time is None: stop_time=cmip5.stop_time(self.get_tree_ring_projection()) target_obs = self.get_tree_ring_projection()(time=(start_time,stop_time)) L=len(target_obs) modslopes,noiseterm = self.sn_at_time(start_time,L,overlapping=True,noisestart=noisestart,solver=solver) ns=np.std(noiseterm) signal = float(cmip5.get_linear_trends(target_obs)) data["noise"]=noiseterm data["modslopes"]=modslopes data["tree_rings"]=signal if include_dai: dai_proj = self.project_dai_on_solver(start=start_time,solver=solver) daitrend = cmip5.get_linear_trends(dai_proj(time=(start_time,stop_time))) data["dai"]=daitrend if include_cru: cru_proj = self.project_cru_on_solver(start=start_time,solver=solver) crutrend = cmip5.get_linear_trends(cru_proj(time=(start_time,stop_time))) data["cru"]=crutrend if include_piControl: p=self.project_piControl_on_solver(solver=solver) noiseterm_mod=bootstrap_slopes(p,L) data["picontrol"]=noiseterm_mod
def splice(hist, rcp): hmodels = cmip5.models(hist) rcpmodels = cmip5.models(rcp) hrips = [x.split(".")[1] + "." + x.split(".")[3] for x in hmodels] rcprips = [x.split(".")[1] + "." + x.split(".")[3] for x in rcpmodels] goodrips = np.intersect1d(hrips, rcprips) i = 0 labels = [] nmod = len(goodrips) nt = hist.shape[1] + rcp.shape[1] H85 = MV.zeros((nmod, nt) + hist.shape[2:]) for rip in goodrips: smoosh = MV.concatenate( (hist[hrips.index(rip)], rcp[rcprips.index(rip)])) H85[i] = smoosh labels += [ hmodels[hrips.index(rip)] + " SPLICED WITH " + rcpmodels[rcprips.index(rip)] ] if i == 0: tax = smoosh.getTime() i += 1 modax = cmip5.make_model_axis(labels) H85.setAxisList([modax, tax] + hist.getAxisList()[2:]) return H85
def PET_experiment(experiment): fnames_tasmax = np.array(cmip5.get_datafiles(experiment, "tasmax")) fnames_tasmin = np.array(cmip5.get_datafiles(experiment, "tasmin")) path = "/kate/PET/" + experiment + "/" os.system("mkdir " + path) # TASMAX for fname in fnames_tasmax: try: PET, VPD, RH = PET_from_cmip(fname) writefname = fname.split("/")[-1].replace("xml", "nc").replace( "tasmax", "PET_tasmax") fw = cdms.open(path + writefname, "w") fw.write(PET) fw.write(VPD) fw.write(RH) fw.close() except: print "bad file: " + fname for fname in fnames_tasmin: try: PET, VPD, RH = PET_from_cmip(fname) writefname = fname.split("/")[-1].replace("xml", "nc").replace( "tasmin", "PET_tasmin") fw = cdms.open(path + writefname, "w") fw.write(PET) fw.write(VPD) fw.write(RH) fw.close() except: print "bad file: " + fname
def dictionary_ensemble_average(d, grid=None): if grid is None: shape = 1.e20 if grid is None: for m in d.keys(): gridsize = d[m].shape[-1] * d[m].shape[-2] if gridsize < shape: shape = gridsize themodel = m coarsest_grid = d[themodel].getGrid() allstop = str(np.min([cmip5.stop_time(d[m]).year for m in d.keys()])) + "-12-31" allstart = str(np.max([cmip5.start_time(d[m]).year for m in d.keys()])) + "-1-11" standardize = lambda data: data(time=(allstart, allstop)).regrid( coarsest_grid, regridTool='regrid2') counter = 0 goodmodels = list(d) L = len(goodmodels) for m in d.keys(): modeldata = standardize(MV.average( d[m], axis=0)) # average over individual ensemble members if counter == 0: MME = MV.zeros((L, ) + modeldata.shape) MME[counter] = modeldata counter += 1 modax = cmip5.make_model_axis(list(d)) axlist = [modax] + modeldata.getAxisList() MME.setAxisList(axlist) cdutil.setTimeBoundsMonthly(MME) #MME.id=variable return MME
def convert_to_percentage(self, alldata): self.get_ensemble("piControl") piC = self.piControl cdutil.setTimeBoundsMonthly(piC) npiCmodels, npiCt = piC.shape ac = cdutil.ANNUALCYCLE.climatology(piC) pmodels = [x.split(".")[-3] for x in cmip5.models(piC)] if len(cmip5.models(alldata)[0].split(".")) == 1: emodels = cmip5.models(alldata) else: emodels = [x.split(".")[-3] for x in cmip5.models(alldata)] nmembers = len(emodels) Z = np.zeros_like(alldata) for ens_i in range(nmembers): model = emodels[ens_i] corr_piC = pmodels.index(model) ac_piC = ac[corr_piC] for month_i in range(12): Z[ens_i, month_i::12] = (alldata[ens_i, month_i::12] - ac_piC[month_i]) / ac_piC[month_i] Z = cmip5.cdms_clone(Z * 100., alldata) Z.id = alldata.id return (Z)
def __init__(self, dataset): f = cdms.open("DATA/OBS/PROCESSED/" + dataset + ".nc") self.data = {} obs_w = f("pr_W") self.data["west"] = obs_w stop_time = cmip5.stop_time(obs_w) if stop_time.month != 12: stop_time = cdtime.comptime(stop_time.year - 1, 12, 31) start_time = cmip5.start_time(obs_w) if start_time.month != 1: start_time = cdtime.comptime(start_time.year + 1, 1, 1) obs_w = obs_w(time=(start_time, stop_time)) obs_w = fp.by_month(obs_w) obs_e = f("pr_CE") self.data["east"] = obs_e stop_time = cmip5.stop_time(obs_e) if stop_time.month != 12: stop_time = cdtime.comptime(stop_time.year - 1, 12, 31) start_time = cmip5.start_time(obs_e) if start_time.month != 1: start_time = cdtime.comptime(start_time.year + 1, 1, 1) obs_e = obs_e(time=(start_time, stop_time)) obs_e = fp.by_month(obs_e) self.reshaped = {} self.reshaped["east"] = obs_e - MV.average(obs_e, axis=0) self.reshaped["west"] = obs_w - MV.average(obs_w, axis=0) self.reshaped["multi"] = [self.reshaped["west"], self.reshaped["east"]] self.dataset = dataset
def pdsi_SN_figure(D,start_time=None,stop_time=None,use_dai=True): noise_cru=[] noise_dai=[] noise_tree=[] noise=[] signal_cru=[] signal_dai=[] signal_tree=[] if start_time is None: start_time=cdtime.comptime(1981,1,1) if stop_time is None: stop_time=cdtime.comptime(2017,12,31) stop_cru=cdtime.comptime(2017,12,31) stop_dai=cdtime.comptime(2014,12,31) start_cru = cdtime.comptime(1901,1,1) start_dai=cdtime.comptime(1901,1,1) pcru=D.ALL.project_cru_on_solver(start=start_cru) pdai=D.ALL.project_dai_on_solver(start=start_dai) start_tree=cdtime.comptime(1400,1,1) stop_tree=cdtime.comptime(1975,12,31) nt=stop_cru.year-start_time.year nmodel=D.ALL.P.shape[0] H85=np.ma.zeros((nmodel,nt)) t=start_time.add(1,cdtime.Years) i=0 cru_time=[] tree_time=[] dai_time=[] while t.cmp(stop_time)<0: L=t.year-start_time.year+1 modslopes,noiseterm = D.ALL.sn_at_time(start_time,L) H85[:,i] = modslopes noise+=[np.std(noiseterm)] if (t.cmp(stop_cru)<=0) and (t.cmp(start_cru)>0): signal_cru += [float(cmip5.get_linear_trends(pcru(time=(start_time,t))))] cru_time+=[t.year] noise_cru += [np.std(noiseterm)] if (t.cmp(stop_dai)<=0) and (t.cmp(start_dai)>0): signal_dai += [float(cmip5.get_linear_trends(pdai(time=(start_time,t))))] dai_time+=[t.year] noise_dai += [np.std(noiseterm)] if t.cmp(stop_tree)<=0: signal_tree += [float(cmip5.get_linear_trends(D.ALL.projection(time=(start_time,t))))] tree_time +=[t.year] noise_tree += [np.std(noiseterm)] t=t.add(1,cdtime.Years) i+=1 timex=np.arange(start_time.year+1,start_time.year+1+nt) #for i in range(nmodel): # plt.plot(timex,H85[i]/np.array(noise),c="k",lw=1,alpha=.2) plt.plot(cru_time,np.array(signal_cru)/np.array(noise_cru),label="CRU",color=get_dataset_color("cru"),lw=3) if use_dai: plt.plot(dai_time,np.array(signal_dai)/np.array(noise_dai),label="Dai",color=get_dataset_color("dai"),lw=3) plt.plot(tree_time,np.array(signal_tree)/np.array(noise_tree),label="Tree Rings",color=get_dataset_color("tree"),lw=3)
def get_LAI_and_GPP(experiment="1pctCO2"): lai_fnames_all = np.array( cmip5.get_datafiles(experiment, "lai", realm="land")) lai_esm = only_ESMS(lai_fnames_all) if experiment == "1pctCO2": #GFDL p1 increases CO2 only to doubling so get rid of it i = np.where( np.array([ x.find(".GFDL-ESM2M.1pctCO2.r1i1p1.") >= 0 for x in lai_esm ]))[0] lai_esm = np.delete(lai_esm, i) nmods = len(lai_esm) fobs = cdms.open("/work/marvel1/SEASONAL/OBS/GPCP.precip.mon.mean.nc") the_grid = fobs["precip"].getGrid() nlat, nlon = the_grid.shape fobs.close() LAI = MV.zeros((nmods, 140 * 12, nlat, nlon)) for i in range(nmods): f = cdms.open(lai_esm[i]) X = f("lai") Xregrid = PETFUNC(X) LAI[i] = Xregrid axes = [cmip5.make_model_axis(lai_esm)] + Xregrid.getAxisList() LAI.setAxisList(axes) LAI.id = "lai" gpp_fnames_all = np.array( cmip5.get_datafiles(experiment, "gpp", realm="land")) gpp_esm = only_ESMS(gpp_fnames_all) if experiment == "1pctCO2": #GFDL p1 increases CO2 only to doubling so get rid of it i = np.where( np.array([ x.find(".GFDL-ESM2M.1pctCO2.r1i1p1.") >= 0 for x in gpp_esm ]))[0] gpp_esm = np.delete(gpp_esm, i) nmods = len(gpp_esm) fobs = cdms.open("/work/marvel1/SEASONAL/OBS/GPCP.precip.mon.mean.nc") the_grid = fobs["precip"].getGrid() nlat, nlon = the_grid.shape fobs.close() GPP = MV.zeros((nmods, 140 * 12, nlat, nlon)) for i in range(nmods): f = cdms.open(gpp_esm[i]) X = f("gpp") GPP[i] = PETFUNC(X) axes = [cmip5.make_model_axis(gpp_esm)] + Xregrid.getAxisList() GPP.setAxisList(axes) GPP.id = "gpp" fw = cdms.open("/kate/TEST_DATA/ESM_LAI_GPP.nc", "w") fw.write(LAI) fw.write(GPP) fw.close() return LAI, GPP
def DA_histogram(self, experiment, direction, start=None, stop=None, datasets=None): fingerprint = getattr(self, experiment) if start is None: start = cmip5.start_time(self.gpcp.reshaped["east"]) start = cdtime.comptime(start.year, start.month, 1) if stop is None: stop = cmip5.stop_time(self.gpcp.reshaped["east"]) stop = cdtime.comptime(stop.year, stop.month, 30) #get the h85 projections over the same time period H85m = self.model_projections(experiment, direction)(time=(start, stop)) H85 = cmip5.cdms_clone(np.ma.mask_rows(H85m), H85m) H85_trends = cmip5.get_linear_trends(H85) #get the piControl projection time series noise = self.noise_projections(experiment, direction) L = stop.year - start.year + 1 noise_trends = da.get_slopes(noise, L) #plot plt.hist(H85_trends.compressed(), 25, color=da_colors("h85"), alpha=.5, normed=True) plt.hist(noise_trends, 25, color=da_colors("piC"), alpha=.5, normed=True) da.fit_normals_to_data(H85_trends, color=da_colors("h85"), lw=3, label="H85") da.fit_normals_to_data(noise_trends, color=da_colors("piC"), lw=3, label="piControl") # plt.axvline(obs_trend,label=obs.dataset,color=da_colors(obs.dataset)) #Project the observations if datasets is None: datasets = ["gpcp", "cmap", "precl"] if type(datasets) != type([]): datasets = [datasets] for dataset in datasets: obs_proj = self.obs_projections(experiment, dataset, direction)(time=(start, stop)) obs_trend = cmip5.get_linear_trends(obs_proj) plt.axvline(obs_trend, label=dataset, color=da_colors(dataset)) print dataset + "S/N is: " + str(obs_trend / np.std(noise_trends))
def get_P_and_E(experiment="1pctCO2"): pr_fnames_all = np.array(cmip5.get_datafiles(experiment, "pr")) pr_esm = only_ESMS(pr_fnames_all) if experiment == "1pctCO2": #GFDL p1 increases CO2 only to doubling so get rid of it i = np.where( np.array([ x.find(".GFDL-ESM2M.1pctCO2.r1i1p1.") >= 0 for x in pr_esm ]))[0] pr_esm = np.delete(pr_esm, i) nmods = len(pr_esm) fobs = cdms.open("/work/marvel1/SEASONAL/OBS/GPCP.precip.mon.mean.nc") the_grid = fobs["precip"].getGrid() nlat, nlon = the_grid.shape fobs.close() PR = MV.zeros((nmods, 140 * 12, nlat, nlon)) for i in range(nmods): f = cdms.open(pr_esm[i]) X = f("pr") Xregrid = PETFUNC(X) PR[i] = Xregrid axes = [cmip5.make_model_axis(pr_esm)] + Xregrid.getAxisList() PR.setAxisList(axes) PR.id = "pr" evspsbl_fnames_all = np.array(cmip5.get_datafiles(experiment, "evspsbl")) evspsbl_esm = only_ESMS(evspsbl_fnames_all) if experiment == "1pctCO2": #GFDL p1 increases CO2 only to doubling so get rid of it i = np.where( np.array([ x.find(".GFDL-ESM2M.1pctCO2.r1i1p1.") >= 0 for x in evspsbl_esm ]))[0] evspsbl_esm = np.delete(evspsbl_esm, i) nmods = len(evspsbl_esm) fobs = cdms.open("/work/marvel1/SEASONAL/OBS/GPCP.precip.mon.mean.nc") the_grid = fobs["precip"].getGrid() nlat, nlon = the_grid.shape fobs.close() EVSPSBL = MV.zeros((nmods, 140 * 12, nlat, nlon)) for i in range(nmods): f = cdms.open(evspsbl_esm[i]) X = f("evspsbl") EVSPSBL[i] = PETFUNC(X) axes = [cmip5.make_model_axis(evspsbl_esm)] + Xregrid.getAxisList() EVSPSBL.setAxisList(axes) EVSPSBL.id = "evspsbl" fw = cdms.open("/kate/TEST_DATA/ESM_PR_EVSPSBL.nc", "w") fw.write(PR) fw.write(EVSPSBL) fw.close()
def get_rid_of_bad(h85): #Fgoals and bcc have the wrong grid (FIX THIS LATER???) models = cmip5.models(h85) newmodels = models[:3] + models[4:22] + models[23:] h85_mod = MV.zeros((len(newmodels), ) + h85.shape[1:]) h85_mod = MV.concatenate((h85[:3], h85[4:22], h85[23:])) h85_mod.setAxis(0, cmip5.make_model_axis(newmodels)) for i in range(len(h85.shape))[1:]: h85_mod.setAxis(i, h85.getAxis(i)) h85_mod.id = "pdsi" return h85_mod
def convert_to_mm(X): if X.units == "mm": return X if X.units == 'kg m-2 s-1': X=X*60*60*24 #convert to mm/day days_in_month=np.array([calendar.monthrange(x.year,x.month)[1] for x in X.getTime().asComponentTime()]) if len(X.shape)==3: Xd = cmip5.cdms_clone(X*days_in_month[:,np.newaxis,np.newaxis],X) elif len(X.shape)==4: Xd = cmip5.cdms_clone(X*days_in_month[np.newaxis,:,np.newaxis,np.newaxis],X) Xd.units = "mm" return Xd
def get_land_ice_mask(fname): fland = cdms.open(cmip5.landfrac(fname)) fglac = cdms.open(cmip5.glacierfrac(fname)) land = fland("sftlf") glacier=fglac("sftgif") #mask ocean and ice sheets totmask = np.logical_or(land==0,glacier==100.) # totmask =np.repeat(totmask.asma()[np.newaxis],12,axis=0) fland.close() fglac.close() return totmask
def SignalToNoise(D,fortalk=False): if fortalk: plt.figure() else: plt.subplot(211) time_of_emergence_figure(D,noisestart=cmip5.start_time(D.ALL.obs)) plt.title("(a): Time of emergence for PDSI signal") plt.xlim(1985,2050) plt.legend(ncol=2) if fortalk: plt.figure() ax=plt.subplot(111) else: ax=plt.subplot(212) start_time = cdtime.comptime(1981,1,1) ALL_SM = soil.SoilMoisture(D.ALL.obs.mask[0]) ALL_SM.time_of_emergence(start_time,"30cm",ax=ax,color=cm.Set1(1/2.)) ALL_SM.time_of_emergence(start_time,"2m",ax=ax,color=cm.Set1(2/2.)) plt.xlim(1985,2050) plt.title("(b): Times of emergence for soil moisture metrics") #noisefigure(D) plt.legend() plt.title("(b): Preindustrial \"noise\" terms")
def Smodel_trends(D): m=b.landplot(cmip5.get_linear_trends(D.ALL.mma)) m.fillcontinents(color="gray",zorder=0) plt.colorbar(orientation="horizontal",label="1900-2099 trend (PDSI/decade)") plt.ylim(-60,90)
def opendap_ensemble(model, variable, experiment): rips = get_rips_opendap(model, variable, experiment) L = len(rips) i = 0 ens_member = opendap_data(model, variable, experiment, rips[i]) ENS = MV.zeros((L, ) + ens_member.shape) + 1.e20 ENS[i] = ens_member if L > 1: for i in range(L)[1:]: try: ens_member = opendap_data(model, variable, experiment, rips[i]) ENS[i] = ens_member except: print("problem downloading ", model + "." + rips[i]) ENS = MV.masked_where(ENS > 1.e10, ENS) fnames_rip = [ variable + "." + experiment + "." + model + "." + rip for rip in rips ] modax = cmip5.make_model_axis(fnames_rip) axlist = [modax] + ens_member.getAxisList() ENS.id = variable ENS.setAxisList(axlist) cdutil.setTimeBoundsMonthly(ENS) return ENS
def concatenated_piControl(models, variable, L=500, grid=None): rawdir = get_rawdir(variable) if grid is None: model = "CESM2" rip = "r1i1p1f1" allfiles = sorted( glob.glob(rawdir + variable + "/" + model + "/*.historical.*." + rip + ".*")) f = cdms.open(allfiles[0]) data = f(variable) #for model in get_ok_models("SW"): grid = data.getGrid() f.close() nmod = len(models) allpiC = MV.zeros((nmod, L * 12) + grid.shape) for i in range(nmod): print(models[i]) data = get_piControl_firstmember(models[i], variable, L=L) data_regrid = data.regrid(grid, regridTool='regrid2') allpiC[i] = data_regrid modax = cmip5.make_model_axis(models) allpiC.setAxisList([modax] + data_regrid.getAxisList()) allpiC.id = variable return allpiC
def get_slopes(c,yrs,plot = False): """ get non-overlapping trends in concatenated control run """ trends = np.ma.array([]) start = 0 end = yrs if plot: Ntot = float(len(c)/yrs) while end < len(c): ctrunc = c[start:end] #if len(ctrunc.compressed()) == len(ctrunc): if True: # Express in "per decade" units. All control runs are in days. slope0,intercept0 = genutil.statistics.linearregression(ctrunc) slope = slope0*3650. if plot: xax = cmip5.get_plottable_time(ctrunc) plt.plot(xax,ctrunc.asma(),color = cm.RdYlBu(float(start/yrs)/Ntot)) plt.plot(xax,float(slope0)*ctrunc.getTime()[:]+float(intercept0),color ="k",linewidth = 3) trends = np.append(trends,slope) start = end end += yrs trends = MV.masked_where(np.isnan(trends),trends) trends = trends.compressed() return trends
def concatenate_this(piC,modaxis=0,compressed=False): if not ("time" in piC.getAxisIds()): print("Need a time axis to concatenate along") raise TypeError if compressed: axlist = piC.getAxisList() todel=[] modax = piC.getAxis(modaxis) allfiles=eval(modax.models) if len(allfiles)!=piC.shape[modaxis]: allfiles =[x+".xml" for x in allfiles[0].split(".xml")[:-1]] allfiles=np.array(allfiles) #assume model axis is 0 for this for i in range(len(allfiles)): if len(piC[i].compressed()) != len(piC[i]): todel+=[i] piC = MV.array(np.delete(piC,todel,axis=modaxis)) allfiles = np.delete(allfiles,todel).tolist() newmodax=cmip5.make_model_axis(allfiles) piC.setAxisList([newmodax]+axlist[1:]) naxes = len(piC.shape) timeaxis = piC.getAxisIds().index("time") dimensions=piC.shape nmodc = dimensions[modaxis] ntc = dimensions[timeaxis] newdim = (nmodc*ntc,) units = 'days since 0001-1-1' start = cdtime.comptime(1,1,1) tax = cdms.createAxis(np.arange(0,nmodc*ntc*365,365)+15.5) #tax = cdms.createAxis([start.add(i,cdtime.Months).torel(units).value for i in range(ntc*nmodc)]) tax.units = units tax.id = "time" tax.designateTime() newaxes = [tax] if len(dimensions)>2: for i in range(len(dimensions)): if (i != timeaxis) and (i!= modaxis): newdim+=(dimensions[i],) newaxes+=[piC.getAxis(i)] piC_concatenate = piC.reshape(newdim) piC_concatenate.setAxisList(newaxes) return piC_concatenate
def write_regridded_ensemble(the_grid=None, label="summerseason.ensemble"): """write ensemble of summer season PDSI""" direc = "/Volumes/Marvel/FromBen/PDSI/" allfiles = glob.glob(direc + "*") nf = len(allfiles) i = 0 f = cdms.open(allfiles[i]) X = f("PDSI") SS = summerseason_pdsi(X) Xt = regrid_and_truncate(SS, the_grid=the_grid) ens = MV.zeros((nf, ) + Xt.shape) + 1.e20 ens[i] = Xt for i in range(nf)[1:]: f = cdms.open(allfiles[i]) X = f("PDSI") SS = summerseason_pdsi(X) Xt = regrid_and_truncate(SS, the_grid=the_grid) try: ens[i] = Xt except: continue axes = Xt.getAxisList() f.close() ens = MV.masked_where(np.abs(ens) > 1.e10, ens) ens = MV.masked_where(np.isnan(ens), ens) ens.id = "pdsi" modax = cmip5.make_model_axis(allfiles) ens.setAxisList([modax] + axes) fw = cdms.open("../DROUGHT_ATLAS/CMIP5/pdsi." + label + ".hist.rcp85.nc", "w") ens.id = 'pdsi' fw.write(ens) fw.close() return ens
def truncated_solver(D,the_start=None,the_stop=None,include_cru=False,include_dai=False): thedata = D.ALL.model(time=(the_start,the_stop)) the_mma=MV.average(cmip5.ensemble2multimodel(thedata),axis=0) if include_cru: f = cdms.open("../DROUGHT_ATLAS/OBSERVATIONS/CRU_selfcalibrated.nc") cru_jja=f("pdsi") f.close() cru_jja_mask = mask_data(cru_jja,D.ALL.obs[0].mask)(time=(the_start,'2018-12-31')) newmask = np.prod(~cru_jja_mask.mask,axis=0) cru_jja_mask = mask_data(cru_jja_mask,newmask==0) thesolver = Eof(mask_data(D.ALL.mma(time=(the_start,the_stop)),newmask==0),weights='area') elif include_dai: f = cdms.open("../DROUGHT_ATLAS/OBSERVATIONS/DAI_selfcalibrated.nc") dai_jja=f("pdsi") f.close() dai_jja_mask = mask_data(dai_jja,D.ALL.obs[0].mask)(time=(the_start,'2018-12-31')) newmask = np.prod(~dai_jja_mask.mask,axis=0) dai_jja_mask = mask_data(dai_jja_mask,newmask==0) thesolver = Eof(mask_data(D.ALL.mma(time=(the_start,the_stop)),newmask==0),weights='area') else: thesolver = Eof(the_mma,weights="area") return thesolver
def ensemble_average(self,experiment): self.get_ensemble(experiment) data=getattr(self,experiment) nens,ntime=data.shape #models=sorted(self.ensemble_dict.keys()) models=get_ok_models(self.region) nmod=len(models) # print("Number of models is", nmod) EnsembleAverage=np.ma.zeros((nmod,ntime))+1.e20 fnames=np.array(get_ensemble_filenames(self.variable,self.region,experiment)) counter=0 for model in models: #fnames=np.array(get_ensemble_filenames(self.variable,self.region,experiment)) I=np.where([x.split(".")[2]==model for x in fnames])[0] if len(I)>0: EnsembleAverage[counter]=np.ma.average(data.asma()[I],axis=0) else: if self.verbose: print("missing data for "+model+" "+self.variable+" "+experiment) counter+=1 EnsembleAverage=MV.masked_where(np.abs(EnsembleAverage)>1.e10,EnsembleAverage) EnsembleAverage=MV.masked_where(np.isnan(EnsembleAverage),EnsembleAverage) EnsembleAverage=MV.array(EnsembleAverage) EnsembleAverage.setAxis(1,data.getTime()) modax=cmip5.make_model_axis(models) EnsembleAverage.setAxis(0,modax) cdutil.setTimeBoundsMonthly(EnsembleAverage) return EnsembleAverage
def single_member_ensemble(self, experiment): """Get a single member from each ensemble""" self.get_ensemble(experiment) data = getattr(self, experiment) nens, ntime = data.shape #models=sorted(self.ensemble_dict.keys()) models = get_ok_models(self.region) nmod = len(models) SingleMember = np.ma.zeros((nmod, ntime)) + 1.e20 fnames = sorted( get_ensemble_filenames(self.variable, self.region, experiment)) counter = 0 for model in models: fnames = np.array( get_ensemble_filenames(self.variable, self.region, experiment)) I = np.where([x.split(".")[2] == model for x in fnames])[0] if len(I) > 0: first_member = I[0] SingleMember[counter] = data.asma()[first_member] else: if self.verbose: print("missing data for " + model + " " + self.variable + " " + experiment) counter += 1 SingleMember = MV.masked_where( np.abs(SingleMember) > 1.e10, SingleMember) SingleMember = MV.masked_where(np.isnan(SingleMember), SingleMember) # SingleMember=MV.array(SingleMember) SingleMember.setAxis(1, data.getTime()) modax = cmip5.make_model_axis(models) SingleMember.setAxis(0, modax) cdutil.setTimeBoundsMonthly(SingleMember) return SingleMember
def get_crossing_time(region, variable, scenario, month): vcert = stats.norm.interval(.99)[1] mfile = get_file(region, variable, scenario, month) if mfile is None: crossing_time = None else: f = cdms.open(mfile) data = f(variable + "_SN") avg = MV.average(data, axis=0) threshexceed = np.where(np.abs(avg) > vcert)[0] #If it never exceeds the threshold, return None if len(threshexceed) == 0: return (None) #If it hasn't exceeded the threshold by the last time step, return None if len(avg) - 1 not in threshexceed: return (None) if len(np.where(np.diff(threshexceed) > 1)[0]) > 0: isnot1 = np.max(np.where(np.diff(threshexceed) > 1)[0]) + 1 else: isnot1 = 0 staysabove = int(threshexceed[isnot1]) crossing_time = int(cmip5.get_plottable_time(data)[staysabove]) f.close() return (crossing_time)
def proj_aerosols(AA, piControl, H85, start=None, stop=None): if start is None: start = cdtime.comptime(1945, 1, 1) if stop is None: stop = cdtime.comptime(1984, 12, 31) data = [H85.reshaped["west"], H85.reshaped["east"]] nmod, nyears, nmonths = H85.reshaped["west"].shape P = MV.zeros((nmod, nyears)) msolver = AA.solvers["multi"] fac = da.get_orientation(msolver) for i in range(nmod): to_proj = [H85.reshaped["west"][i], H85.reshaped["east"][i]] P[i] = msolver.projectField(to_proj)[:, 0] * fac P.setAxis(0, H85.reshaped["west"].getAxis(0)) timeax = H85.reshaped["west"].getAxis(1) timeax.id = "time" P.setAxis(1, timeax) piCdata = [piControl.reshaped["west"], piControl.reshaped["east"]] pc = msolver.projectField(piCdata)[:, 0] Pt = P(time=(start, stop)) nt = len(Pt.getTime()) hslopes = cmip5.get_linear_trends(Pt) pslopes = da.get_slopes(pc, nt)