def plot_obs_trends(self, dataset, **kwargs): X = self.OBS[string.upper(dataset)] west = X.reshaped["west"] east = X.reshaped["east"] if "start" not in kwargs.keys(): start = cmip5.start_time(east) start = cdtime.comptime(start.year, start.month, 1) else: start = kwargs.pop("start") if "stop" not in kwargs.keys(): stop = cmip5.stop_time(east) stop = cdtime.comptime(stop.year, stop.month, 30) else: stop = kwargs.pop("stop") west = west(time=(start, stop)) east = east(time=(start, stop)) west.getAxis(0).id = "time" east.getAxis(0).id = "time" plt.plot(cmip5.get_linear_trends(west).asma(), label="WEST", color=get_colors("west"), **kwargs) plt.plot(cmip5.get_linear_trends(east).asma(), label="EAST", color=get_colors("east"), **kwargs) months = [ "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC" ] plt.xticks(np.arange(12), months)
def for_figure_4(self,start_time,stop_time=None,overlapping=True,include_trees=True,include_dai=False,include_cru=False,include_piControl=False,noisestart=None,solver=None): data={} if stop_time is None: stop_time=cmip5.stop_time(self.get_tree_ring_projection()) target_obs = self.get_tree_ring_projection()(time=(start_time,stop_time)) L=len(target_obs) modslopes,noiseterm = self.sn_at_time(start_time,L,overlapping=True,noisestart=noisestart,solver=solver) ns=np.std(noiseterm) signal = float(cmip5.get_linear_trends(target_obs)) data["noise"]=noiseterm data["modslopes"]=modslopes data["tree_rings"]=signal if include_dai: dai_proj = self.project_dai_on_solver(start=start_time,solver=solver) daitrend = cmip5.get_linear_trends(dai_proj(time=(start_time,stop_time))) data["dai"]=daitrend if include_cru: cru_proj = self.project_cru_on_solver(start=start_time,solver=solver) crutrend = cmip5.get_linear_trends(cru_proj(time=(start_time,stop_time))) data["cru"]=crutrend if include_piControl: p=self.project_piControl_on_solver(solver=solver) noiseterm_mod=bootstrap_slopes(p,L) data["picontrol"]=noiseterm_mod
def pdsi_SN_figure(D,start_time=None,stop_time=None,use_dai=True): noise_cru=[] noise_dai=[] noise_tree=[] noise=[] signal_cru=[] signal_dai=[] signal_tree=[] if start_time is None: start_time=cdtime.comptime(1981,1,1) if stop_time is None: stop_time=cdtime.comptime(2017,12,31) stop_cru=cdtime.comptime(2017,12,31) stop_dai=cdtime.comptime(2014,12,31) start_cru = cdtime.comptime(1901,1,1) start_dai=cdtime.comptime(1901,1,1) pcru=D.ALL.project_cru_on_solver(start=start_cru) pdai=D.ALL.project_dai_on_solver(start=start_dai) start_tree=cdtime.comptime(1400,1,1) stop_tree=cdtime.comptime(1975,12,31) nt=stop_cru.year-start_time.year nmodel=D.ALL.P.shape[0] H85=np.ma.zeros((nmodel,nt)) t=start_time.add(1,cdtime.Years) i=0 cru_time=[] tree_time=[] dai_time=[] while t.cmp(stop_time)<0: L=t.year-start_time.year+1 modslopes,noiseterm = D.ALL.sn_at_time(start_time,L) H85[:,i] = modslopes noise+=[np.std(noiseterm)] if (t.cmp(stop_cru)<=0) and (t.cmp(start_cru)>0): signal_cru += [float(cmip5.get_linear_trends(pcru(time=(start_time,t))))] cru_time+=[t.year] noise_cru += [np.std(noiseterm)] if (t.cmp(stop_dai)<=0) and (t.cmp(start_dai)>0): signal_dai += [float(cmip5.get_linear_trends(pdai(time=(start_time,t))))] dai_time+=[t.year] noise_dai += [np.std(noiseterm)] if t.cmp(stop_tree)<=0: signal_tree += [float(cmip5.get_linear_trends(D.ALL.projection(time=(start_time,t))))] tree_time +=[t.year] noise_tree += [np.std(noiseterm)] t=t.add(1,cdtime.Years) i+=1 timex=np.arange(start_time.year+1,start_time.year+1+nt) #for i in range(nmodel): # plt.plot(timex,H85[i]/np.array(noise),c="k",lw=1,alpha=.2) plt.plot(cru_time,np.array(signal_cru)/np.array(noise_cru),label="CRU",color=get_dataset_color("cru"),lw=3) if use_dai: plt.plot(dai_time,np.array(signal_dai)/np.array(noise_dai),label="Dai",color=get_dataset_color("dai"),lw=3) plt.plot(tree_time,np.array(signal_tree)/np.array(noise_tree),label="Tree Rings",color=get_dataset_color("tree"),lw=3)
def DA_histogram(self, experiment, direction, start=None, stop=None, datasets=None): fingerprint = getattr(self, experiment) if start is None: start = cmip5.start_time(self.gpcp.reshaped["east"]) start = cdtime.comptime(start.year, start.month, 1) if stop is None: stop = cmip5.stop_time(self.gpcp.reshaped["east"]) stop = cdtime.comptime(stop.year, stop.month, 30) #get the h85 projections over the same time period H85m = self.model_projections(experiment, direction)(time=(start, stop)) H85 = cmip5.cdms_clone(np.ma.mask_rows(H85m), H85m) H85_trends = cmip5.get_linear_trends(H85) #get the piControl projection time series noise = self.noise_projections(experiment, direction) L = stop.year - start.year + 1 noise_trends = da.get_slopes(noise, L) #plot plt.hist(H85_trends.compressed(), 25, color=da_colors("h85"), alpha=.5, normed=True) plt.hist(noise_trends, 25, color=da_colors("piC"), alpha=.5, normed=True) da.fit_normals_to_data(H85_trends, color=da_colors("h85"), lw=3, label="H85") da.fit_normals_to_data(noise_trends, color=da_colors("piC"), lw=3, label="piControl") # plt.axvline(obs_trend,label=obs.dataset,color=da_colors(obs.dataset)) #Project the observations if datasets is None: datasets = ["gpcp", "cmap", "precl"] if type(datasets) != type([]): datasets = [datasets] for dataset in datasets: obs_proj = self.obs_projections(experiment, dataset, direction)(time=(start, stop)) obs_trend = cmip5.get_linear_trends(obs_proj) plt.axvline(obs_trend, label=dataset, color=da_colors(dataset)) print dataset + "S/N is: " + str(obs_trend / np.std(noise_trends))
def DA_histogram(fingerprint, obslist, h85, piC, direction, start=None, stop=None): if type(obslist) == type([]): obs = obslist[0] else: obs = obslist if start is None: start = cmip5.start_time(obs.reshaped["east"]) start = cdtime.comptime(start.year, start.month, 1) if stop is None: stop = cmip5.stop_time(obs.reshaped["east"]) stop = cdtime.comptime(stop.year, stop.month, 30) #project the observations onto the fingerprint obs_proj = obs_projections(fingerprint, obs, direction)(time=(start, stop)) obs_trend = cmip5.get_linear_trends(obs_proj) #get the h85 projections over the same time period H85m = model_projections(fingerprint, h85, direction)(time=(start, stop)) H85 = cmip5.cdms_clone(np.ma.mask_rows(H85m), H85m) H85_trends = cmip5.get_linear_trends(H85) #get the piControl projection time series noise = noise_projections(fingerprint, piC, direction) L = len(obs_proj) noise_trends = da.get_slopes(noise, L) #plot plt.hist(H85_trends.compressed(), 25, color=da_colors("h85"), alpha=.5, normed=True) plt.hist(noise_trends, 25, color=da_colors("piC"), alpha=.5, normed=True) da.fit_normals_to_data(H85_trends, color=da_colors("h85"), lw=3, label="H85") da.fit_normals_to_data(noise_trends, color=da_colors("piC"), lw=3, label="piControl") plt.axvline(obs_trend, label=obs.dataset, color=da_colors(obs.dataset)) if type(obslist) == type([]): for obs in obslist[1:]: obs_proj = obs_projections(fingerprint, obs, direction)(time=(start, stop)) obs_trend = cmip5.get_linear_trends(obs_proj) plt.axvline(obs_trend, label=obs.dataset, color=da_colors(obs.dataset)) return H85, noise, obs_proj
def Smodel_trends(D): m=b.landplot(cmip5.get_linear_trends(D.ALL.mma)) m.fillcontinents(color="gray",zorder=0) plt.colorbar(orientation="horizontal",label="1900-2099 trend (PDSI/decade)") plt.ylim(-60,90)
def proj_aerosols(AA, piControl, H85, start=None, stop=None): if start is None: start = cdtime.comptime(1945, 1, 1) if stop is None: stop = cdtime.comptime(1984, 12, 31) data = [H85.reshaped["west"], H85.reshaped["east"]] nmod, nyears, nmonths = H85.reshaped["west"].shape P = MV.zeros((nmod, nyears)) msolver = AA.solvers["multi"] fac = da.get_orientation(msolver) for i in range(nmod): to_proj = [H85.reshaped["west"][i], H85.reshaped["east"][i]] P[i] = msolver.projectField(to_proj)[:, 0] * fac P.setAxis(0, H85.reshaped["west"].getAxis(0)) timeax = H85.reshaped["west"].getAxis(1) timeax.id = "time" P.setAxis(1, timeax) piCdata = [piControl.reshaped["west"], piControl.reshaped["east"]] pc = msolver.projectField(piCdata)[:, 0] Pt = P(time=(start, stop)) nt = len(Pt.getTime()) hslopes = cmip5.get_linear_trends(Pt) pslopes = da.get_slopes(pc, nt)
def bootstrap_slopes(noise, L): nt = noise.shape[0] - L test = MV.zeros((nt, L)) for i in range(nt): test[i] = noise[i:L + i] test.setAxis(1, noise[:L].getAxis(0)) return cmip5.get_linear_trends(test)
def fast_slopes(noise, L): #L=30 ntrends = int(noise.shape[0] / L) trunc = noise[:L * ntrends] tax = noise[:L].getTime() fast = trunc.reshape(ntrends, L) fast.setAxis(1, tax) return (cmip5.get_linear_trends(fast))
def plot_model_trends(self, i, start=None, stop=None): if i != "avg": west = self.h85.reshaped["west"][i] east = self.h85.reshaped["east"][i] else: west = MV.average(h85.reshaped["west"], axis=0) east = MV.average(h85.reshaped["east"], axis=0) if start is not None: west = west(time=(start, stop)) east = east(time=(start, stop)) plt.plot(cmip5.get_linear_trends(west).asma(), label="WEST") plt.plot(cmip5.get_linear_trends(east).asma(), label="EAST") months = [ "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC" ] plt.xticks(np.arange(12), months)
def get_signal_to_noise(self, season, ssp, start_time, stop_time): data = self.splice_historical(ssp)(time=(start_time, stop_time)) seasdata = getattr(cdutil, season).departures(data) piCdata = self.concatenate_piControl(season) signals = cmip5.get_linear_trends(seasdata) L = len(seasdata.getTime()) noise = bootstrap_slopes(piCdata, L) return signals, noise
def obs_SN(self, start_time, stop_time, depth, overlapping=True): self.project_soilmoisture("MERRA2") self.project_soilmoisture("GLEAM") L = stop_time.year - start_time.year + 1 modslopes, noiseterm = self.sn_at_time(start_time, L, depth, overlapping=overlapping) ns = np.std(noiseterm) plt.hist(modslopes / ns, 20, normed=True, color=cm.Oranges(.8), alpha=.5) lab = str(start_time.year) + "-" + str(stop_time.year) da.fit_normals_to_data(modslopes / ns, color=cm.Oranges(.9), lw=3, label=lab + " trends in H85 projections onto fingerprint") plt.hist(noiseterm / ns, 20, normed=True, color=cm.Purples(.8), alpha=.5) da.fit_normals_to_data( noiseterm / ns, color=cm.Purples(.9), lw=3, label=str(L) + "-year trends in piControl projection onto fingerprint") plt.xlabel("S/N") plt.ylabel("Normalized Frequency") merra = self.OBS_PROJECTIONS["MERRA2"][depth](time=(start_time, stop_time)) gleam = self.OBS_PROJECTIONS["GLEAM"][depth](time=(start_time, stop_time)) merrasig = cmip5.get_linear_trends(merra) / ns plt.axvline(merrasig, label="MERRA2", c="b", lw=3) gleamsig = cmip5.get_linear_trends(gleam) / ns plt.axvline(gleamsig, label="GLEAM", c="r", lw=3) plt.legend()
def sn_at_time(self,start_time,L,overlapping=True): if not hasattr(self,"P"): self.model_projections() stop_time=start_time.add(L,cdtime.Years) modslopes = cmip5.get_linear_trends(self.P(time=(start_time,stop_time))) if overlapping: noiseterm = bootstrap_slopes(self.noise,L) else: noiseterm = da.get_slopes(self.noise,L)/365. return modslopes,noiseterm
def sn_at_time(self, start_time, L, depth, overlapping=True): self.project_piControl_on_solver(depth) self.model_projections(depth) stop_time = start_time.add(L, cdtime.Years) modslopes = cmip5.get_linear_trends(self.P[depth](time=(start_time, stop_time))) if overlapping: noiseterm = b.bootstrap_slopes(self.noise[depth], L) else: noiseterm = da.get_slopes(self.noise[depth], L) / 365. return modslopes, noiseterm
def obs_SN(self,start_time,stop_time=None,overlapping=True,include_dai=False): if stop_time is None: stop_time=cmip5.stop_time(self.projection) target_obs = self.projection(time=(start_time,stop_time)) L=len(target_obs) modslopes,noiseterm = self.sn_at_time(start_time,L,overlapping=True) ns=np.std(noiseterm) signal = float(cmip5.get_linear_trends(target_obs))/ns plt.hist(modslopes/ns,20,normed=True,color=cm.Oranges(.8),alpha=.5) lab = str(start_time.year)+"-"+str(stop_time.year) da.fit_normals_to_data(modslopes/ns,color=cm.Oranges(.9),label=lab+" Model projections") plt.hist(noiseterm/ns,20,normed=True,color=cm.Greens(.8),alpha=.5) da.fit_normals_to_data(noiseterm/ns,color=cm.Greens(.9),label="Pre-1850 tree-ring reconstructions") plt.axvline(signal,color=cm.Blues(.8),lw=3,label=lab+" Tree-ring reconstructions") print signal if include_dai: dai_proj = self.project_dai_on_solver(start=start_time) daitrend = cmip5.get_linear_trends(dai_proj(time=(start_time,stop_time))) plt.legend(loc=0)
def signal_to_noise_map(drought_atlas,start_year=1100,modern_start=1979): preindustrial = drought_atlas[start_year:1850] modern = drought_atlas[modern_start:] modern_times = modern.shape[0] nt,nlat,nlon=drought_atlas.shape SN = MV.zeros(drought_atlas.shape[1:])+1.e20 signals = cmip5.get_linear_trends(modern) for i in range(nlat): for j in range(nlon): if not drought_atlas.mask[-1,i,j]: noise = da.get_slopes(preindustrial[:,i,j],modern_times)/365. #get_slopes assumes time units are days; these are years width=np.ma.std(noise) signal = signals[i,j] SN[i,j]=signal/width return SN
def signals(self, season, ssp, single_member=True, start_time=None, init=10): if start_time is None: start_time = cdtime.comptime(1980, 1, 1) end_time = cdtime.comptime(2100, 12, 31) trend_end = start_time.add(init, cdtime.Years) if single_member: func = self.single_member_ensemble else: func = self.ensemble_average all_data = self.splice_historical(ssp, single_member=single_member) hist = func("historical") #define the anomaly base period clim_start_time = cdtime.comptime(1951, 1, 1) clim_end_time = cdtime.comptime(1980, 12, 31) clim = getattr(cdutil,season).climatology(hist(time= \ (clim_start_time, \ clim_end_time, 'co'))) #get seasonal anomalies seasonal_data = getattr(cdutil, season).departures(all_data, ref=clim) #set up array for signals nmod, nyears = seasonal_data.shape tax = seasonal_data(time=(trend_end, end_time)).getTime() nsig = len(tax) signals = MV.zeros((nmod, nsig)) missing_data = all_data[:, -1].mask #calculate signals counter = 0 while trend_end.cmp(end_time) < 0: signals[:, counter] = MV.masked_where( missing_data, cmip5.get_linear_trends( seasonal_data(time=(start_time, trend_end)))) trend_end = trend_end.add(1, cdtime.Years) counter += 1 signals.setAxis(0, hist.getAxis(0)) signals.setAxis(1, tax) return signals
def regional_DA(OWDA, region, start_time=None, typ='fingerprint', return_noise=False): if start_time is None: start_time = cdtime.comptime(2000, 1, 1) times = np.arange(10, 76) modeldata = mask_data(OWDA.model(region), OWDA.obs(region)[0].mask) if typ == 'fingerprint': mma = MV.average(modeldata, axis=0) solver = Eof(mma, weights='area') to_proj = mask_data(modeldata, solver.eofs()[0].mask) P = MV.zeros(to_proj.shape[:2]) for i in range(to_proj.shape[0]): tp = to_proj[i] mma_mask = mask_data(mma, tp[0].mask) solver = Eof(mma_mask, weights='area') fac = da.get_orientation(solver) P[i] = solver.projectField(tp)[:, 0] * fac P.setAxisList(to_proj.getAxisList()[:2]) noise = solver.projectField(OWDA.obs(region))[:, 0] else: P = cdutil.averager(modeldata, axis='xy') noise = cdutil.averager(OWDA.obs(region), axis='xy') if return_noise: return P, noise else: nmod, nyears = P.shape TOE = MV.zeros((nmod, len(times))) for i in range(len(times)): L = times[i] stop_time = start_time.add(L, cdtime.Years) modslopes = cmip5.get_linear_trends(P(time=(start_time, stop_time))) noiseterm = np.std(bootstrap_slopes(noise, L)) TOE[:, i] = modslopes / noiseterm TOE.setAxis(0, P.getAxis(0)) return TOE
def sn_at_time(self, start_time, L, overlapping=True, noisestart=None, solver=None): if noisestart is None: noisestart = cmip5.start_time(self.obs) noisestop = cmip5.stop_time(self.get_noise(solver=solver)) stop_time = start_time.add(L, cdtime.Years) modslopes = cmip5.get_linear_trends( self.get_forced(solver=solver)(time=(start_time, stop_time))) if overlapping: noiseterm = bootstrap_slopes( self.get_noise(solver=solver)(time=(noisestart, noisestop)), L) else: noiseterm = da.get_slopes( self.get_noise(solver=solver)(time=(noisestart, noisestop)), L) / 365. return modslopes, noiseterm
def plot_model_trends(self, ax=None, legend=False, change_units=False): months = [ "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC" ] if ax is None: fig = plt.figure() ax = plt.subplot(111) test = self.ensemble_average("ssp585") if change_units: if self.variable in ["mrso", "mrsos"]: test = self.standardize_zscore(test) else: test = self.convert_to_percentage(test) nmod = test.shape[0] trends = np.zeros((nmod, 12)) models = cmip5.models(test) for i in range(12): month = months[i] mdat = getattr(cdutil, month).departures(test) trends[:, i] = cmip5.get_linear_trends(mdat) trends = MV.array(trends, mask=cdutil.ANNUALCYCLE.climatology(test).mask) trends.setAxis(0, test.getAxis(0)) for i in range(nmod): model = models[i] c = get_model_colors(model) ls = get_model_ls(model) ax.plot(np.arange(12), trends[i].asma(), c=c, ls=ls, label=model) ax.set_xticks(np.arange(12)) ax.set_xticklabels(months) ax.set_ylabel(self.variable) #ax.set_title(region) ax.axhline(0, ls=":", lw=.5, c="k") if legend: plt.legend(fontsize=6, ncol=2)
def projection_figure(D,fortalk=False): start = cdtime.comptime(1975,8,1) trends = cmip5.get_linear_trends(D.ALL.obs(time=(start,'2005-12-31'))) if fortalk: plt.figure() else: plt.subplot(211) m=b.landplot(trends,vmin=-2,vmax=2) m.drawcoastlines(color="gray") plt.colorbar(orientation='horizontal',label="Trend (PDSI/decade)") plt.title("(a): 1975-2005 GDA trends") if fortalk: plt.figure() else: plt.subplot(212) Plotting.Plotting.time_plot(D.ALL.projection(time=('1100-1-1','1850-12-31')),color=cm.Greens(.9),lw=3,label="pre-industrial noise") Plotting.Plotting.time_plot(D.ALL.projection(time=('1851-1-1','1974-12-31')),c=cm.Greys(.5),lw=3) Plotting.Plotting.time_plot(D.ALL.projection(time=('1975-1-1','2005-12-31')),c=cm.Blues(.8),label="Target period",lw=3) plt.xlabel("Year") plt.ylabel("Projection") plt.title("(b): GDA projection on fingerprint") plt.xlim(1400,2010) plt.legend()
def TOE(OnePct, piControl, H85, start=None): #Calculate the time of emergence: data = [H85.reshaped["west"], H85.reshaped["east"]] nmod, nyears, nmonths = H85.reshaped["west"].shape P = MV.zeros((nmod, nyears)) msolver = OnePct.solvers["multi"] fac = da.get_orientation(msolver) for i in range(nmod): to_proj = [H85.reshaped["west"][i], H85.reshaped["east"][i]] P[i] = msolver.projectField(to_proj)[:, 0] * fac P.setAxis(0, H85.reshaped["west"].getAxis(0)) timeax = H85.reshaped["west"].getAxis(1) timeax.id = "time" P.setAxis(1, timeax) piCdata = [piControl.reshaped["west"], piControl.reshaped["east"]] pc = msolver.projectField(piCdata)[:, 0] if start is None: start = cdtime.comptime(2000, 1, 1) stop = start.add(1, cdtime.Years) final_year = cdtime.comptime(2099, 12, 31) tl = final_year.year - start.year + 1 NOISE = MV.zeros(tl) SIGNAL = MV.zeros((nmod, tl)) i = 0 while stop.cmp(final_year) < 0: modelproj = P(time=(start, stop)) L = modelproj.shape[1] slopes = da.get_slopes(pc, L) SIGNAL[:, i] = cmip5.get_linear_trends(modelproj) NOISE[i] = np.ma.std(slopes) stop = stop.add(1, cdtime.Years) i += 1 return SIGNAL, NOISE
def average_histogram(self, direction, start=None, stop=None, months="JJ", datasets=None): if months is "JJ": mmean = lambda x: MV.average(x[:, 5:7], axis=1) bigmmean = lambda X: MV.average(X[:, :, 5:7], axis=2) elif months is "SO": mmean = lambda x: MV.average(x[:, 8:10], axis=1) bigmmean = lambda X: MV.average(X[:, :, 8:10], axis=2) elif months is "JJA": mmean = lambda x: MV.average(x[:, 5:8], axis=1) bigmmean = lambda X: MV.average(X[:, :, 5:8], axis=2) elif months is "JAS": mmean = lambda x: MV.average(x[:, 6:9], axis=1) bigmmean = lambda X: MV.average(X[:, :, 6:9], axis=2) elif months is "Jun": mmean = lambda x: x[:, 5] bigmmean = lambda X: MV.average(X[:, :, 5]) elif months is "YEAR": mmean = lambda x: MV.average(x, axis=1) bigmmean = lambda X: MV.average(X, axis=2) if start is None: start = cmip5.start_time(self.gpcp.reshaped["east"]) start = cdtime.comptime(start.year, start.month, 1) if stop is None: stop = cmip5.stop_time(self.gpcp.reshaped["east"]) stop = cdtime.comptime(stop.year, stop.month, 30) #get the h85 trends over the same time period H85m = bigmmean(self.h85.reshaped[direction])(time=(start, stop)) H85 = cmip5.cdms_clone(np.ma.mask_rows(H85m), H85m) H85_trends = cmip5.get_linear_trends(H85) #get the piControl projection time series noise = mmean(self.piC.reshaped[direction]) L = stop.year - start.year + 1 noise_trends = da.get_slopes(noise, L) #plot plt.hist(H85_trends.compressed(), 25, color=da_colors("h85"), alpha=.5, normed=True) plt.hist(noise_trends, 25, color=da_colors("piC"), alpha=.5, normed=True) da.fit_normals_to_data(H85_trends, color=da_colors("h85"), lw=3, label="H85") da.fit_normals_to_data(noise_trends, color=da_colors("piC"), lw=3, label="piControl") #calculate the trend in the observations if datasets is None: datasets = ["gpcp", "cmap", "precl"] if type(datasets) != type([]): datasets = [datasets] for dataset in datasets: X = self.OBS[string.upper(dataset)] obs_avg = mmean(X.reshaped[direction](time=(start, stop))) obs_trend = cmip5.get_linear_trends(obs_avg) plt.axvline(obs_trend, label=dataset, color=da_colors(dataset)) plt.xlabel("S/N") plt.ylabel("Frequency") plt.legend(loc=0)
def average_histogram(obslist, h85, piC, direction, start=None, stop=None, months="JJ"): if months is "JJ": mmean = lambda x: MV.average(x[:, 5:7], axis=1) bigmmean = lambda X: MV.average(X[:, :, 5:7], axis=2) elif months is "SO": mmean = lambda x: MV.average(x[:, 8:10], axis=1) bigmmean = lambda X: MV.average(X[:, :, 8:10], axis=2) elif months is "JJA": mmean = lambda x: MV.average(x[:, 5:8], axis=1) bigmmean = lambda X: MV.average(X[:, :, 5:8], axis=2) elif months is "Jun": mmean = lambda x: x[:, 5] bigmmean = lambda X: MV.average(X[:, :, 5]) if type(obslist) == type([]): obs = obslist[0] else: obs = obslist if start is None: start = cmip5.start_time(obs.reshaped["east"]) start = cdtime.comptime(start.year, start.month, 1) if stop is None: stop = cmip5.stop_time(obs.reshaped["west"]) stop = cdtime.comptime(stop.year, stop.month, 30) #calculate the trend in the observations obs_avg = mmean(obs.reshaped[direction](time=(start, stop))) obs_trend = cmip5.get_linear_trends(obs_avg) #get the h85 trends over the same time period H85m = bigmmean(h85.reshaped[direction])(time=(start, stop)) H85 = cmip5.cdms_clone(np.ma.mask_rows(H85m), H85m) H85_trends = cmip5.get_linear_trends(H85) #get the piControl projection time series noise = mmean(piC.reshaped[direction]) L = len(obs_avg) noise_trends = da.get_slopes(noise, L) #plot plt.hist(H85_trends.compressed(), 25, color=da_colors("h85"), alpha=.5, normed=True) plt.hist(noise_trends, 25, color=da_colors("piC"), alpha=.5, normed=True) da.fit_normals_to_data(H85_trends, color=da_colors("h85"), lw=3, label="H85") da.fit_normals_to_data(noise_trends, color=da_colors("piC"), lw=3, label="piControl") plt.axvline(obs_trend, label=obs.dataset, color=da_colors(obs.dataset)) if type(obslist) == type([]): for obs in obslist[1:]: obs_avg = mmean(obs.reshaped[direction](time=(start, stop))) obs_trend = cmip5.get_linear_trends(obs_avg) plt.axvline(obs_trend, label=obs.dataset, color=da_colors(obs.dataset)) plt.xlabel("S/N") plt.ylabel("Frequency") plt.legend(loc=0)
def plot_correlations(region, season=None, significance_level=None): #if 1: scenario = "ssp585" droughtdirec = "DroughtSN/" TRENDS = {} CLIM = {} MAXSIG = {} mysort = ["tas", "evspsbl", "mrros", "mrro", "mrsos", "mrso", "pr", "prsn"] snfiles = glob.glob(rootdirec + droughtdirec + region + "/*") variables_match = sorted( np.unique([x.split(".")[0].split("/")[-1] for x in snfiles])) for variable in variables_match: #print(variable) #find the season of max signal vfiles = glob.glob(rootdirec + droughtdirec + region + "/*" + variable + ".*") months = np.unique([x.split(".")[3] for x in vfiles]) if len(months) == 1: month_maxsig = months[0] else: cts = [] for month in months: ct = get_crossing_time(region, variable, "ssp585", month) if ct is None: ct = 2101 cts += [ct] month_maxsig = months[np.argmin(np.array(cts))] if season is not None: month_maxsig = season X = TOE(variable, region) ssp585 = X.ensemble_average("ssp585") historical = X.ensemble_average("historical") trends = cmip5.get_linear_trends( getattr(cdutil, month_maxsig).departures(ssp585)) climatology = getattr(cdutil, month_maxsig).climatology( historical(time=('1980-1-1', '2014-12-31')))[:, 0] TRENDS[variable] = trends CLIM[variable] = climatology MAXSIG[variable] = month_maxsig nvars = len(TRENDS.keys()) ok_variables = sorted(TRENDS.keys()) CLIM_CORRS = np.zeros((nvars, nvars)) + 1.e20 CLIM_PVALS = np.zeros((nvars, nvars)) + 1.e20 TREND_CORRS = np.zeros((nvars, nvars)) + 1.e20 TREND_PVALS = np.zeros((nvars, nvars)) + 1.e20 CROSS_CORRS = np.zeros((nvars, nvars)) + 1.e20 CROSS_PVALS = np.zeros((nvars, nvars)) + 1.e20 for variable in ok_variables: i = ok_variables.index(variable) for v2 in ok_variables: j = ok_variables.index(v2) #try: CLIM_CORRS[i, j] = stats.pearsonr(CLIM[variable], CLIM[v2])[0] CLIM_PVALS[i, j] = stats.pearsonr(CLIM[variable], CLIM[v2])[1] TREND_CORRS[i, j] = stats.pearsonr(TRENDS[variable], TRENDS[v2])[0] TREND_PVALS[i, j] = stats.pearsonr(TRENDS[variable], TRENDS[v2])[1] CROSS_CORRS[i, j] = stats.pearsonr(CLIM[variable], TRENDS[v2])[0] CROSS_PVALS[i, j] = stats.pearsonr(CLIM[variable], TRENDS[v2])[1] # except: # print((variable,v2)) # continue xlabels = [] for variable in TRENDS.keys(): if variable in MAXSIG.keys(): xlabels += [MAXSIG[variable] + " " + variable] else: xlabels += [variable] diag_mask = np.tri(TREND_CORRS.shape[0], k=-1) CLIM_CORRS = np.ma.masked_where(CLIM_CORRS > 1.e10, CLIM_CORRS) if significance_level is not None: CLIM_CORRS = np.ma.masked_where(CLIM_PVALS > significance_level, CLIM_CORRS) CROSS_CORRS = np.ma.masked_where(CROSS_CORRS > 1.e10, CROSS_CORRS) if significance_level is not None: CROSS_CORRS = np.ma.masked_where(CROSS_PVALS > significance_level, CROSS_CORRS) TREND_CORRS = np.ma.masked_where(TREND_CORRS > 1.e10, TREND_CORRS) if significance_level is not None: TREND_CORRS = np.ma.masked_where(TREND_PVALS > significance_level, TREND_CORRS) fig = plt.figure(figsize=(12, 5)) ax1 = plt.subplot(1, 3, 1) plt.pcolor(np.ma.array(CLIM_CORRS, mask=diag_mask), vmin=-1, vmax=1, cmap=cm.RdBu) plt.xticks(np.arange(nvars) + .5, xlabels, fontsize=8, rotation="vertical") plt.yticks(np.arange(nvars) + .5, xlabels, fontsize=8) plt.xlabel("climatology") plt.ylabel("climatology") plt.colorbar(orientation="horizontal") ax1.xaxis.tick_top() ax2 = plt.subplot(1, 3, 2) plt.pcolor(np.ma.array(TREND_CORRS, mask=diag_mask), vmin=-1, vmax=1, cmap=cm.RdBu) plt.xticks(np.arange(nvars) + .5, xlabels, fontsize=8, rotation="vertical") plt.yticks(np.arange(nvars) + .5, xlabels, fontsize=8) plt.xlabel("trend") plt.ylabel("trend") plt.colorbar(orientation="horizontal") ax2.xaxis.tick_top() ax3 = plt.subplot(1, 3, 3) plt.pcolor(np.ma.array(CROSS_CORRS, mask=diag_mask), vmin=-1, vmax=1, cmap=cm.RdBu) plt.xticks(np.arange(nvars) + .5, xlabels, fontsize=8, rotation="vertical") plt.yticks(np.arange(nvars) + .5, xlabels, fontsize=8) plt.xlabel("climatology") plt.ylabel("trend") plt.colorbar(orientation="horizontal") ax3.xaxis.tick_top() plt.tight_layout()
def obs_SN(self, start_time, stop_time=None, overlapping=True, include_trees=True, include_dai=False, include_cru=False, include_piControl=False, noisestart=None, solver=None, plot=True): to_return = {} if stop_time is None: stop_time = cmip5.stop_time(self.get_tree_ring_projection()) target_obs = self.get_tree_ring_projection(solver=solver)( time=(start_time, stop_time)) L = len(target_obs) modslopes, noiseterm = self.sn_at_time(start_time, L, overlapping=True, noisestart=noisestart, solver=solver) ns = np.std(noiseterm) signal = float(cmip5.get_linear_trends(target_obs)) if plot: plt.hist(modslopes / ns, 20, normed=True, color=get_dataset_color("h85"), alpha=.5) lab = str(start_time.year) + "-" + str(stop_time.year) da.fit_normals_to_data(modslopes / ns, color=get_dataset_color("h85"), lw=1, label="H85") plt.hist(noiseterm / ns, 20, normed=True, color=get_dataset_color("tree_noise"), alpha=.5) da.fit_normals_to_data(noiseterm / ns, color=get_dataset_color("tree_noise"), lw=1, label="Pre-1850 tree rings") if include_trees: percentiles = [] if plot: plt.axvline(signal / ns, color=get_dataset_color("tree"), lw=1, label=lab + " GDA trend") print signal / ns noise_percentile = stats.percentileofscore(noiseterm.tolist(), signal) h85_percentile = stats.percentileofscore(modslopes.tolist(), signal) percentiles += [noise_percentile, h85_percentile] to_return["trees"] = [signal / ns] + percentiles if include_dai: daipercentiles = [] dai_proj = self.project_dai_on_solver(start=start_time, solver=solver) daitrend = float( cmip5.get_linear_trends(dai_proj(time=(start_time, stop_time)))) daisignal = daitrend / ns noise_percentile = stats.percentileofscore(noiseterm.tolist(), daitrend) h85_percentile = stats.percentileofscore(modslopes.tolist(), daitrend) daipercentiles += [noise_percentile, h85_percentile] if plot: plt.axvline(daisignal, color=get_dataset_color("dai"), lw=1, label="Dai") print "DAI signal/noise is " + str(daisignal) to_return["dai"] = [daitrend / ns] + daipercentiles if include_cru: crupercentiles = [] cru_proj = self.project_cru_on_solver(start=start_time, solver=solver) crutrend = float( cmip5.get_linear_trends(cru_proj(time=(start_time, stop_time)))) noise_percentile = stats.percentileofscore(noiseterm.tolist(), crutrend) h85_percentile = stats.percentileofscore(modslopes.tolist(), crutrend) crupercentiles += [noise_percentile, h85_percentile] crusignal = crutrend / ns if plot: plt.axvline(crusignal, color=get_dataset_color("cru"), lw=1, label="CRU") print "CRU signal/noise is " + str(crusignal) to_return["cru"] = [crutrend / ns] + crupercentiles if include_piControl: p = self.project_piControl_on_solver(solver=solver) noiseterm_mod = bootstrap_slopes(p, L) if plot: plt.hist(noiseterm_mod / ns, 20, normed=True, color=get_dataset_color("picontrol"), alpha=.5) da.fit_normals_to_data(noiseterm_mod / ns, color=get_dataset_color("picontrol"), lw=1, label="PiControl") print "relative to model noise:" print float(signal) / np.std(noiseterm_mod) # percentiles+=[stats.percentileofscore(noiseterm_mod.tolist(),signal)] if plot: plt.legend(loc=0) plt.xlabel("S/N") plt.ylabel("Normalized Frequency") return to_return
def observations(): # Read in PET and PR files #regridded PET f = cdms.open("OBS/cru_ts4.01.1901.2016.pet.dat.REGRID.nc") pet = f("pet") f.close() landmask = pet[0].mask # Regridded GPCC fpr = cdms.open("OBS/precip.mon.total.2.5x2.5.v7.nc") pr = fpr("precip") fpr.close() pr = pr / 30. #Approximate mm-> mm/day by assuming 30 days/month #Put them both on the same time axis startpet = cmip5.start_time(pet) stoppet = cmip5.stop_time(pet) startpr = cmip5.start_time(pr) stoppr = cmip5.stop_time(pr) if cdtime.compare(startpet, startpr) > 0: start = startpet else: start = startpr if cdtime.compare(stoppet, stoppr) > 0: stop = stoppr else: stop = stoppet start = cdtime.comptime(start.year, start.month, 1) stop = cdtime.comptime(stop.year, stop.month, 31) pr = pr(time=(start, stop)) pet = pet(time=(start, stop)) #Calculate R and P Rpr, Ppr = sc.fast_annual_cycle(pr) Rpet, Ppet = sc.fast_annual_cycle(pet) #Convert phase to month of maximum (VECTORIZE THIS?) #How to handle "month of maximum" if it's fluctuating between 1 and 12? (physically, what does this mean when our timesteps are every year?) #Should we modify the code in phase detection to start at phase 0? Ie start in a month such that the maximum is 6 months away? #Calculate variance maps for p and pet test_period = ('1979-1-1', '2004-12-31' ) #for overlap with CMIP5 historical pet_vmap = sc.variance_map(pet(time=test_period)) pr_vmap = sc.variance_map(pr(time=test_period)) #make phase maps variance_threshold = 0.25 #Can we come up with a physically meaningful threshold here? Null hypothesis of no correlation ruled out at 99% confidence? Ppet_clim = sc.phase_climatology(Ppet) Ppet_clim_month = sc.mask_data(sc.phase_to_month(Ppet_clim), landmask) Ppr_clim = sc.phase_climatology(Ppr) Ppr_clim_month = sc.mask_data(sc.phase_to_month(Ppr_clim), landmask) months = [ "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC" ] from land_seasonal_cycle import landplot plt.subplot(211) m = landplot(MV.masked_where(pet_vmap < variance_threshold, Ppet_clim_month), cmap=cm.hsv, vmin=0, vmax=12) m.drawcoastlines(color='gray') cbar = plt.colorbar(orientation="horizontal") cbar.set_ticks(np.arange(12)) cbar.set_ticklabels(months) plt.title("PET phase") plt.subplot(212) m = landplot(MV.masked_where(pr_vmap < variance_threshold, Ppr_clim_month), cmap=cm.hsv, vmin=0, vmax=12) m.drawcoastlines(color='gray') cbar = plt.colorbar(orientation="horizontal") cbar.set_ticks(np.arange(12)) cbar.set_ticklabels(months) plt.title("PR phase") #phase trends Pa_pet = sc.get_phase_anomalies(Ppet) Pa_pet_trends = cmip5.get_linear_trends(Pa_pet) Pa_pr = sc.get_phase_anomalies(Ppr) Pa_pr_trends = cmip5.get_linear_trends(Pa_pr)
def NatureRevisions_Figure5(D): aerosol_start = cdtime.comptime(1950,1,1) aerosol_stop = cdtime.comptime(1975,12,31) aerosolsolver=Eof(D.ALL.mma(time=(aerosol_start,aerosol_stop)),weights='area') fac=da.get_orientation(aerosolsolver) plt.subplot(221) m=b.landplot(fac*aerosolsolver.eofs()[0],vmin=-.1,vmax=.1) m.fillcontinents(color="gray",zorder=0) varex= str(int(100*np.round(aerosolsolver.varianceFraction()[0],2))) plt.title("(a)")#: 1950-1975 historical fingerprint ("+varex+"% of variance explained)",fontsize=8) m.drawcoastlines(color='gray') plt.ylim(-60,90) plt.colorbar(orientation='horizontal',label='EOF loading') plt.subplot(222) Plotting.time_plot(fac*aerosolsolver.pcs()[:,0],color=cm.Greys(.8),lw=1) plt.title("(b)")#: Associated PC",fontsize=8) plt.ylabel("Temporal amplitude") plt.subplot(223) target_obs,cru_proj,dai_proj=pdsi_time_series(D,aerosol_start,aerosol_stop,aerosols=True) plt.legend(fontsize=6) plt.title("(c)")#: Projections on fingerprint",fontsize=8) plt.subplot(224) # target_obs = D.ALL.get_tree_ring_projection(solver = aerosolsolver)(time=(aerosol_start,aerosol_stop)) L=len(target_obs) modslopes,noiseterm = D.ALL.sn_at_time(aerosol_start,L,overlapping=True,solver=aerosolsolver) ns=np.std(noiseterm) signal = float(cmip5.get_linear_trends(target_obs)) plt.hist(modslopes/ns,20,normed=True,color=get_dataset_color("h85"),alpha=.5) lab = str(aerosol_start.year)+"-"+str(aerosol_stop.year) da.fit_normals_to_data(modslopes/ns,color=get_dataset_color("h85"),lw=1,label="H85") plt.hist(noiseterm/ns,20,normed=True,color=get_dataset_color("tree_noise"),alpha=.5) da.fit_normals_to_data(noiseterm/ns,color=get_dataset_color("tree_noise"),lw=1,label="Pre-1850 tree rings") percentiles=[] plt.axvline(signal/ns,color=get_dataset_color("tree"),lw=1,label=lab+" GDA trend") noise_percentile=stats.percentileofscore(noiseterm.tolist(),signal) h85_percentile=stats.percentileofscore(modslopes.tolist(),signal) percentiles += [noise_percentile,h85_percentile] daitrend = cmip5.get_linear_trends(dai_proj) print "DAI slope is "+str(daitrend) daisignal = daitrend/ns plt.axvline(daisignal,color=get_dataset_color("dai"),lw=1,label="Dai") print "DAI signal/noise is "+str(daisignal) crutrend = cmip5.get_linear_trends(cru_proj) print "CRU slope is "+str(crutrend) crusignal = crutrend/ns plt.axvline(crusignal,color=get_dataset_color("cru"),lw=1,label="CRU") print "CRU signal/noise is "+str(crusignal) plt.legend(loc=0,fontsize=8) plt.xlabel("S/N") plt.ylabel("Normalized Frequency") plt.title("(d)")#: Detection and Attribution Results",fontsize=8) fig=plt.gcf() for ax in fig.axes: plt.setp(ax.xaxis.get_label(),fontsize=6) plt.setp(ax.yaxis.get_label(),fontsize=6) plt.setp(ax.get_xticklabels(),fontsize=6) plt.setp(ax.get_yticklabels(),fontsize=6) ax=fig.axes[0] ax.set_title("(a)",fontsize=6) ax=fig.axes[2] ax.set_title("(b)",fontsize=6) ax=fig.axes[3] ax.set_title("(c)",fontsize=6) ax=fig.axes[4] ax.set_title("(d)",fontsize=6) leg=ax.legend(fontsize=6,ncol=1,loc=2) leg.set_frame_on(False) cax=fig.axes[1] ticklabels=["-0.1","","-0.05","","0","","0.05","","0.1"] cax.set_xticklabels(ticklabels) plt.setp(cax.xaxis.get_ticklabels(),fontsize=6) plt.setp(cax.xaxis.get_label(),fontsize=6)