def stageData(self, m): y0 = float(self.keywords.get( "y0", 1994.)) # [yr] beginning year to include in analysis yf = float(self.keywords.get( "yf", 2007.)) # [yr] end year to include in analysis obs = Variable(filename=self.source, variable_name=self.variable, alternate_vars=self.alternate_vars) #if obs.time is None: raise il.NotTemporalVariable() self.pruneRegions(obs) mod = m.extractTimeSeries(self.variable, alt_vars=self.alternate_vars, expression=self.derived, initial_time=(y0 - 1850) * 365, final_time=(yf - 1850) * 365, lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) # remove the time dimension obs = obs.integrateInTime(mean=True) mod = mod.integrateInTime(mean=True) return obs, mod
def stageData(self, m): obs = Variable(filename=self.source, variable_name=self.variable, alternate_vars=self.alternate_vars) t0 = obs.time_bnds[0, 0] tf = obs.time_bnds[-1, 1] climatology = False if obs.cbounds is None else True if climatology: t0 = (obs.cbounds[0] - 1850) * 365. tf = (obs.cbounds[1] + 1 - 1850) * 365. mod = m.extractTimeSeries(self.variable, alt_vars=self.alternate_vars, expression=self.derived, initial_time=t0, final_time=tf, lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) if obs.layered and not mod.layered: obs = obs.integrateInDepth(z0=0, zf=0.1, mean=True) if climatology: mod = mod.annualCycle() mod.time = obs.time.copy() mod.time_bnds = obs.time_bnds.copy() obs, mod = il.MakeComparable(obs, mod, mask_ref=True, clip_ref=True, extents=self.extents, logstring="[%s][%s]" % (self.longname, m.name)) return obs, mod
def stageData(self, m): energy_threshold = float(self.keywords.get("energy_threshold", 20.)) sh = Variable(filename=os.path.join(os.environ["ILAMB_ROOT"], "DATA/sh/GBAF/sh_0.5x0.5.nc"), variable_name="sh") le = Variable(filename=os.path.join(os.environ["ILAMB_ROOT"], "DATA/le/GBAF/le_0.5x0.5.nc"), variable_name="le") obs = Variable(name=self.variable, unit="1", data=np.ma.masked_array( le.data / (le.data + sh.data), mask=((le.data < 0) + (sh.data < 0) + ((le.data + sh.data) < energy_threshold))), lat=sh.lat, lat_bnds=sh.lat_bnds, lon=sh.lon, lon_bnds=sh.lon_bnds, time=sh.time, time_bnds=sh.time_bnds) if obs.time is None: raise il.NotTemporalVariable() self.pruneRegions(obs) sh = m.extractTimeSeries("hfss", initial_time=obs.time_bnds[0, 0], final_time=obs.time_bnds[-1, 1], lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) le = m.extractTimeSeries("hfls", initial_time=obs.time_bnds[0, 0], final_time=obs.time_bnds[-1, 1], lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) mod = Variable(name=self.variable, unit="1", data=np.ma.masked_array( le.data / (le.data + sh.data), mask=((le.data < 0) + (sh.data < 0) + ((le.data + sh.data) < energy_threshold))), lat=sh.lat, lat_bnds=sh.lat_bnds, lon=sh.lon, lon_bnds=sh.lon_bnds, time=sh.time, time_bnds=sh.time_bnds) obs, mod = il.MakeComparable(obs, mod, mask_ref=True, clip_ref=True, logstring="[%s][%s]" % (self.longname, m.name)) return obs, mod
def stageData(self, m): """Extracts model data and transforms it to make it comparable to the runoff dataset. Parameters ---------- m : ILAMB.ModelResult.ModelResult the model result context Returns ------- obs : ILAMB.Variable.Variable the variable context associated with the observational dataset mod : ILAMB.Variable.Variable the variable context associated with the model result """ # Extract the observational data for basins obs = Variable(filename=self.source, variable_name=self.variable).convert("mm d-1") # Extract the globally gridded runoff mod = m.extractTimeSeries(self.variable, alt_vars=self.alternate_vars, initial_time=obs.time_bnds[0, 0], final_time=obs.time_bnds[-1, 1]) # We want annual mean, not monthly mean years = np.asarray([obs.time_bnds[::12, 0], obs.time_bnds[11::12, 1]]).T obs = obs.coarsenInTime(years) mod = mod.coarsenInTime(years) obs.name = "runoff" mod.name = "runoff" # Operate on model data to compute mean runoff values in each basin. data = np.ma.zeros(obs.data.shape) for i, basin in enumerate(self.basins): b = il.ClipTime(mod.integrateInSpace(region=basin, mean=True), obs.time_bnds[0, 0], obs.time_bnds[-1, 1]).convert(obs.unit) data[:, i] = b.data # Create a variable to return for the model mod = Variable(name=obs.name, unit=obs.unit, data=np.ma.masked_array(data, mask=obs.data.mask), time=obs.time, time_bnds=obs.time_bnds, ndata=obs.ndata, lat=obs.lat, lat_bnds=obs.lat_bnds, lon=obs.lon, lon_bnds=obs.lon_bnds) return obs, mod
def GetSlope(v): # loop over unmasked cells and compute slope wrt depth def _dindx(v, S0=200, Sc=1000): # find the index for 200 and 1000 m dZ = np.asarray(getattr(v, 'depth')) i0 = np.argmin(abs(dZ - S0)) ic = np.argmin(abs(dZ - Sc)) vdiff = dZ[ic] - dZ[i0] print('level {:2d}-{:2d} in {:6.2f}-{:6.2f}m time is {}'.format( i0, ic, dZ[i0], dZ[ic], v.time)) return i0, ic, vdiff i0, ic, vdiff = _dindx(v) with np.errstate(under='ignore'): # dTdZ = (v.data[i0,...]-v.data[ic,...])/vdiff*1e3 dTdZ = sc_fit(v) dTdZ.shape = (1, ) + dTdZ.shape slope = Variable(time=np.asarray([0.5]), time_bnds=np.asarray([[0., 1.]]), data=dTdZ, lat=v.lat, lat_bnds=v.lat_bnds, lon=v.lon, lon_bnds=v.lon_bnds, unit='1e-3 degC/m') return slope
def _extendSitesToMap(self, var): """A local function to extend site data to the basins. Parameters ---------- var : ILAMB.Variable.Variable the site-based variable we wish to extend to basins Returns ------- extended : ILAMB.Variable.Variable the spatial variable which is the extended version of the input variable """ # determine the global mask global_mask = None global_data = None for i, basin in enumerate(self.basins): name, lat, lon, mask = Regions._regions[basin] keep = (mask == False) if global_mask is None: global_mask = np.copy(mask) global_data = keep * var.data[i] else: global_mask *= mask global_data += keep * var.data[i] return Variable(name=var.name, unit=var.unit, data=np.ma.masked_array(global_data, mask=global_mask), lat=lat, lon=lon)
def stageData(self, m): obs = Variable(filename=self.source, variable_name=self.variable, alternate_vars=self.alternate_vars, convert_calendar=False) if obs.time is None: raise il.NotTemporalVariable() self.pruneRegions(obs) # Try to extract a commensurate quantity from the model mod = m.extractTimeSeries(self.variable, alt_vars=self.alternate_vars, expression=self.derived, convert_calendar=False, lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) # When we make things comparable, sites can get pruned, we # also need to prune the site labels lat = np.copy(obs.lat) lon = np.copy(obs.lon) obs, mod = il.MakeComparable(obs, mod, clip_ref=False, prune_sites=True, allow_diff_times=True) # Some datasets / models return data in UTC, others are local # time. Try to correct by looking at the time of maximum # incident radiation. try: inc = Variable(filename=self.source, variable_name="LW_IN", convert_calendar=False) obs.tmax = getTimeOfDailyMaximum(inc) except: obs.tmax = 12. try: inc = m.extractTimeSeries("FSDS", convert_calendar=False, lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) mod.tmax = getTimeOfDailyMaximum(inc) except: mod.tmax = 12. return obs, mod
def modelPlots(self, m): # some of the plots can be generated using the standard # routine, with some modifications super(ConfRunoff, self).modelPlots(m) # bname = os.path.join(self.output_path, "%s_Benchmark.nc" % (self.name)) fname = os.path.join(self.output_path, "%s_%s.nc" % (self.name, m.name)) # get the HTML page page = [ page for page in self.layout.pages if "MeanState" in page.name ][0] if not os.path.isfile(bname): return if not os.path.isfile(fname): return obs = Variable(filename=bname, variable_name="runoff", groupname="MeanState") mod = Variable(filename=fname, variable_name="runoff", groupname="MeanState") for i, basin in enumerate(self.basins): page.addFigure("Spatially integrated regional mean", basin, "MNAME_global_%s.png" % basin, basin, False, longname=basin) fig, ax = plt.subplots(figsize=(6.8, 2.8), tight_layout=True) ax.plot(obs.time / 365 + 1850, obs.data[:, i], lw=2, color='k', alpha=0.5) ax.plot(mod.time / 365 + 1850, mod.data[:, i], lw=2, color=m.color) ax.grid() ax.set_ylabel(post.UnitStringToMatplotlib(obs.unit)) fig.savefig( os.path.join(self.output_path, "%s_global_%s.png" % (m.name, basin))) plt.close()
def ReadBenchmark(filname, variable_name, alternate_vars="", study_limits=[]): obs = Variable(filename = filname, variable_name = variable_name, alternate_vars = alternate_vars, t0 = None if len(study_limits) != 2 else study_limits[0], tf = None if len(study_limits) != 2 else study_limits[1]) if obs.time is None: raise il.NotTemporalVariable() return obs
def _profileScore(ref, com, region): db = np.unique( np.hstack( [np.unique(ref.depth_bnds), np.unique(com.depth_bnds)])) d = 0.5 * (db[:-1] + db[1:]) w = np.diff(db) r = ref.data[np.argmin(np.abs(d[:, np.newaxis] - ref.depth), axis=1)] c = com.data[np.argmin(np.abs(d[:, np.newaxis] - com.depth), axis=1)] err = np.sqrt((((r - c)**2) * w).sum() / ((r**2) * w).sum()) # relative L2 error return Variable(name="Profile Score %s" % region, unit="1", data=np.exp(-err))
def _albedo(dn, up, vname, energy_threshold): mask = (dn.data < energy_threshold) dn.data = np.ma.masked_array(dn.data, mask=mask) up.data = np.ma.masked_array(up.data, mask=mask) np.seterr(over='ignore', under='ignore') al = np.ma.masked_array(up.data / dn.data, mask=mask) np.seterr(over='warn', under='warn') al = Variable(name=vname, unit="1", data=al, lat=dn.lat, lat_bnds=dn.lat_bnds, lon=dn.lon, lon_bnds=dn.lon_bnds, time=dn.time, time_bnds=dn.time_bnds) return dn, up, al
def stageData(self, m): obs = Variable(filename=self.source, variable_name=self.variable, alternate_vars=self.alternate_vars) if obs.time is None: raise il.NotTemporalVariable() self.pruneRegions(obs) # Try to extract a commensurate quantity from the model mod = m.extractTimeSeries( self.variable, alt_vars=self.alternate_vars, expression=self.derived, initial_time=obs.time_bnds[0, 0], final_time=obs.time_bnds[-1, 1], lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon).convert(obs.unit) return obs, mod
def _evapfrac(sh, le, vname, energy_threshold): mask = ((le.data < 0) + (sh.data < 0) + ((le.data + sh.data) < energy_threshold)) sh.data = np.ma.masked_array(sh.data, mask=mask) le.data = np.ma.masked_array(le.data, mask=mask) np.seterr(over='ignore', under='ignore') ef = np.ma.masked_array(le.data / (le.data + sh.data), mask=mask) np.seterr(over='warn', under='warn') ef = Variable(name=vname, unit="1", data=ef, lat=sh.lat, lat_bnds=sh.lat_bnds, lon=sh.lon, lon_bnds=sh.lon_bnds, time=sh.time, time_bnds=sh.time_bnds) return sh, le, ef
def __init__(self, **keywords): # Calls the regular constructor super(ConfPEcAn, self).__init__(**keywords) obs = Variable(filename=self.source, variable_name=self.variable, alternate_vars=self.alternate_vars, convert_calendar=False) self.years = np.asarray( [t.year for t in cftime.num2date(obs.time, "days since 1850-1-1")], dtype=int) self.years = np.unique(self.years) # Setup a html layout for generating web views of the results pages = [] # Mean State page pages.append(post.HtmlPage("MeanState", "Mean State")) pages[-1].setHeader("CNAME / RNAME / MNAME") pages[-1].setSections([ "Seasonal Diurnal Cycle", ] + ["%d" % y for y in self.years]) pages.append(post.HtmlAllModelsPage("AllModels", "All Models")) pages[-1].setHeader("CNAME / RNAME") pages[-1].setSections([]) pages[-1].setRegions(self.regions) pages.append(post.HtmlPage("DataInformation", "Data Information")) pages[-1].setSections([]) pages[-1].text = "\n" with Dataset(self.source) as dset: for attr in dset.ncattrs(): a = dset.getncattr(attr) if 'astype' in dir(a): a = a.astype('str') if 'encode' in dir(a): a = a.encode('ascii', 'ignore') pages[-1].text += "<p><b> %s: </b>%s</p>\n" % ( attr, a) self.layout = post.HtmlLayout(pages, self.longname)
def modelPlots(self, m): bname = "%s/%s_Benchmark.nc" % (self.output_path, self.name) fname = "%s/%s_%s.nc" % (self.output_path, self.name, m.name) if not os.path.isfile(bname): return if not os.path.isfile(fname): return # get the HTML page page = [ page for page in self.layout.pages if "MeanState" in page.name ][0] page.priority = ["Beginning", "Ending", "Strength", "Score", "Overall"] for y in self.years: # ---------------------------------------------------------------- # plt.figure(figsize=(5, 5), tight_layout=True) has_data = False for name, color, alpha in zip([bname, fname], ['k', m.color], [0.6, 1.0]): try: v = Variable(filename=name, variable_name="mag%d" % y, groupname="MeanState") has_data = True except: continue plt.polar(v.time / 365. * 2 * np.pi, v.data, '-', color=color, alpha=alpha, lw=2) if has_data: plt.xticks(bnd_months[:-1] / 365. * 2 * np.pi, lbl_months) plt.ylim(0, self.limits["mag"]) plt.savefig("%s/%s_mag%d.png" % (self.output_path, m.name, y)) page.addFigure("%d" % y, "mag%d" % y, "MNAME_mag%d.png" % y, side="DIURNAL MAGNITUDE", legend=False) plt.close() # ---------------------------------------------------------------- # fig, ax = plt.subplots(figsize=(8, 5), tight_layout=True) has_data = False unit = "" for name, color, alpha, lbl in zip([bname, fname], ['k', m.color], [0.6, 1.0], ['Benchmark', m.name]): try: v = Variable(filename=name, variable_name="cycle%d" % y, groupname="MeanState") has_data = True unit = v.unit except: continue v.plot(ax, color=color, alpha=alpha, lw=2, label=lbl) if has_data: ax.set_xticks(np.linspace(0, 1, 9) / 365 + 1850) ax.set_xticklabels( ["%2d:00" % t for t in np.linspace(0, 24, 9)]) ax.set_ylim(self.limits['cycle']['min'], self.limits['cycle']['max']) ax.grid(True) ax.set_ylabel(post.UnitStringToMatplotlib(unit)) ax.set_xlabel("local time") ax.legend(bbox_to_anchor=(0, 1.005, 1, 0.25), loc='lower left', mode='expand', ncol=2, borderaxespad=0, frameon=False) plt.savefig("%s/%s_cycle%d.png" % (self.output_path, m.name, y)) page.addFigure("%d" % y, "cycle%d" % y, "MNAME_cycle%d.png" % y, side="SEASONAL DIURNAL CYCLE", legend=False) plt.close()
def confront(self, m): # get the HTML page page = [ page for page in self.layout.pages if "MeanState" in page.name ][0] # Grab the data obs, mod = self.stageData(m) odata, ot, otb = DiurnalReshape(obs) mdata, mt, mtb = DiurnalReshape(mod) n = len(self.lbls) obs_amp = np.zeros(n) mod_amp = np.zeros(n) amp_score = np.zeros(n) obs_phase = np.zeros(n) mod_phase = np.zeros(n) phase_score = np.zeros(n) for site in range(n): # Site name lbl = self.lbls[site] skip = False # Observational diurnal cycle tobs = ot + obs.lon[site] / 360 vobs = odata[..., site] vobs = np.roll(vobs, -tobs.searchsorted(0), axis=1) tobs = np.roll(tobs, -tobs.searchsorted(0)) tobs += (tobs < 0) aobs = (vobs.max(axis=1) - vobs.min(axis=1)).mean() vobs = vobs.mean(axis=0) if vobs.size == vobs.mask.sum(): skip = True if not skip: acyc = CubicSpline(np.hstack([tobs, tobs[0] + 1.]), np.hstack([vobs, vobs[0]]), bc_type="periodic") troot = acyc.derivative().solve() troot = troot[(troot >= 0) * (troot <= 1.)] otmx = troot[acyc(troot).argmax()] # Model diurnal cycle tmod = mt + mod.lon[site] / 360 vmod = mdata[..., site] vmod = np.roll(vmod, -tmod.searchsorted(0), axis=1) tmod = np.roll(tmod, -tmod.searchsorted(0)) tmod += (tmod < 0) amod = (vmod.max(axis=1) - vmod.min(axis=1)).mean() vmod = vmod.mean(axis=0) mcyc = CubicSpline(np.hstack([tmod, tmod[0] + 1.]), np.hstack([vmod, vmod[0]]), bc_type="periodic") troot = mcyc.derivative().solve() troot = troot[(troot >= 0) * (troot <= 1.)] mtmx = troot[mcyc(troot).argmax()] # Scalars and scores if skip: obs_amp[site] = np.nan obs_phase[site] = np.nan amp_score[site] = np.nan phase_score[site] = np.nan else: obs_amp[site] = aobs obs_phase[site] = otmx amp_score[site] = np.exp(-np.abs(amod - aobs) / aobs) phase_score[site] = 1 - np.abs(mtmx - otmx) / 0.5 mod_amp[site] = amod mod_phase[site] = mtmx # Plot ts = np.linspace(0, 1, 100) fig, ax = plt.subplots(figsize=(6.8, 2.8), tight_layout=True) if not skip: ax.plot(tobs, vobs, 'o', mew=0, markersize=3, color='k') ax.plot(ts, acyc(ts), '-', color='k') ax.plot(otmx, acyc(otmx), 'o', mew=0, markersize=5, color='k') ax.plot(tmod, vmod, 'o', mew=0, markersize=3, color=m.color) ax.plot(ts, mcyc(ts), '-', color=m.color) ax.plot(mtmx, mcyc(mtmx), 'o', mew=0, markersize=5, color=m.color) xt = np.arange(25)[::3] xtl = ["%02d:00" % xx for xx in xt] ax.set_xticks(xt / 24.) ax.set_xticklabels(xtl) ax.grid(True) ax.set_xlabel("Mean solar time") ax.set_ylabel("[%s]" % obs.unit) plt.savefig( os.path.join(self.output_path, "%s_diurnal_%s.png" % (m.name, lbl))) plt.close() obs_amp = np.ma.masked_invalid(obs_amp) obs_phase = np.ma.masked_invalid(obs_phase) amp_score = np.ma.masked_invalid(amp_score) phase_score = np.ma.masked_invalid(phase_score) results = Dataset(os.path.join(self.output_path, "%s_%s.nc" % (self.name, m.name)), mode="w") results.setncatts({"name": m.name, "color": m.color}) Variable(name="Amplitude global", unit=obs.unit, data=mod_amp.mean()).toNetCDF4(results, group="MeanState") Variable(name="Max time global", unit="h", data=24 * mod_phase.mean()).toNetCDF4(results, group="MeanState") Variable(name="Amplitude Score global", unit="1", data=amp_score.mean()).toNetCDF4(results, group="MeanState") Variable(name="Phase Score global", unit="1", data=phase_score.mean()).toNetCDF4(results, group="MeanState") results.close() if self.master: results = Dataset(os.path.join(self.output_path, "%s_Benchmark.nc" % self.name), mode="w") results.setncatts({ "name": "Benchmark", "color": np.asarray([0.5, 0.5, 0.5]) }) Variable(name="Amplitude global", unit=obs.unit, data=obs_amp.mean()).toNetCDF4(results, group="MeanState") Variable(name="Max time global", unit="h", data=24 * obs_phase.mean()).toNetCDF4( results, group="MeanState") results.close()
def compositePlots(self): if not self.master: return # get the HTML page page = [ page for page in self.layout.pages if "MeanState" in page.name ][0] # composite profile plot f1 = {} a1 = {} u1 = None for fname in glob.glob("%s/*.nc" % self.output_path): with Dataset(fname) as dset: if "MeanState" not in dset.groups: continue group = dset.groups["MeanState"] variables = getVariableList(group) for region in self.regions: vname = "profile_of_%s_over_%s" % (self.variable, region) if vname in variables: if not f1.has_key(region): f1[region], a1[region] = plt.subplots( figsize=(5, 5), tight_layout=True) var = Variable(filename=fname, variable_name=vname, groupname="MeanState") u1 = var.unit page.addFigure("Mean regional depth profiles", "profile", "RNAME_profile.png", side="REGIONAL MEAN PROFILE", legend=False) a1[region].plot(var.data, var.depth, '-', color=dset.getncattr("color")) for key in f1.keys(): a1[key].set_xlabel("%s [%s]" % (self.variable, u1)) a1[key].set_ylabel("depth [m]") a1[key].invert_yaxis() f1[key].savefig("%s/%s_profile.png" % (self.output_path, key)) plt.close() # spatial distribution Taylor plot models = [] colors = [] corr = {} std = {} has_std = False for fname in glob.glob("%s/*.nc" % self.output_path): with Dataset(fname) as dset: models.append(dset.getncattr("name")) colors.append(dset.getncattr("color")) if "MeanState" not in dset.groups: continue dset = dset.groups["MeanState"] for region in self.regions: if not std.has_key(region): std[region] = [] if not corr.has_key(region): corr[region] = [] key = [] if "scalars" in dset.groups: key = [ v for v in dset.groups["scalars"].variables.keys() if ("Spatial Distribution Score" in v and region in v) ] if len(key) > 0: has_std = True sds = dset.groups["scalars"].variables[key[0]] corr[region].append(sds.getncattr("R")) std[region].append(sds.getncattr("std")) if has_std: # Legends def _alphabeticalBenchmarkFirst(key): key = key[0].upper() if key == "BENCHMARK": return 0 return key tmp = sorted(zip(models, colors), key=_alphabeticalBenchmarkFirst) fig, ax = plt.subplots() for model, color in tmp: ax.plot(0, 0, 'o', mew=0, ms=8, color=color, label=model) handles, labels = ax.get_legend_handles_labels() plt.close() ncol = np.ceil(float(len(models)) / 11.).astype(int) fig, ax = plt.subplots(figsize=(3. * ncol, 2.8), tight_layout=True) ax.legend(handles, labels, loc="upper right", ncol=ncol, fontsize=10, numpoints=1) ax.axis('off') fig.savefig("%s/legend_spatial_variance.png" % self.output_path) plt.close() page.addFigure("Period mean at surface", "spatial_variance", "RNAME_spatial_variance.png", side="SPATIAL TAYLOR DIAGRAM", legend=False) page.addFigure("Period mean at surface", "legend_spatial_variance", "legend_spatial_variance.png", side="MODEL COLORS", legend=False) if "Benchmark" in models: colors.pop(models.index("Benchmark")) for region in self.regions: if not (std.has_key(region) and corr.has_key(region)): continue if len(std[region]) != len(corr[region]): continue if len(std[region]) == 0: continue fig = plt.figure(figsize=(6.0, 6.0)) post.TaylorDiagram(np.asarray(std[region]), np.asarray(corr[region]), 1.0, fig, colors) fig.savefig("%s/%s_spatial_variance.png" % (self.output_path, region)) plt.close()
def modelPlots(self, m): def _fheight(region): if region in ["arctic", "southern"]: return 6.8 return 2.8 bname = "%s/%s_Benchmark.nc" % (self.output_path, self.name) fname = "%s/%s_%s.nc" % (self.output_path, self.name, m.name) if not os.path.isfile(bname): return if not os.path.isfile(fname): return # get the HTML page page = [ page for page in self.layout.pages if "MeanState" in page.name ][0] with Dataset(fname) as dataset: group = dataset.groups["MeanState"] variables = getVariableList(group) color = dataset.getncattr("color") vname = "timeint_surface_%s" % self.variable if vname in variables: var = Variable(filename=fname, variable_name=vname, groupname="MeanState") page.addFigure("Period mean at surface", "timeint", "MNAME_RNAME_timeint.png", side="MODEL SURFACE MEAN", legend=True) for region in self.regions: fig = plt.figure() ax = fig.add_axes([0.06, 0.025, 0.88, 0.965]) var.plot(ax, region=region, vmin=self.limits["timeint"]["min"], vmax=self.limits["timeint"]["max"], cmap=self.cmap, land=0.750, water=0.875) fig.savefig("%s/%s_%s_timeint.png" % (self.output_path, m.name, region)) plt.close() vname = "bias_surface_%s" % self.variable if vname in variables: var = Variable(filename=fname, variable_name=vname, groupname="MeanState") page.addFigure("Period mean at surface", "bias", "MNAME_RNAME_bias.png", side="SURFACE MEAN BIAS", legend=True) for region in self.regions: fig = plt.figure() ax = fig.add_axes([0.06, 0.025, 0.88, 0.965]) var.plot(ax, region=region, vmin=self.limits["bias"]["min"], vmax=self.limits["bias"]["max"], cmap="seismic", land=0.750, water=0.875) fig.savefig("%s/%s_%s_bias.png" % (self.output_path, m.name, region)) plt.close() vname = "biasscore_surface_%s" % self.variable if vname in variables: var = Variable(filename=fname, variable_name=vname, groupname="MeanState") page.addFigure("Period mean at surface", "biasscore", "MNAME_RNAME_biasscore.png", side="SURFACE MEAN BIAS SCORE", legend=True) for region in self.regions: fig = plt.figure() ax = fig.add_axes([0.06, 0.025, 0.88, 0.965]) var.plot(ax, region=region, vmin=0, vmax=1, cmap="RdYlGn", land=0.750, water=0.875) fig.savefig("%s/%s_%s_biasscore.png" % (self.output_path, m.name, region)) plt.close() vname = "rmse_surface_%s" % self.variable if vname in variables: var = Variable(filename=fname, variable_name=vname, groupname="MeanState") page.addFigure("Period mean at surface", "rmse", "MNAME_RNAME_rmse.png", side="SURFACE MEAN RMSE", legend=True) for region in self.regions: fig = plt.figure() ax = fig.add_axes([0.06, 0.025, 0.88, 0.965]) var.plot(ax, region=region, vmin=self.limits["rmse"]["min"], vmax=self.limits["rmse"]["max"], cmap="YlOrRd", land=0.750, water=0.875) fig.savefig("%s/%s_%s_rmse.png" % (self.output_path, m.name, region)) plt.close() vname = "rmsescore_surface_%s" % self.variable if vname in variables: var = Variable(filename=fname, variable_name=vname, groupname="MeanState") page.addFigure("Period mean at surface", "rmsescore", "MNAME_RNAME_rmsescore.png", side="SURFACE MEAN RMSE SCORE", legend=True) for region in self.regions: fig = plt.figure() ax = fig.add_axes([0.06, 0.025, 0.88, 0.965]) var.plot(ax, region=region, vmin=0, vmax=1, cmap="RdYlGn", land=0.750, water=0.875) fig.savefig("%s/%s_%s_rmsescore.png" % (self.output_path, m.name, region)) plt.close() for region in self.regions: vname = "timelonint_of_%s_over_%s" % (self.variable, region) if vname in variables: var = Variable(filename=fname, variable_name=vname, groupname="MeanState") if region == "global": page.addFigure( "Mean regional depth profiles", "timelonint", "MNAME_RNAME_timelonint.png", side="MODEL DEPTH PROFILE", legend=True, longname="Time/longitude averaged profile") fig, ax = plt.subplots(figsize=(6.8, 2.8), tight_layout=True) l = np.hstack([var.lat_bnds[:, 0], var.lat_bnds[-1, 1]]) d = np.hstack( [var.depth_bnds[:, 0], var.depth_bnds[-1, 1]]) ind = np.all(var.data.mask, axis=0) ind = np.ma.masked_array(range(ind.size), mask=ind, dtype=int) b = ind.min() e = ind.max() + 1 ax.pcolormesh( l[b:(e + 1)], d, var.data[:, b:e], vmin=self.limits["timelonint"]["global"]["min"], vmax=self.limits["timelonint"]["global"]["max"], cmap=self.cmap) ax.set_xlabel("latitude") ax.set_ylim((d.max(), d.min())) ax.set_ylabel("depth [m]") fig.savefig("%s/%s_%s_timelonint.png" % (self.output_path, m.name, region)) plt.close() if not self.master: return with Dataset(bname) as dataset: group = dataset.groups["MeanState"] variables = getVariableList(group) color = dataset.getncattr("color") vname = "timeint_surface_%s" % self.variable if vname in variables: var = Variable(filename=bname, variable_name=vname, groupname="MeanState") page.addFigure("Period mean at surface", "benchmark_timeint", "Benchmark_RNAME_timeint.png", side="BENCHMARK SURFACE MEAN", legend=True) for region in self.regions: fig = plt.figure() ax = fig.add_axes([0.06, 0.025, 0.88, 0.965]) var.plot(ax, region=region, vmin=self.limits["timeint"]["min"], vmax=self.limits["timeint"]["max"], cmap=self.cmap, land=0.750, water=0.875) fig.savefig("%s/Benchmark_%s_timeint.png" % (self.output_path, region)) plt.close() for region in self.regions: vname = "timelonint_of_%s_over_%s" % (self.variable, region) if vname in variables: var = Variable(filename=bname, variable_name=vname, groupname="MeanState") if region == "global": page.addFigure( "Mean regional depth profiles", "benchmark_timelonint", "Benchmark_RNAME_timelonint.png", side="BENCHMARK DEPTH PROFILE", legend=True, longname="Time/longitude averaged profile") fig, ax = plt.subplots(figsize=(6.8, 2.8), tight_layout=True) l = np.hstack([var.lat_bnds[:, 0], var.lat_bnds[-1, 1]]) d = np.hstack( [var.depth_bnds[:, 0], var.depth_bnds[-1, 1]]) ind = np.all(var.data.mask, axis=0) ind = np.ma.masked_array(range(ind.size), mask=ind, dtype=int) b = ind.min() e = ind.max() + 1 ax.pcolormesh( l[b:(e + 1)], d, var.data[:, b:e], vmin=self.limits["timelonint"]["global"]["min"], vmax=self.limits["timelonint"]["global"]["max"], cmap=self.cmap) ax.set_xlabel("latitude") ax.set_ylim((d.max(), d.min())) ax.set_ylabel("depth [m]") fig.savefig("%s/Benchmark_%s_timelonint.png" % (self.output_path, region)) plt.close()
def modelPlots(self, m): # Check that the required intermediate files are present bname = "%s/%s_Benchmark.nc" % (self.output_path, self.name) fname = "%s/%s_%s.nc" % (self.output_path, self.name, m.name) if not os.path.isfile(bname): return if not os.path.isfile(fname): return # Get the HTML page page = [ page for page in self.layout.pages if "MeanState" in page.name ][0] # Read variables from the datafiles obs = Variable(filename=bname, variable_name="co2", groupname="MeanState") mod = Variable(filename=fname, variable_name="co2", groupname="MeanState") ocyc = Variable(filename=bname, variable_name="cycle", groupname="MeanState") mcyc = Variable(filename=fname, variable_name="cycle", groupname="MeanState") oiav = Variable(filename=bname, variable_name="iav", groupname="MeanState") miav = Variable(filename=fname, variable_name="iav", groupname="MeanState") ocycf = Variable(filename=bname, variable_name="cycle_fine", groupname="MeanState") mcycf = Variable(filename=fname, variable_name="cycle_fine", groupname="MeanState") omaxp = Variable(filename=bname, variable_name="maxp", groupname="MeanState") ominp = Variable(filename=bname, variable_name="minp", groupname="MeanState") oamp = Variable(filename=bname, variable_name="amp", groupname="MeanState") mmaxp = Variable(filename=fname, variable_name="maxp", groupname="MeanState") mminp = Variable(filename=fname, variable_name="minp", groupname="MeanState") mamp = Variable(filename=fname, variable_name="amp", groupname="MeanState") t = np.linspace(0, 365, 366) # Create an index for ordering sites by descending latitude sord = np.argsort(obs.lat)[::-1] inds = np.asarray(range(len(self.lbls)), dtype=int)[sord] lbls = np.asarray(self.lbls)[sord] # Create sparkline plots of each site fig_height = 1. width_per_year = 5. / 28 fig_dpi = 300. lw = 1. bndmonths = np.asarray(bnd_months, dtype=float) / 365. for site_id, site in zip(inds, lbls): # Initialize site info band = self.lat_bands.searchsorted(obs.lat[site_id]) section = "Latitude Band %d to %d [ppm]" % ( self.lat_bands[band - 1], self.lat_bands[band]) vmin = min(obs.data[:, site_id].min(), mod.data[:, site_id].min()) vmax = max(obs.data[:, site_id].max(), mod.data[:, site_id].max()) tick = max(int(np.floor(min(vmax, abs(vmin)))), 1) yticks = [-tick, 0, tick] # How many years of data do we have? t0, tf = mod.time_bnds[(np.where( (mod.data[:, site_id] * mod.time).mask == False)[0])[[0, -1]]] / 365. + 1850 t0 = np.floor(t0[0]) tf = np.ceil(tf[-1]) xticks = [ i for i in range(int(t0), int(tf) + 1) if str(i)[-1] == "0" ] # Plot setup fig_width0 = (5.) * width_per_year fig_width1 = (tf - t0) * width_per_year fig_width2 = (tf - t0) * width_per_year fig_width3 = (10.) * width_per_year fig, ax = plt.subplots( ncols=4, figsize=(fig_width0 + fig_width1 + fig_width2 + fig_width3, fig_height), gridspec_kw={ 'width_ratios': [fig_width0, fig_width3, fig_width1, fig_width2] }, tight_layout=True, dpi=fig_dpi) # Text only plot with the name and location of the site ax[0].text(0.5, 0.5, "%s\n%d,%d" % (site, obs.lat[site_id], obs.lon[site_id]), horizontalalignment='center', verticalalignment='center', transform=ax[0].transAxes) ax[0].set_xticks([]) ax[0].set_yticks([]) ax[0].axis('off') # Plot the finely interpolated annual cycle, shade JFM and JJA ax[1].fill_between(bndmonths[[0, 3]], [vmin, vmin], [vmax, vmax], color='k', alpha=0.05, lw=0) ax[1].fill_between(bndmonths[[6, 9]], [vmin, vmin], [vmax, vmax], color='k', alpha=0.05, lw=0) ax[1].plot(ocyc.time / 365, ocyc.data[:, site_id], lw=1.5 * lw, color='k', alpha=0.35) ax[1].plot(mcyc.time / 365, mcyc.data[:, site_id], lw=lw, color=m.color) ax[1].set_ylim(vmin, vmax) ax[1].spines['top'].set_visible(False) ax[1].spines['right'].set_visible(False) ax[1].spines['bottom'].set_position('zero') ax[1].set_xticks([]) ax[1].set_yticks(yticks) ax[1].set_xticklabels([]) ax[1].set_ylabel('cycle') # Plot the variability in co2, shade every other decade shade = [ t0, ] + xticks + [ tf, ] alf = 0.15 bot = vmin + 0.02 * (vmax - vmin) for i in range(1, len(shade) - 1): if i % 2 == 0: ax[2].text(shade[i], bot, shade[i], color='k', alpha=alf, size=12) ax[3].text(shade[i], bot, shade[i], color='k', alpha=alf, size=12) else: ax[2].fill_between(shade[i:(i + 2)], [vmin, vmin], [vmax, vmax], color='k', alpha=0.05, lw=0) ax[2].text(shade[i], bot, shade[i], color='k', alpha=alf, size=12) ax[3].fill_between(shade[i:(i + 2)], [vmin, vmin], [vmax, vmax], color='k', alpha=0.05, lw=0) ax[3].text(shade[i], bot, shade[i], color='k', alpha=alf, size=12) ax[2].plot(obs.time / 365 + 1850, obs.data[:, site_id], lw=1.5 * lw, color='k', alpha=0.35) ax[2].plot(mod.time / 365 + 1850, mod.data[:, site_id], lw=lw, color=m.color) ax[2].set_ylim(vmin, vmax) ax[2].spines['top'].set_visible(False) ax[2].spines['right'].set_visible(False) ax[2].spines['bottom'].set_position('zero') ax[2].set_yticks(yticks) ax[2].set_xticklabels([]) ax[2].set_xticks([]) ax[2].set_ylabel('var') # Plot the interannual variability in co2, shade every other decade ax[3].plot(oiav.time / 365 + 1850, oiav.data[:, site_id], lw=1.5 * lw, color='k', alpha=0.35) ax[3].plot(miav.time / 365 + 1850, miav.data[:, site_id], lw=lw, color=m.color) ax[3].set_ylim(vmin, vmax) ax[3].spines['top'].set_visible(False) ax[3].spines['right'].set_visible(False) ax[3].spines['bottom'].set_position('zero') ax[3].set_xticks([]) ax[3].set_yticks(yticks) ax[3].tick_params(axis='x', direction='inout', length=10) ax[3].set_ylabel('iav') # Save the figure fig.savefig( os.path.join(self.output_path, "%s_global_%s.png" % (m.name, site))) page.addFigure(section, site, "MNAME_global_%s.png" % site, side="", legend=False, width=fig.get_size_inches()[0] * fig.dpi * 0.25, br=True, longname="Site %s" % site) plt.close() # Compute mean amplitude, max and min phase over latitude bands lat_bnds = self.lat_bands lat = 0.5 * (lat_bnds[:-1] + lat_bnds[1:]) nb = lat_bnds.size - 1 o_band_min = np.zeros(nb) o_band_max = np.zeros(nb) o_band_amp = np.zeros(nb) o_band_iav = np.zeros(nb) m_band_min = np.zeros(nb) m_band_max = np.zeros(nb) m_band_amp = np.zeros(nb) m_band_iav = np.zeros(nb) with np.errstate(under='ignore'): for i in range(o_band_min.size): ind = np.where( (obs.lat > lat_bnds[i]) * (obs.lat <= lat_bnds[i + 1]))[0] o_band_min[i] = _meanDay(ominp.data[ind]) o_band_max[i] = _meanDay(omaxp.data[ind]) o_band_amp[i] = oamp.data[ind].mean() o_band_iav[i] = oiav.data.std(axis=0)[ind].mean() m_band_min[i] = _meanDay(mminp.data[ind]) m_band_max[i] = _meanDay(mmaxp.data[ind]) m_band_amp[i] = mamp.data[ind].mean() m_band_iav[i] = miav.data.std(axis=0)[ind].mean() # To plot the mean values over latitude bands superimposed on # the globe, we have to transform the phase and amplitude # values to [-180,180], as if they were longitudes. o_band_min = o_band_min / 365. * 360 - 180 o_band_max = o_band_max / 365. * 360 - 180 m_band_min = m_band_min / 365. * 360 - 180 m_band_max = m_band_max / 365. * 360 - 180 max_amp = o_band_amp.max() min_amp = o_band_amp.min() amp_ticks = np.linspace(min_amp, max_amp, 6) amp_ticklabels = ["%.2f" % t for t in amp_ticks] damp = 0.1 * (max_amp - min_amp) max_amp += damp min_amp -= damp o_band_amp = (o_band_amp - min_amp) / (max_amp - min_amp) * 360 - 180 m_band_amp = (m_band_amp - min_amp) / (max_amp - min_amp) * 360 - 180 amp_ticks = (amp_ticks - min_amp) / (max_amp - min_amp) * 360 - 180 max_iav = max(o_band_iav.max(), m_band_iav.max()) min_iav = 0. iav_ticks = np.linspace(min_iav, max_iav, 6) iav_ticklabels = ["%.2f" % t for t in iav_ticks] diav = 0.1 * (max_iav - min_iav) max_iav += diav min_iav -= diav o_band_iav = (o_band_iav - min_iav) / (max_iav - min_iav) * 360. - 180. m_band_iav = (m_band_iav - min_iav) / (max_iav - min_iav) * 360. - 180. iav_ticks = (iav_ticks - min_iav) / (max_iav - min_iav) * 360. - 180. # Plot mean latitude band amplitude where amplitude is on the longitude axis fig, ax = plt.subplots(figsize=(8, 4.5), tight_layout=True) bmap = Basemap(projection='cyl', llcrnrlon=-180, llcrnrlat=-90, urcrnrlon=+180, urcrnrlat=+90, ax=ax, resolution='c') bmap.drawlsmask(land_color="0.875", ocean_color="1.000", lakes=True) ms = 8 bmap.scatter(obs.lon, obs.lat, 8, color="0.60", latlon=True, label="Sites", ax=ax) ax.plot(o_band_amp, lat, '--o', color=np.asarray([0.5, 0.5, 0.5]), label="%s amplitude" % self.name, mew=0, markersize=ms) ax.plot(m_band_amp, lat, '-o', color=m.color, label="%s amplitude" % m.name, mew=0, markersize=ms) ax.yaxis.grid(color="0.875", linestyle="-") ax.legend(bbox_to_anchor=(0, 1.005, 1, 0.25), loc='lower left', mode='expand', ncol=5, borderaxespad=0, frameon=False) ax.set_xlim(-180, 180) ax.set_ylim(-90, 90) ax.set_xlabel(obs.unit) ax.set_xticks(amp_ticks) ax.set_xticklabels(amp_ticklabels) ax.set_yticks(lat_bnds) fig.savefig(os.path.join(self.output_path, "%s_amp.png" % m.name)) page.addFigure("Summary", "amp", "MNAME_amp.png", side="AMPLITUDE", width=fig.get_size_inches()[0] * fig.dpi * 0.75, legend=False) # Plot mean latitude band iav where iav is on the longitude axis fig, ax = plt.subplots(figsize=(8, 4.5), tight_layout=True) bmap = Basemap(projection='cyl', llcrnrlon=-180, llcrnrlat=-90, urcrnrlon=+180, urcrnrlat=+90, ax=ax, resolution='c') bmap.drawlsmask(land_color="0.875", ocean_color="1.000", lakes=True) ms = 8 bmap.scatter(obs.lon, obs.lat, 8, color="0.60", latlon=True, label="Sites", ax=ax) ax.plot(o_band_iav, lat, '--o', color=np.asarray([0.5, 0.5, 0.5]), label="%s variability" % self.name, mew=0, markersize=ms) ax.plot(m_band_iav, lat, '-o', color=m.color, label="%s variability" % m.name, mew=0, markersize=ms) ax.yaxis.grid(color="0.875", linestyle="-") ax.legend(bbox_to_anchor=(0, 1.005, 1, 0.25), loc='lower left', mode='expand', ncol=5, borderaxespad=0, frameon=False) ax.set_xlim(-180, 180) ax.set_ylim(-90, 90) ax.set_xlabel(obs.unit) ax.set_xticks(iav_ticks) ax.set_xticklabels(iav_ticklabels) ax.set_yticks(lat_bnds) fig.savefig(os.path.join(self.output_path, "%s_iav.png" % m.name)) page.addFigure("Summary", "iav", "MNAME_iav.png", side="INTERANNUAL VARIABILITY", width=fig.get_size_inches()[0] * fig.dpi * 0.75, legend=False) # Plot mean latitude band max phase where the phase is on the longitude axis fig, ax = plt.subplots(figsize=(8, 4.5), tight_layout=True) bmap = Basemap(projection='cyl', llcrnrlon=-180, llcrnrlat=-90, urcrnrlon=+180, urcrnrlat=+90, ax=ax, resolution='c') bmap.drawlsmask(land_color="0.875", ocean_color="1.000", lakes=True) bmap.scatter(obs.lon, obs.lat, 8, color="0.60", latlon=True, label="Sites", ax=ax) ax.plot(o_band_max, lat, '--o', color=np.asarray([0.5, 0.5, 0.5]), label="%s maximum" % self.name, mew=0, markersize=ms) ax.plot(m_band_max, lat, '-o', color=m.color, label="%s maximum" % m.name, mew=0, markersize=ms) ax.yaxis.grid(color="0.875", linestyle="-") ax.legend(bbox_to_anchor=(0, 1.005, 1, 0.25), loc='lower left', mode='expand', ncol=3, borderaxespad=0, frameon=False) ax.set_xlim(-180, 180) ax.set_ylim(-90, 90) ax.set_xticks(mid_months / 365. * 360. - 180) ax.set_xticklabels(lbl_months) ax.set_yticks(lat_bnds) fig.savefig(os.path.join(self.output_path, "%s_maxphase.png" % m.name)) page.addFigure("Summary", "maxphase", "MNAME_maxphase.png", side="TIMING OF MAXIMUM", width=fig.get_size_inches()[0] * fig.dpi * 0.75, legend=False) # Plot mean latitude band min phase where the phase is on the longitude axis fig, ax = plt.subplots(figsize=(8, 4.5), tight_layout=True) bmap = Basemap(projection='cyl', llcrnrlon=-180, llcrnrlat=-90, urcrnrlon=+180, urcrnrlat=+90, ax=ax, resolution='c') bmap.drawlsmask(land_color="0.875", ocean_color="1.000", lakes=True) bmap.scatter(obs.lon, obs.lat, 8, color="0.60", latlon=True, label="Sites", ax=ax) ax.plot(o_band_min, lat, '--o', color=np.asarray([0.5, 0.5, 0.5]), label="%s minimum" % self.name, mew=0, markersize=ms) ax.plot(m_band_min, lat, '-o', color=m.color, label="%s minimum" % m.name, mew=0, markersize=ms) ax.yaxis.grid(color="0.875", linestyle="-") ax.legend(bbox_to_anchor=(0, 1.005, 1, 0.25), loc='lower left', mode='expand', ncol=3, borderaxespad=0, frameon=False) ax.set_xlim(-180, 180) ax.set_ylim(-90, 90) ax.set_xticks(mid_months / 365. * 360. - 180) ax.set_xticklabels(lbl_months) ax.set_yticks(lat_bnds) fig.savefig(os.path.join(self.output_path, "%s_minphase.png" % m.name)) page.addFigure("Summary", "minphase", "MNAME_minphase.png", side="TIMING OF MINIMUM", width=fig.get_size_inches()[0] * fig.dpi * 0.75, legend=False)
def getDiurnalDataForGivenYear(var, year): """ """ # Get this year's data, make sure there is enough spd = int(round(1 / np.diff(var.time_bnds, axis=1).mean())) datum = cftime.date2num(cftime.datetime(year, 1, 1), "days since 1850-1-1") ind = np.where(year == var.year)[0] t = var.time[ind] - datum tb = var.time_bnds[ind] - datum data = var.data[ind, 0] # Reshape the data begin = np.argmin(tb[:(spd - 1), 0] % 1) end = begin + int(t[begin:].size / float(spd) - 1) * spd shift = int(round((var.tmax - 12) / (var.dt * 24))) begin += shift end += shift shp = (-1, spd) + data.shape[1:] data = data[begin:end].reshape(shp) t = t[begin:end].reshape(shp).mean(axis=1) # Diurnal magnitude mag = Variable(name="mag%d" % year, unit=var.unit, time=t, data=data.max(axis=1) - data.min(axis=1)) # Some of the tower data is 'intelligently' masked which leads to # too much of the data being removed to use my change-detection # algorithm to determine season begin/end. mag.skip = False if mag.data.mask.all(): raise NotEnoughDataInYear # if year is all masked dmag = (mag.data.max() - mag.data.min()) if dmag < 1e-14: raise NotEnoughDataInYear # if diurnal mag has no amplitude # Some mask out off seasons, season is taken to be all the data begin_day, end_day = mag.time[mag.data.mask == False][[ 0, -1 ]] # begin/end of the mask data if ((begin_day < 2 and end_day < 363) or (begin_day > 2 and end_day > 363)): # this is likely a dataset which is a partial year raise NotEnoughDataInYear elif (begin_day > 2 and end_day < 363): # this is likely a dataset that masks out off-seasons season = np.asarray([begin_day, end_day]) else: season = findSeasonalTiming(mag.time, mag.data) centroid = findSeasonalCentroid(mag.time, mag.data) mag.season = season mag.centroid = centroid # Mask out off season mask = (t < season[0]) + (t > season[1]) data = np.ma.masked_array(data, mask=mask[:, np.newaxis] * np.ones(data.shape[1], dtype=bool)) # Mean seasonal diurnal cycle uncert = np.zeros((data.shape[1], 2)) for i in range(data.shape[1]): d = data[:, i].compressed() if d.size == 0: continue uncert[i, :] = np.percentile(d, [10, 90]) day = np.linspace(0, 1, spd + 1) day = 0.5 * (day[:-1] + day[1:]) with np.errstate(under='ignore', over='ignore'): cycle = Variable(name="cycle%d" % year, unit=var.unit, time=day, data=data.mean(axis=0), data_bnds=uncert) # Mean seasonal uptake uptake = Variable(unit=var.unit, time=var.time[ind] - datum, time_bnds=var.time_bnds[ind] - datum, data=var.data[ind, 0]) uptake.data = np.ma.masked_array(uptake.data, mask=((uptake.time < season[0]) + (uptake.time > season[1]))) uptake = uptake.integrateInTime(mean=True) cycle.uptake = uptake.data # Timing of peak seasonal cycle, could be a maximum or minimum, # check second derivative of a best-fit parabola to the daytime # data. begin = int(spd / 4) end = int(spd * 3 / 4) p = np.polyfit(cycle.time[begin:end], cycle.data[begin:end], 2) if p[0] < 0: cycle.peak = day[cycle.data.argmax()] * 24 else: cycle.peak = day[cycle.data.argmin()] * 24 return mag, cycle
def stageData(self, m): # Get the observational data obs = Variable(filename=self.source, variable_name=self.variable, alternate_vars=self.alternate_vars) # Reduce the sites if self.map: obs.lat = obs.lat[self.map] obs.lon = obs.lon[self.map] obs.depth = obs.depth[self.map] obs.data = obs.data[:, self.map] obs.ndata = len(self.map) # Get the model result force_emulation = self.keywords.get("force_emulation", "False").lower() == "true" never_emulation = self.keywords.get("never_emulation", "False").lower() == "true" no_co2 = False mod = None if not force_emulation: try: #print "Trying to get co2 from %s" % m.name mod = m.extractTimeSeries( self.variable, alt_vars=self.alternate_vars, initial_time=obs.time_bnds[0, 0], final_time=obs.time_bnds[-1, 1], lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) except il.VarNotInModel: #print "co2 not in %s" % m.name no_co2 = True if (((mod is None) or no_co2) and (not never_emulation)): #print "Emulating co2 in %s" % m.name mod = self.emulatedModelResult(m, obs) if mod is None: raise il.VarNotInModel() # get the right layering, closest to the layer elevation where all aren't masked. if mod.layered: ind = (np.abs(obs.depth[:, np.newaxis] - mod.depth)).argmin(axis=1) for i in range(ind.size): while (mod.data[:, ind[i], i].mask.sum() > 0.5 * mod.data.shape[0]): ind[i] += 1 data = [] for i in range(ind.size): data.append(mod.data[:, ind[i], i]) mod.data = np.ma.masked_array(data).T mod.depth = None mod.depth_bnds = None mod.layered = False obs, mod = il.MakeComparable(obs, mod, mask_ref=True, clip_ref=True) mod.data.mask += obs.data.mask # Remove the trend via quadradic polynomial obs = _detrend(obs) mod = _detrend(mod) return obs, mod
def confront(self, m): # Grab the data obs, mod = self.stageData(m) # Compute amplitude, min and max phase, and annual cycle as numpy data arrays ocyc, ot, otb = _cycleShape(obs) mcyc, mt, mtb = _cycleShape(mod) n = len(self.lbls) obs_amp = np.zeros(n) obs_maxp = np.zeros(n) obs_minp = np.zeros(n) mod_amp = np.zeros(n) mod_maxp = np.zeros(n) mod_minp = np.zeros(n) obs_cyc = np.zeros((366, n)) mod_cyc = np.zeros((366, n)) well_define = np.zeros(n) for i, site in enumerate(self.lbls): obs_amp[i], obs_maxp[i], obs_minp[ i], obs_cyc[:, i] = _siteCharacteristics(ot, ocyc[..., i]) mod_amp[i], mod_maxp[i], mod_minp[ i], mod_cyc[:, i] = _siteCharacteristics(mt, mcyc[..., i]) well_define[i] = _phaseWellDefined(obs.time, obs.data[:, i]) well_define /= well_define.sum() # Write out ILAMB variables for observed quantities with np.errstate(under='ignore'): ocyc = Variable( name="cycle", # mean annual cycle unit=obs.unit, data=ocyc.mean(axis=0), ndata=obs.ndata, lat=obs.lat, lon=obs.lon, time=ot, time_bnds=otb) oiav = Variable( name="iav", # deseasonalized interannual variability unit=obs.unit, data=obs.data - ocyc.data[ocyc.time.searchsorted(obs.time % 365), ...], time=obs.time, ndata=obs.ndata, lat=obs.lat, lon=obs.lon, time_bnds=obs.time_bnds) ocycf = Variable( name= "cycle_fine", # finely sampled cycle from cubic interpolation unit=obs.unit, data=obs_cyc, time=np.linspace(0, 365, 366), ndata=obs.ndata, lat=obs.lat, lon=obs.lon) obs_amp = Variable( name="amp", # mean amplitude over time period unit=obs.unit, data=obs_amp, ndata=obs.ndata, lat=obs.lat, lon=obs.lon) obs_maxp = Variable( name="maxp", # Julian day of the maximum of the annual cycle unit="d", data=obs_maxp, ndata=obs.ndata, lat=obs.lat, lon=obs.lon) obs_minp = Variable( name="minp", # Julian day of the minimum of the annual cycle unit="d", data=obs_minp, ndata=obs.ndata, lat=obs.lat, lon=obs.lon) # Write out ILAMB variables for modeled quantities mcyc = Variable( name="cycle", # mean annual cycle unit=mod.unit, data=mcyc.mean(axis=0), ndata=mod.ndata, lat=mod.lat, lon=mod.lon, time=mt, time_bnds=mtb) miav = Variable( name="iav", # deseasonalized interannual variability unit=mod.unit, data=mod.data - mcyc.data[mcyc.time.searchsorted(mod.time % 365), ...], time=mod.time, ndata=mod.ndata, lat=mod.lat, lon=mod.lon, time_bnds=mod.time_bnds) mcycf = Variable( name= "cycle_fine", # finely sampled cycle from cubic interpolation unit=mod.unit, data=mod_cyc, time=np.linspace(0, 365, 366), ndata=mod.ndata, lat=mod.lat, lon=mod.lon) mod_amp = Variable( name="amp", # mean amplitude over time period unit=mod.unit, data=mod_amp, ndata=mod.ndata, lat=mod.lat, lon=mod.lon) mod_maxp = Variable( name="maxp", # Julian day of the maximum of the annual cycle unit="d", data=mod_maxp, ndata=mod.ndata, lat=mod.lat, lon=mod.lon) mod_minp = Variable( name="minp", # Julian day of the minimum of the annual cycle unit="d", data=mod_minp, ndata=mod.ndata, lat=mod.lat, lon=mod.lon) # Amplitude score: for each site we compute the relative error # in amplitude and then score each site using the # exponential. The score for the model is then the arithmetic # mean across sites. Samp = Variable(name="Amplitude Score global", unit="1", data=np.exp(-np.abs(mod_amp.data - obs_amp.data) / obs_amp.data).mean()) # Interannual variability score: similar to the amplitude # score, we also score the relative error in the stdev(iav) # and report a mean across sites. ostd = oiav.data.std(axis=0) mstd = miav.data.std(axis=0) Siav = Variable(name="Interannual Variability Score global", unit="1", data=np.exp(-np.abs(mstd - ostd) / ostd).mean()) # Min/Max Phase score: for each site we compute the phase # shift and normalize it linearly where a 0 day shift gets a # score of 1 and a 365/2 day shift is zero. We then compute a # weighted mean across sites where sites without a well # defined annual cycle are discarded. Smax = Variable(name="Max Phase Score global", unit="1", data=np.average(_computeShift(obs_maxp, mod_maxp), weights=well_define)) Smin = Variable(name="Min Phase Score global", unit="1", data=np.average(_computeShift(obs_minp, mod_minp), weights=well_define)) # Write out the intermediate variables with Dataset(os.path.join(self.output_path, "%s_%s.nc" % (self.name, m.name)), mode="w") as results: results.setncatts({"name": m.name, "color": m.color}) for v in [ mod, mcyc, miav, mcycf, mod_maxp, mod_minp, mod_amp, Samp, Siav, Smax, Smin ]: v.toNetCDF4(results, group="MeanState") if not self.master: return with Dataset(os.path.join(self.output_path, "%s_Benchmark.nc" % self.name), mode="w") as results: results.setncatts({ "name": "Benchmark", "color": np.asarray([0.5, 0.5, 0.5]) }) for v in [obs, ocyc, oiav, ocycf, obs_maxp, obs_minp, obs_amp]: v.toNetCDF4(results, group="MeanState")
def emulatedModelResult(self, m, obs): # Emulation parameters emulated_flux = self.keywords.get("emulated_flux", "nbp") spinup = 12 Ninf = 60 ilev = 1 # Get the model result mod = m.extractTimeSeries(emulated_flux, initial_time=obs.time_bnds[0, 0] - float(Ninf) / 12 * 365 + 29., final_time=obs.time_bnds[-1, 1]) # What if I don't have Ninf leadtime? tf = min(obs.time_bnds[-1, 1], mod.time_bnds[-1, 1]) obs.trim(t=[-1e20, tf]) mod.trim(t=[-1e20, tf]) # Integrate the emulated flux over each pulse region region_int = {} for region in self.pulse_regions: region_int[region] = mod.integrateInSpace( region=region).convert("Pg yr-1") # Load the operator from the files lat, lon, H = None, None, None for i in range(12): # FIX: move pulses into one file to avoid requiring a naming convention with Dataset(os.path.join(self.pulse_dir, "Pulse%02d.nc" % (i + 1))) as dset: if lat is None: lat = dset.variables["lat"][...] if lon is None: lon = dset.variables["lon"][...] if H is None: H = np.zeros((22, 12, Ninf + 12, lat.size, lon.size)) for j in range(22): T = dset.variables['T%d' % (j + 1)] H[j, i, ...] = T[spinup:, ilev, ...] - T[:spinup, ...].mean() # Where are our sites? ilat = np.abs(lat[:, np.newaxis] - obs.lat).argmin(axis=0) ilon = np.abs(lon[:, np.newaxis] - obs.lon).argmin(axis=0) # Apply the operator Nyrs = mod.time.size / 12 Ntot = 12 * Nyrs + Ninf eflux = np.zeros((obs.ndata, 22, Ntot)) for j in range(20): for s in range(obs.ndata): Htemp = H[j, ..., ilat[s], ilon[s]] Htrac = np.zeros((22, Ntot, 12 * Nyrs)) for i in range(Nyrs): pb = 12 * i pe = 12 * (i + 1) re = pe + Ninf Htrac[j, pb:re, pb:pe] = Htemp.T Htrac[j, re:, pb:pe] = np.tile(Htemp[:, -1], [12 * (Nyrs - i - 1), 1]) eflux[s, j, :] = np.dot( Htrac[j, ...], region_int["pulse_region_%d" % (j + 1)].data) * (-1e-3 ) # H is [] ? eflux = eflux.sum(axis=1).T eflux = eflux[Ninf:-Ninf] eflux = np.ma.masked_array(eflux, mask=obs.data.mask) mod = Variable(name="co2", unit=obs.unit, lat=obs.lat, lon=obs.lon, ndata=obs.ndata, time=obs.time, time_bnds=obs.time_bnds, data=eflux) return mod
def confront(self, m): # Grab the data obs, mod = self.stageData(m) # What years does the analysis run over? obs.year = np.asarray( [t.year for t in cftime.num2date(obs.time, "days since 1850-1-1")], dtype=int) mod.year = np.asarray( [t.year for t in cftime.num2date(mod.time, "days since 1850-1-1")], dtype=int) # Analysis mod_file = os.path.join(self.output_path, "%s_%s.nc" % (self.name, m.name)) obs_file = os.path.join(self.output_path, "%s_Benchmark.nc" % (self.name, )) with il.FileContextManager(self.master, mod_file, obs_file) as fcm: # Encode some names and colors fcm.mod_dset.setncatts({ "name": m.name, "color": m.color, "lat": mod.lat[0], "lon": mod.lon[0], "complete": 0 }) if self.master: fcm.obs_dset.setncatts({ "name": "Benchmark", "color": np.asarray([0.5, 0.5, 0.5]), "complete": 0 }) Osbegin = [] Osend = [] Oslen = [] Opeak = [] Ouptake = [] Msbegin = [] Msend = [] Mslen = [] Mpeak = [] Muptake = [] Ssbegin = [] Ssend = [] Scentroid = [] Speak = [] Suptake = [] obs_years = 0 mod_years = 0 for y in self.years: # First try to get the obs for this year, might not # have enough info in which case we skip the year. try: obs_mag, obs_cycle = getDiurnalDataForGivenYear(obs, y) except NotEnoughDataInYear: continue # Output what we must even if the model doesn't have # data here. obs_years += 1 Osbegin.append(obs_mag.season[0]) Osend.append(obs_mag.season[1]) Oslen.append(obs_mag.season[1] - obs_mag.season[0]) Opeak.append(obs_cycle.peak) Ouptake.append(obs_cycle.uptake) if self.master: obs_mag.toNetCDF4(fcm.obs_dset, group="MeanState", attributes={ "sbegin": obs_mag.season[0], "send": obs_mag.season[1] }) obs_cycle.toNetCDF4(fcm.obs_dset, group="MeanState", attributes={ "uptake": obs_cycle.uptake, "peak": obs_cycle.peak }) # Try to get enough data from the model to operate try: mod_mag, mod_cycle = getDiurnalDataForGivenYear(mod, y) except NotEnoughDataInYear: continue mod_years += 1 Msbegin.append(mod_mag.season[0]) Msend.append(mod_mag.season[1]) Mslen.append(mod_mag.season[1] - mod_mag.season[0]) Mpeak.append(mod_cycle.peak) Muptake.append(mod_cycle.uptake) mod_mag.toNetCDF4(fcm.mod_dset, group="MeanState", attributes={ "sbegin": mod_mag.season[0], "send": mod_mag.season[1] }) mod_cycle.toNetCDF4(fcm.mod_dset, group="MeanState", attributes={ "uptake": mod_cycle.uptake, "peak": mod_cycle.peak }) # Get scores for this year ssbegin, ssend, scentroid, speak, suptake = DiurnalScalars( obs_mag, mod_mag, obs_cycle, mod_cycle) Ssbegin.append(ssbegin) Ssend.append(ssend) Scentroid.append(scentroid) Speak.append(speak) Suptake.append(suptake) # Output mean scores/scalars if self.master and obs_years > 0: Ouptake = np.ma.masked_invalid(Ouptake) Variable(name="Number of Years global", unit="1", data=obs_years).toNetCDF4(fcm.obs_dset, group="MeanState") Variable(name="Computed UTC Shift global", unit="h", data=obs.tmax - 12).toNetCDF4(fcm.obs_dset, group="MeanState") Variable(name="Season Beginning global", unit="d", data=np.asarray(Osbegin).mean()).toNetCDF4( fcm.obs_dset, group="MeanState") Variable(name="Season Ending global", unit="d", data=np.asarray(Osend).mean()).toNetCDF4( fcm.obs_dset, group="MeanState") Variable(name="Season Length global", unit="d", data=np.asarray(Oslen).mean()).toNetCDF4( fcm.obs_dset, group="MeanState") Variable(name="Diurnal Peak Timing global", unit="h", data=np.asarray(Opeak).mean()).toNetCDF4( fcm.obs_dset, group="MeanState") Variable(name="Mean Season Uptake global", unit=obs.unit, data=Ouptake.mean()).toNetCDF4(fcm.obs_dset, group="MeanState") if mod_years > 0: Muptake = np.ma.masked_invalid(Muptake) Suptake = np.ma.masked_invalid(Suptake) Variable(name="Number of Years global", unit="1", data=mod_years).toNetCDF4(fcm.mod_dset, group="MeanState") Variable(name="Computed UTC Shift global", unit="h", data=mod.tmax - 12).toNetCDF4(fcm.mod_dset, group="MeanState") Variable(name="Season Beginning global", unit="d", data=np.asarray(Msbegin).mean()).toNetCDF4( fcm.mod_dset, group="MeanState") Variable(name="Season Ending global", unit="d", data=np.asarray(Msend).mean()).toNetCDF4( fcm.mod_dset, group="MeanState") Variable(name="Season Length global", unit="d", data=np.asarray(Mslen).mean()).toNetCDF4( fcm.mod_dset, group="MeanState") Variable(name="Diurnal Peak Timing global", unit="h", data=np.asarray(Mpeak).mean()).toNetCDF4( fcm.mod_dset, group="MeanState") Variable(name="Mean Season Uptake global", unit=mod.unit, data=Muptake.mean()).toNetCDF4(fcm.mod_dset, group="MeanState") Variable(name="Season Beginning Score global", unit="1", data=np.asarray(Ssbegin).mean()).toNetCDF4( fcm.mod_dset, group="MeanState") Variable(name="Season Ending Score global", unit="1", data=np.asarray(Ssend).mean()).toNetCDF4( fcm.mod_dset, group="MeanState") Variable(name="Season Strength Score global", unit="1", data=np.asarray(Scentroid).mean()).toNetCDF4( fcm.mod_dset, group="MeanState") Variable(name="Diurnal Peak Timing Score global", unit="1", data=np.asarray(Speak).mean()).toNetCDF4( fcm.mod_dset, group="MeanState") Variable(name="Diurnal Uptake Score global", unit="1", data=Suptake.mean()).toNetCDF4(fcm.mod_dset, group="MeanState") # Flag complete fcm.mod_dset.complete = 1 if self.master: fcm.obs_dset.complete = 1
def test_bias(variables): head = "\n--- Testing bias() " print "%s%s\n" % (head, "-" * (120 - len(head))) for vdict in variables: var = vdict["var"] try: vdict["bias"] = var.bias(var) print vdict["bias"] except il.NotSpatialVariable: pass # Setup different types of variables gpp = {} gpp["var"] = Variable(filename=os.environ["ILAMB_ROOT"] + "/DATA/gpp/FLUXNET-MTE/derived/gpp.nc", variable_name="gpp") le = {} le["var"] = Variable(filename=os.environ["ILAMB_ROOT"] + "/DATA/le/FLUXNET/derived/le.nc", variable_name="le") co2 = {} co2["var"] = Variable(filename=os.environ["ILAMB_ROOT"] + "/DATA/co2/MAUNA.LOA/derived/co2_1959-2013.nc", variable_name="co2") pi = {} pi["var"] = Variable(data=np.pi, unit="-", name="pi") variables = [gpp, le, co2, pi] head = "\n--- Found the following variables for testing "
def stageData(self, m): energy_threshold = float(self.keywords.get("energy_threshold", 20.)) # Handle obs data sh_obs = Variable(filename=os.path.join(os.environ["ILAMB_ROOT"], "DATA/sh/GBAF/sh_0.5x0.5.nc"), variable_name="sh") le_obs = Variable(filename=os.path.join(os.environ["ILAMB_ROOT"], "DATA/le/GBAF/le_0.5x0.5.nc"), variable_name="le") sh_obs, le_obs, obs = _evapfrac(sh_obs, le_obs, self.variable, energy_threshold) # Prune out uncovered regions if obs.time is None: raise il.NotTemporalVariable() self.pruneRegions(obs) # Handle model data sh_mod = m.extractTimeSeries("hfss", initial_time=obs.time_bnds[0, 0], final_time=obs.time_bnds[-1, 1], lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) le_mod = m.extractTimeSeries("hfls", initial_time=obs.time_bnds[0, 0], final_time=obs.time_bnds[-1, 1], lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) sh_mod, le_mod, mod = _evapfrac(sh_mod, le_mod, self.variable, energy_threshold) # Make variables comparable obs, mod = il.MakeComparable(obs, mod, mask_ref=True, clip_ref=True, logstring="[%s][%s]" % (self.longname, m.name)) sh_obs, sh_mod = il.MakeComparable(sh_obs, sh_mod, mask_ref=True, clip_ref=True, logstring="[%s][%s]" % (self.longname, m.name)) le_obs, le_mod = il.MakeComparable(le_obs, le_mod, mask_ref=True, clip_ref=True, logstring="[%s][%s]" % (self.longname, m.name)) # Compute the mean ef sh_obs = sh_obs.integrateInTime(mean=True) le_obs = le_obs.integrateInTime(mean=True) np.seterr(over='ignore', under='ignore') obs_timeint = np.ma.masked_array( le_obs.data / (le_obs.data + sh_obs.data), mask=(sh_obs.data.mask + le_obs.data.mask)) np.seterr(over='warn', under='warn') obs_timeint = Variable(name=self.variable, unit="1", data=obs_timeint, lat=sh_obs.lat, lat_bnds=sh_obs.lat_bnds, lon=sh_obs.lon, lon_bnds=sh_obs.lon_bnds) sh_mod = sh_mod.integrateInTime(mean=True) le_mod = le_mod.integrateInTime(mean=True) np.seterr(over='ignore', under='ignore') mod_timeint = np.ma.masked_array( le_mod.data / (le_mod.data + sh_mod.data), mask=(sh_mod.data.mask + le_mod.data.mask)) np.seterr(over='warn', under='warn') mod_timeint = Variable(name=self.variable, unit="1", data=mod_timeint, lat=sh_mod.lat, lat_bnds=sh_mod.lat_bnds, lon=sh_mod.lon, lon_bnds=sh_mod.lon_bnds) return obs, mod, obs_timeint, mod_timeint
def stageData(self, m): energy_threshold = float(self.keywords.get("energy_threshold", 10)) # Handle obs data dn_obs = Variable(filename=self.source.replace("albedo", "rsds"), variable_name="rsds") up_obs = Variable(filename=self.source.replace("albedo", "rsus"), variable_name="rsus") dn_obs, up_obs, obs = _albedo(dn_obs, up_obs, self.variable, energy_threshold) # Prune out uncovered regions if obs.time is None: raise il.NotTemporalVariable() self.pruneRegions(obs) # Handle model data dn_mod = m.extractTimeSeries("rsds", initial_time=obs.time_bnds[0, 0], final_time=obs.time_bnds[-1, 1], lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) up_mod = m.extractTimeSeries("rsus", initial_time=obs.time_bnds[0, 0], final_time=obs.time_bnds[-1, 1], lats=None if obs.spatial else obs.lat, lons=None if obs.spatial else obs.lon) dn_mod, up_mod, mod = _albedo(dn_mod, up_mod, self.variable, energy_threshold) # Make variables comparable obs, mod = il.MakeComparable(obs, mod, mask_ref=True, clip_ref=True, logstring="[%s][%s]" % (self.longname, m.name)) dn_obs, dn_mod = il.MakeComparable(dn_obs, dn_mod, mask_ref=True, clip_ref=True, logstring="[%s][%s]" % (self.longname, m.name)) up_obs, up_mod = il.MakeComparable(up_obs, up_mod, mask_ref=True, clip_ref=True, logstring="[%s][%s]" % (self.longname, m.name)) # Compute the mean albedo dn_obs = dn_obs.integrateInTime(mean=True) up_obs = up_obs.integrateInTime(mean=True) np.seterr(over='ignore', under='ignore') obs_timeint = np.ma.masked_array(up_obs.data / dn_obs.data, mask=(dn_obs.data.mask + up_obs.data.mask)) np.seterr(over='warn', under='warn') obs_timeint = Variable(name=self.variable, unit="1", data=obs_timeint, lat=dn_obs.lat, lat_bnds=dn_obs.lat_bnds, lon=dn_obs.lon, lon_bnds=dn_obs.lon_bnds) dn_mod = dn_mod.integrateInTime(mean=True) up_mod = up_mod.integrateInTime(mean=True) np.seterr(over='ignore', under='ignore') mod_timeint = np.ma.masked_array(up_mod.data / dn_mod.data, mask=(dn_mod.data.mask + up_mod.data.mask)) np.seterr(over='warn', under='warn') mod_timeint = Variable(name=self.variable, unit="1", data=mod_timeint, lat=dn_mod.lat, lat_bnds=dn_mod.lat_bnds, lon=dn_mod.lon, lon_bnds=dn_mod.lon_bnds) return obs, mod, obs_timeint, mod_timeint