def main(index, diagnostics=False, anomalies="None", grid="ADW"): """ :param str index: which index to run :param bool diagnostics: output diagnostic information :param str anomalies: run code on anomalies or climatology rather than raw data :param str grid: gridding type ADW/CAM """ # get details of index index = utils.INDICES[index] # allow for option of running through each month if index.name in utils.MONTHLY_INDICES: nmonths = 13 timescale = "MON" else: nmonths = 1 timescale = "ANN" # setting up colours cmap = plt.cm.viridis bounds = np.arange(0, 110, 10) norm = mpl.cm.colors.BoundaryNorm(bounds, cmap.N) # plot all month versions at once for month, name in enumerate(month_names): if diagnostics: print(name) # set up the figure fig = plt.figure(figsize=(8, 6)) plt.clf() ax = fig.add_axes([0.1, 0.1, 0.85, 0.85]) filename = os.path.join(utils.FINALROOT, utils.make_filenames(index=index.name, grid=grid, anomalies=anomalies, extra="", month_index="")) try: ncfile = ncdf.Dataset(filename, 'r') timevar = ncfile.variables['time'] # array of YYYMMDD latvar = ncfile.variables['latitude'] # array of lats lonvar = ncfile.variables['longitude'] # array of lons annualvar = ncfile.variables[month_names[month]] # array of arrays if anomalies == "anomalies": # to make into actuals, add climatology to the anomalies clim_filename = os.path.join(utils.FINALROOT, utils.make_filenames(index=index.name, grid=grid, anomalies="climatology", extra="", month_index="")) clim_file = ncdf.Dataset(clim_filename, 'r') climvar = clim_file.variables[month_names[month]] except RuntimeError: print("File not found: {}".format(filename)) except IOError: print("File not found: {}".format(filename)) except KeyError: continue # extract the information times = timevar[:] lats = latvar[:] lons = lonvar[:] # get land sea mask if month == 0: lsm = utils.get_land_sea_mask(lats, lons, floor=False) n_land_boxes = np.sum(lsm.astype(int), axis=1).astype(float) n_land_boxes = np.ma.expand_dims(n_land_boxes, axis=1) # if anomalies from HadEX3, then need to add onto climatology if anomalies == "anomalies": annual_data = annualvar[:] + climvar[:] else: annual_data = annualvar[:] # go through each year and count up zonal_boxes = np.zeros(annual_data.shape[:2][::-1]) for y, year_data in enumerate(annual_data): zonal_boxes[:, y] = np.ma.count(year_data, axis=1).astype(float) zonal_boxes = np.ma.masked_where(zonal_boxes == 0, zonal_boxes) normalised_boxes = 100. * zonal_boxes / np.tile(n_land_boxes, [1, annual_data.shape[0]]) newtimes, newlats = np.meshgrid(np.append(times, times[-1]+1), utils.box_edge_lats) mesh = plt.pcolormesh(newtimes, newlats, normalised_boxes, cmap=cmap, norm=norm) cb = plt.colorbar(mesh, orientation='horizontal', pad=0.07, fraction=0.05, \ aspect=30, ticks=bounds[1:-1], label="%", drawedges=True) cb.set_ticklabels(["{:g}".format(b) for b in bounds[1:-1]]) cb.outline.set_linewidth(2) cb.dividers.set_color('k') cb.dividers.set_linewidth(2) plt.ylim([-90, 90]) plt.yticks(np.arange(-90, 120, 30)) ax.yaxis.set_ticklabels(["{}N".format(i) if i > 0 else "{}S".format(abs(i)) if i < 0 else "{}".format(i) for i in np.arange(-90, 120, 30)]) plt.ylabel("Latitude") plt.title("{} - {}".format(index.name, name)) outname = putils.make_filenames("latitude_coverage", index=index.name, grid=grid, anomalies=anomalies, month=name) plt.savefig("{}/{}/{}".format(utils.PLOTLOCS, index.name, outname)) plt.close() return # main
def main(index, comparison=False, diagnostics=False, anomalies="None", grid="ADW", normalise=False, matched=False, uncertainties=False): """ :param str index: which index to run :param bool comparison: compare against other datasets :param bool diagnostics: output diagnostic information :param str anomalies: run code on anomalies or climatology rather than raw data :param str grid: gridding type ADW/CAM :param bool normalise: plot as anomalies from e.g. 1961-90 :param bool matched: match HadEX3 to HadEX2 coverage and plot timeseries :param bool uncertainties: plot ERA5 derived coverage uncertainties """ # get details of index index = utils.INDICES[index] # allow for option of running through each month if index.name in utils.MONTHLY_INDICES: nmonths = 13 timescale = "MON" else: nmonths = 1 timescale = "ANN" # currently not stored - but ready just in case outfilename = "{}/timeseries_{}.dat".format(TSOUTLOCATION, index.name) if os.path.exists(outfilename): os.remove(outfilename) coveragefilename = "{}/timeseries_{}_boxes.dat".format( TSOUTLOCATION, index.name) if os.path.exists(coveragefilename): os.remove(coveragefilename) # plot all month versions at once for month, name in enumerate(month_names[:nmonths]): if diagnostics: print(name) # set up the figure fig = plt.figure(figsize=(8, 5.5)) plt.clf() ax = fig.add_axes([0.15, 0.2, 0.82, 0.75]) # number of valid boxes timeseries = {} valid_boxes = {} land_boxes = {} e5cube = 0 # spin through all comparison datasets for ds, dataset in enumerate(DATASETS): incube = 0 print(dataset) if not comparison: # but if not doing comparisons, skip (just run HadEX3) if dataset != "HadEX3": continue if dataset == "HadEX": if name != "Ann": continue else: try: if index.name == "R95pTOT": filename = "{}/HadEX_{}_1951-2003.txt".format( utils.HADEX_LOC, "R95pT") else: filename = "{}/HadEX_{}_1951-2003.txt".format( utils.HADEX_LOC, index.name) all_data, years = [], [] data = np.zeros((72, 96)) latc = -1 with open(filename, "r") as infile: for lc, line in enumerate(infile): if lc == 0: # skip header continue # read each line line = line.split() if len(line) < 10: years += [int(line[0])] if line[0] != "1951": all_data += [data] # reset storage data = np.zeros((72, 96)) latc = -1 else: latc += 1 data[latc, :] = line # add final year all_data += [data] all_data = np.array(all_data).astype(float) all_data = np.ma.masked_where(all_data == -999.99, all_data) if index.name == "R95pTOT": all_data *= 100 latitudes = np.arange(90, -90, -2.5) longitudes = np.arange(-180, 180, 3.75) incube = make_iris_cube_3d(all_data, years, "unknown", latitudes, longitudes, name, index.units) incube = fix_time_coord(incube) except RuntimeError: print("File not found: {}".format(filename)) continue except IOError: print("File not found: {}".format(filename)) continue # except IndexError: # print("Month not available in {}".format(filename)) # continue elif dataset == "HadEX2": filename = "{}/HadEX2_{}_1901-2010_h2_mask_m4.nc".format( utils.HADEX2_LOC, index.name) try: cubelist = iris.load(filename) names = np.array([c.var_name for c in cubelist]) incube = cubelist[np.where(names == name)[0][0]] incube.coord("lat").standard_name = "latitude" incube.coord("lon").standard_name = "longitude" incube = fix_time_coord(incube) except RuntimeError: print("File not found: {}".format(filename)) continue except IOError: print("File not found: {}".format(filename)) continue except IndexError: print("Month not available in {}".format(filename)) continue elif dataset == "HadEX3": filename = os.path.join( utils.FINALROOT, utils.make_filenames(index=index.name, grid=grid, anomalies=anomalies, extra="", month_index="")) try: cubelist = iris.load(filename) names = np.array([c.var_name for c in cubelist]) incube = cubelist[np.where(names == name)[0][0]] incube.coord("grid_latitude").standard_name = "latitude" incube.coord("grid_longitude").standard_name = "longitude" incube = fix_time_coord(incube) h3_cube = copy.deepcopy(incube) if anomalies == "anomalies": # to make into actuals, add climatology to the anomalies clim_filename = os.path.join( utils.FINALROOT, utils.make_filenames(index=index.name, grid=grid, anomalies="climatology", extra="", month_index="")) clim_cubelist = iris.load(clim_filename) names = np.array([c.var_name for c in cubelist]) clim_cube = clim_cubelist[np.where( names == name)[0][0]] try: clim_cube.coord( "grid_latitude").standard_name = "latitude" clim_cube.coord( "grid_longitude").standard_name = "longitude" except iris.exceptions.CoordinateNotFoundError: pass if clim_cube.coord( "time" ).units.origin != "days since 1901-01-01 00:00": clim_cube = fix_time_coord(clim_cube) except RuntimeError: print("File not found: {}".format(filename)) continue except IOError: print("File not found: {}".format(filename)) continue except IndexError: print("Month not available in {}".format(filename)) continue elif dataset == "GHCNDEX": filename = "{}/{}/GHCND_{}_1951-2019_RegularGrid_global_2.5x2.5deg_LSmask.nc".format( utils.GHCNDEX_LOC, GHCNDEX_VERSION, index.name) try: cubelist = iris.load(filename) names = np.array([c.var_name for c in cubelist]) incube = cubelist[np.where(names == name)[0][0]] incube = fix_time_coord(incube) except RuntimeError: print("File not found: {}".format(filename)) continue except IOError: print("File not found: {}".format(filename)) continue except IndexError: print("Month not available in {}".format(filename)) continue elif dataset == "ERA5": filename = "{}/ERA5_{}_1979-2019.nc".format( utils.ERA5_LOC, index.name) try: cubelist = iris.load(filename) names = np.array([c.var_name for c in cubelist]) incube = cubelist[np.where(names == name)[0][0]] # match latitude order incube.coord('latitude').points = incube.coord( 'latitude').points[::-1] incube.data = incube.data[:, ::-1, :] # match to H3 grid try: h3_cube.coord("longitude").guess_bounds() h3_cube.coord("latitude").guess_bounds() except ValueError: # already has bounds pass try: incube.coord("longitude").guess_bounds() incube.coord("latitude").guess_bounds() except ValueError: # already has bounds pass incube = incube.regrid( h3_cube, iris.analysis.Linear(extrapolation_mode="mask")) e5_cube = copy.deepcopy(incube) except RuntimeError: print("File not found: {}".format(filename)) e5_cube = False continue except IOError: print("File not found: {}".format(filename)) e5_cube = False continue except IndexError: print("Month not available in {}".format(filename)) e5_cube = False continue print("need to match grids") # process data ready for plotting # if anomalies from HadEX3, then need to add onto climatology if anomalies == "anomalies" and dataset == "HadEX3": incube.data = incube.data + clim_cube.data # fix percent -> days issue for these four if index.name in ["TX90p", "TN90p", "TX10p", "TN10p"]: incube.data = incube.data * (DAYSPERYEAR / 100.) index.units = "days" # restrict to times of interest time_constraint = iris.Constraint( time=lambda cell: utils.STARTYEAR <= cell <= utils.ENDYEAR) incube = incube.extract(time_constraint) if matched and (utils.DELTALON == 3.75 and utils.DELTALAT == 2.5): # if matching coverage, retain hadex2 if dataset == "HadEX2": hadex2_mask = incube.data.mask[:] if dataset == "HadEX3": # new cube to hold matched_hadex3 = incube.data[:hadex2_mask.shape[0]] # find which boxes have x% of years with data - default is 90% if dataset == "GHCNDEX": completeness_mask = utils.CompletenessCheckGrid( incube.data, utils.ENDYEAR.year, 1951) elif dataset == "HadEX": completeness_mask = utils.CompletenessCheckGrid( incube.data, 2003, 1951) elif dataset == "HadEX2": completeness_mask = utils.CompletenessCheckGrid( incube.data, 2010, utils.STARTYEAR.year) elif dataset == "HadEX3": completeness_mask = utils.CompletenessCheckGrid( incube.data, utils.ENDYEAR.year, utils.STARTYEAR.year) elif dataset == "ERA5": completeness_mask = utils.CompletenessCheckGrid( incube.data, utils.ENDYEAR.year, 1979) # extract number of boxes before applying temporal completeness nboxes = np.zeros(incube.data.shape[0]) for year in range(incube.data.shape[0]): nboxes[year] = np.ma.count(incube.data[year]) # apply completeness mask, and obtain box counts nboxes_completeness, completeness_masked_data = MaskData( incube.data, incube.data.fill_value, completeness_mask) incube.data = completeness_masked_data if normalise: # apply normalisation! clim_constraint = iris.Constraint( time=lambda cell: dt.datetime(utils.REF_START, 1, 1) <= cell <= dt.datetime(utils.REF_END, 1, 1)) norm_cube = incube.extract(clim_constraint) norm = norm_cube.collapsed(['time'], iris.analysis.MEAN) incube = incube - norm # weights for the region weights = iris.analysis.cartography.cosine_latitude_weights(incube) ts = incube.collapsed(['longitude', 'latitude'], iris.analysis.MEAN, weights=weights) # only plot where there are non-missing values coord = ts.coord("time") years = np.array( [c.year for c in coord.units.num2date(coord.points)]) if dataset == "ERA5": if PLOTERA: # only plot ERA5 if selected plt.plot(years, ts.data, c=COLOURS[dataset], ls=LS[dataset], lw=2, label=LABELS[dataset], zorder=ZORDER[dataset]) else: plt.plot(years, ts.data, c=COLOURS[dataset], ls=LS[dataset], lw=2, label=LABELS[dataset], zorder=ZORDER[dataset]) if dataset == "HadEX3": h3_ts = ts h3_years = years # save max_boxes = np.product(incube.data.shape[1:]) if diagnostics: print("{}: total grid boxes = {}, max filled = {}".format( dataset, max_boxes, int(np.max(nboxes)))) valid_boxes[dataset] = [ years, 100. * nboxes / max_boxes ] # scale by total number as lat/lon resolution will be different store_ts = np.ones(h3_ts.shape) * utils.HADEX_MDI match = np.in1d(h3_years, years) match_back = np.in1d(years, h3_years) store_ts[match] = ts.data[match_back] timeseries[dataset] = [h3_years, store_ts] # get land sea mask if month == 0: lsm = utils.get_land_sea_mask(incube.coord("latitude").points, incube.coord("longitude").points, floor=False) n_land_boxes = len(np.where(lsm == False)[0]) land_boxes[dataset] = [years, 100. * nboxes / n_land_boxes ] # scale by number of land boxes # once all lines plotted # get coverage error for HadEX3 if uncertainties: try: # test to see if there was an actual cube from this loop (else using stale cube from previous) if e5_cube != 0: coverage_offset, coverage_stdev = putils.compute_coverage_error( h3_cube, e5_cube) coverage_stdev *= 2. # 90%, 2s.d. plt.fill_between(h3_years, h3_ts.data - coverage_stdev, h3_ts.data + coverage_stdev, color='0.5', label="ERA5 coverage uncertainty") except UnboundLocalError: # e5_cube not referenced - i.e. no ERA5 data for this index? pass # then tidy up putils.SortAxesLabels(plt, ax, index, utils.STARTYEAR.year, utils.ENDYEAR.year, month) plt.xlim([1900, 2020]) if normalise: # only plot zero line if done as anomalies plt.axhline(0, color='k', ls='--') # plot legend below figure leg = plt.legend(loc='lower center', ncol=3, bbox_to_anchor=(0.46, -0.31), frameon=False, title='', prop={'size': utils.FONTSIZE}, labelspacing=0.15, columnspacing=0.5) # extra information if utils.WATERMARK: watermarkstring = "{} {}".format( os.path.join("/".join(os.getcwd().split('/')[4:]), os.path.basename(__file__)), dt.datetime.strftime(dt.datetime.now(), "%d-%b-%Y %H:%M")) plt.figtext(0.01, 0.01, watermarkstring, size=6) plt.figtext(0.03, 0.95, "(c)", size=utils.FONTSIZE) ax = putils.Rstylee(ax) # and save if uncertainties: outname = putils.make_filenames("timeseries_uncertainties", index=index.name, grid=grid, anomalies=anomalies, month=name) else: outname = putils.make_filenames("timeseries", index=index.name, grid=grid, anomalies=anomalies, month=name) plt.savefig("{}/{}/{}".format(utils.PLOTLOCS, index.name, outname), dpi=300) plt.close() # output data file if comparison and name == "Ann": with open( os.path.join(utils.INFILELOCS, "{}_timeseries.dat".format(index.name)), "w") as outfile: outfile.write("{:4s} {:7s} {:7s} {:7s} {:7s}\n".format( "Year", "HadEX3", "HadEX2", "HadEX", "GHCNDEX")) years = timeseries["HadEX3"][0] for y, year in enumerate(years): items = [year] for key in ["HadEX3", "HadEX2", "HadEX", "GHCNDEX"]: if key in timeseries.keys(): items += [timeseries[key][1][y]] else: items += [utils.HADEX_MDI] outfile.write( "{:4d} {:7.2f} {:7.2f} {:7.2f} {:7.2f}\n".format( items[0], items[1], items[2], items[3], items[4])) #***************** # Plot coverage - how many grid boxes have values (scaled by total number) fig = plt.figure(figsize=(8, 5.5)) plt.clf() ax = fig.add_axes([0.15, 0.2, 0.82, 0.75]) PlotCoverage(plt, ax, valid_boxes, COLOURS, utils.STARTYEAR.year, utils.ENDYEAR.year, index.name, month, '', ncol=2) plt.figtext(0.03, 0.95, "(d)", size=utils.FONTSIZE) plt.xlim([1900, 2020]) ax = putils.Rstylee(ax) outname = putils.make_filenames("timeseries_coverage", index=index.name, grid=grid, anomalies=anomalies, month=name) plt.savefig("{}/{}/{}".format(utils.PLOTLOCS, index.name, outname), dpi=300) plt.close("all") #***************** # plot coverage - how many grid boxes have values (scaled by land fraction) fig = plt.figure(figsize=(8, 5.5)) plt.clf() ax = fig.add_axes([0.15, 0.2, 0.82, 0.75]) PlotCoverage(plt, ax, land_boxes, COLOURS, utils.STARTYEAR.year, utils.ENDYEAR.year, index.name, month, '', ncol=2, land=True) plt.figtext(0.03, 0.95, "(d)", size=utils.FONTSIZE) plt.xlim([1900, 2020]) ax = putils.Rstylee(ax) outname = putils.make_filenames("timeseries_land_coverage", index=index.name, grid=grid, anomalies=anomalies, month=name) plt.savefig("{}/{}/{}".format(utils.PLOTLOCS, index.name, outname), dpi=300) plt.close("all") return # main
def main(index, diagnostics=False, anomalies="None", grid="ADW"): """ Plot maps of linear trends :param str index: which index to run :param bool diagnostics: output diagnostic information :param str anomalies: run code on anomalies or climatology rather than raw data :param str grid: gridding type ADW/CAM """ # get details of index index = utils.INDICES[index] # allow for option of running through each month if index.name in utils.MONTHLY_INDICES: nmonths = 13 else: nmonths = 1 # sort the colour maps RdYlBu, RdYlBu_r = putils.adjust_RdYlBu() BrBG, BrBG_r = putils.make_BrBG() cube_list = iris.load( os.path.join( utils.FINALROOT, utils.make_filenames(index=index.name, grid=grid, anomalies=anomalies, extra="", month_index=""))) names = np.array([cube.var_name for cube in cube_list]) #************* # plot trend map for month in range(nmonths): if month == 0: # annual if anomalies != "climatology": if index.name in ["TX90p", "TN90p", "SU", "TR", "GSL"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = RdYlBu_r elif index.name in ["DTR", "ETR"]: bounds = [ -100, -1, -0.75, -0.5, -0.25, 0, 0.25, 0.5, 0.75, 1, 100 ] cmap = RdYlBu_r elif index.name in ["WSDI"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["TX10p", "TN10p", "FD", "ID"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = RdYlBu elif index.name in ["CSDI"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in ["TXn", "TNn"]: bounds = [ -100, -2, -1, -0.5, -0.25, 0, 0.25, 0.5, 1, 2, 100 ] cmap = RdYlBu_r elif index.name in ["TXx", "TNx"]: bounds = [ -100, -1, -0.5, -0.25, -0.1, 0, 0.1, 0.25, 0.5, 1, 100 ] cmap = RdYlBu_r elif index.name in ["Rx1day"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = BrBG elif index.name in ["Rx5day"]: bounds = [-100, -4, -3, -2, -1, 0, 1, 2, 3, 4, 100] cmap = BrBG elif index.name in ["PRCPTOT"]: bounds = [-100, -20, -10, -5, -2, 0, 2, 5, 10, 20, 100] cmap = BrBG elif index.name in ["Rnnmm", "R95p", "R99p"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = BrBG elif index.name in ["R95pTOT"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = BrBG elif index.name in ["R99pTOT"]: bounds = [ -100, -1, -0.5, -0.25, -0.1, 0, 0.1, 0.25, 0.5, 1, 100 ] cmap = BrBG elif index.name in ["R10mm"]: bounds = [ -100, -3, -1.5, -0.75, -0.25, 0, 0.25, 0.75, 1.5, 3, 100 ] cmap = BrBG elif index.name in ["R20mm"]: bounds = [ -100, -2, -1, -0.5, -0.25, 0, 0.25, 0.5, 1, 2, 100 ] cmap = BrBG elif index.name in ["CWD"]: bounds = [ -100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100 ] cmap = BrBG elif index.name in ["SDII"]: bounds = [ -100, -0.75, -0.5, -0.25, -0.1, 0, 0.1, 0.25, 0.5, 0.75, 100 ] cmap = BrBG elif index.name in ["CDD"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = BrBG_r elif index.name in ["CDDcold18"]: bounds = [ -10000, -100, -50, -20, -10, 0, 10, 20, 50, 100, 10000 ] cmap = RdYlBu_r elif index.name in ["HDDheat18"]: bounds = [ -10000, -800, -400, -200, -100, 0, 100, 200, 400, 800, 10000 ] cmap = RdYlBu elif index.name in ["GDDgrow10"]: bounds = [ -10000, -400, -200, -100, -50, 0, 50, 100, 200, 400, 10000 ] cmap = RdYlBu elif index.name in ["WSDI3"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["CSDI3"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in [ "TNlt2", "TNltm2", "TNltm20", "TMlt10", "TMlt5" ]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in [ "TXge30", "TXge35", "TMge5", "TMge10", "TXge50p" ]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["TNm", "TXm", "TMm", "TXTN"]: bounds = [ -100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100 ] cmap = RdYlBu_r elif index.name in ["TXbTNb"]: bounds = [ -100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100 ] cmap = RdYlBu elif index.name in ["RXday"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = BrBG else: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = RdYlBu_r else: if index.name in [ "TX90p", "TN90p", "WSDI", "SU", "TR", "GSL", "DTR", "ETR" ]: bounds = np.arange(0, 60, 5) cmap = plt.cm.YlOrRd elif index.name in ["TX10p", "TN10p", "CSDI", "FD", "ID"]: bounds = np.arange(0, 60, 5) cmap = plt.cm.YlOrRd_r elif index.name in ["TXx", "TNx"]: bounds = np.arange(-10, 40, 5) cmap = plt.cm.YlOrRd elif index.name in ["TXn", "TNn"]: bounds = np.arange(-30, 10, 5) cmap = plt.cm.YlOrRd elif index.name in ["Rx1day", "Rx5day"]: bounds = np.arange(0, 100, 10) cmap = plt.cm.YlGnBu elif index.name in ["PRCPTOT"]: bounds = np.arange(0, 1000, 100) cmap = plt.cm.YlGnBu elif index.name in [ "CWD", "R20mm", "R10mm", "Rnnmm", "R95pTOT", "R99pTOT", "R95p", "R99p", "SDII" ]: bounds = np.arange(0, 200, 20) cmap = plt.cm.YlGnBu elif index.name in ["CDD"]: bounds = np.arange(0, 200, 20) cmap = plt.cm.YlGnBu_r else: bounds = np.arange(0, 60, 5) cmap = plt.cm.YlOrRd else: # monthly if anomalies != "climatology": if index.name in [ "TX90p", "TN90p", "SU", "TR", "GSL", "DTR", "ETR" ]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = RdYlBu_r elif index.name in ["WSDI"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["TX10p", "TN10p", "FD", "ID"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = RdYlBu elif index.name in ["CSDI"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in ["TXx", "TNx", "TXn", "TNn"]: bounds = [ -100, -2, -1, -0.5, -0.25, 0, 0.25, 0.5, 1, 2, 100 ] cmap = RdYlBu_r elif index.name in ["Rx1day", "Rx5day"]: bounds = [-100, -4, -3, -2, -1, 0, 1, 2, 3, 4, 100] cmap = BrBG elif index.name in ["PRCPTOT"]: bounds = [-100, -10, -5, -2, -1, 0, 1, 2, 5, 10, 100] cmap = BrBG elif index.name in [ "Rnnmm", "R95pTOT", "R99pTOT", "R95p", "R99p" ]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = BrBG elif index.name in ["R20mm", "R10mm"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = BrBG elif index.name in ["CWD"]: bounds = [ -100, -2, -1, -0.5, -0.25, 0, 0.25, 0.5, 1, 2, 100 ] cmap = BrBG elif index.name in ["SDII"]: bounds = [ -100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100 ] cmap = BrBG elif index.name in ["CDD"]: bounds = [-100, -4, -3, -2, -1, 0, 1, 2, 3, 4, 100] cmap = BrBG_r elif index.name in ["CDDcold18"]: bounds = [-100, -20, -15, -10, -5, 0, 5, 10, 15, 20, 100] cmap = RdYlBu_r elif index.name in ["HDDheat18"]: bounds = [ -10000, -800, -400, -200, -100, 0, 100, 200, 400, 800, 10000 ] cmap = RdYlBu elif index.name in ["GDDgrow10"]: bounds = [ -10000, -400, -200, -100, -50, 0, 50, 100, 200, 400, 10000 ] cmap = RdYlBu elif index.name in ["WSDI3"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["CSDI3"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in [ "TNlt2", "TNltm2", "TNltm20", "TMlt10", "TMlt5" ]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in [ "TXge30", "TXge35", "TMge5", "TMge10", "TXge50p" ]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["TNm", "TXm", "TMm", "TXTN"]: bounds = [ -100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100 ] cmap = RdYlBu_r elif index.name in ["TXbTNb"]: bounds = [ -100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100 ] cmap = RdYlBu elif index.name in ["RXday"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = BrBG elif index.name in [ "24month_SPI", "12month_SPI", "6month_SPI", "3month_SPI", "24month_SPEI", "12month_SPEI", "6month_SPEI", "3month_SPEI" ]: bounds = [ -100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100 ] cmap = BrBG else: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = RdYlBu_r else: if index.name in [ "TX90p", "TN90p", "WSDI", "SU", "TR", "GSL", "DTR", "ETR" ]: bounds = np.arange(0, 60, 5) cmap = plt.cm.YlOrRd elif index.name in ["TX10p", "TN10p", "CSDI", "FD", "ID"]: bounds = np.arange(0, 60, 5) cmap = plt.cm.YlOrRd_r elif index.name in ["TXx", "TNx"]: bounds = np.arange(-10, 40, 5) cmap = plt.cm.YlOrRd elif index.name in ["TXn", "TNn"]: bounds = np.arange(-30, 10, 5) cmap = plt.cm.YlOrRd elif index.name in ["Rx1day", "Rx5day"]: bounds = np.arange(0, 100, 10) cmap = plt.cm.YlGnBu elif index.name in ["PRCPTOT"]: bounds = np.arange(0, 1000, 100) cmap = plt.cm.YlGnBu elif index.name in [ "CWD", "R20mm", "R10mm", "Rnnmm", "R95pTOT", "R99pTOT", "R95p", "R99p", "SDII" ]: bounds = np.arange(0, 200, 20) cmap = plt.cm.YlGnBu elif index.name in ["CDD"]: bounds = np.arange(0, 200, 20) cmap = plt.cm.YlGnBu_r else: bounds = np.arange(0, 60, 5) cmap = plt.cm.YlOrRd selected_cube, = np.where(names == month_names[month]) cube = cube_list[selected_cube[0]] try: cube.coord('grid_latitude').guess_bounds() cube.coord('grid_longitude').guess_bounds() except ValueError: pass # fix percent -> days issue for these four if index.name in ["TX90p", "TN90p", "TX10p", "TN10p"]: cube.data = cube.data * 3.65 index.units = "days" # get recent period and trend if anomalies != "climatology": postYYYY = periodConstraint(cube, utils.TREND_START) cube = cube.extract(postYYYY) preYYYY = periodConstraint(cube, utils.TREND_END, lower=False) cube = cube.extract(preYYYY) trend_cube, sigma, significance = TrendingCalculation( cube, verbose=diagnostics) if index.units == "degrees_C": units = '$^{\circ}$' + "C" else: units = index.units if anomalies != "climatology": outname = putils.make_filenames("trend", index=index.name, grid=grid, anomalies=anomalies, month=month_names[month]) putils.plot_smooth_map_iris( "{}/{}/{}".format(utils.PLOTLOCS, index.name, outname), trend_cube, cmap, bounds, "Trend ({}/10 year)".format(units), title="{} - {}, Linear Trend {}-{}".format( index.name, month_names[month], utils.TREND_START, utils.TREND_END), figtext="(a)", significance=significance) else: outname = putils.make_filenames("climatology", index=index.name, grid=grid, anomalies=anomalies, month=month_names[month]) putils.plot_smooth_map_iris( "{}/{}/{}".format(utils.PLOTLOCS, index.name, outname), cube[0], cmap, bounds, "{}".format(units), title="{} - {}, Climatology {}-{}".format( index.name, month_names[month], utils.CLIM_START.year, utils.CLIM_END.year), figtext="(a)") return # main
def main(grid="ADW", index="TX90p", month_index=0, diagnostics=False, hadex2_adw=False, qc_flags="", anomalies="None"): """ :param str grid: gridding type ADW/CAM :param str index: which index to run :param int month_index: which month to apply (0 = Annual, 1-12 for months) :param str qc_flags: which QC flags to process W, B, A, N, C, R :param bool diagnostics: output diagnostic information :param str anomalies: run code on anomalies or climatology rather than raw data """ # ensure correct timescale is selected if args.index in utils.MONTHLY_INDICES: if month_index == 0: timescale = "ANN" else: timescale = "MON" else: if month_index == 0: timescale = "ANN" else: print("Monthly requested for annual-only index.\n Exiting") return # move this up one level eventually all_datasets = utils.get_input_datasets() # set up the data arrays if anomalies == "climatology": nyears = 1 else: nyears = len(utils.REFERENCEYEARS) if grid == "CAM": GridData, GridStations = cam(all_datasets, index, timescale, nyears, qc_flags=qc_flags, month_index=month_index, diagnostics=diagnostics, anomalies=anomalies) elif grid == "ADW": GridData, GridStations, GridDLSStations = adw(all_datasets, index, timescale, nyears, qc_flags=qc_flags, month_index=month_index, diagnostics=diagnostics, anomalies=anomalies, hadex2_adw=hadex2_adw) if utils.DOLSM: nmonths = 1 # apply LSM lsm = utils.get_land_sea_mask( utils.box_centre_lats, utils.box_centre_lons, floor=False ) # not taking only purely non-land boxes. Have to have sufficient amount of land! # resize to match lsm_sized = np.tile(np.tile(lsm, (1, 1, 1, 1)), (nyears, nmonths, 1, 1)) GridData.mask = np.logical_or(GridData.mask, lsm_sized) GridStations.mask = np.logical_or(GridStations.mask, lsm_sized) if grid == "ADW": GridDLSStations.mask = np.logical_or(GridDLSStations.mask, lsm_sized) # correct fill_value GridData.fill_value = utils.HADEX_MDI GridStations.fill_value = utils.HADEX_MDI # append appropriate name to filename if anomalies or climatology filename = utils.make_filenames(index=index, grid=grid, anomalies=anomalies, month_index=month_index) ncdfp.netcdf_write(os.path.join(utils.OUTROOT, filename), index, GridData.filled(), utils.REFERENCEYEARS, utils.box_centre_lats, utils.box_centre_lons, single_month=month_index) filename = utils.make_filenames(index=index, grid=grid, anomalies=anomalies, extra="num", month_index=month_index) ncdfp.netcdf_write(os.path.join(utils.OUTROOT, filename), index, GridStations.filled(), utils.REFERENCEYEARS, utils.box_centre_lats, utils.box_centre_lons, single_month=month_index, station_count=True) if grid == "ADW": filename = utils.make_filenames(index=index, grid=grid, anomalies=anomalies, extra="numdls", month_index=month_index) ncdfp.netcdf_write(os.path.join(utils.OUTROOT, filename), index, GridDLSStations.filled(), utils.REFERENCEYEARS, utils.box_centre_lats, utils.box_centre_lons, single_month=month_index, station_count=True) return # main
def main(index, diagnostics=False, anomalies="None", grid="ADW"): """ Plot maps of linear trends :param str index: which index to run :param bool diagnostics: output diagnostic information :param str anomalies: run code on anomalies or climatology rather than raw data :param str grid: gridding type ADW/CAM """ # get details of index index = utils.INDICES[index] # sort the colour maps RdYlBu, RdYlBu_r = putils.adjust_RdYlBu() BrBG, BrBG_r = putils.make_BrBG() cube_list = iris.load(os.path.join(utils.FINALROOT, utils.make_filenames(index=index.name, grid=grid, anomalies=anomalies, extra="", month_index=""))) names = np.array([cube.var_name for cube in cube_list]) #************* # plot trend map for season in SEASONS: three_month_data = [] months = SEASON_DICT[season] for month in months: if anomalies != "climatology": if index.name in ["TX90p", "TN90p", "SU", "TR"]: bounds = [-100, -4, -3, -2, -1, 0, 1, 2, 3, 4, 100] cmap = RdYlBu_r elif index.name in ["TX10p", "TN10p", "FD", "ID"]: bounds = [-100, -4, -3, -2, -1, 0, 1, 2, 3, 4, 100] cmap = RdYlBu elif index.name in ["DTR", "ETR"]: bounds = [-100, -1, -0.75, -0.5, -0.25, 0, 0.25, 0.5, 0.75, 1, 100] cmap = RdYlBu_r elif index.name in ["TXx", "TNx", "TXn", "TNn"]: bounds = [-100, -2, -1, -0.5, -0.25, 0, 0.25, 0.5, 1, 2, 100] cmap = RdYlBu_r elif index.name in ["Rx1day", "Rx5day"]: bounds = [-100, -4, -3, -2, -1, 0, 1, 2, 3, 4, 100] cmap = BrBG elif index.name in ["CWD"]: bounds = [-100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100] cmap = BrBG elif index.name in ["CDD", "PRCPTOT"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = BrBG_r elif index.name in ["R10mm", "R20mm"]: bounds = [-100, -2, -1, -0.5, -0.25, 0, 0.25, 0.5, 1, 2, 100] cmap = BrBG else: bounds = [-100, -4, -3, -2, -1, 0, 1, 2, 3, 4, 100] cmap = RdYlBu_r else: if index.name in ["TX90p", "TN90p", "SU", "TR"]: bounds = np.arange(0, 60, 5) cmap = plt.cm.YlOrRd elif index.name in ["TX10p", "TN10p", "FD", "ID"]: bounds = np.arange(0, 60, 5) cmap = plt.cm.YlOrRd_r elif index.name in ["DTR", "ETR"]: bounds = np.arange(0, 30, 5) cmap = plt.cm.YlOrRd elif index.name in ["TXx", "TNx"]: bounds = np.arange(-10, 40, 5) cmap = plt.cm.YlOrRd elif index.name in ["TXn", "TNn"]: bounds = np.arange(-30, 10, 5) cmap = plt.cm.YlOrRd elif index.name in ["Rx1day", "Rx5day"]: bounds = np.arange(0, 100, 10) cmap = plt.cm.YlGnBu elif index.name in ["CWD"]: bounds = np.arange(0, 100, 10) cmap = BrBG elif index.name in ["CDD", "PRCPTOT", "R10mm", "R20mm"]: bounds = np.arange(0, 100, 10) cmap = BrBG_r else: bounds = np.arange(0, 60, 5) cmap = plt.cm.YlOrRd selected_cube, = np.where(names == month) cube = cube_list[selected_cube[0]] try: cube.coord('grid_latitude').guess_bounds() cube.coord('grid_longitude').guess_bounds() except ValueError: pass # fix percent -> days issue for these four if index.name in ["TX90p", "TN90p", "TX10p", "TN10p"]: this_month, = np.where(month_names == month) ndays = np.array([calendar.monthrange(y, this_month[0])[1] for y in utils.REFERENCEYEARS]) cube.data = cube.data * ndays[:, None, None] / 100. index.units = "days" three_month_data += [cube.data] # extracted the three months of the season season_cube = copy.deepcopy(cube) three_month_data = np.ma.array(three_month_data) # take appropriate seasonal value if index.name in ["TX90p", "TN90p", "TX10p", "TN10p"]: season_cube.data = np.ma.sum(three_month_data, axis=0) elif index.name in ["FD", "ID", "SU", "TR"]: season_cube.data = np.ma.sum(three_month_data, axis=0) elif index.name in ["TXx", "TNx", "ETR"]: season_cube.data = np.ma.max(three_month_data, axis=0) elif index.name in ["TXn", "TNn"]: season_cube.data = np.ma.min(three_month_data, axis=0) elif index.name in ["Rx1day", "Rx5day"]: season_cube.data = np.ma.max(three_month_data, axis=0) elif index.name in ["CDD", "CWD"]: season_cube.data = np.ma.max(three_month_data, axis=0) elif index.name in ["R10mm", "R20mm", "PRCPTOT"]: season_cube.data = np.ma.sum(three_month_data, axis=0) elif index.name in ["R95pTOT", "R99pTOT", "DTR"]: season_cube.data = np.ma.mean(three_month_data, axis=0) elif index.name in ["TNlt2", "TNltm2", "TNltm20", "TXge35", "TXge30", "TMlt10", "TMge10", "TMlt5", "TMge5"]: season_cube.data = np.ma.sum(three_month_data, axis=0) elif index.name in ["TMm", "TXm", "TNm", "TXgt50p"]: season_cube.data = np.ma.mean(three_month_data, axis=0) # mask if fewer that 2 months present nmonths_locs = np.ma.count(three_month_data, axis=0) season_cube.data = np.ma.masked_where(nmonths_locs < 2, season_cube.data) # get recent period and trend if anomalies != "climatology": postYYYY = periodConstraint(season_cube, utils.TREND_START) season_cube = season_cube.extract(postYYYY) trend_cube, sigma, significance = TrendingCalculation(season_cube) if anomalies != "climatology": figtext="" if index.name == "TX90p": if season == "DJF": figtext = "(a)" elif season == "MAM": figtext = "(b)" elif season == "JJA": figtext = "(c)" elif season == "SON": figtext = "(d)" elif index.name == "TN10p": if season == "DJF": figtext = "(e)" elif season == "MAM": figtext = "(f)" elif season == "JJA": figtext = "(g)" elif season == "SON": figtext = "(h)" outname = putils.make_filenames("trend", index=index.name, grid=grid, anomalies=anomalies, month=season) putils.plot_smooth_map_iris("{}/{}/{}".format(utils.PLOTLOCS, index.name, outname), trend_cube, cmap, bounds, "Trend ({}/10 year)".format(index.units), title="{} - {}, {}-2018".format(index.name, season, utils.TREND_START), figtext=figtext, significance=significance) else: outname = putils.make_filenames("climatology", index=index.name, grid=grid, anomalies=anomalies, month=season) putils.plot_smooth_map_iris("{}/{}/{}".format(utils.PLOTLOCS, index.name, outname), cube[0], cmap, bounds, "{}".format(index.units), title="{} - {}, {}-{}".format(index.name, season, utils.CLIM_START.year, utils.CLIM_END.year)) return # main
def main(index="TX90p", grid="ADW", diagnostics=False, hadex2_adw=False, anomalies="None"): """ Merge the monthly files together, or, if just annual index, add extra metadata :param str grid: gridding type ADW/CAM :param str index: which index to run :param str anomalies: run code on anomalies or climatology rather than raw data :param bool diagnostics: output diagnostic information """ if grid == "ADW": suffixes = ["", "num", "numdls"] else: suffixes = ["", "num"] # both the data and the counts for suffix in suffixes: station_count = False if suffix in ["_num", "_numdls"]: station_count = True # from when testing and comparing to HadEX2's ADW routine if hadex2_adw: suffix = "{}_H2ADW".format(suffix) if index in utils.MONTHLY_INDICES: nmonths = 13 else: nmonths = 1 # spin through the months for month_index in range(nmonths): # annual = 0 infilename = os.path.join( utils.OUTROOT, utils.make_filenames(index=index, grid=grid, anomalies=anomalies, extra=suffix, month_index=month_index)) if os.path.exists(infilename): indata, lons, lats, times = ncdfp.netcdf_read( infilename, month_index) try: # store data data[:, month_index, :, :] = indata except NameError: # if first pass through, set up the array. data = np.zeros((times.shape[0], nmonths, lats.shape[0], lons.shape[0])) data[:, month_index, :, :] = indata[:] # delete indata to ensure it isn't reused indata = 0 print(infilename) else: print("File {} doesn't exist".format(infilename)) # create filename and write. outfilename = os.path.join( utils.FINALROOT, utils.make_filenames(index=index, grid=grid, anomalies=anomalies, extra=suffix, month_index="")) ncdfp.netcdf_write(outfilename, index, data, times, lats, lons, station_count=station_count) # delete data array to ensure try/except clause works on next pass del data return # main
def main(index, first, second, length, diagnostics=False, anomalies="None", grid="ADW"): """ Plot maps of linear trends :param str index: which index to run :param int first: start of first period :param int second: start of second period :param int length: length of periods :param bool diagnostics: output diagnostic information :param str anomalies: run code on anomalies or climatology rather than raw data :param str grid: gridding type ADW/CAM """ if first + length - 1 > second: print("Periods overlap, please re-specify") return # get details of index index = utils.INDICES[index] # allow for option of running through each month if index.name in utils.MONTHLY_INDICES: nmonths = 13 else: nmonths = 1 # sort the colour maps RdYlBu, RdYlBu_r = putils.adjust_RdYlBu() BrBG, BrBG_r = putils.make_BrBG() print(index.name) if index.name in ["TX90p", "TN90p", "SU", "TR", "GSL"]: bounds = [-100, -20, -15, -10, -5, 0, 5, 10, 15, 20, 100] cmap = RdYlBu_r elif index.name in ["DTR", "ETR"]: bounds = [-100, -4, -3, -2, -1, 0, 1, 2, 3, 4, 100] cmap = RdYlBu_r elif index.name in ["WSDI"]: bounds = [-100, -10, -7.5, -5, -2.5, 0, 2.5, 5, 7.5, 10, 100] cmap = RdYlBu_r elif index.name in ["TX10p", "TN10p", "ID"]: bounds = [-100, -20, -15, -10, -5, 0, 5, 10, 15, 20, 100] cmap = RdYlBu elif index.name in ["FD"]: bounds = [-100, -12, -9, -6, -3, 0, 3, 6, 9, 12, 100] cmap = RdYlBu elif index.name in ["CSDI"]: bounds = [-100, -10, -7.5, -5, -2.5, 0, 2.5, 5, 7.5, 10, 100] cmap = RdYlBu elif index.name in ["TXn", "TNn"]: bounds = [-100, -5, -3, -2, -1, 0, 1, 2, 3, 5, 100] cmap = RdYlBu_r elif index.name in ["TXx", "TNx"]: bounds = [-100, -4, -2, -1, 0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["Rx1day"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = BrBG elif index.name in ["Rx5day"]: bounds = [-100, -10, -7.5, -5, -2.5, 0, 2.5, 5, 7.5, 10, 100] cmap = BrBG elif index.name in ["PRCPTOT"]: bounds = [-100, -40, -20, -10, -5, 0, 5, 10, 20, 40, 100] cmap = BrBG elif index.name in ["Rnnmm", "R95p", "R99p"]: bounds = [-100, -20, -15, -10, -5, 0, 5, 10, 15, 20, 100] cmap = BrBG elif index.name in ["R95pTOT", "R99pTOT"]: bounds = [-100, -10, -5, -2.5, -1, 0, 1, 2.5, 5, 10, 100] cmap = BrBG elif index.name in ["R10mm"]: bounds = [-100, -10, -5, -2.5, -1, 0, 1, 2.5, 5, 10, 100] cmap = BrBG elif index.name in ["R20mm"]: bounds = [-100, -5, -2.5, -1, -0.5, 0, 0.5, 1, 2.5, 5, 100] cmap = BrBG elif index.name in ["CWD"]: bounds = [-100, -2, -1, -0.5, -0.25, 0, 0.25, 0.5, 1, 2, 100] cmap = BrBG elif index.name in ["SDII"]: bounds = [-100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100] cmap = BrBG elif index.name in ["CDD"]: bounds = [-100, -10, -7.5, -5, -2.5, 0, 2.5, 5, 7.5, 10, 100] cmap = BrBG_r elif index.name in ["CDDcold18"]: bounds = [-10000, -100, -50, -20, -10, 0, 10, 20, 50, 100, 10000] cmap = RdYlBu_r elif index.name in ["HDDheat18"]: bounds = [-10000, -800, -400, -200, -100, 0, 100, 200, 400, 800, 10000] cmap = RdYlBu elif index.name in ["GDDgrow10"]: bounds = [-10000, -400, -200, -100, -50, 0, 50, 100, 200, 400, 10000] cmap = RdYlBu elif index.name in ["WSDI3"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["CSDI3"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in ["TNlt2", "TNltm2", "TNltm20", "TMlt10", "TMlt5"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in ["TXge30", "TXge35", "TMge5", "TMge10", "TXge50p"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["TNm", "TXm", "TMm", "TXTN"]: bounds = [-100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100] cmap = RdYlBu_r elif index.name in ["TXbTNb"]: bounds = [-100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100] cmap = RdYlBu elif index.name in ["RXday"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = BrBG else: bounds = [-100, -20, -15, -10, -5, 0, 5, 10, 15, 20, 100] cmap = RdYlBu_r cube_list = iris.load( os.path.join( utils.FINALROOT, utils.make_filenames(index=index.name, grid=grid, anomalies=anomalies, extra="", month_index=""))) names = np.array([cube.var_name for cube in cube_list]) #************* # plot difference map for month in range(nmonths): selected_cube, = np.where(names == month_names[month]) cube = cube_list[selected_cube[0]] try: cube.coord('grid_latitude').guess_bounds() cube.coord('grid_longitude').guess_bounds() except ValueError: pass # fix percent -> days issue for these four if index.name in ["TX90p", "TN90p", "TX10p", "TN10p"]: cube.data = cube.data * 3.65 index.units = "days" # get two cubes to difference first_cube, first_sigma = get_climatology(cube, first, length) second_cube, second_sigma = get_climatology(cube, second, length) differences = second_cube - first_cube # get "significance" by looking at non-overlapping sigmas total_sigma = first_sigma + second_sigma significance = np.ma.zeros(first_cube.shape) significance[differences.data > total_sigma.data] = 1 significance.mask = differences.data.mask significance = putils.make_iris_cube_2d( significance, cube.coord("grid_latitude").points, cube.coord("grid_longitude").points, "difference_significance", "") first_string = "{}{}".format( str(first)[-2:], str(first + length - 1)[-2:]) second_string = "{}{}".format( str(second)[-2:], str(second + length - 1)[-2:]) if index.units == "degrees_C": units = '$^{\circ}$' + "C" else: units = index.units if anomalies != "climatology": outname = putils.make_filenames("diff_{}-{}".format( second_string, first_string), index=index.name, grid=grid, anomalies=anomalies, month=month_names[month]) putils.plot_smooth_map_iris( "{}/{}/{}".format(utils.PLOTLOCS, index.name, outname), differences, cmap, bounds, "Difference {}-{} ({})".format(second_string, first_string, units), title="{} - {}, Difference ({}-{}) - ({}-{})".format( index.name, month_names[month], second, second + length - 1, first, first + length - 1), figtext="(b)") #, significance=significance) return # main
def main(index, diagnostics=False, normalise=True, anomalies="None", grid="ADW"): """ :param str index: which index to run :param bool diagnostics: output diagnostic information :param str anomalies: run code on anomalies or climatology rather than raw data :param str grid: gridding type ADW/CAM """ cosine = False # get details of index index = utils.INDICES[index] # allow for option of running through each month if index.name in utils.MONTHLY_INDICES: nmonths = 13 else: nmonths = 1 # sort the colour maps RdYlBu, RdYlBu_r = putils.adjust_RdYlBu() BrBG, BrBG_r = putils.make_BrBG() # assign bounds and colormaps if index.name in ["TX90p", "TN90p", "SU", "TR", "GSL"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = RdYlBu_r elif index.name in ["DTR", "ETR"]: bounds = [-100, -1, -0.75, -0.5, -0.25, 0, 0.25, 0.5, 0.75, 1, 100] cmap = RdYlBu_r elif index.name in ["WSDI"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["TX10p", "TN10p", "FD", "ID"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = RdYlBu elif index.name in ["CSDI"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in ["TXn", "TNn"]: bounds = [-100, -2, -1, -0.5, -0.25, 0, 0.25, 0.5, 1, 2, 100] cmap = RdYlBu_r elif index.name in ["TXx", "TNx"]: bounds = [-100, -1, -0.5, -0.25, -0.1, 0, 0.1, 0.25, 0.5, 1, 100] cmap = RdYlBu_r elif index.name in ["Rx1day"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = BrBG elif index.name in ["Rx5day"]: bounds = [-100, -4, -3, -2, -1, 0, 1, 2, 3, 4, 100] cmap = BrBG elif index.name in ["PRCPTOT"]: bounds = [-100, -20, -10, -5, -2, 0, 2, 5, 10, 20, 100] cmap = BrBG elif index.name in ["Rnnmm", "R95p", "R99p"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = BrBG elif index.name in ["R95pTOT"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = BrBG elif index.name in ["R99pTOT"]: bounds = [-100, -1, -0.5, -0.25, -0.1, 0, 0.1, 0.25, 0.5, 1, 100] cmap = BrBG elif index.name in ["R10mm"]: bounds = [-100, -3, -1.5, -0.75, -0.25, 0, 0.25, 0.75, 1.5, 3, 100] cmap = BrBG elif index.name in ["R20mm"]: bounds = [-100, -2, -1, -0.5, -0.25, 0, 0.25, 0.5, 1, 2, 100] cmap = BrBG elif index.name in ["CWD"]: bounds = [-100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100] cmap = BrBG elif index.name in ["SDII"]: bounds = [-100, -0.75, -0.5, -0.25, -0.1, 0, 0.1, 0.25, 0.5, 0.75, 100] cmap = BrBG elif index.name in ["CDD"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = BrBG_r elif index.name in ["CDDcold18"]: bounds = [-10000, -100, -50, -20, -10, 0, 10, 20, 50, 100, 10000] cmap = RdYlBu_r elif index.name in ["HDDheat18"]: bounds = [-10000, -800, -400, -200, -100, 0, 100, 200, 400, 800, 10000] cmap = RdYlBu elif index.name in ["GDDgrow10"]: bounds = [-10000, -400, -200, -100, -50, 0, 50, 100, 200, 400, 10000] cmap = RdYlBu elif index.name in ["WSDI3"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["CSDI3"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in ["TNlt2", "TNltm2", "TNltm20", "TMlt10", "TMlt5"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu elif index.name in ["TXge30", "TXge35", "TMge5", "TMge10", "TXge50p"]: bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100] cmap = RdYlBu_r elif index.name in ["TNm", "TXm", "TMm", "TXTN"]: bounds = [-100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100] cmap = RdYlBu_r elif index.name in ["TXbTNb"]: bounds = [-100, -1, -0.5, -0.2, -0.1, 0, 0.1, 0.2, 0.5, 1, 100] cmap = RdYlBu elif index.name in ["RXday"]: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = BrBG else: bounds = [-100, -8, -4, -2, -1, 0, 1, 2, 4, 8, 100] cmap = RdYlBu_r norm = mpl.cm.colors.BoundaryNorm(bounds, cmap.N) cube_list = iris.load( os.path.join( utils.FINALROOT, utils.make_filenames(index=index.name, grid=grid, anomalies=anomalies, extra="", month_index=""))) names = np.array([cube.var_name for cube in cube_list]) # plot all month versions at once for month, mname in enumerate(month_names): if diagnostics: print(mname) selected_cube, = np.where(names == mname) cube = cube_list[selected_cube[0]] try: cube.coord('grid_latitude').guess_bounds() cube.coord('grid_longitude').guess_bounds() except ValueError: pass # fix percent -> days issue for these four if index.name in ["TX90p", "TN90p", "TX10p", "TN10p"]: cube.data = cube.data * 3.65 index.units = "days" # Take the mean over latitude cube = cube.collapsed('grid_longitude', iris.analysis.MEAN) # if show relative to climatology if normalise: clim_constraint = iris.Constraint( time=lambda cell: utils.REF_START <= cell <= utils.REF_END) norm_cube = cube.extract(clim_constraint) norm_cube = norm_cube.collapsed(['time'], iris.analysis.MEAN) cube = cube - norm_cube # plot # set up the figure fig = plt.figure(figsize=(8, 6)) plt.clf() ax = fig.add_axes([0.1, 0.1, 0.85, 0.85]) ax.patch.set_facecolor("0.8") contour = iris.plot.pcolor( cube, cmap=cmap, norm=norm) #, vmax=bounds[-2], vmin=bounds[1]) cb = plt.colorbar(contour, orientation='horizontal', pad=0.07, fraction=0.05, \ aspect=30, ticks=bounds[1:-1], drawedges=True) cb.set_label(index.units, size=utils.FONTSIZE) # thicken border of colorbar and the dividers # http://stackoverflow.com/questions/14477696/customizing-colorbar-border-color-on-matplotlib cb.set_ticklabels(["{:g}".format(b) for b in bounds[1:-1]]) cb.ax.tick_params(labelsize=utils.FONTSIZE, size=0) # cb.outline.set_color('k') cb.outline.set_linewidth(2) cb.dividers.set_color('k') cb.dividers.set_linewidth(2) for tick in ax.xaxis.get_major_ticks(): tick.label.set_fontsize(utils.FONTSIZE) for tick in ax.yaxis.get_major_ticks(): tick.label.set_fontsize(utils.FONTSIZE) ax.set_xlim([1900, 2020]) if cosine: ax.set_ylim(np.sin(np.deg2rad(np.array([-90, 90])))) ax.set_yticks( np.sin(np.deg2rad(np.array([-90, -60, -30, 0, 30, 60, 90])))) ax.set_yticklabels(["-90"+r'$^{\circ}$'+"S", "-60"+r'$^{\circ}$'+"S", \ "-30"+r'$^{\circ}$'+"S", "0"+r'$^{\circ}$'+"", \ "30"+r'$^{\circ}$'+"N", "60"+r'$^{\circ}$'+"N", "90"+r'$^{\circ}$'+"N"], fontsize=utils.FONTSIZE) else: ax.set_ylim([-90, 90]) ax.set_yticks([-60, -30, 0, 30, 60]) ax.set_yticklabels(["-60"+r'$^{\circ}$'+"S", "-30"+r'$^{\circ}$'+"S", \ "0"+r'$^{\circ}$'+"", "30"+r'$^{\circ}$'+"N", "60"+r'$^{\circ}$'+"N"], fontsize=utils.FONTSIZE) plt.title("{} - {}, Hovmöller".format(index.name, month_names[month]), fontsize=utils.FONTSIZE) fig.text(0.03, 0.95, "(e)", fontsize=utils.FONTSIZE) if utils.WATERMARK: watermarkstring = "{} {}".format( os.path.join("/".join(os.getcwd().split('/')[4:]), os.path.basename(__file__)), dt.datetime.strftime(dt.datetime.now(), "%d-%b-%Y %H:%M")) plt.figtext(0.01, 0.01, watermarkstring, size=6) outname = putils.make_filenames("hovmoeller", index=index.name, grid=grid, anomalies=anomalies, month=mname) plt.savefig("{}/{}/{}".format(utils.PLOTLOCS, index.name, outname), dpi=300) plt.close() return # main