def calc_ratio(fsoil_mary, fsoil_kettle): lon, lat, topo = sp.parse_STEM_coordinates( os.path.join(os.environ['SARIKA_INPUT'], 'TOPO-124x124.nc')) fsoil_mary = maskoceans(lon, lat, fsoil_mary) fsoil_kettle = maskoceans(lon, lat, fsoil_kettle) ratio = ma.masked_invalid(fsoil_kettle) / ma.masked_invalid(fsoil_mary) return(ratio)
def main(base_folder="/skynet3_rech1/huziy/veg_fractions/", fname="pm1983120100_00000000p", canopy_name="Y2C", label="USGS", depth_to_bedrock_name="8L" ): data_path = os.path.join(base_folder, fname) r = RPN(data_path) veg_fractions = r.get_2D_field_on_all_levels(name=canopy_name) print(list(veg_fractions.keys())) sand = r.get_first_record_for_name("SAND") clay = r.get_first_record_for_name("CLAY") dpth_to_bedrock = r.get_first_record_for_name(depth_to_bedrock_name) proj_params = r.get_proj_parameters_for_the_last_read_rec() lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() print(lons.shape) rll = RotatedLatLon(lon1=proj_params["lon1"], lat1=proj_params["lat1"], lon2=proj_params["lon2"], lat2=proj_params["lat2"]) lon0, lat0 = rll.get_true_pole_coords_in_rotated_system() plon, _ = rll.get_north_pole_coords() b = Basemap(projection="rotpole", llcrnrlon=lons[0, 0], llcrnrlat=lats[0, 0], urcrnrlon=lons[-1, -1], urcrnrlat=lats[-1, -1], lon_0=lon0 - 180, o_lon_p=lon0, o_lat_p=lat0) lons[lons > 180] -= 360 for lev in list(veg_fractions.keys()): veg_fractions[lev] = maskoceans(lons, lats, veg_fractions[lev], inlands=False) sand = maskoceans(lons, lats, sand) clay = maskoceans(lons, lats, clay) dpth_to_bedrock = maskoceans(lons, lats, dpth_to_bedrock) x, y = b(lons, lats) plot_veg_fractions(x, y, b, veg_fractions, out_image=os.path.join(base_folder, "veg_fractions_{0}.jpeg".format(label))) plot_sand_and_clay(x, y, b, sand, clay, out_image=os.path.join(base_folder, "sand_clay_{0}.jpeg".format(label))) # set relation between vegetation frsction fields and names veg_fract_dict = {} for lev, the_field in veg_fractions.items(): lev = int(lev) if lev not in y2c_level_to_title: continue veg_fract_dict[y2c_level_to_title[lev]] = the_field data = { "SAND": sand, "CLAY": clay, "BDRCK_DEPTH": dpth_to_bedrock } data.update(veg_fract_dict) return b, lons, lats, data, label
def main(): # create the image folder if necessary img_folder = "bulk_field_capacity_model" if not os.path.isdir(img_folder): os.mkdir(img_folder) data, lons, lats, bmp = get_data_and_coords() lons[lons > 180] -= 360 print(list(data.keys())) # reproject coords x, y = bmp(lons, lats) clevs = np.arange(0, 0.5, 0.02) cmap = cm.get_cmap("rainbow", lut=len(clevs)) # plot for all levels right away for lev, field in data.items(): fig = plt.figure() plt.title(r"$\theta_{\rm fc}$, " + "soil lev = {}".format(lev)) to_plot = maskoceans(lons, lats, field, inlands=True) img = bmp.contourf(x, y, to_plot, levels=clevs, cmap=cmap) bmp.colorbar(img) bmp.drawcoastlines() print("lev={}, fc-min={}, fc-max={}".format(lev, field.min(), field.max())) fname = "thfc_lev_{}.png".format(lev) fig.savefig(os.path.join(img_folder, fname)) plt.close(fig)
def plot_for_simulation(axis=None, sim_path="", cmap=None, cnorm=None, start_year=None, end_year=None, months=None): """ plot a panel for each simulation :param axis: :param sim_path: :param cmap: :param cnorm: """ if months is None: months = list(range(1, 13)) lons, lats, bm = analysis.get_basemap_from_hdf(sim_path) params = dict( path1=sim_path, path2=sim_path, start_year=start_year, end_year=end_year, varname1="I1", level1=0, varname2="AV", level2=0, months=months ) corr, i1_clim, av_clim = calculate_correlation_field_for_climatology(**params) # convert longitudes to the [-180, 180] range lons[lons > 180] -= 360 corr = maskoceans(lons, lats, corr) x, y = bm(lons, lats) im = bm.pcolormesh(x, y, corr, norm=cnorm, cmap=cmap, ax=axis) bm.drawcoastlines() return im, corr
def __plot_timings(prefix, show_cb=False, row=0, col=0, the_storage=None): _dates = ds["{}_dates.month".format(prefix)][:] ax = fig.add_subplot(gs[row, col]) if the_storage is not None: _dates = _dates.where(~np.isnan(the_storage)) _dates = np.ma.masked_where(the_storage.mask, _dates) _dates = maskoceans(lons2d_, lats2d, _dates) cs = bmap.pcolormesh(xx, yy, _dates, norm=norm_timings, cmap=cmap_timings) cb = bmap.colorbar(cs, location="bottom", format=FuncFormatter(__timing_cb_format_ticklabels)) if show_cb: cb.ax.set_xlabel("month") maj_locator = cb.ax.xaxis.get_major_locator() print("old tick locs = {}".format(maj_locator.locs)) maj_locator.locs = __get_new_tick_locs_middle(maj_locator.locs, len(clevs_timings) - 1, shift_direction=-1) print("new tick locs = {}".format(maj_locator.locs)) for tick_line in cb.ax.xaxis.get_ticklines(): tick_line.set_visible(False) cb.ax.set_visible(show_cb) ax.set_title("{} timing".format(prefix)) axes.append(ax)
def __plot_storage(prefix, show_cb=False, row=0, col=0, plot_deviations=True): if plot_deviations: clevs = [0, 1e3, 1e4, 1e5, 1.0e6, 1e7, 1e8, 1.0e9] clevs = [-c for c in reversed(clevs)][:-1] + clevs cmap = cm.get_cmap("bwr", len(clevs) - 1) else: clevs = [0, 1e3, 1e4, 1e5, 1.0e6, 1e7, 1e8, 1.0e9] cmap = cm.get_cmap("YlGnBu", len(clevs) - 1) norm = BoundaryNorm(boundaries=clevs, ncolors=len(clevs) - 1) _storage = ds["{}_{}".format(prefix, storage_var_name)][:] ax = fig.add_subplot(gs[row, col]) _storage = _storage.where(_storage > storage_lower_limit_m3) _storage = maskoceans(lons2d_, lats2d, _storage) _storage = np.ma.masked_where(np.isnan(_storage), _storage) if plot_deviations: cs = bmap.pcolormesh(xx, yy, _storage - bankfull_storage, norm=norm, cmap=cmap) else: cs = bmap.pcolormesh(xx, yy, _storage, norm=norm, cmap=cmap) ext = "both" if plot_deviations else "max" cb = bmap.colorbar(cs, location="bottom", format=FuncFormatter(__storage_cb_format_ticklabels), extend=ext) cb.ax.set_visible(show_cb) cb.ax.set_xlabel(r"${\rm m^3}$") ax.set_title(prefix) axes.append(ax) return _storage
def ResearchRegion_surface(): """ 在地图上画出柱表面混合比图 :return: """ fig = plt.figure(figsize=(11, 8), facecolor="white") # data = np.loadtxt('seasonAvr_data/SurfaceMixingRatio/1_seasonAvr.txt') data = np.loadtxt("allYearAvr_data/SurfaceMixingRatio/allYearAvr.txt") arr = np.zeros((180, 360)) for i in range(180): arr[i, :] = data[179 - i, :] longitude = np.loadtxt("lonlat_data/longitude.txt") latitude = np.loadtxt("lonlat_data/latitude.txt") m = Basemap(llcrnrlon=70, llcrnrlat=15, urcrnrlon=138, urcrnrlat=55, projection="mill", resolution="h") m.drawparallels(np.arange(5.5, 90.5, 1.0), color="w", linewidth=0.5, dashes=[1, 1], labels=[0, 0, 0, 0]) m.drawmeridians(np.arange(60.5, 181.5, 1.0), color="w", linewidth=0.5, dashes=[1, 1], labels=[0, 0, 0, 0]) m.drawmapboundary(fill_color="0.3") m.readshapefile("shp/CHINA", "CHINA", drawbounds=1, color="black") topo = maskoceans(longitude, latitude, arr) im = m.pcolormesh(longitude, latitude, topo, shading="flat", cmap=plt.cm.jet, latlon=True, vmin=0, vmax=500) m.drawlsmask(ocean_color="w", lsmask=0) cbar = m.colorbar() cbar.ax.set_ylabel("SurfaceMixingRatio", color="black", fontsize="14", rotation=90) plt.show()
def latlonggrid(longmin, longmax, latmin, latmax, step, **kwargs): """ Produce a dense grid suitable for drawing climate maps -- land areas only. The number of grid points per degree is the same for longitude and latitude at the equator. Away from the equator, we thin it out a bit more to keep the number of grid points per kilometer roughhly constant. Parameters -- longmin -- minimum longtiude longmax -- maximum longtiude latmin -- minimum latitude latmax -- maximum latitude step -- The number of grid points per degree for latitude and for longitude at the equator. """ llgrid = pd.DataFrame( [ [lat / step, long / step / cos(pi * lat / step / 180)] for lat in range(latmin * step, latmax * step + 1) for long in range( floor(longmin * step * cos(pi * lat / step / 180)), floor(longmax * step * cos(pi * lat / step / 180)) ) ], columns=["lat", "long"], ) masked = maskoceans(llgrid.long, llgrid.lat, llgrid.index.get_level_values(0), **kwargs) return llgrid[masked.mask == False]
def plotKoppen(Dict): """ Plot a map of the Koppen climate zones used in the correlation analysis. """ m empty = np.zeros((27,22)) empty[23:,14:] = 1.0 #equatorial empty[19:23,14:] = 2.0 #tropical empty[10:19,16:] = 3.0 #subtropical empty[9:17,14:15] = 4.0 #desert empty[17:19,14:15] = 5.0#grass1 empty[17:19,15:16] = 5.0 #grass2 empty[4:17,15:16] = 5.0 #grass3 empty[4:9,14:15] = 5.0 #grass4 empty[4:10,16:] = 6.0 #temperate1 empty[:4,14:20] = 6.0 #temperate2 [lonall,latall] = np.meshgrid((Dict['lon']),(Dict['lat'])) x,y = m(lonall,latall) eastern_Aus = maskoceans(lonall,latall,empty) eastern_Aus = np.ma.masked_where(eastern_Aus == 0.0,eastern_Aus) cs = m.pcolor(x,y,eastern_Aus) plt.title("Climate zones in eastern Australia") plt.show() return eastern_Aus
def compare_vars(vname_model, vname_to_obs, r_config, season_to_months, bmp_info_agg, axes_list): season_to_clim_fields_model = analysis.get_seasonal_climatology_for_runconfig(run_config=r_config, varname=vname_model, level=0, season_to_months=season_to_months) for season, field in season_to_clim_fields_model.items(): print(field.shape) if vname_model == "PR": field *= 1.0e3 * 24 * 3600 seasonal_clim_fields_obs = vname_to_obs[vname_model] lons = bmp_info_agg.lons.copy() lons[lons > 180] -= 360 season_to_err = OrderedDict() for season in seasonal_clim_fields_obs: season_to_err[season] = season_to_clim_fields_model[season] - seasonal_clim_fields_obs[season] season_to_err[season] = maskoceans(lons, bmp_info_agg.lats, season_to_err[season], inlands=False) cs = plot_seasonal_mean_biases(season_to_error_field=season_to_err, varname=vname_model, basemap_info=bmp_info_agg, axes_list=axes_list) return cs
def plot_only_vegetation_fractions( data_path="/RESCUE/skynet3_rech1/huziy/geof_lake_infl_exp/geophys_Quebec_0.1deg_260x260_with_dd_v6_with_ITFS", canopy_name="VF", label="QC_10km"): r = RPN(data_path) veg_fractions = r.get_2D_field_on_all_levels(name=canopy_name) print(list(veg_fractions.keys())) proj_params = r.get_proj_parameters_for_the_last_read_rec() lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() print(lons.shape) rll = RotatedLatLon(lon1=proj_params["lon1"], lat1=proj_params["lat1"], lon2=proj_params["lon2"], lat2=proj_params["lat2"]) lon0, lat0 = rll.get_true_pole_coords_in_rotated_system() plon, _ = rll.get_north_pole_coords() b = Basemap(projection="rotpole", llcrnrlon=lons[0, 0], llcrnrlat=lats[0, 0], urcrnrlon=lons[-1, -1], urcrnrlat=lats[-1, -1], lon_0=lon0 - 180, o_lon_p=lon0, o_lat_p=lat0) lons[lons > 180] -= 360 for lev in list(veg_fractions.keys()): veg_fractions[lev] = maskoceans(lons, lats, veg_fractions[lev], inlands=False) x, y = b(lons, lats) plot_veg_fractions(x, y, b, veg_fractions, out_image=os.path.join(os.path.dirname(data_path), "veg_fractions_{0}.png".format(label)))
def __init__(self, filename): nc = Dataset(filename) self.debug_mode = 1. print self.debug_mode self.dt = nc.variables["time"][1]-nc.variables["time"][0] self.xlon = nc.variables["xlon"][:] self.xlat = nc.variables["xlat"][:] N_lon = self.xlon.shape[1] N_lat = self.xlat.shape[0] #self.idx_to_lon = interp1d(numpy.arange(N_lon), self.xlon[0,:]) #self.idx_to_lat = interp1d(numpy.arange(N_lat), self.xlat[:,0]) self.idx_to_lon = interp2d(numpy.arange(N_lon), numpy.arange(N_lat), self.xlon) self.idx_to_lat = interp2d(numpy.arange(N_lon), numpy.arange(N_lat), self.xlat) self.left = self.xlon.min() self.right = self.xlon.max() self.bottom = self.xlat.min()+18 self.top = self.xlat.max()-8 self.m = Basemap(llcrnrlon=self.left, llcrnrlat=self.bottom, urcrnrlon=self.right, urcrnrlat=self.top, projection='cyl', resolution='l') self.ocean_mask = maskoceans(self.xlon, self.xlat, self.xlon).mask self.time_min = 0. self.time_max = 0. self.no_ingested = 0 self.masks = numpy.empty((0,)+self.ocean_mask.shape) self.tc_table = numpy.empty((0,5)) self.time = numpy.empty((0,))
def subplot(self, axes, subfigure): from mpl_toolkits.basemap import maskoceans data, xgrid, ygrid, title, datarange, cbarlabel, map_kwargs = subfigure masked_data = maskoceans(xgrid, ygrid, data, inlands=False) subfigure = (masked_data, xgrid, ygrid, title, datarange, cbarlab, map_kwargs) super(MaskedArrayMapFigure, self).subplot(axes, subfigure)
def get_mask(arr, lats=None, lons=None, masknan=True, maskocean=False, maskland=False): ''' Return array which is a mask for the input array to mask the ocean or land you need to put in the lats, lons of the data ''' mask=np.zeros(np.shape(arr),dtype=np.bool) if masknan: mask=np.isnan(arr) if maskocean or maskland: if len(np.shape(lats)) == 1: lons,lats = np.meshgrid(lons,lats) if maskocean: mask = mask + maskoceans(lons,lats,arr, inlands=False).mask if maskland: mask = mask + ~(maskoceans(lons,lats,arr, inlands=False).mask) return mask
def plot_swe_bfes(runconfig_rea, runconfig_gcm, vname_model="I5", season_to_months=None, bmp_info=None, axes_list=None): seasonal_clim_fields_rea = analysis.get_seasonal_climatology_for_runconfig(run_config=runconfig_rea, varname=vname_model, level=0, season_to_months=season_to_months) seasonal_clim_fields_gcm = analysis.get_seasonal_climatology_for_runconfig(run_config=runconfig_gcm, varname=vname_model, level=0, season_to_months=season_to_months) lons = bmp_info.lons.copy() lons[lons > 180] -= 360 assert len(seasonal_clim_fields_rea) > 0 season_to_err = OrderedDict() for season, field in seasonal_clim_fields_rea.items(): rea = field gcm = seasonal_clim_fields_gcm[season] # Mask oceans and lakes season_to_err[season] = maskoceans(lons, bmp_info.lats, gcm - rea) assert hasattr(season_to_err[season], "mask") plot_performance_err_with_cru.plot_seasonal_mean_biases(season_to_error_field=season_to_err, varname=vname_model, basemap_info=bmp_info, axes_list=axes_list)
def plot_field_2d(lons_2d, lats_2d, field_2d, start_lon = -180, end_lon = 0, color_map = None, minmax = (None, None) ): plt.figure() m = Basemap(llcrnrlon = start_lon,llcrnrlat = np.min(lats_2d), urcrnrlon = end_lon,urcrnrlat = np.max(lats_2d), resolution = 'l') m.drawmeridians(range(start_lon,end_lon,10)) m.drawparallels(range(-90,90,10)) # y, x = meshgrid(lats_2d, lons_2d) # lons_2d[lons_2d < start_lon] = lons_2d[lons_2d < start_lon] + 360 x, y = m(lons_2d, lats_2d) x -= 360 ###########CONVERTING LONGITUDE TO -180:180 field_2d = maskoceans(x, y, field_2d) m.pcolormesh(x, y, field_2d, cmap = color_map, vmin = minmax[0], vmax = minmax[1]) m.drawcoastlines() #plt.imshow(np.transpose(data[:,:]), origin = 'lower') #for plotting in order to see i,j we supply j,i numticks = color_map.N + 1 if color_map != None else 10 plt.colorbar(ticks = LinearLocator(numticks = numticks), format = '%.01f', orientation = 'vertical', shrink = 0.6)
def inds_subset(self, lat0=None,lat1=None,lon0=None,lon1=None, maskocean=False, maskland=False): ''' return indices of lat,lon arrays within input box ''' inds=~np.isnan(self.AMF_OMI) # only want non nans mlons,mlats=np.meshgrid(self.longitude,self.latitude) with np.errstate(invalid='ignore'): # ignore comparisons with NaNs if lat0 is not None: inds = inds * (mlats >= lat0) if lon0 is not None: inds = inds * (mlons >= lon0) if lat1 is not None: inds = inds * (mlats <= lat1) if lon1 is not None: inds = inds * (mlons <= lon1) # true over ocean squares oceanmask=maskoceans(mlons,mlats,self.AMF_OMI,inlands=False).mask landmask = (~oceanmask) # mask ocean if flag is set if maskocean: inds *= (~oceanmask) if __DEBUG__: print("oceanmask:") print((type(oceanmask),oceanmask.shape)) print( (inds * (~oceanmask)).shape ) print((np.sum(oceanmask),np.sum(~oceanmask))) # true for ocean squares! if maskland: inds *= (~landmask) return inds
def draw_map(t_str, ax, data, vmin, vmax, cmap=plt.get_cmap('Blues'), norm=plt.normalize, maskoceans_switch=True): map = NAMapFigure(t_str=t_str, cb_axis=None, map_axis=ax, fast_or_pretty='pretty', lat_0=49, lon_0=-97, mapwidth=5.8e6, mapheight=5.2e6) lon, lat, topo = sp.parse_STEM_coordinates( os.path.join(os.getenv('SARIKA_INPUT'), 'TOPO-124x124.nc')) if maskoceans_switch: data = maskoceans(lon, lat, data, inlands=False, resolution='f') cm = map.map.contourf(lon, lat, data, cmap=cmap, latlon=True, norm=norm, vmin=vmin, vmax=vmax) return(map, cm)
def draw_map(self, data, map_axis_idx=None, cb_axis=None, cb_format='%0.2f', cbar_t_str=None, label_lat=False, label_lon=False, vmin=None, vmax=None, midpoint=None, bands_above_mdpt=5, bands_below_mdpt=5, cmap=plt.get_cmap('Blues'), panel_lab='a', extend='neither'): """ returns: stem_pytools.na_map.NAMapFigure object containing the map """ d = STEM_Domain() stem_lon = d.get_lon() stem_lat = d.get_lat() this_cmap, this_norm = midpt_norm.get_discrete_midpt_cmap_norm( vmin=vmin, vmax=vmax, midpoint=midpoint, bands_above_mdpt=bands_above_mdpt, bands_below_mdpt=bands_below_mdpt, extend=extend, this_cmap=cmap) this_ax = self.ax[map_axis_idx] map_obj = na_map.NAMapFigure(map_axis=this_ax, label_latlon=(label_lat, label_lon), lon_label_interval=30, cb_axis=cb_axis, t_str=None) cm = map_obj.map.pcolor(stem_lon, stem_lat, maskoceans(stem_lon, stem_lat, data), cmap=this_cmap, latlon=True, norm=this_norm) print 'colorbar format: ' + cb_format cbar = this_ax.figure.colorbar(cm, ax=this_ax, format=cb_format, orientation='horizontal') if cbar_t_str is not None: cbar.ax.set_title(cbar_t_str) ticklabs = cbar.ax.get_xticklabels() tickvals = np.array([float(x.get_text()) for x in ticklabs]) cbar.ax.set_xticklabels(tickvals, rotation=-45) # place panel label in upper left this_ax.text(-0.2, 1.0, panel_lab, transform=this_ax.transAxes)
def compare_swe_diff_for_era40_driven(): b, lons2d, lats2d = draw_regions.get_basemap_and_coords(llcrnrlat=40.0, llcrnrlon=-145, urcrnrlon=-10) lons2d[lons2d > 180] -= 360 x, y = b(lons2d, lats2d) #period start_year = 1981 end_year = 1997 the_months = [12,1,2] levels = [10] + list(range(20, 120, 20)) + [150,200, 300,500,1000] cmap = mpl.cm.get_cmap(name="jet_r", lut = len(levels)) norm = colors.BoundaryNorm(levels, cmap.N) swe_obs_manager = SweDataManager(var_name="SWE") swe_obs = swe_obs_manager.get_mean(start_year, end_year, months=the_months) print("Calculated obs swe") swe_obs_interp = swe_obs_manager.interpolate_data_to(swe_obs, lons2d, lats2d) axes_list = [] levels_diff = np.arange(-100, 110, 10) #plot model res. (ERA40 driven 1) paths = ["data/CORDEX/na/era40_1", "data/CORDEX/na/era40_2"] prefixes = ["pmNorthAmerica_0.44deg_ERA40-Int_{0}_1958-1961".format("DJF"), "pmNorthAmerica_0.44deg_ERA40-Int2_{0}_1958-1961".format("DJF") ] pf_kinds = draw_regions.get_permafrost_mask(lons2d, lats2d) for i, the_path in enumerate(paths): base = os.path.basename(the_path) fig = plt.figure() ax = plt.gca() axes_list.append(ax) swe_model_era = CRCMDataManager.get_mean_2d_from_climatologies(path=the_path, var_name="I5", file_prefixes=prefixes) swe_model_era = maskoceans(lons2d, lats2d, swe_model_era) #plot model(ERA40 driven) - obs axes_list.append(ax) img = b.contourf(x, y, swe_model_era - swe_obs_interp, levels = levels_diff) draw_colorbar(fig, img, ax = ax, boundaries=levels_diff) ax.set_title("Model ({0} 1958-1961) - Obs.".format(base)) b.drawcoastlines(ax = ax, linewidth = 0.2) b.contour(x, y, pf_kinds, ax = ax, colors = "k") fig.savefig("swe_{0}.png".format(base))
def __init__(self, date, oneday=False, latres=0.25, lonres=0.3125, keylist=None, filename=None): ''' Function to read a reprocessed omi file, by default reads an 8-day average (8p) Inputs: date = datetime(y,m,d) of file oneday = False : read a single day average rather than 8 day average latres=0.25 lonres=0.3125 keylist=None : if set to a list of strings, just read those data from the file, otherwise read all data filename=None : if set then read this file ( used for testing ) Output: Structure containing omhchorp dataset ''' struct=fio.read_omhchorp(date, oneday=oneday, latres=latres, lonres=lonres, keylist=keylist, filename=filename) # date and dimensions self.date=date self.latitude=struct['latitude'] self.longitude=struct['longitude'] self.gridentries=struct['gridentries'] # Reference Sector Correction stuff self.RSC_latitude=struct['RSC_latitude'] self.RSC_region=struct['RSC_region'] self.RSC_GC=struct['RSC_GC'] # [rsc_lats, 60] - the rsc for this time period self.RSC=struct['RSC'] # The vertical column corrected using the RSC self.VCC=struct['VCC'] self.VCC_PP=struct['VCC_PP'] # Corrected Paul Palmer VC # Arrays [ lats, lons ] self.AMF_GC=struct['AMF_GC'] self.AMF_OMI=struct['AMF_OMI'] self.AMF_GCz=struct['AMF_GCz'] self.AMF_PP=struct['AMF_PP'] # AMF calculated using Paul palmers code # remove small and negative AMFs print("Removing %d AMF_PP's less than 0.1"%np.nansum(self.AMF_PP<0.1)) self.AMF_PP[self.AMF_PP < 0.1]=np.NaN screen=[-5e15,1e17] screened=(self.VCC_PP<screen[0]) + (self.VCC_PP>screen[1]) print("Removing %d VCC_PP's outside [-5e15,1e17]"%(np.sum(screened))) self.VCC_PP[screened]=np.NaN self.SC=struct['SC'] self.VC_GC=struct['VC_GC'] self.VC_OMI=struct['VC_OMI'] self.VC_OMI_RSC=struct['VC_OMI_RSC'] self.col_uncertainty_OMI=struct['col_uncertainty_OMI'] self.fires=struct['fires'] self.fire_mask_8=struct['fire_mask_8'] # true where fires occurred over last 8 days self.fire_mask_16=struct['fire_mask_16'] # true where fires occurred over last 16 days mlons,mlats=np.meshgrid(self.longitude,self.latitude) self.oceanmask=maskoceans(mlons,mlats,self.AMF_OMI,inlands=False).mask
def main(): bathymetry_path = "" topo_path = "/RECH2/huziy/coupling/coupled-GL-NEMO1h_30min/geophys_452x260_directions_new_452x260_GL+NENA_0.1deg_SAND_CLAY_LDPT_DPTH.fst" plot_utils.apply_plot_params() with RPN(topo_path) as r: assert isinstance(r, RPN) topo = r.get_first_record_for_name("ME") lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() print(lons.shape) prj_params = r.get_proj_parameters_for_the_last_read_rec() rll = RotatedLatLon(**prj_params) bmap = rll.get_basemap_object_for_lons_lats(lons2d=lons, lats2d=lats, resolution="i") xx, yy = bmap(lons, lats) plt.figure() ax = plt.gca() lons1 = np.where(lons <= 180, lons, lons - 360) topo = maskoceans(lons1, lats, topo) topo_clevs = [0, 100, 200, 300, 400, 500, 600, 800, 1000, 1200] # bn = BoundaryNorm(topo_clevs, len(topo_clevs) - 1) cmap = cm.get_cmap("terrain") ocean_color = cmap(0.18) cmap, norm = colors.from_levels_and_colors(topo_clevs, cmap(np.linspace(0.3, 1, len(topo_clevs) - 1))) add_rectangle(ax, xx, yy, margin=20, edge_style="solid") add_rectangle(ax, xx, yy, margin=10, edge_style="dashed") im = bmap.pcolormesh(xx, yy, topo, cmap=cmap, norm=norm) bmap.colorbar(im, ticks=topo_clevs) bmap.drawcoastlines(linewidth=0.3) bmap.drawmapboundary(fill_color=ocean_color) bmap.drawparallels(np.arange(-90, 90, 10), labels=[1, 0, 0, 1], color="0.3") bmap.drawmeridians(np.arange(-180, 190, 10), labels=[1, 0, 0, 1], color="0.3") plt.savefig("GL_452x260_0.1deg_domain.png", dpi=300, bbox_inches="tight")
def oceanmask(lats,lons,inlands=False): ''' Return oceanmask, true over ocean squares inlands=False means don't mask inland water squares ''' mlats,mlons=lats,lons if len(np.shape(lats)) == 1: mlons,mlats=np.meshgrid(lons,lats) # lonsin, latsin, datain arguments for maskoceans # we just want mask, so datain doesn't matter ocean=maskoceans(mlons,mlats,mlats,inlands=False).mask return ocean
def draw_map(self, fast_or_pretty='fast', t_str='124x124 quick plot', n_levs=9, vmin=None, vmax=None, cmap=plt.get_cmap('Blues'), norm=None, maskoceans_switch=False, label_latlon=True, cbar_fmt_str=None): """draw a map of a 124x124 field on the N Pole stereographic N American STEM domain. ARGS: fast_or_pretty ({'fast'}|'pretty): if 'fast', uses the default basemap projection, which renders quickly. If 'pretty' uses the 'satellite' projection, which looks nicer but renders more slowly. t_str (string): title to appear at the top of the map. The default is '124x124 quick plot'. n_levs (integer): number of contour levels for the map (default is 9) vmin (real): minumum value for color scale; if unspecified uses field_124x124.min() vmax (real): maximum value for color scale; if unspecified uses field_124x124.max() RETURNS: Object of `class Mapper124x124` """ self.map = NAMapFigure(t_str=t_str, cb_axis=True, fast_or_pretty=fast_or_pretty, label_latlon=label_latlon) if maskoceans_switch: self.field_124x124 = maskoceans( self.lon_stem, self.lat_stem, self.field_124x124, resolution='f') cm = self.map.map.pcolor(self.lon_stem, self.lat_stem, self.field_124x124, vmin=vmin, vmax=vmax, cmap=cmap, norm=norm, latlon='True') plt.colorbar(cm, cax=self.map.ax_cmap, format=cbar_fmt_str) return(self)
def myplot(data, lat, lon): mymap = Basemap(projection='cyl', llcrnrlat=-60., urcrnrlat=15., llcrnrlon=-90., urcrnrlon=-30., resolution='c', suppress_ticks=True) mymap.drawmeridians(np.arange(-160., 161., 10.), labels=[0, 0, 0, 1], linewidth=0.,fontsize=8) mymap.drawparallels(np.arange(-90., 91., 10.), labels=[1, 0, 0, 0], linewidth=0., fontsize=8) lons, lats = np.meshgrid(lon, lat) x, y = mymap(lons, lats) levs = [0., 50., 100., 200., 300., 500., 700., 900., 1000.] barcolor = ('#CC3333', '#FF6633', '#FF9933', '#FFFF99', '#FFFFCC', '#CCFFCC', '#99FFCC', '#66FF66', '#00CC00', '#009900', '#003300') cpalunder = barcolor[0] cpalover = barcolor[-1] barcolor = barcolor[1:-1] my_cmap = c.ListedColormap(barcolor) my_cmap.set_under(cpalunder) my_cmap.set_over(cpalover) norm = BoundaryNorm(levs, ncolors=my_cmap.N, clip=True) #~ mymap.drawmapboundary(fill_color='grey') #~ mymap.drawcoastlines(linewidth=0.25) #~ mymap.drawcountries(linewidth=0.25) #~ mymap.fillcontinents(color='coral', lake_color='aqua') #~ mymap.fillcontinents(color='grey') cs = plt.pcolormesh(x, y, data, cmap=my_cmap, norm=norm, vmin=50) hold('on') data = maskoceans(lons, lats, data) cs = plt.pcolormesh(x, y, data, cmap=my_cmap, norm=norm) shapedir = "/home/marcelo/Anaconda/lib/python2.7/site-packages/PyFuncemeClimateTools-0.1.1-py2.7.egg/PyFuncemeClimateTools/shp/brazil" mymap.readshapefile(shapedir, 'brazil', drawbounds=True, linewidth=.5, color='k') bar = mymap.colorbar(cs, location='right', spacing='uniform', ticks=levs, extendfrac='auto', extend='both', pad="8%") bar.ax.tick_params(labelsize=8) plt.show() plt.close()
def main(): AFRIC = 1 QUEBEC = 2 varname = "drainage_density" region = QUEBEC if region == QUEBEC: data_path = "/home/huziy/skynet3_rech1/Netbeans Projects/Java/DDM/directions_with_drainage_density/directions_qc_dx0.1deg_5.nc" out_path = "qc_{0}_0.1deg.pdf".format(varname) elif region == AFRIC: data_path = "/home/huziy/skynet3_rech1/Netbeans Projects/Java/DDM/directions_africa_dx0.44deg.v3.nc" out_path = "af_{0}_0.44deg.pdf".format(varname) else: raise Exception("Unknown region...") # ds = Dataset(data_path) data = ds.variables[varname][20:-20, 20:-20] lons = ds.variables["lon"][20:-20, 20:-20] lats = ds.variables["lat"][20:-20, 20:-20] slope = ds.variables["slope"][20:-20, 20:-20] fig = plt.figure() print(data.min(), data.max()) ax = plt.gca() data = np.ma.masked_where(slope < 0, data) basemap = Crcm5ModelDataManager.get_rotpole_basemap_using_lons_lats(lons2d=lons, lats2d=lats) lons[lons > 180] -= 360 x, y = basemap(lons, lats) data = maskoceans(lons, lats, data, inlands=False) img = basemap.contourf(x, y, data, cmap=cm.get_cmap("jet", 10)) ax.set_title("Drainage density") divider = make_axes_locatable(ax) cax = divider.append_axes("right", "5%", pad="3%") cb = fig.colorbar(img, cax=cax) cax.set_title("(km**-1)") basemap.drawcoastlines(ax=ax) fig.tight_layout() fig.savefig(out_path)
def draw_ratio(map, ratio): lon, lat, topo = sp.parse_STEM_coordinates( os.path.join(os.environ['SARIKA_INPUT'], 'TOPO-124x124.nc')) ratio = maskoceans(lon, lat, ratio) ratio_norm = midpt_norm.MidpointNormalize(midpoint=1.0) cm = map.map.pcolor(lon, lat, ratio, vmin=-7, # np.percentile(ratio, 1), vmax=9, # np.percentile(ratio, 99), cmap=plt.get_cmap('PuOr'), norm=ratio_norm, latlon=True) cb = plt.colorbar(cm, ax=map.ax_map, extend='both', ticks=np.arange(-7, 9, 2)) cb.solids.set_edgecolor("face")
def get_ocean_mask(lats,lons): ''' returns mask with true's over oceanic squares (and nans) ''' # set nans to an ocean square nans=np.isnan(lats) lat=np.copy(lats) lat[nans] = -50 lon=np.copy(lons) lon[nans] = 100 # get mask of ocean squares mask=maskoceans(lon,lat,lon,inlands=False).mask mask[nans] = True return mask
def get_to_plot(varname, data, lake_fraction=None, mask_oceans=True, lons=None, lats=None, difference = False, level_width_m=None): # This one is used if something is to be masked or changed before plotting if varname in ["STFL", "STFA"]: if lake_fraction is None or np.sum(lake_fraction) <= 0.01: # data1 = np.ma.masked_where(data < 0, data) if not difference else data return maskoceans(lonsin=lons, latsin=lats, datain=data) else: data1 = np.ma.masked_where(lake_fraction >= GLOBAL_LAKE_FRACTION, data) elif varname == "PR": data1 = data * 24 * 60 * 60 * MILLIMETERS_PER_METER # convert m/s to mm/day elif varname == "I0": data1 = data - 273.15 # convert to deg C elif varname in ["TRAF", "TDRA"]: data1 = data * 24 * 60 * 60 # convert mm/s to mm/day elif varname in ["I1", "IMAV", "I5"]: if varname == "I1": if level_width_m is not None: data = level_width_m * data * MILLIMETERS_PER_METER else: pass return maskoceans(lonsin=lons, latsin=lats, datain=data, inlands=True) elif varname in ["HU", ]: data1 = data * GRAMS_PER_KILOGRAM # convert to g/kg else: data1 = data if mask_oceans: assert lons is not None and lats is not None inlands = varname not in ["PR", "TT", "HU", "AV", "AH", "AS", "AI", "AD-AI", "AD", "AR"] return maskoceans(lonsin=lons, latsin=lats, datain=data1, inlands=inlands) return data1
def subplot(self, axes, subfigure): from mpl_toolkits.basemap import maskoceans data, xgrid, ygrid, title, levels, cbarlab, map_kwargs = subfigure mapobj, mx, my = self.createMap(axes, xgrid, ygrid, map_kwargs) masked_data = maskoceans(xgrid, ygrid, data, inlands=False) CS = mapobj.contourf(mx, my, masked_data, levels=levels, extend='both') CB = mapobj.colorbar(CS, location='right', pad='5%', ticks=levels[::2], fig=self, ax=axes, extend='both') CB.set_label(cbarlab) axes.set_title(title) self.labelAxes(axes) self.addGraticule(axes, mapobj) self.addCoastline(mapobj) self.addMapScale(mapobj)
ax1 = fig.add_subplot(321) #ax1.set_title("M3",fontsize=20) #map = Basemap(projection ='cyl', llcrnrlat=-62, urcrnrlat=90,llcrnrlon=-180, urcrnrlon=180, resolution='c') map = Basemap(projection='cyl', llcrnrlat=-12, urcrnrlat=55, llcrnrlon=60, urcrnrlon=155, resolution='c') x, y = map(lon, lat) map.drawcoastlines() map.drawcountries() map.drawmapboundary() m3newp = maskoceans(x, y, m3newp) gg = m3newp / m3newa spamy[N.isnan(spamy)] = 0. spamy[N.isinf(spamy)] = 0. ryield = ma.masked_where(ryield <= 0.0, ryield) ryield = ma.masked_where(spamy <= 0.0, ryield) spamy = ma.masked_where(ryield <= 0.0, spamy) spamy = ma.masked_where(spamy <= 0.0, spamy) spamy = ma.masked_where(spamy >= 10.0**20, spamy) ryield = ma.masked_where(spamy <= 0.0, ryield) spamy = ma.masked_where(ryield <= 0.0, spamy) cs1 = map.pcolormesh(x, y, ryield, cmap=plt.cm.jet, vmin=0, vmax=8) plt.axis('off') #cbar = map.colorbar(cs1,location='bottom',size="5%",pad="2%")
def main(): # Define the simulations to be validated r_config = RunConfig( data_path= "/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-r.hdf5", start_year=1990, end_year=2010, label="CRCM5-L1") r_config_list = [r_config] r_config = RunConfig( data_path= "/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-r.hdf5", start_year=1990, end_year=2010, label="CRCM5-NL") r_config_list.append(r_config) bmp_info = analysis.get_basemap_info_from_hdf(file_path=r_config.data_path) bmp_info.should_draw_grey_map_background = True bmp_info.should_draw_basin_boundaries = False bmp_info.map_bg_color = "0.75" station_ids = ["104001", "093806", "093801", "081002", "081007", "080718"] # get river network information used in the model flow_directions = analysis.get_array_from_file( r_config.data_path, var_name=infovar.HDF_FLOW_DIRECTIONS_NAME) accumulation_area_km2 = analysis.get_array_from_file( path=r_config.data_path, var_name=infovar.HDF_ACCUMULATION_AREA_NAME) cell_manager = CellManager(flow_dirs=flow_directions, lons2d=bmp_info.lons, lats2d=bmp_info.lats, accumulation_area_km2=accumulation_area_km2) # Get the list of stations to indicate on the bias map stations = cehq_station.read_station_data(start_date=None, end_date=None, selected_ids=station_ids) """:type : list[Station]""" xx, yy = bmp_info.get_proj_xy() station_to_modelpoint = cell_manager.get_model_points_for_stations( station_list=stations) upstream_edges = cell_manager.get_upstream_polygons_for_points( model_point_list=station_to_modelpoint.values(), xx=xx, yy=yy) bmp_info.draw_colorbar_for_each_subplot = True # Validate temperature, precip and swe obs_path_anusplin = "/home/huziy/skynet3_rech1/anusplin_links" obs_path_swe = "data/swe_ross_brown/swe.nc" model_var_to_obs_path = OrderedDict([("TT", obs_path_anusplin), ("I5", obs_path_swe)]) model_var_to_season = OrderedDict([ ("TT", OrderedDict([("Spring", range(3, 6))])), ("I5", OrderedDict([("Winter", [1, 2, 12])])) ]) vname_to_obs_data = {} # parameters that won't change in the loop over variable names params_const = dict(rconfig=r_config, bmp_info=bmp_info) for vname, obs_path in model_var_to_obs_path.items(): season_to_obs_data = get_seasonal_clim_obs_data( vname=vname, obs_path=obs_path, season_to_months=model_var_to_season[vname], **params_const) # Comment swe over lakes, since I5 calculated only for land if vname in [ "I5", ]: for season in season_to_obs_data: season_to_obs_data[season] = maskoceans( bmp_info.lons, bmp_info.lats, season_to_obs_data[season], inlands=True) vname_to_obs_data[vname] = season_to_obs_data # Plotting plot_all_vars_in_one_fig = True fig = None gs = None if plot_all_vars_in_one_fig: plot_utils.apply_plot_params(font_size=12, width_pt=None, width_cm=25, height_cm=20) fig = plt.figure() ncols = len(model_var_to_obs_path) + 1 gs = GridSpec(len(r_config_list), ncols, width_ratios=(ncols - 1) * [ 1., ] + [ 0.05, ]) else: plot_utils.apply_plot_params(font_size=12, width_pt=None, width_cm=25, height_cm=25) station_x_list = [] station_y_list = [] mvarname_to_cs = {} for row, r_config in enumerate(r_config_list): for col, mname in enumerate(model_var_to_obs_path): row_axes = [ fig.add_subplot(gs[row, col]), ] mvarname_to_cs[mname] = compare_vars( vname_model=mname, vname_to_obs=vname_to_obs_data, r_config=r_config, season_to_months=model_var_to_season[mname], bmp_info_agg=bmp_info, axes_list=row_axes) # -1 in order to exclude colorbars for the_ax in row_axes: the_ax.set_title(the_ax.get_title() + ", {}".format( infovar.get_long_display_label_for_var(mname))) # Need titles only for the first row if row > 0: the_ax.set_title("") if col == 0: the_ax.set_ylabel(r_config.label) else: the_ax.set_ylabel("") draw_upstream_area_bounds(the_ax, upstream_edges, color="g") if len(station_x_list) == 0: for the_station in stations: xst, yst = bmp_info.basemap(the_station.longitude, the_station.latitude) station_x_list.append(xst) station_y_list.append(yst) bmp_info.basemap.scatter(station_x_list, station_y_list, c="g", ax=the_ax, s=20, zorder=10, alpha=0.5) # Save the figure if necessary if plot_all_vars_in_one_fig: if not img_folder.is_dir(): img_folder.mkdir(parents=True) fig_path = img_folder.joinpath("{}.png".format( "_".join(model_var_to_obs_path))) with fig_path.open("wb") as figfile: fig.savefig(figfile, format="png", bbox_inches="tight") plt.close(fig)
def mask_oceans_124x124(data): d = STEM_Domain() stem_lon = d.get_lon() stem_lat = d.get_lat() result = maskoceans(stem_lon, stem_lat, data) return result
map = Basemap(projection='cyl', llcrnrlat=-65, urcrnrlat=90, llcrnrlon=-180, urcrnrlon=180, resolution='c') x, y = map(lon2, lat2) iyield = ma.masked_where(iyield <= 0, iyield) iarea = ma.masked_where(iarea <= 0, iarea) iyield = ma.masked_where(iarea <= 0, iyield) map.drawcoastlines() map.drawcountries() map.drawmapboundary() #yield_new = ma.masked_where(yield_new<=0,yield_new) yield_new = maskoceans(x, y, yield_new) #yield_new[N.isnan(yield_new)] = -9999 yield_new = ma.filled(yield_new, fill_value=0.) #yield_new[N.isnan(yield_new)] = -9999 yield_new = ma.masked_where(yield_new <= 0, yield_new) yield_new = ma.masked_where(iyield <= 0, yield_new) #cs1 = map.pcolormesh(x,y,yield_new*iarea/100/10,cmap=plt.cm.YlGn,vmin=0.01,vmax=0.15) cs1 = map.pcolormesh(x, y, yield_new * iarea / 100 / 10 * 1000, cmap=plt.cm.YlGn, norm=colors.PowerNorm(gamma=1. / 2.), vmin=0, vmax=50) cbar = map.colorbar(cs1, location='bottom', size="5%", pad="2%")
urcrnrlat=52, lat_ts=35) # latitude of the true scale # draw coastlines, state and country boundaries, edge of map. m.drawcoastlines() m.drawstates() m.drawcountries() parallels = np.arange(0., 90, 10.) m.drawparallels(parallels, labels=[1, 0, 0, 0], fontsize=10) meridians = np.arange(180., 360., 10.) m.drawmeridians(meridians, labels=[0, 0, 0, 1], fontsize=10) maplat, maplon = np.meshgrid(boxlat, boxlon) x, y = m(maplon, maplat) # compute map proj coordinates. clevs = np.linspace(0, 4000, 50) mask_ocean = True if mask_ocean: # do not plot the contour over water newdata = maskoceans(maplon, maplat, data) cs = m.contourf(x, y, newdata, clevs, cmap='BrBG') else: cs = m.contourf(x, y, data, clevs, cmap='BrBG') m.drawlsmask(land_color=(0, 0, 0, 0), ocean_color='w', lakes=True) sc = m.scatter(dfs['LON'].values, dfs['LAT'].values, marker='o', c='red', s=1, zorder=10, latlon=True) cbar = m.colorbar(cs, location='bottom', pad="8%") cbar.set_label('Elevation [m MSL]') plt.title('Mean elevation', fontsize=12) # fig.tight_layout()
def yieldout(year): bb = year - 1900 region1 = NetCDFFile( '/scratch2/scratchdirs/tslin2/plot/globalcrop/data/clm/HistoricalGLM_crop_150901.nc', 'r') if bb <= 105: maitrop = region1.variables['rice'][bb - 1, :, :] maitropi = region1.variables['rice_irrig'][bb - 1, :, :] else: maitrop = region1.variables['rice'][104, :, :] maitropi = region1.variables['rice_irrig'][104, :, :] maitrop = ma.masked_where(maitrop <= 0, maitrop) maitrop = ma.filled(maitrop, fill_value=0.) gridarea = region1.variables['area'][:, :] maitropi = ma.masked_where(maitropi <= 0, maitropi) maitropi = ma.filled(maitropi, fill_value=0.) maizetor = maitrop maizetoi = maitropi maizetrop = maitrop + maitropi maizeto = maitrop + maitropi isam = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/rice_cheyenne/his_cru/egu_new/ric_fert/output/ric_fert.nc', 'r') clmtropf = isam.variables['totalyield'][bb - 1, :, :] clm2 = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/rice_cheyenne/his_cru/egu_new/ric_irr_fert/output/ric_irr_fert.nc', 'r') clmtropfi = clm2.variables['totalyield'][bb - 1, :, :] lonisam = clm2.variables['lon'][:] clmtropf, lonisam1 = shiftgrid(180.5, clmtropf, lonisam, start=False) clmtropfi, lonisam1 = shiftgrid(180.5, clmtropfi, lonisam, start=False) print lonisam1 clmtropf = ma.masked_where(maitrop <= 0, clmtropf) clmtropf = ma.filled(clmtropf, fill_value=0.) clmtropfi = ma.masked_where(maitropi <= 0, clmtropfi) clmtropfi = ma.filled(clmtropfi, fill_value=0.) yield_clmtf = clmtropf yield_clmtf = ma.masked_where(yield_clmtf <= 0, yield_clmtf) yield_clmtf = ma.filled(yield_clmtf, fill_value=0.) yield_clmtfi = clmtropfi yield_clmtfi = ma.masked_where(yield_clmtfi <= 0, yield_clmtfi) yield_clmtfi = ma.filled(yield_clmtfi, fill_value=0.) area = NetCDFFile( '/scratch2/scratchdirs/tslin2/plot/globalcrop/data/gridareahalf_isam.nc', 'r') gridarea = area.variables['cell_area'][:, :] gridlon = area.variables['lon'][:] gridlat = area.variables['lat'][:] gridarea, gridlon = shiftgrid(180.5, gridarea, gridlon, start=False) lon2, lat2 = N.meshgrid(gridlon, gridlat) map = Basemap(projection='cyl', llcrnrlat=-65, urcrnrlat=90, llcrnrlon=-180, urcrnrlon=180, resolution='c') x, y = map(lon2, lat2) yield_clmtf = maskoceans(x, y, yield_clmtf) yield_clmtf = ma.masked_where(maizeto <= 0, yield_clmtf) yield_clmtfi = maskoceans(x, y, yield_clmtfi) yield_clmtfi = ma.masked_where(maizeto <= 0, yield_clmtfi) clmy = ((yield_clmtf * maizetor * gridarea) + (yield_clmtfi * maizetoi * gridarea)) / ((maizetoi * gridarea) + (maizetor * gridarea)) return clmy
map = Basemap(projection='cyl', llcrnrlat=-65, urcrnrlat=90, llcrnrlon=-180, urcrnrlon=180, resolution='c') x, y = map(lon2, lat2) iyield = ma.masked_where(iyield <= 0, iyield) iarea = ma.masked_where(iarea <= 0, iarea) #iyield = ma.masked_where(iarea<=0,iyield) map.drawcoastlines() map.drawcountries() map.drawmapboundary() #yield_new = ma.masked_where(yield_new<=0,yield_new) yield_new = maskoceans(x, y, yield_new) yield_new = ma.filled(yield_new, fill_value=0.) yield_new = ma.masked_where(yield_new <= 0, yield_new) yield_new = ma.masked_where(maizeto <= 0, yield_new) yield_new2 = maskoceans(x, y, yield_new2) yield_new2 = ma.filled(yield_new2, fill_value=0.) yield_new2 = ma.masked_where(yield_new2 <= 0, yield_new2) yield_new2 = ma.masked_where(maizeto <= 0, yield_new2) isamy = ((yield_new * maizetor * gridarea) + (yield_new2 * maizetoi * gridarea)) / ((maizetoi * gridarea) + (maizetor * gridarea)) isamy = ma.masked_where(iizumy <= 0, isamy) cs1 = map.pcolormesh(x, y, isamy, cmap=plt.cm.YlGn, vmin=0, vmax=16)
def main(): soiltemp_var_name = "TBAR" path1 = "/skynet1_rech3/huziy/class_offline_simulations_VG/dpth_var/CLASS_output_CLASSoff1_Arctic_0.5_ERA40_dpth_to_bdrck_var_1980-2009/TBAR.rpn" label1 = "Variable (real) depth to bedrock" path2 = "/skynet1_rech3/huziy/class_offline_simulations_VG/dpth_3.6m/CLASS_output_CLASSoff1_Arctic_0.5_ERA40_dpth_to_bdrck_constant_1980-2009/TBAR.rpn" label2 = "Fixed depth to bedrock (3.6 m)" path_to_dpth_to_bedrock = "/skynet1_rech3/huziy/CLASS_offline_VG/GEOPHYSICAL_FIELDS/test_analysis.rpn" # read depth to bedrock r = RPN(path_to_dpth_to_bedrock) dpth = r.get_first_record_for_name("DPTH") r.close() layer_widths = [ 0.1, 0.2, 0.3, 0.5, 0.9, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5 ] print(len(layer_widths)) crcm_data_manager = CRCMDataManager(layer_widths=layer_widths) # Read in the temperature profiles r1 = RPN(path1) soiltemp1 = r1.get_4d_field(name=soiltemp_var_name) lons2d, lats2d = r1.get_longitudes_and_latitudes_for_the_last_read_rec() rll = RotatedLatLon(**r1.get_proj_parameters_for_the_last_read_rec()) b = rll.get_basemap_object_for_lons_lats(lons2d=lons2d, lats2d=lats2d, resolution="c") r1.close() r2 = RPN(path2) soiltemp2 = r2.get_4d_field(name=soiltemp_var_name) r2.close() dates_sorted = list(sorted(soiltemp1.keys())) levels_sorted = list(sorted(list(soiltemp1.items())[0][1].keys())) # group dates for each year alt1_list = [] alt2_list = [] for year, dates_group in itertools.groupby(dates_sorted, lambda par: par.year): t1 = [] t2 = [] for d in dates_group: t1.append([soiltemp1[d][lev] for lev in levels_sorted]) t2.append([soiltemp2[d][lev] for lev in levels_sorted]) # Get maximum temperature profiles t1max = np.max(t1, axis=0).transpose(1, 2, 0) t2max = np.max(t2, axis=0).transpose(1, 2, 0) print(t1max.shape, t2max.shape) #calculate and save alts h1 = crcm_data_manager.get_alt(t1max) h2 = crcm_data_manager.get_alt(t2max) h1[h1 < 0] = np.nan h2[h2 < 0] = np.nan alt1_list.append(h1) alt2_list.append(h2) #take into account permafrost alt1_list_pf = [] alt2_list_pf = [] n_years_for_pf = 3 for i in range(len(alt1_list) - n_years_for_pf): alt1_list_pf.append(np.max(alt1_list[i:i + n_years_for_pf], axis=0)) alt2_list_pf.append(np.max(alt2_list[i:i + n_years_for_pf], axis=0)) # calculate climatological mean alt1 = np.mean(alt1_list_pf, axis=0) alt2 = np.mean(alt2_list_pf, axis=0) print(np.isnan(alt1).any(), np.isnan(alt2).any()) #mask nans alt1 = np.ma.masked_where(np.isnan(alt1), alt1) alt2 = np.ma.masked_where(np.isnan(alt2), alt2) #calculate change due to fixed depth to bedrock delta = alt2 - alt1 # for i in range(len(alt1_list)): alt1_list[i] = np.ma.masked_where(np.isnan(alt1_list[i]), alt1_list[i]) alt2_list[i] = np.ma.masked_where(np.isnan(alt2_list[i]), alt2_list[i]) #tval, pval = ttest_ind(alt1_list, alt2_list) #mask oceans lons2d[lons2d > 180] -= 360 dpth = maskoceans(lons2d, lats2d, dpth) delta = np.ma.masked_where(dpth.mask, delta) #calculate differences in SWE path_swe_1 = path1.replace("TBAR.rpn", "SNO.rpn") r1 = RPN(path_swe_1) swe1 = r1.get_all_time_records_for_name("SNO") r1.close() swe1_winter_clim = np.mean( [field for key, field in swe1.items() if key.month in [1, 2, 12]], axis=0) swe1_winter_clim = np.ma.masked_where( (swe1_winter_clim >= 999) | dpth.mask, swe1_winter_clim) path_swe_2 = path2.replace("TBAR.rpn", "SNO.rpn") r2 = RPN(path_swe_2) swe2 = r2.get_all_time_records_for_name("SNO") r2.close() swe2_winter_clim = np.mean( [field for key, field in swe2.items() if key.month in [1, 2, 12]], axis=0) swe2_winter_clim = np.ma.masked_where( (swe2_winter_clim >= 999) | dpth.mask, swe2_winter_clim) #plotting print("Start plotting ...") plot_differences(b, lons2d, lats2d, dpth, delta, label="ALT(DPTH=3.6m) - ALT(DPTH~)", pvalue=None, swe_diff=swe2_winter_clim - swe1_winter_clim)
v = perecm_map_tot_new[:, :] - perecm_map_tot[:, :] #v = perecm_map_tot_new[:,:] #v = perecm_map_tot[:,:] x2 = np.linspace(x[0][0], x[0][-1], x.shape[1] * 20) y2 = np.linspace(y[0][0], y[-1][0], y.shape[0] * 30) #x2 = np.linspace(x[0][0],x[0][-1],x.shape[1]*2) #y2 = np.linspace(y[0][0],y[-1][0],y.shape[0]*3) x2, y2 = np.meshgrid(x2, y2) X2, Y2 = m(x2, y2) data2 = interp(v, x[0], y[:, 0], x2, y2, order=1) mdata = maskoceans(x2, y2, data2, resolution='h', grid=1.25, inlands=True) #mdata = maskoceans(x, y, v) #lats = lat #lons = lon #lon_len = len(perecm_map_tot[0,:]) #lat_len = len(perecm_map_tot[:,0]) #root = Dataset('shi_ecm_pft_1p9x2p5.nc','w',format='NETCDF4') #root.description = 'ECM percentage by PFT based on Shi et al. (2016)' #root.createDimension('lon',lon_len) #root.createDimension('lat',lat_len) #root.createDimension('z',18) #latitudes = root.createVariable('lat', 'f4', ('lat',)) #longitudes = root.createVariable('lon', 'f4', ('lon',)) #var_shi = root.createVariable('perecm (fraction)', 'f4', ('z','lat','lon',),fill_value=-1e+20) #var_shi = root.createVariable('perecm (fraction)', 'f4', ('lat','lon',),fill_value=-1e+20)
sys.exit() #x2 = np.linspace(x[0][0],x[0][-1],x.shape[1]*20) #y2 = np.linspace(y[0][0],y[-1][0],y.shape[0]*30) x2 = np.linspace(x[0][0], x[0][-1], x.shape[1] * 40) y2 = np.linspace(y[0][0], y[-1][0], y.shape[0] * 60) x2, y2 = np.meshgrid(x2, y2) X2, Y2 = m(x2, y2) #order=0 for nearest-neighbor, order=1 for bilinear, order=3 cubic data2 = interp(v, x[0], y[:, 0], x2, y2, order=1) mdata = maskoceans(x2, y2, data2, resolution='l', grid=1.25, inlands=True) #mdata = maskoceans(x, y, v) #My colorbar upper = plt.cm.jet(np.arange(256)) lower = np.ones((int(256 / 4), 4)) for i in range(3): lower[:, i] = np.linspace(1, upper[0, i], lower.shape[0]) cmap = np.vstack((lower, upper)) cmap = ListedColormap(cmap, name='myColorMap', N=cmap.shape[0])
def load_data(self): distance = VincentyDistance() height = distance.measure( (self.bounds[0], self.centroid[1]), (self.bounds[2], self.centroid[1])) * 1000 * 1.25 width = distance.measure( (self.centroid[0], self.bounds[1]), (self.centroid[0], self.bounds[3])) * 1000 * 1.25 if self.projection == 'EPSG:32661': near_pole, covers_pole = self.pole_proximity(self.points[0]) blat = min(self.bounds[0], self.bounds[2]) blat = 5 * np.floor(blat / 5) if self.centroid[0] > 80 or near_pole or covers_pole: self.basemap = basemap.load_map( 'npstere', self.centroid, height, width, min(self.bounds[0], self.bounds[2])) else: self.basemap = basemap.load_map('lcc', self.centroid, height, width) elif self.projection == 'EPSG:3031': near_pole, covers_pole = self.pole_proximity(self.points[0]) blat = max(self.bounds[0], self.bounds[2]) blat = 5 * np.ceil(blat / 5) if ((self.centroid[0] < -80 or self.bounds[1] < -80 or self.bounds[3] < -80) or covers_pole): # is centerered close to the south pole self.basemap = basemap.load_map('spstere', (blat, 180), height, width) else: self.basemap = basemap.load_map( 'lcc', self.centroid, height, width, max(self.bounds[0], self.bounds[2])) elif abs(self.centroid[1] - self.bounds[1]) > 90: height_bounds = [self.bounds[0], self.bounds[2]] width_bounds = [self.bounds[1], self.bounds[3]] height_buffer = (abs(height_bounds[1] - height_bounds[0])) * 0.1 width_buffer = (abs(width_bounds[0] - width_bounds[1])) * 0.1 if abs(width_bounds[1] - width_bounds[0]) > 360: raise ClientError( gettext( "You have requested an area that exceads the width of the world. \ Thinking big is good but plots need to be less the 360 deg wide." )) if height_bounds[1] < 0: height_bounds[1] = height_bounds[1] + height_buffer else: height_bounds[1] = height_bounds[1] + height_buffer if height_bounds[0] < 0: height_bounds[0] = height_bounds[0] - height_buffer else: height_bounds[0] = height_bounds[0] - height_buffer new_width_bounds = [] new_width_bounds.append(width_bounds[0] - width_buffer) new_width_bounds.append(width_bounds[1] + width_buffer) if abs(new_width_bounds[1] - new_width_bounds[0]) > 360: width_buffer = np.floor( (360 - abs(width_bounds[1] - width_bounds[0])) / 2) new_width_bounds[0] = width_bounds[0] - width_buffer new_width_bounds[1] = width_bounds[1] + width_buffer if new_width_bounds[0] < -360: new_width_bounds[0] = -360 if new_width_bounds[1] > 720: new_width_bounds[1] = 720 self.basemap = basemap.load_map( 'merc', self.centroid, (height_bounds[0], height_bounds[1]), (new_width_bounds[0], new_width_bounds[1])) else: self.basemap = basemap.load_map('lcc', self.centroid, height, width) if self.basemap.aspect < 1: gridx = 500 gridy = int(500 * self.basemap.aspect) else: gridy = 500 gridx = int(500 / self.basemap.aspect) self.longitude, self.latitude = self.basemap.makegrid(gridx, gridy) with open_dataset(get_dataset_url(self.dataset_name)) as dataset: if self.time < 0: self.time += len(dataset.timestamps) self.time = np.clip(self.time, 0, len(dataset.timestamps) - 1) self.variable_unit = self.get_variable_units( dataset, self.variables)[0] self.variable_name = self.get_variable_names( dataset, self.variables)[0] scale_factor = self.get_variable_scale_factors( dataset, self.variables)[0] if self.cmap is None: if len(self.variables) == 1: self.cmap = colormap.find_colormap(self.variable_name) else: self.cmap = colormap.colormaps.get('speed') if len(self.variables) == 2: self.variable_name = self.vector_name(self.variable_name) if self.depth == 'bottom': depth_value = 'Bottom' else: self.depth = np.clip(int(self.depth), 0, len(dataset.depths) - 1) depth_value = dataset.depths[self.depth] data = [] allvars = [] for v in self.variables: var = dataset.variables[v] allvars.append(v) if self.filetype in ['csv', 'odv', 'txt']: d, depth_value = dataset.get_area(np.array( [self.latitude, self.longitude]), self.depth, self.time, v, self.interp, self.radius, self.neighbours, return_depth=True) else: d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, v, self.interp, self.radius, self.neighbours) d = np.multiply(d, scale_factor) self.variable_unit, d = self.kelvin_to_celsius( self.variable_unit, d) data.append(d) if self.filetype not in ['csv', 'odv', 'txt']: if len(var.dimensions) == 3: self.depth_label = "" elif self.depth == 'bottom': self.depth_label = " at Bottom" else: self.depth_label = " at " + \ str(int(np.round(depth_value))) + " m" if len(data) == 2: data[0] = np.sqrt(data[0]**2 + data[1]**2) self.data = data[0] quiver_data = [] # Store the quiver data on the same grid as the main variable. This # will only be used for CSV export. quiver_data_fullgrid = [] if self.quiver is not None and \ self.quiver['variable'] != '' and \ self.quiver['variable'] != 'none': for v in self.quiver['variable'].split(','): allvars.append(v) var = dataset.variables[v] quiver_unit = get_variable_unit(self.dataset_name, var) quiver_name = get_variable_name(self.dataset_name, var) quiver_lon, quiver_lat = self.basemap.makegrid(50, 50) d = dataset.get_area( np.array([quiver_lat, quiver_lon]), self.depth, self.time, v, self.interp, self.radius, self.neighbours, ) quiver_data.append(d) # Get the quiver data on the same grid as the main # variable. d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, v, self.interp, self.radius, self.neighbours, ) quiver_data_fullgrid.append(d) self.quiver_name = self.vector_name(quiver_name) self.quiver_longitude = quiver_lon self.quiver_latitude = quiver_lat self.quiver_unit = quiver_unit self.quiver_data = quiver_data self.quiver_data_fullgrid = quiver_data_fullgrid if all( map(lambda v: len(dataset.variables[v].dimensions) == 3, allvars)): self.depth = 0 contour_data = [] if self.contour is not None and \ self.contour['variable'] != '' and \ self.contour['variable'] != 'none': d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, self.contour['variable'], self.interp, self.radius, self.neighbours, ) contour_unit = get_variable_unit( self.dataset_name, dataset.variables[self.contour['variable']]) contour_name = get_variable_name( self.dataset_name, dataset.variables[self.contour['variable']]) contour_factor = get_variable_scale_factor( self.dataset_name, dataset.variables[self.contour['variable']]) contour_unit, d = self.kelvin_to_celsius(contour_unit, d) d = np.multiply(d, contour_factor) contour_data.append(d) self.contour_unit = contour_unit self.contour_name = contour_name self.contour_data = contour_data self.timestamp = dataset.timestamps[self.time] if self.compare: self.variable_name += " Difference" with open_dataset(get_dataset_url( self.compare['dataset'])) as dataset: data = [] for v in self.compare['variables']: var = dataset.variables[v] d = dataset.get_area( np.array([self.latitude, self.longitude]), self.compare['depth'], self.compare['time'], v, self.interp, self.radius, self.neighbours, ) data.append(d) if len(data) == 2: data = np.sqrt(data[0]**2 + data[1]**2) else: data = data[0] u, data = self.kelvin_to_celsius( dataset.variables[self.compare['variables'][0]].unit, data) self.data -= data # Load bathymetry data self.bathymetry = overlays.bathymetry(self.basemap, self.latitude, self.longitude, blur=2) if self.depth != 'bottom' and self.depth != 0: if len(quiver_data) > 0: quiver_bathymetry = overlays.bathymetry( self.basemap, quiver_lat, quiver_lon) self.data[np.where(self.bathymetry < depth_value)] = np.ma.masked for d in self.quiver_data: d[np.where(quiver_bathymetry < depth_value)] = np.ma.masked for d in self.contour_data: d[np.where(self.bathymetry < depth_value)] = np.ma.masked else: mask = maskoceans(self.longitude, self.latitude, self.data).mask self.data[~mask] = np.ma.masked for d in self.quiver_data: mask = maskoceans(self.quiver_longitude, self.quiver_latitude, d).mask d[~mask] = np.ma.masked for d in contour_data: mask = maskoceans(self.longitude, self.latitude, d).mask d[~mask] = np.ma.masked if self.area and self.filetype in ['csv', 'odv', 'txt', 'geotiff']: area_polys = [] for a in self.area: rings = [LinearRing(p) for p in a['polygons']] innerrings = [LinearRing(p) for p in a['innerrings']] polygons = [] for r in rings: inners = [] for ir in innerrings: if r.contains(ir): inners.append(ir) polygons.append(Poly(r, inners)) area_polys.append(MultiPolygon(polygons)) points = [ Point(p) for p in zip(self.latitude.ravel(), self.longitude.ravel()) ] indicies = [] for a in area_polys: indicies.append( np.where(map(lambda p, poly=a: poly.contains(p), points))[0]) indicies = np.unique(np.array(indicies).ravel()) newmask = np.ones(self.data.shape, dtype=bool) newmask[np.unravel_index(indicies, newmask.shape)] = False self.data.mask |= newmask self.depth_value = depth_value
R = 6371000. #Radius of the earth in m cellArea = 2 * np.pi * (R**2) * np.abs( np.sin(cLats[1:] * (np.pi / 180.)) - np.sin(cLats[:-1] * (np.pi / 180.))) * dlon #in m cellArea = np.repeat(cellArea[:, np.newaxis], lons.shape, axis=1) haThresh = (cellArea * thresh) * 1. / 10000 #converting the threshhold to hectares cMsk = mMsk + sMsk + wMsk ha = cMsk.copy() cMsk_r = cMsk.copy() cMsk[cMsk >= haThresh] = np.nan cMsk[cMsk < haThresh] = 1 cMsk_r[cMsk_r < haThresh] = np.nan cMsk_r[cMsk_r >= haThresh] = 1 lons, lats = np.meshgrid(lons, lats) cMsk = maskoceans(lons, lats, cMsk) ha = maskoceans(lons, lats, ha) cMsk_r = maskoceans(lons, lats, cMsk_r) thisCMAP = 'BrBG' clrBr = np.arange(-0.5, 0.5 + .01, .01) norm = Normalize(vmin=-0.5, vmax=0.5, clip=False) mapper = cm.ScalarMappable(norm=norm, cmap=thisCMAP) fig = plt.figure() ax1 = plt.subplot(111) ##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~## # Plot the observations first ##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~##~*~## plt.sca(ax1) m = Basemap(projection='cyl',llcrnrlat=-45,urcrnrlat=60,\
R = 6371000. #Radius of the earth in m cellArea = 2 * np.pi * (R**2) * np.abs( np.sin(cLats[1:] * (np.pi / 180.)) - np.sin(cLats[:-1] * (np.pi / 180.))) * dlon #in m cellArea = np.repeat(cellArea[:, np.newaxis], lons.shape, axis=1) haThresh = (cellArea * thresh ) * 1. / 10000 #converting the threshhold to hectares cMsk_r = cMsk.copy() cMsk[cMsk >= haThresh] = np.nan cMsk[cMsk < haThresh] = 1 cMsk_r[cMsk_r < haThresh] = np.nan cMsk_r[cMsk_r >= haThresh] = 1 lons, lats = np.meshgrid(lons, lats) cMsk_all = cMsk.copy() cMsk = maskoceans(lons, lats, cMsk) cMsk_r = maskoceans(lons, lats, cMsk_r) fig = plt.figure() m = Basemap(projection='kav7', lon_0=0) #m.pcolor(sstLons.astype(int),sstLats.astype(int),eof_sst,cmap='RdBu_r',latlon=True,vmin=-0.75,vmax=0.75) SVDcol = pd.DataFrame(data={'state': eof_states2, 'SVD': eof_prod}) #Plot everything at the country level first from FAO data before going into subnational data world = m.readshapefile( '/Volumes/Data_Archive/Data/adminBoundaries/ne_50m_admin_0_countries/ne_50m_admin_0_countries', name='cnts', drawbounds=True) m.pcolor(lons, lats, cMsk_r, cmap='Dark2_r', zorder=1.2, latlon=True)
def compare_for_season(start_year=1958, end_year=1974, the_months=None, period_str="djf"): """ Compare CRU, ERA40-driven and GCM-driven s """ #b, lons2d, lats2d = draw_regions.get_basemap_and_coords(llcrnrlat=40.0, llcrnrlon=-145, urcrnrlon=-10) b, lons2d, lats2d = draw_regions.get_basemap_and_coords() lons2d[lons2d > 180] -= 360 x, y = b(lons2d, lats2d) cru = CRUDataManager() cru_data = cru.get_mean(start_year, end_year, months=the_months) cru_data_interp = cru.interpolate_data_to(cru_data, lons2d, lats2d) temp_levels = np.arange(-40, 40, 5) diff_levels = np.arange(-10, 12, 2) gs = gridspec.GridSpec(3, 2) #plot_utils.apply_plot_params(width_pt=None, height_cm=20, width_cm=20, font_size=12) fig = plt.figure() coast_line_width = 0.25 axes_list = [] #plot CRU data ax = fig.add_subplot(gs[0, :]) axes_list.append(ax) cru_data_interp = maskoceans(lons2d, lats2d, cru_data_interp) img = b.contourf(x, y, cru_data_interp, ax=ax, levels=temp_levels) ax.set_title("CRU") plot_utils.draw_colorbar(fig, img, ax=ax) #era40 driven file_path = None era40_folder = "data/CORDEX/na/era40_1" file_prefix = "dm" for file_name in os.listdir(era40_folder): if period_str.upper() in file_name and file_name.startswith( file_prefix): file_path = os.path.join(era40_folder, file_name) break #get the temperature rpn_obj = RPN(file_path) t2m_era40 = rpn_obj.get_first_record_for_name_and_level( varname="TT", level=1, level_kind=level_kinds.HYBRID) t2m_era40 = maskoceans(lons2d, lats2d, t2m_era40) ax = fig.add_subplot(gs[1, 0]) axes_list.append(ax) img = b.contourf(x, y, t2m_era40, ax=ax, levels=temp_levels) ax.set_title("ERA40 driven 1 (1958-1961)") plot_utils.draw_colorbar(fig, img, ax=ax) rpn_obj.close() #era40 - cru ax = fig.add_subplot(gs[1, 1]) axes_list.append(ax) img = b.contourf(x, y, t2m_era40 - cru_data_interp, ax=ax, levels=diff_levels) ax.set_title("ERA40 driven 1 - CRU") plot_utils.draw_colorbar(fig, img, ax=ax) plot_e2_data = False if plot_e2_data: ##get and plot E2 data file_path = None e2_folder = "data/CORDEX/na/e2" prefix = "dm" #get file path for file_name in os.listdir(e2_folder): if file_name.endswith(period_str) and file_name.startswith(prefix): file_path = os.path.join(e2_folder, file_name) break pass #get the temperature rpn_obj = RPN(file_path) t2m = rpn_obj.get_first_record_for_name_and_level( varname="TT", level=1, level_kind=level_kinds.HYBRID) t2m = maskoceans(lons2d, lats2d, t2m) ax = fig.add_subplot(gs[2, 0]) axes_list.append(ax) img = b.contourf(x, y, t2m, ax=ax, levels=temp_levels) ax.set_title("E2, GCM driven") plot_utils.draw_colorbar(fig, img, ax=ax) #e2 - cru ax = fig.add_subplot(gs[2, 1]) axes_list.append(ax) img = b.contourf(x, y, t2m - cru_data_interp, ax=ax, levels=diff_levels) ax.set_title("E2, GCM driven - CRU") plot_utils.draw_colorbar(fig, img, ax=ax) ####Draw common elements pf_kinds = draw_regions.get_permafrost_mask(lons2d, lats2d) for the_ax in axes_list: b.drawcoastlines(ax=the_ax, linewidth=coast_line_width) b.contour(x, y, pf_kinds, ax=the_ax, colors="k") gs.tight_layout(fig, h_pad=5, w_pad=5, pad=2) fig.suptitle(period_str.upper(), y=0.03, x=0.5) fig.savefig("temperature_validation_{0}.png".format(period_str)) fig = plt.figure() ax = plt.gca() img = b.contourf(x, y, t2m_era40 - cru_data_interp, ax=ax, levels=diff_levels) ax.set_title("ERA40 driven 1 - CRU") plot_utils.draw_colorbar(fig, img, ax=ax) b.drawcoastlines(ax=ax, linewidth=coast_line_width) b.contour(x, y, pf_kinds, ax=ax, colors="k") fig.savefig("temperature_diff_{0}.png".format(period_str)) pass
def yieldout(year): bb = year - 1900 region1 = NetCDFFile( '/scratch2/scratchdirs/tslin2/plot/globalcrop/data/clm/HistoricalGLM_crop_150901.nc', 'r') maitrop = region1.variables['soy_trop'][99, :, :] maitemp = region1.variables['soy_temp'][99, :, :] maitropi = region1.variables['soy_trop_irrig'][99, :, :] maitempi = region1.variables['soy_temp_irrig'][99, :, :] gridarea = region1.variables['area'][:, :] maitrop = ma.masked_where(maitrop <= 0, maitrop) maitrop = ma.filled(maitrop, fill_value=0.) maitemp = ma.masked_where(maitemp <= 0, maitemp) maitemp = ma.filled(maitemp, fill_value=0.) maitropi = ma.masked_where(maitropi <= 0, maitropi) maitropi = ma.filled(maitropi, fill_value=0.) maitempi = ma.masked_where(maitempi <= 0, maitempi) maitempi = ma.filled(maitempi, fill_value=0.) maizetor = maitrop + maitemp maizetoi = maitropi + maitempi maizetrop = maitrop + maitropi maizetemp = maitemp + maitempi maizeto = maitrop + maitemp + maitropi + maitempi ff = NetCDFFile( '/scratch2/scratchdirs/tslin2/plot/globalcrop/data/clm/HistoricalFertilizer.nc', 'r') fert_maitrop = ff.variables['soy_trop_fert'][bb - 1, :, :] fert_maitemp = ff.variables['soy_temp_fert'][bb - 1, :, :] clm = NetCDFFile( '/scratch2/scratchdirs/tslin2/plot/globalcrop/data/clm/clm45historical/soytrop_historical_co2_rf_fert_0.5x0.5.nc', 'r') # clmtropf = clm.variables['yield'][bb,:,:] clmtropfer = clm.variables['fertilizer'][bb - 1, :, :] clm1 = NetCDFFile( '/scratch2/scratchdirs/tslin2/plot/globalcrop/data/clm/clm45historical/soytemp_historical_co2_rf_fert_0.5x0.5.nc', 'r') # clmtempf = clm1.variables['yield'][bb,:,:] clmtempfer = clm1.variables['fertilizer'][bb - 1, :, :] isam = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/cheyenne/yieldout/isamhistorical/heat/soytrop_historical_co2_rf_fert_0.5x0.5.nc', 'r') clmtropf = isam.variables['totalyield'][bb, :, :] isam1 = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/cheyenne/yieldout/isamhistorical/heat/soytemp_historical_co2_rf_fert_0.5x0.5.nc', 'r') clmtempf = isam1.variables['totalyield'][bb, :, :] clm2 = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/cheyenne/yieldout/isamhistorical/heat/soytrop_historical_co2_irrig_fert_0.5x0.5.nc', 'r') clmtropfi = clm2.variables['totalyield'][bb, :, :] clm3 = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/cheyenne/yieldout/isamhistorical/heat/soytemp_historical_co2_irrig_fert_0.5x0.5.nc', 'r') clmtempfi = clm3.variables['totalyield'][bb, :, :] clma = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/cheyenne/yieldout/isamhistorical/heat/soytrop_historical_co2_rf_nofert_0.5x0.5.nc', 'r') clmtropfno = clma.variables['totalyield'][bb, :, :] clm1a = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/cheyenne/yieldout/isamhistorical/heat/soytemp_historical_co2_rf_nofert_0.5x0.5.nc', 'r') clmtempfno = clm1a.variables['totalyield'][bb, :, :] clm2a = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/cheyenne/yieldout/isamhistorical/heat/soytrop_historical_co2_irrig_nofert_0.5x0.5.nc', 'r') clmtropfnoi = clm2a.variables['totalyield'][bb, :, :] clm3a = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/cheyenne/yieldout/isamhistorical/heat/soytemp_historical_co2_irrig_nofert_0.5x0.5.nc', 'r') clmtempfnoi = clm3a.variables['totalyield'][bb, :, :] lonisam = clm3a.variables['lon'][:] clmtempfnoi, lonisam1 = shiftgrid(180.5, clmtempfnoi, lonisam, start=False) clmtropfnoi, lonisam1 = shiftgrid(180.5, clmtropfnoi, lonisam, start=False) clmtempfno, lonisam1 = shiftgrid(180.5, clmtempfno, lonisam, start=False) clmtropfno, lonisam1 = shiftgrid(180.5, clmtropfno, lonisam, start=False) clmtempf, lonisam1 = shiftgrid(180.5, clmtempf, lonisam, start=False) clmtropf, lonisam1 = shiftgrid(180.5, clmtropf, lonisam, start=False) clmtempfi, lonisam1 = shiftgrid(180.5, clmtempfi, lonisam, start=False) clmtropfi, lonisam1 = shiftgrid(180.5, clmtropfi, lonisam, start=False) #print lonisam1 clmtropfer = N.flipud(clmtropfer) clmtempfer = N.flipud(clmtempfer) clmtropf = ma.masked_where(maitrop <= 0, clmtropf) clmtempf = ma.masked_where(maitemp <= 0, clmtempf) clmtropf = ma.filled(clmtropf, fill_value=0.) clmtempf = ma.filled(clmtempf, fill_value=0.) clmtropfi = ma.masked_where(maitropi <= 0, clmtropfi) clmtempfi = ma.masked_where(maitempi <= 0, clmtempfi) clmtropfi = ma.filled(clmtropfi, fill_value=0.) clmtempfi = ma.filled(clmtempfi, fill_value=0.) clmtropfno = ma.masked_where(maitrop <= 0, clmtropfno) clmtempfno = ma.masked_where(maitemp <= 0, clmtempfno) clmtropfno = ma.filled(clmtropfno, fill_value=0.) clmtempfno = ma.filled(clmtempfno, fill_value=0.) clmtropfnoi = ma.masked_where(maitropi <= 0, clmtropfnoi) clmtempfnoi = ma.masked_where(maitempi <= 0, clmtempfnoi) clmtropfnoi = ma.filled(clmtropfnoi, fill_value=0.) clmtempfnoi = ma.filled(clmtempfnoi, fill_value=0.) fertfractiontrop = N.zeros((360, 720)) nofertfractiontrop = N.zeros((360, 720)) fertfractiontemp = N.zeros((360, 720)) nofertfractiontemp = N.zeros((360, 720)) for x in range(0, 360): for y in range(0, 720): if clmtropfer[x, y] > 0.0: fertfractiontrop[x, y] = min( 1.0, fert_maitrop[x, y] / clmtropfer[x, y]) nofertfractiontrop[x, y] = 1.0 - fertfractiontrop[x, y] else: fertfractiontrop[x, y] = 0.0 nofertfractiontrop[x, y] = 1.0 for x in range(0, 360): for y in range(0, 720): if clmtempfer[x, y] > 0.0: fertfractiontemp[x, y] = min( 1.0, fert_maitemp[x, y] / clmtempfer[x, y]) nofertfractiontemp[x, y] = 1.0 - fertfractiontemp[x, y] else: fertfractiontemp[x, y] = 0.0 nofertfractiontemp[x, y] = 1.0 clmtropfnew = N.zeros((360, 720)) clmtempfnew = N.zeros((360, 720)) clmtropfinew = N.zeros((360, 720)) clmtempfinew = N.zeros((360, 720)) for x in range(0, 360): for y in range(0, 720): clmtropfnew[x, y] = (nofertfractiontrop[x, y] * clmtropfno[x, y]) + ( fertfractiontrop[x, y] * clmtropf[x, y]) clmtempfnew[x, y] = (nofertfractiontemp[x, y] * clmtempfno[x, y]) + ( fertfractiontemp[x, y] * clmtempf[x, y]) clmtropfinew[x, y] = (nofertfractiontrop[x, y] * clmtropfnoi[x, y] ) + (fertfractiontrop[x, y] * clmtropfi[x, y]) clmtempfinew[x, y] = (nofertfractiontemp[x, y] * clmtempfnoi[x, y] ) + (fertfractiontemp[x, y] * clmtempfi[x, y]) yield_clmtf = clmtropf + clmtempf yield_clmtf = ma.masked_where(yield_clmtf <= 0, yield_clmtf) #yield_clmtf = ma.masked_where(maizetor<=0,yield_clmtf ) yield_clmtf = ma.filled(yield_clmtf, fill_value=0.) yield_clmtfi = clmtropfi + clmtempfi yield_clmtfi = ma.masked_where(yield_clmtfi <= 0, yield_clmtfi) #yield_clmtfi = ma.masked_where(maizetoi<=0,yield_clmtfi) yield_clmtfi = ma.filled(yield_clmtfi, fill_value=0.) yield_clmtfnew = clmtropfnew + clmtempfnew yield_clmtfnew = ma.masked_where(yield_clmtfnew <= 0, yield_clmtfnew) #yield_clmtf = ma.masked_where(maizetor<=0,yield_clmtf ) yield_clmtfnew = ma.filled(yield_clmtfnew, fill_value=0.) yield_clmtfinew = clmtropfinew + clmtempfinew yield_clmtfinew = ma.masked_where(yield_clmtfinew <= 0, yield_clmtfinew) #yield_clmtfi = ma.masked_where(maizetoi<=0,yield_clmtfi) yield_clmtfinew = ma.filled(yield_clmtfinew, fill_value=0.) area = NetCDFFile( '/scratch2/scratchdirs/tslin2/plot/globalcrop/data/gridareahalf.nc', 'r') gridarea = area.variables['cell_area'][:, :] gridlon = area.variables['lon'][:] gridlat = area.variables['lat'][:] gridarea, gridlon = shiftgrid(180.5, gridarea, gridlon, start=False) lon2, lat2 = N.meshgrid(gridlon, gridlat) map = Basemap(projection='cyl', llcrnrlat=-65, urcrnrlat=90, llcrnrlon=-180, urcrnrlon=180, resolution='c') x, y = map(lon2, lat2) yield_clmtf = maskoceans(x, y, yield_clmtf) yield_clmtf = ma.masked_where(maizeto <= 0, yield_clmtf) yield_clmtfi = maskoceans(x, y, yield_clmtfi) yield_clmtfi = ma.masked_where(maizeto <= 0, yield_clmtfi) clmy = ((yield_clmtf * maizetor * gridarea) + (yield_clmtfi * maizetoi * gridarea)) / ((maizetoi * gridarea) + (maizetor * gridarea)) yield_clmtfnew = maskoceans(x, y, yield_clmtfnew) yield_clmtfnew = ma.masked_where(maizeto <= 0, yield_clmtfnew) yield_clmtfinew = maskoceans(x, y, yield_clmtfinew) yield_clmtfinew = ma.masked_where(maizeto <= 0, yield_clmtfinew) clmynew = ((yield_clmtfnew * maizetor * gridarea) + (yield_clmtfinew * maizetoi * gridarea)) / ( (maizetoi * gridarea) + (maizetor * gridarea)) return clmynew
norm = colors.BoundaryNorm(bounds, cmap.N) fig = plt.figure(figsize=(20, 10)) ax1 = fig.add_subplot(221) ax1.set_title("Maize SPAM", fontsize=20) map = Basemap(projection='cyl', llcrnrlat=-62, urcrnrlat=90, llcrnrlon=-180, urcrnrlon=180, resolution='c') map.drawcoastlines() map.drawcountries() map.drawmapboundary() x, y = map(lon, lat) m3newp = maskoceans(x, y, m3newp) m3newa = maskoceans(x, y, m3newa) cs1 = map.pcolormesh(x, y, m3newp * 10000 / gridarea, cmap=cmap, norm=norm) #cs1 = map.pcolormesh(x,y,m3newp*10000/gridarea,cmap=plt.cm.Greens,norm=colors.PowerNorm(gamma=1./2.),vmin=0,vmax=9) #cs1 = map.pcolormesh(x,y,m3newp/m3newa,cmap=plt.cm.gist_earth,vmin=0,vmax=10) plt.axis('off') cbar = map.colorbar(cs1, location='bottom', size="5%", pad="2%", ticks=bounds, extend='max') #cbar.ax.set_xticklabels(['0', '0.01', '0.05','0.1','0.5','1','2','3','4','5','6']) # horizontal colorbar cbar.ax.tick_params(labelsize=16)
p = np.clip(p, vmin, vmax) Z[i] = p count += 1 if count % 1000 == 0: print count, np.prod(Z.shape) print 'plotting' maps = [ ('nyc', (20, 20), basemap.Basemap(projection='ortho',lat_0=30,lon_0=-30,resolution='l')), ('asia', (20, 20), basemap.Basemap(projection='ortho',lat_0=23,lon_0=105,resolution='l')), ('world', (20, 10), basemap.Basemap(projection='cyl', llcrnrlat=-60,urcrnrlat=80,\ llcrnrlon=-180,urcrnrlon=180,resolution='c')) ] # remove oceans Z = basemap.maskoceans(X, Y, Z, resolution='h', grid=1.25) for k, figsize, m in maps: print 'drawing', k plt.figure(figsize=figsize) # draw coastlines, country boundaries, fill continents. m.drawcoastlines(linewidth=0.25) m.drawcountries(linewidth=0.25) # draw lon/lat grid lines every 30 degrees. m.drawmeridians(np.arange(0, 360, 30)) m.drawparallels(np.arange(-90, 90, 30)) # contour data over the map. cf = m.contourf(X,
#tmax = tmax - 39.5262 #tmax = tmax - 42.5948 fig = plt.figure(figsize=(4, 2)) m = Basemap(projection='merc', llcrnrlon=-78.5079, llcrnrlat=38.00905, urcrnrlon=-75.6454, urcrnrlat=39.91155, resolution='h') ny = tmax.shape[0] nx = tmax.shape[1] lons, lats = m.makegrid(nx, ny) # get lat/lons of ny by nx evenly space grid. x, y = m(lons, lats) mdata = maskoceans(lons, lats, tmax) m.drawcoastlines() m.drawcounties(linewidth=0.4) parallels = np.arange(0., 90, 0.5) m.drawparallels(parallels, labels=[1, 0, 0, 0], dashes=[2, 900], fontsize=10, linewidth=0.4) meridians = np.arange(180., 360., 1) m.drawmeridians(meridians, labels=[0, 0, 0, 1], dashes=[2, 900], fontsize=10,
# In[100]: # 90 + lands DCD = np.full(sunzenith.shape, 999, dtype='float16') #先重置,上步骤用过了 index = np.where((masked_sunzenith > 90) & (masked_sunzenith <= 180)) DCD[index] = data_39[index] - data_112[index] mask[np.where((DCD <= 0) & (mask == True))] = False DCD = np.ma.array(DCD, mask=mask) # In[101]: # 暂时没有雾的分级,所以直接用mask绘图 fog_lands = np.where(mask == False, 100, 0) fog_lands = np.ma.array(fog_lands, mask=mask) # ma oceans fog_lands = maskoceans(lons, lats, fog_lands, inlands=True) lons_1 = np.ma.array(lons, mask=fog_lands.mask) lats_1 = np.ma.array(lats, mask=fog_lands.mask) # In[102]: # oceans mask = np.full(sunzenith.shape, True) DCD = np.full(sunzenith.shape, 999, dtype='float') # 边角处掩码 masked_sunzenith = np.ma.array(sunzenith, mask=np.where(sunzenith == 65535, True, False)) # 0-10 index = np.where(masked_sunzenith <= 10) DCD[index] = data_39[index] - data_112[index]
def plot_tmin_tmax_correlations(start_year=1980, end_year=2010, months=None): default_path = "/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl-intfl_ITFS.hdf5" # default_path = "/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl-intfl_ITFS_avoid_truncation1979-1989.hdf5" if months is None: months = list(range(1, 13)) img_folder = os.path.join("interflow_corr_images", os.path.basename(default_path)) if not os.path.isdir(img_folder): os.makedirs(img_folder) img_filename = "Tmin_Tmax_interflow_correlations_months={}_{}-{}.jpg".format( "-".join(str(m) for m in months), start_year, end_year) lons, lats, basemap = analysis.get_basemap_from_hdf(file_path=default_path) lons[lons > 180] -= 360 x, y = basemap(lons, lats) # Correlate interflow rate and soil moisture params = dict(path1=default_path, varname1="INTF", level1=0, path2=default_path, level2=0, varname2="TT_max", months=months) params.update(dict( start_year=start_year, end_year=end_year, )) title_list = [] data_list = [] corr1, intf_clim, i1_clim = calculate_correlation_field_for_climatology( **params) to_plot1 = maskoceans(lons, lats, corr1) title_list.append("Corr({}, {})".format(params["varname1"], params["varname2"])) data_list.append(to_plot1) # correlate interflow and precip # params.update(dict(varname2="PR", level2=0)) # corr2 = calculate_correlation_field_for_climatology(**params) # to_plot2 = np.ma.masked_where(to_plot1.mask, corr2) # title_list.append("Corr({}, {})".format(params["varname1"], params["varname2"])) # data_list.append(to_plot2) # correlate precip and soil moisture # params.update(dict(varname1="I1", level1=0)) # corr3 = calculate_correlation_field_for_climatology(**params) # to_plot3 = np.ma.masked_where(to_plot2.mask, corr3) # title_list.append("Corr({}, {})".format(params["varname1"], params["varname2"])) # data_list.append(to_plot3) # correlate evaporation and soil moisture params.update(dict(varname2="TT_min", level2=0, varname1="INTF", level1=0)) corr4, i1_clim, av_clim = calculate_correlation_field_for_climatology( **params) to_plot3 = np.ma.masked_where(to_plot1.mask, corr4) title_list.append("Corr({}, {})".format(params["varname1"], params["varname2"])) data_list.append(to_plot3) # Do plotting clevels = np.arange(-1, 1.2, 0.2) npanels = len(data_list) gs = GridSpec(1, npanels + 1, width_ratios=[ 1.0, ] * npanels + [ 0.05, ]) fig = plt.figure() assert isinstance(fig, Figure) fig.set_figheight(1.5 * fig.get_figheight()) img = None for col in range(npanels): ax = fig.add_subplot(gs[0, col]) basemap.drawmapboundary(fill_color="0.75", ax=ax) img = basemap.contourf(x, y, data_list[col], levels=clevels, cmap=cm.get_cmap("RdBu_r", len(clevels) - 1)) plt.title(title_list[col]) basemap.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH, ax=ax) plt.colorbar(img, cax=fig.add_subplot(gs[0, npanels])) fig.savefig(os.path.join(img_folder, img_filename), dpi=cpp.FIG_SAVE_DPI)
label = r'$\delta^{18} O$' + u' (' + u'\u2030' + ')' f = Dataset(path) var = f.variables[var_name][:, :] #[0,0,:,:] lat = f.variables['latitude'][:] lon = f.variables['longitude'][:] f.close() fig = plt.figure() m = Basemap(projection='spstere', boundinglat=-60, lon_0=-180, resolution='h') var_cyclic, lon_cyclic = addcyclic(var, lon) var_cyclic, lon_cyclic = shiftgrid(180., var_cyclic, lon_cyclic, start=False) lon2d, lat2d = np.meshgrid(lon_cyclic, lat) x, y = m(lon2d, lat2d) mdata = maskoceans(lon2d, lat2d, var_cyclic, resolution='h', grid=1.25, inlands=True) cs = m.contourf(x, y, mdata, cmap=plt.cm.rainbow, alpha=0.5) levels = np.arange(var.min(), var.max(), 5) cs2 = m.contour(x, y, mdata, levels=levels, linewidth=1.) plt.clabel(cs2, fmt='%4.0f', fontsize=8) m.drawcoastlines(color='grey') m.drawmapboundary() cbar = m.colorbar(cs, location='bottom') cbar.set_label(label) cbar.ax.tick_params(labelsize=8) plt.show()
def main(start_year=1980, end_year=2010, months=None): default_path = "/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl-intfl_ITFS.hdf5" # default_path = "/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl-intfl_ITFS_avoid_truncation1979-1989.hdf5" if months is None: months = list(range(1, 13)) img_folder = os.path.join("interflow_corr_images", os.path.basename(default_path)) if not os.path.isdir(img_folder): os.makedirs(img_folder) img_filename = "interflow_correlations_months={}_{}-{}.pdf".format( "-".join(str(m) for m in months), start_year, end_year) lons, lats, basemap = analysis.get_basemap_from_hdf(file_path=default_path) lons[lons > 180] -= 360 x, y = basemap(lons, lats) # Correlate interflow rate and soil moisture params = dict(path1=default_path, varname1="INTF", level1=0, path2=default_path, level2=0, varname2="I1", months=months) params.update(dict( start_year=start_year, end_year=end_year, )) title_list = [] data_list = [] corr1, intf_clim, i1_clim = calculate_correlation_field_for_climatology( **params) to_plot1 = maskoceans(lons, lats, corr1) title_list.append("Corr({}, {})".format( infovar.get_display_label_for_var(params["varname1"]), infovar.get_display_label_for_var(params["varname2"]))) data_list.append(to_plot1) # correlate interflow and precip params.update(dict(varname2="PR", level2=0)) corr2, i1_clim, pr_clim = calculate_correlation_field_for_climatology( **params) to_plot2 = np.ma.masked_where(to_plot1.mask, corr2) title_list.append("Corr({}, {})".format( infovar.get_display_label_for_var(params["varname1"]), infovar.get_display_label_for_var(params["varname2"]))) data_list.append(to_plot2) # correlate precip and soil moisture # params.update(dict(varname1="I1", level1=0)) # corr3 = calculate_correlation_field_for_climatology(**params) # to_plot3 = np.ma.masked_where(to_plot2.mask, corr3) # title_list.append("Corr({}, {})".format(params["varname1"], params["varname2"])) # data_list.append(to_plot3) # correlate evaporation and soil moisture params.update(dict(varname2="AV", level2=0, varname1="I1", level1=0)) corr4, i1_clim, av_clim = calculate_correlation_field_for_climatology( **params) to_plot3 = np.ma.masked_where(to_plot1.mask, corr4) title_list.append("Corr({}, {})".format( infovar.get_display_label_for_var(params["varname1"]), infovar.get_display_label_for_var(params["varname2"]))) data_list.append(to_plot3) # correlate interflow and evaporation params.update(dict(varname2="AV", level2=0, varname1="INTF", level1=0)) corr4, intf_clim, av_clim = calculate_correlation_field_for_climatology( **params) to_plot4 = np.ma.masked_where(to_plot1.mask, corr4) title_list.append("Corr({}, {})".format( infovar.get_display_label_for_var(params["varname1"]), infovar.get_display_label_for_var(params["varname2"]))) data_list.append(to_plot4) # TODO: Correlate infiltration and surface runoff # Do plotting clevels = np.arange(-1, 1.2, 0.2) npanels = len(data_list) gs = GridSpec(1, npanels + 1, width_ratios=[ 1.0, ] * npanels + [ 0.05, ]) fig = plt.figure() assert isinstance(fig, Figure) fig.set_figheight(1.5 * fig.get_figheight()) img = None for col in range(npanels): ax = fig.add_subplot(gs[0, col]) basemap.drawmapboundary(fill_color="0.75", ax=ax) img = basemap.contourf(x, y, data_list[col], levels=clevels, cmap=cm.get_cmap("RdBu_r", len(clevels) - 1)) plt.title(title_list[col]) basemap.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH, ax=ax) plt.colorbar(img, cax=fig.add_subplot(gs[0, npanels])) fig.savefig(os.path.join(img_folder, img_filename), dpi=cpp.FIG_SAVE_DPI) # plot timeseries the_mask = corr4 < -0.1 varname_to_ts = { "INTF": get_mean_over(the_mask, intf_clim), "LH": get_mean_over(the_mask, av_clim), "SM": get_mean_over(the_mask, i1_clim) } from matplotlib import gridspec fig = plt.figure() fig.set_figheight(3 * fig.get_figheight()) gs = gridspec.GridSpec(len(varname_to_ts), 1) d0 = datetime(2001, 1, 1) dt = timedelta(days=1) dates = [d0 + dt * i for i in range(365) if (d0 + dt * i).month in months] sfmt = ScalarFormatter() dfmt = DateFormatter("%d%b") for i, (label, data) in enumerate(varname_to_ts.items()): ax = fig.add_subplot(gs[i, 0]) ax.plot(dates, data, label=label, lw=2) ax.grid() ax.legend() ax.yaxis.set_major_formatter(sfmt) if i < len(varname_to_ts) - 1: ax.xaxis.set_ticklabels([]) else: ax.xaxis.set_major_formatter(dfmt) fig.savefig(os.path.join( img_folder, "aa_ts_{}_{}.png".format(os.path.basename(default_path), "-".join(str(m) for m in months))), dpi=cpp.FIG_SAVE_DPI)
fig, ax = plt.subplots(figsize=(12,12)) ax.set_title("ISAM CESM-NCEP T (K)",fontsize=18) map = Basemap(projection ='cyl', llcrnrlat=-65, urcrnrlat=90,llcrnrlon=-180, urcrnrlon=180, resolution='c') x,y = map(lon2,lat2) iyield = ma.masked_where(iyield<=0,iyield) iarea = ma.masked_where(iarea<=0,iarea) #iyield = ma.masked_where(iarea<=0,iyield) iyield = ma.masked_where(maizeto<=0,iyield) iizumy=iyield*maizeto*100 yield_new=maskoceans(x,y,yield_new) yield_new=ma.filled(yield_new, fill_value=0.) yield_new = ma.masked_where(yield_new<=0,yield_new) yield_new = ma.masked_where(maizeto<=0,yield_new) yield_new2=maskoceans(x,y,yield_new2) yield_new2=ma.filled(yield_new2, fill_value=0.) yield_new2 = ma.masked_where(yield_new2<=0,yield_new2) yield_new2 = ma.masked_where(maizeto<=0,yield_new2) isamy=((yield_new*maizetor*gridarea)+(yield_new2*maizetoi*gridarea))*100/gridarea isamy = ma.masked_where(iizumy<=0,isamy) yield_newf=maskoceans(x,y,yield_newf) yield_newf=ma.filled(yield_newf, fill_value=0.) yield_newf = ma.masked_where(yield_newf<=0,yield_newf)
def main(): erainterim_075_folder = "/HOME/data/Validation/ERA-Interim_0.75/Offline_driving_data/3h_Forecast" vname = "PR" start_year = 1980 end_year = 2010 season_key = "summer" season_labels = {season_key: "Summer"} season_to_months = OrderedDict([ (season_key, [6, 7, 8]) ]) # Validate temperature and precip model_vars = ["TT", "PR"] obs_vars = ["tmp", "pre"] obs_paths = [ "/HOME/data/Validation/CRU_TS_3.1/Original_files_gzipped/cru_ts_3_10.1901.2009.tmp.dat.nc", "/HOME/data/Validation/CRU_TS_3.1/Original_files_gzipped/cru_ts_3_10.1901.2009.pre.dat.nc" ] model_var_to_obs_var = dict(zip(model_vars, obs_vars)) model_var_to_obs_path = dict(zip(model_vars, obs_paths)) obs_path = model_var_to_obs_path[vname] cru = CRUDataManager(var_name=model_var_to_obs_var[vname], path=obs_path) # Calculate seasonal means for CRU seasonal_clim_fields_cru = cru.get_seasonal_means(season_name_to_months=season_to_months, start_year=start_year, end_year=end_year) # Calculate seasonal mean for erai flist = get_files_for_season(erainterim_075_folder, start_year=start_year, end_year=end_year, months=season_to_months[season_key]) rpf = MultiRPN(flist) date_to_field_erai075 = rpf.get_all_time_records_for_name_and_level(varname=vname, level=-1) # Convert to mm/day era075 = np.mean([field for field in date_to_field_erai075.values()], axis=0) * 24 * 3600 * 1000 lons_era, lats_era = rpf.get_longitudes_and_latitudes_of_the_last_read_rec() seasonal_clim_fields_cru_interp = OrderedDict() # Calculate biases for season, cru_field in seasonal_clim_fields_cru.items(): seasonal_clim_fields_cru_interp[season] = cru.interpolate_data_to(cru_field, lons2d=lons_era, lats2d=lats_era, nneighbours=1) # Do the plotting ------------------------------------------------------------------------------ plot_utils.apply_plot_params() fig = plt.figure() b = Basemap() gs = gridspec.GridSpec(nrows=3, ncols=1) ax = fig.add_subplot(gs[0, 0]) xx, yy = b(cru.lons2d, cru.lats2d) cs = b.contourf(xx, yy, seasonal_clim_fields_cru[season_key], 20) b.drawcoastlines(ax=ax) ax.set_title("CRU") plt.colorbar(cs, ax=ax) ax = fig.add_subplot(gs[1, 0]) lons_era[lons_era > 180] -= 360 lons_era, era075 = b.shiftdata(lons_era, datain=era075, lon_0=0) xx, yy = b(lons_era, lats_era) # mask oceans in the era plot as well era075 = maskoceans(lons_era, lats_era, era075) cs = b.contourf(xx, yy, era075, levels=cs.levels, norm=cs.norm, cmap=cs.cmap, ax=ax) b.drawcoastlines(ax=ax) ax.set_title("ERA-Interim 0.75") plt.colorbar(cs, ax=ax) # differences ax = fig.add_subplot(gs[2, 0]) diff = era075 - seasonal_clim_fields_cru_interp[season_key] delta = np.percentile(np.abs(diff)[~diff.mask], 90) clevs = np.linspace(-delta, delta, 20) cs = b.contourf(xx, yy, diff, levels=clevs, cmap="RdBu_r", extend="both") b.drawcoastlines(ax=ax) ax.set_title("ERA-Interim 0.75 - CRU") plt.colorbar(cs, ax=ax) plt.show() fig.savefig(os.path.join(img_folder, "erai0.75_vs_cru_precip.png"), bbox_inches="tight")
def yieldout(year): bb = year - 1900 bb1 = year - 850 isam1 = NetCDFFile( '/scratch2/scratchdirs/tslin2/plot/globalcrop/data/mirca_isam.nc', 'r') maitrop1 = isam1.variables['asoy_rf'][:, :] #mirca2000 maitropi1 = isam1.variables['asoy_irr'][:, :] #mirca2000 maitrop1 = ma.masked_where(maitrop1 <= 0, maitrop1) maitrop1 = ma.filled(maitrop1, fill_value=0.) maitropi1 = ma.masked_where(maitropi1 <= 0, maitropi1) maitropi1 = ma.filled(maitropi1, fill_value=0.) lonisam = isam1.variables['lon'][:] maitrop, lonisam1 = shiftgrid(180.5, maitrop1, lonisam, start=False) maitropi, lonisam1 = shiftgrid(180.5, maitropi1, lonisam, start=False) maizetor = maitrop maizetoi = maitropi maizetrop = maitrop + maitropi maizeto = maitrop + maitropi isam = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/maisoy_cheyenne/his_cru/new/soy_fert/output/soy_fert.nc', 'r') clmtropf = isam.variables['g_ET'][bb - 1, 1, :, :] clm2 = NetCDFFile( '/scratch2/scratchdirs/tslin2/isam/maisoy_cheyenne/his_cru/new/soy_irr_fert/output/soy_irr_fert.nc', 'r') clmtropfi = clm2.variables['g_ET'][bb - 1, 1, :, :] lonisam = clm2.variables['lon'][:] clmtropf, lonisam1 = shiftgrid(180.5, clmtropf, lonisam, start=False) clmtropfi, lonisam1 = shiftgrid(180.5, clmtropfi, lonisam, start=False) print lonisam1 clmtropf = ma.masked_where(maitrop <= 0, clmtropf) clmtropf = ma.filled(clmtropf, fill_value=0.) clmtropfi = ma.masked_where(maitropi <= 0, clmtropfi) clmtropfi = ma.filled(clmtropfi, fill_value=0.) yield_clmtf = clmtropf yield_clmtf = ma.masked_where(yield_clmtf <= 0, yield_clmtf) yield_clmtf = ma.filled(yield_clmtf, fill_value=0.) yield_clmtfi = clmtropfi yield_clmtfi = ma.masked_where(yield_clmtfi <= 0, yield_clmtfi) yield_clmtfi = ma.filled(yield_clmtfi, fill_value=0.) area = NetCDFFile( '/scratch2/scratchdirs/tslin2/plot/globalcrop/data/gridareahalf_isam.nc', 'r') gridarea = area.variables['cell_area'][:, :] gridlon = area.variables['lon'][:] gridlat = area.variables['lat'][:] gridarea, gridlon = shiftgrid(180.5, gridarea, gridlon, start=False) lon2, lat2 = N.meshgrid(gridlon, gridlat) map = Basemap(projection='cyl', llcrnrlat=-65, urcrnrlat=90, llcrnrlon=-180, urcrnrlon=180, resolution='c') x, y = map(lon2, lat2) yield_clmtf = maskoceans(x, y, yield_clmtf) yield_clmtf = ma.masked_where(maizeto <= 0, yield_clmtf) yield_clmtfi = maskoceans(x, y, yield_clmtfi) yield_clmtfi = ma.masked_where(maizeto <= 0, yield_clmtfi) clmy = ((yield_clmtf * maizetor) + (yield_clmtfi * maizetoi)) / ((maizetoi) + (maizetor)) areall = (maizetoi) + (maizetor) clmall = ((yield_clmtf * maizetor) + (yield_clmtfi * maizetoi)) return clmy, areall, clmall
def main(): start_year = 1980 end_year = 2009 HL_LABEL = "CRCM5_HL" NEMO_LABEL = "CRCM5_NEMO" # critical p-value for the ttest aka significance level p_crit = 1 vars_of_interest = [ # T_AIR_2M, # TOTAL_PREC, # SWE, default_varname_mappings.LATENT_HF, default_varname_mappings.SENSIBLE_HF, default_varname_mappings.LWRAD_DOWN, default_varname_mappings.SWRAD_DOWN # LAKE_ICE_FRACTION ] coastline_width = 0.3 vname_to_seasonmonths_map = { SWE: OrderedDict([("November", [11]), ("December", [12]), ("January", [1, ])]), LAKE_ICE_FRACTION: OrderedDict([ ("December", [12]), ("January", [1, ]), ("February", [2, ]), ("March", [3, ]), ("April", [4, ])]), T_AIR_2M: season_to_months, TOTAL_PREC: season_to_months, } # set season to months mappings for vname in vars_of_interest: if vname not in vname_to_seasonmonths_map: vname_to_seasonmonths_map[vname] = season_to_months sim_configs = { HL_LABEL: RunConfig(data_path="/RECH2/huziy/coupling/GL_440x260_0.1deg_GL_with_Hostetler/Samples_selected", start_year=start_year, end_year=end_year, label=HL_LABEL), NEMO_LABEL: RunConfig(data_path="/RECH2/huziy/coupling/coupled-GL-NEMO1h_30min/selected_fields", start_year=start_year, end_year=end_year, label=NEMO_LABEL), } sim_labels = [HL_LABEL, NEMO_LABEL] vname_to_level = { T_AIR_2M: VerticalLevel(1, level_kinds.HYBRID), U_WE: VerticalLevel(1, level_kinds.HYBRID), V_SN: VerticalLevel(1, level_kinds.HYBRID), default_varname_mappings.LATENT_HF: VerticalLevel(5, level_kinds.ARBITRARY), default_varname_mappings.SENSIBLE_HF: VerticalLevel(5, level_kinds.ARBITRARY), } # Try to get the land_fraction for masking if necessary land_fraction = None try: first_ts_file = Path(sim_configs[HL_LABEL].data_path).parent / "pm1979010100_00000000p" land_fraction = get_land_fraction(first_timestep_file=first_ts_file) except Exception as err: raise err pass # Calculations # prepare params for interpolation lons_t, lats_t, bsmap = get_target_lons_lats_basemap(sim_configs[HL_LABEL]) # get a subdomain of the simulation domain nx, ny = lons_t.shape iss = IndexSubspace(i_start=20, j_start=10, i_end=nx // 1.5, j_end=ny / 1.8) # just to change basemap limits lons_t, lats_t, bsmap = get_target_lons_lats_basemap(sim_configs[HL_LABEL], sub_space=iss) xt, yt, zt = lat_lon.lon_lat_to_cartesian(lons_t.flatten(), lats_t.flatten()) vname_map = {} vname_map.update(default_varname_mappings.vname_map_CRCM5) # Read and calculate simulated seasonal means mod_label_to_vname_to_season_to_std = {} mod_label_to_vname_to_season_to_nobs = {} sim_data = defaultdict(dict) for label, r_config in sim_configs.items(): store_config = { "base_folder": r_config.data_path, "data_source_type": data_source_types.SAMPLES_FOLDER_FROM_CRCM_OUTPUT_VNAME_IN_FNAME, "varname_mapping": vname_map, "level_mapping": vname_to_level, "offset_mapping": default_varname_mappings.vname_to_offset_CRCM5, "multiplier_mapping": default_varname_mappings.vname_to_multiplier_CRCM5, } dm = DataManager(store_config=store_config) mod_label_to_vname_to_season_to_std[label] = {} mod_label_to_vname_to_season_to_nobs[label] = {} interp_indices = None for vname in vars_of_interest: # -- end_year_for_current_var = end_year if vname == SWE: end_year_for_current_var = min(1996, end_year) # -- seas_to_year_to_mean = dm.get_seasonal_means(varname_internal=vname, start_year=start_year, end_year=end_year_for_current_var, season_to_months=vname_to_seasonmonths_map[vname]) # get the climatology seas_to_clim = {seas: np.array(list(y_to_means.values())).mean(axis=0) for seas, y_to_means in seas_to_year_to_mean.items()} sim_data[label][vname] = seas_to_clim if interp_indices is None: _, interp_indices = dm.get_kdtree().query(list(zip(xt, yt, zt))) season_to_std = {} mod_label_to_vname_to_season_to_std[label][vname] = season_to_std season_to_nobs = {} mod_label_to_vname_to_season_to_nobs[label][vname] = season_to_nobs for season in seas_to_clim: interpolated_field = seas_to_clim[season].flatten()[interp_indices].reshape(lons_t.shape) seas_to_clim[season] = interpolated_field # calculate standard deviations of the interpolated fields season_to_std[season] = np.asarray([field.flatten()[interp_indices].reshape(lons_t.shape) for field in seas_to_year_to_mean[season].values()]).std(axis=0) # calculate numobs for the ttest season_to_nobs[season] = np.ones_like(lons_t) * len(seas_to_year_to_mean[season]) # Plotting: interpolate to the same grid and plot obs and biases xx, yy = bsmap(lons_t, lats_t) lons_t[lons_t > 180] -= 360 for vname in vars_of_interest: field_mask = maskoceans(lons_t, lats_t, np.zeros_like(lons_t), inlands=vname in [SWE]).mask field_mask_lakes = maskoceans(lons_t, lats_t, np.zeros_like(lons_t), inlands=True).mask plot_utils.apply_plot_params(width_cm=11 * len(vname_to_seasonmonths_map[vname]), height_cm=20, font_size=8) fig = plt.figure() nrows = len(sim_configs) + 1 ncols = len(vname_to_seasonmonths_map[vname]) gs = GridSpec(nrows=nrows, ncols=ncols) # plot the fields for current_row, sim_label in enumerate(sim_labels): for col, season in enumerate(vname_to_seasonmonths_map[vname]): field = sim_data[sim_label][vname][season] ax = fig.add_subplot(gs[current_row, col]) if current_row == 0: ax.set_title(season) clevs = get_clevs(vname) if clevs is not None: bnorm = BoundaryNorm(clevs, len(clevs) - 1) cmap = cm.get_cmap("viridis", len(clevs) - 1) else: cmap = "viridis" bnorm = None the_mask = field_mask_lakes if vname in [T_AIR_2M, TOTAL_PREC, SWE] else field_mask to_plot = np.ma.masked_where(the_mask, field) * internal_name_to_multiplier[vname] # temporary plot the actual values cs = bsmap.contourf(xx, yy, to_plot, ax=ax, levels=get_clevs(vname), cmap=cmap, norm=bnorm, extend="both") bsmap.drawcoastlines(linewidth=coastline_width) bsmap.colorbar(cs, ax=ax) if col == 0: ax.set_ylabel("{}".format(sim_label)) # plot differences between the fields for col, season in enumerate(vname_to_seasonmonths_map[vname]): field = sim_data[NEMO_LABEL][vname][season] - sim_data[HL_LABEL][vname][season] ax = fig.add_subplot(gs[-1, col]) clevs = get_clevs(vname + "biasdiff") if clevs is not None: bnorm = BoundaryNorm(clevs, len(clevs) - 1) cmap = cm.get_cmap("bwr", len(clevs) - 1) else: cmap = "bwr" bnorm = None to_plot = field * internal_name_to_multiplier[vname] # to_plot = np.ma.masked_where(field_mask, field) * internal_name_to_multiplier[vname] # ttest a = sim_data[NEMO_LABEL][vname][season] # Calculate the simulation data back from biases std_a = mod_label_to_vname_to_season_to_std[NEMO_LABEL][vname][season] nobs_a = mod_label_to_vname_to_season_to_nobs[NEMO_LABEL][vname][season] b = sim_data[HL_LABEL][vname][season] # Calculate the simulation data back from biases std_b = mod_label_to_vname_to_season_to_std[HL_LABEL][vname][season] nobs_b = mod_label_to_vname_to_season_to_nobs[HL_LABEL][vname][season] t, p = ttest_ind_from_stats(mean1=a, std1=std_a, nobs1=nobs_a, mean2=b, std2=std_b, nobs2=nobs_b, equal_var=False) # Mask non-significant differences as given by the ttest to_plot = np.ma.masked_where(p > p_crit, to_plot) # mask the points with not sufficient land fraction if land_fraction is not None and vname in [SWE, ]: to_plot = np.ma.masked_where(land_fraction < 0.05, to_plot) # print("land fractions for large differences ", land_fraction[to_plot > 30]) cs = bsmap.contourf(xx, yy, to_plot, ax=ax, extend="both", levels=get_clevs(vname + "biasdiff"), cmap=cmap, norm=bnorm) bsmap.drawcoastlines(linewidth=coastline_width) bsmap.colorbar(cs, ax=ax) if col == 0: ax.set_ylabel("{}\n-\n{}".format(NEMO_LABEL, HL_LABEL)) fig.tight_layout() # save a figure per variable img_file = "seasonal_differences_noobs_{}_{}_{}-{}.png".format(vname, "-".join([s for s in vname_to_seasonmonths_map[vname]]), start_year, end_year) img_file = img_folder.joinpath(img_file) fig.savefig(str(img_file), dpi=300) plt.close(fig)
def main(intf_file="", no_intf_file="", start_year=1980, end_year=2010, dt_hours=3): """ Do it on a year by year basis :param intf_file: :param no_intf_file: :param start_year: :param end_year: """ matplotlib.rc("font", size=20) img_folder = "long-rain-events-30y" if not os.path.isdir(img_folder): os.mkdir(img_folder) # Calculate the durations of the longest rain events in both simulations no_intf_all_max_durations, no_intf_acc_runoff, intf_all_max_durations, intf_acc_runoff = \ get_longest_rain_durations_for_files( intf_file=intf_file, no_intf_file=no_intf_file, start_year=start_year, end_year=end_year) # Debug: visualize cmap = cm.get_cmap("rainbow", 20) lons, lats, basemap = analysis.get_basemap_from_hdf(file_path=no_intf_file) x, y = basemap(lons, lats) plt.figure() mean_max_durations_nointf = np.mean(no_intf_all_max_durations, axis=0).astype(int) im = basemap.pcolormesh(x, y, mean_max_durations_nointf, vmin=0, vmax=50, cmap=cmap) basemap.drawcoastlines() plt.title("no - intf") plt.colorbar(im) print(mean_max_durations_nointf.min(), mean_max_durations_nointf.max(), mean_max_durations_nointf.mean()) plt.savefig(os.path.join(img_folder, "no-intf-durations.png")) plt.figure() mean_max_durations_intf = np.mean(intf_all_max_durations, axis=0).astype(int) im = basemap.pcolormesh(x, y, mean_max_durations_intf, vmin=0, vmax=50, cmap=cmap) basemap.drawcoastlines() plt.title("intf") plt.colorbar(im) print(mean_max_durations_intf.min(), mean_max_durations_intf.max(), mean_max_durations_intf.mean()) plt.savefig(os.path.join(img_folder, "intf-durations.png")) # Plot the interflow effect on the longest rain events mask = maskoceans(lons, lats, mean_max_durations_intf, inlands=True).mask plt.figure() clevs = [0.5, 1, 5, 30, 100, 150] clevs = [-c for c in reversed(clevs)] + clevs bn = BoundaryNorm(clevs, len(clevs) - 1) cmap_diff = cm.get_cmap("bwr", len(clevs) - 1) diff = np.ma.masked_where(mask, (mean_max_durations_intf - mean_max_durations_nointf) * dt_hours) im = basemap.pcolormesh(x, y, diff, cmap=cmap_diff, vmin=clevs[0], vmax=clevs[-1], norm=bn) basemap.drawcoastlines() plt.title("intf - nointf" + r", $\sum\Delta_{i, j}$ = " + "{}\n".format(diff.sum())) cb = plt.colorbar(im) cb.ax.set_title("hours") plt.savefig(os.path.join(img_folder, "diff_intf-nointf_durations.png")) # Plot differences in surface runoff plot_surface_runoff_differences(x, y, basemap, mask, no_intf_acc_runoff, intf_acc_runoff, dt_hours=dt_hours, img_path=os.path.join(img_folder, "runoff_during_long_rain_events.png")) # Plot numbers of events of different durations plot_nevents_duration_curves(mean_max_durations_nointf[~mask], mean_max_durations_intf[~mask], img_path=os.path.join(img_folder, "nevents_vs_duration_over_land.png")) plot_nevents_duration_curves(mean_max_durations_nointf[mask], mean_max_durations_intf[mask], img_path=os.path.join(img_folder, "nevents_vs_duration_over_ocean-and-lakes.png"))
def load_data(self): if self.projection == 'EPSG:32661': blat = min(self.bounds[0], self.bounds[2]) blat = 5 * np.floor(blat / 5) self.basemap = basemap.load_map('npstere', (blat, 0), None, None) elif self.projection == 'EPSG:3031': blat = max(self.bounds[0], self.bounds[2]) blat = 5 * np.ceil(blat / 5) self.basemap = basemap.load_map('spstere', (blat, 180), None, None) else: distance = VincentyDistance() height = distance.measure( (self.bounds[0], self.centroid[1]), (self.bounds[2], self.centroid[1])) * 1000 * 1.25 width = distance.measure( (self.centroid[0], self.bounds[1]), (self.centroid[0], self.bounds[3])) * 1000 * 1.25 self.basemap = basemap.load_map('lcc', self.centroid, height, width) if self.basemap.aspect < 1: gridx = 500 gridy = int(500 * self.basemap.aspect) else: gridy = 500 gridx = int(500 / self.basemap.aspect) self.longitude, self.latitude = self.basemap.makegrid(gridx, gridy) with open_dataset(get_dataset_url(self.dataset_name)) as dataset: if self.time < 0: self.time += len(dataset.timestamps) self.time = np.clip(self.time, 0, len(dataset.timestamps) - 1) self.variable_unit = self.get_variable_units( dataset, self.variables)[0] self.variable_name = self.get_variable_names( dataset, self.variables)[0] scale_factor = self.get_variable_scale_factors( dataset, self.variables)[0] if self.cmap is None: if len(self.variables) == 1: self.cmap = colormap.find_colormap(self.variable_name) else: self.cmap = colormap.colormaps.get('speed') if len(self.variables) == 2: self.variable_name = self.vector_name(self.variable_name) if self.depth == 'bottom': depth_value = 'Bottom' else: self.depth = np.clip(int(self.depth), 0, len(dataset.depths) - 1) depth_value = dataset.depths[self.depth] data = [] allvars = [] for v in self.variables: var = dataset.variables[v] allvars.append(v) if self.filetype in ['csv', 'odv', 'txt']: d, depth_value = dataset.get_area(np.array( [self.latitude, self.longitude]), self.depth, self.time, v, return_depth=True) else: d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.time, v) d = np.multiply(d, scale_factor) self.variable_unit, d = self.kelvin_to_celsius( self.variable_unit, d) data.append(d) if self.filetype not in ['csv', 'odv', 'txt']: if len(var.dimensions) == 3: self.depth_label = "" elif self.depth == 'bottom': self.depth_label = " at Bottom" else: self.depth_label = " at " + \ str(int(np.round(depth_value))) + " m" if len(data) == 2: data[0] = np.sqrt(data[0]**2 + data[1]**2) self.data = data[0] quiver_data = [] if self.quiver is not None and \ self.quiver['variable'] != '' and \ self.quiver['variable'] != 'none': for v in self.quiver['variable'].split(','): allvars.append(v) var = dataset.variables[v] quiver_unit = get_variable_unit(self.dataset_name, var) quiver_name = get_variable_name(self.dataset_name, var) quiver_lon, quiver_lat = self.basemap.makegrid(50, 50) d = dataset.get_area(np.array([quiver_lat, quiver_lon]), self.depth, self.time, v) quiver_data.append(d) self.quiver_name = self.vector_name(quiver_name) self.quiver_longitude = quiver_lon self.quiver_latitude = quiver_lat self.quiver_unit = quiver_unit self.quiver_data = quiver_data if all( map(lambda v: len(dataset.variables[v].dimensions) == 3, allvars)): self.depth = 0 contour_data = [] if self.contour is not None and \ self.contour['variable'] != '' and \ self.contour['variable'] != 'none': d = dataset.get_area(np.array([self.latitude, self.longitude]), self.depth, self.time, self.contour['variable']) contour_unit = get_variable_unit( self.dataset_name, dataset.variables[self.contour['variable']]) contour_name = get_variable_name( self.dataset_name, dataset.variables[self.contour['variable']]) contour_factor = get_variable_scale_factor( self.dataset_name, dataset.variables[self.contour['variable']]) contour_unit, d = self.kelvin_to_celsius(contour_unit, d) d = np.multiply(d, contour_factor) contour_data.append(d) self.contour_unit = contour_unit self.contour_name = contour_name self.contour_data = contour_data self.timestamp = dataset.timestamps[self.time] if self.variables != self.variables_anom: self.variable_name += " Anomaly" with open_dataset(get_dataset_climatology( self.dataset_name)) as dataset: data = [] for v in self.variables: var = dataset.variables[v] d = dataset.get_area( np.array([self.latitude, self.longitude]), self.depth, self.timestamp.month - 1, v) data.append(d) if len(data) == 2: data = np.sqrt(data[0]**2 + data[1]**2) else: data = data[0] u, data = self.kelvin_to_celsius( dataset.variables[self.variables[0]].unit, data) self.data -= data # Load bathymetry data self.bathymetry = overlays.bathymetry(self.basemap, self.latitude, self.longitude, blur=2) if self.depth != 'bottom' and self.depth != 0: if len(quiver_data) > 0: quiver_bathymetry = overlays.bathymetry( self.basemap, quiver_lat, quiver_lon) self.data[np.where(self.bathymetry < depth_value)] = np.ma.masked for d in self.quiver_data: d[np.where(quiver_bathymetry < depth_value)] = np.ma.masked for d in self.contour_data: d[np.where(self.bathymetry < depth_value)] = np.ma.masked else: mask = maskoceans(self.longitude, self.latitude, self.data).mask self.data[~mask] = np.ma.masked for d in self.quiver_data: mask = maskoceans(self.quiver_longitude, self.quiver_latitude, d).mask d[~mask] = np.ma.masked for d in contour_data: mask = maskoceans(self.longitude, self.latitude, d).mask d[~mask] = np.ma.masked if self.area and self.filetype in ['csv', 'odv', 'txt', 'geotiff']: area_polys = [] for a in self.area: rings = [LinearRing(p) for p in a['polygons']] innerrings = [LinearRing(p) for p in a['innerrings']] polygons = [] for r in rings: inners = [] for ir in innerrings: if r.contains(ir): inners.append(ir) polygons.append(Poly(r, inners)) area_polys.append(MultiPolygon(polygons)) points = [ Point(p) for p in zip(self.latitude.ravel(), self.longitude.ravel()) ] indicies = [] for a in area_polys: indicies.append( np.where(map(lambda p, poly=a: poly.contains(p), points))[0]) indicies = np.unique(np.array(indicies).ravel()) newmask = np.ones(self.data.shape, dtype=bool) newmask[np.unravel_index(indicies, newmask.shape)] = False self.data.mask |= newmask self.depth_value = depth_value
def main(): img_folder = Path("nei_validation") img_folder.mkdir(parents=True, exist_ok=True) var_names = ["TT", "PR"] seasons = OrderedDict([ ("DJF", MonthPeriod(12, 3)), ("MAM", MonthPeriod(3, 3)), ("JJA", MonthPeriod(6, 3)), ("SON", MonthPeriod(9, 3)), ]) sim_paths = OrderedDict() start_year = 1980 end_year = 2008 sim_paths["WC_0.44deg_default"] = Path \ ("/HOME/huziy/skynet3_rech1/CRCM5_outputs/NEI/diags/NEI_WC0.44deg_default/Diagnostics") sim_paths["WC_0.44deg_ctem+frsoil+dyngla"] = Path \ ("/HOME/huziy/skynet3_rech1/CRCM5_outputs/NEI/diags/debug_NEI_WC0.44deg_Crr1/Diagnostics") sim_paths["WC_0.11deg_ctem+frsoil+dyngla"] = Path( "/snow3/huziy/NEI/WC/NEI_WC0.11deg_Crr1/Diagnostics") cru_vname_to_path = { "pre": "/HOME/data/Validation/CRU_TS_3.1/Original_files_gzipped/cru_ts_3_10.1901.2009.pre.dat.nc", "tmp": "/HOME/data/Validation/CRU_TS_3.1/Original_files_gzipped/cru_ts_3_10.1901.2009.tmp.dat.nc" } plot_cru_data = True plot_model_data = True plot_naobs_data = True plot_daymet_data = True plot_utils.apply_plot_params(font_size=14) basemap_for_obs = None # plot simulation data for sim_label, sim_path in sim_paths.items(): manager = DiagCrcmManager(data_dir=sim_path) # get the basemap to be reused for plotting observation data if basemap_for_obs is None: basemap_for_obs = manager.get_basemap(resolution="i", area_thresh=area_thresh_km2) if not plot_model_data: break for vname in var_names: seas_to_clim = manager.get_seasonal_means_with_ttest_stats( season_to_monthperiod=seasons, start_year=start_year, end_year=end_year, vname=vname, vertical_level=var_name_to_level[vname], data_file_prefix=var_name_to_file_prefix[vname]) _plot_seasonal_data( seas_data=seas_to_clim, data_label="{}_{}-{}".format(sim_label, start_year, end_year), img_dir=img_folder, map=manager.get_basemap(resolution="i", area_thresh=area_thresh_km2), lons=manager.lons, lats=manager.lats, vname=vname) assert basemap_for_obs is not None # plot obs data # -- CRU for vname in var_names: if not plot_cru_data: break cru_vname = var_name_to_cru_name[vname] manager = CRUDataManager(path=cru_vname_to_path[cru_vname], var_name=cru_vname) seas_to_clim = manager.get_seasonal_means_with_ttest_stats( season_to_monthperiod=seasons, start_year=start_year, end_year=end_year) manager.close() _plot_seasonal_data(seas_data=seas_to_clim, data_label="{}_{}-{}".format( "CRU", start_year, end_year), img_dir=img_folder, map=basemap_for_obs, lons=manager.lons2d, lats=manager.lats2d, vname=vname, var_name_to_mul={ "TT": 1, "PR": 1 }) # -- NAOBS naobs_vname_to_path = { "TT": "/HOME/huziy/skynet3_rech1/obs_data/anuspl_uw_0.11_wc_domain/anusplin+_interpolated_tt_pr.nc", "PR": "/HOME/huziy/skynet3_rech1/obs_data/anuspl_uw_0.11_wc_domain/anusplin+_interpolated_tt_pr.nc" } for vname in var_names: if not plot_naobs_data: break manager = CRUDataManager(path=naobs_vname_to_path[vname], var_name=vname) seas_to_clim = manager.get_seasonal_means_with_ttest_stats( season_to_monthperiod=seasons, start_year=start_year, end_year=end_year) # mask no data points for s, data in seas_to_clim.items(): for i in [0, 1]: data[i] = np.ma.masked_where(manager.lats2d > 60, data[i]) data[i] = np.ma.masked_where(manager.lons2d < -150, data[i]) data[i] = maskoceans(manager.lons2d, manager.lats2d, datain=data[i]) _plot_seasonal_data(seas_data=seas_to_clim, data_label="{}_{}-{}".format( "NAOBS", start_year, end_year), img_dir=img_folder, map=basemap_for_obs, lons=manager.lons2d, lats=manager.lats2d, vname=vname) manager.close() # -- daymet monthly daymet_vname_to_path = { "prcp": "/HOME/data/Validation/Daymet/Monthly_means/NetCDF/daymet_v3_prcp_monttl_*_na.nc4", "tavg": "/HOME/huziy/skynet3_rech1/obs_data/daymet_tavg_monthly/daymet_v3_tavg_monavg_*_na_nc4classic.nc4" } vname_to_daymet_vname = {"PR": "prcp", "TT": "tavg"} for vname in var_names: if not plot_daymet_data: break daymet_vname = vname_to_daymet_vname[vname] manager = HighResDataManager(path=daymet_vname_to_path[daymet_vname], vname=daymet_vname) seas_to_clim = manager.get_seasonal_means_with_ttest_stats_dask( season_to_monthperiod=seasons, start_year=start_year, end_year=end_year, convert_monthly_accumulators_to_daily=(vname == "PR")) _plot_seasonal_data(seas_data=seas_to_clim, data_label="{}_{}-{}".format( "DAYMET", start_year, end_year), img_dir=img_folder, map=basemap_for_obs, lons=manager.lons, lats=manager.lats, vname=vname, var_name_to_mul={ "PR": 1, "TT": 1 }) manager.close()
nc_file = 'F:/Output files/wrfout_d03_2100-09-01_00_00_00.nc' fh = Dataset(nc_file, mode='r') lons = fh.variables['XLONG'][:] lats = fh.variables['XLAT'][:] fh.close() dura1 = np.loadtxt('C:/Users/Yating/Desktop/output/2098-2100Duration_out.txt') dura = np.transpose(dura1) fig = plt.figure(figsize=(4,2)) m = Basemap(projection='merc',llcrnrlon=-78.5079,llcrnrlat=38.00905,urcrnrlon=-75.6454,urcrnrlat=39.91155,resolution='h') ny = dura.shape[0]; nx = dura.shape[1] lons, lats = m.makegrid(nx, ny) # get lat/lons of ny by nx evenly space grid. x, y = m(lons, lats) mdata = maskoceans(lons, lats, dura) m.drawcoastlines() m.drawcounties(linewidth=0.4) parallels = np.arange(0.,90,0.5) m.drawparallels(parallels,labels=[1,0,0,0],dashes=[2,900],fontsize=10,linewidth=0.4) meridians = np.arange(180.,360.,1) m.drawmeridians(meridians,labels=[0,0,0,1],dashes=[2,900],fontsize=10,linewidth=0.4) cMAP = ListedColormap(['#00bfff','#00ffff','#009933','#33cc33','#c6ff1a','#ffff00', '#ffcc00','#ffcc00','#ffcc00','#ff9933','#ff9933','#ff9933', '#ff8000','#ff8000','#ff8000','#ff6600','#ff6600','#ff6600', '#ff4000','#ff4000','#ff4000']) cMAP.set_under('#0080ff')