def main(): # Load the variables cubes = forecast.set_lead_time(hours=18) x = convert.calc(names, cubes) surface = convert.calc('boundary_layer_height', cubes) # Mask points within 100 gridpoints of land z = convert.calc('altitude', cubes) zm = filters.maximum_filter(z[0].data, 100) mask = zm > 20 # Interpolate relative to boundary layer height output = diagnostics.profile(x, surface, dz, mask=mask) # Plot the variables for cube in output: c = second_analysis.all_diagnostics[cube.name()] iplt.plot(cube, cube.coord('distance_from_boundary_layer_height'), color=c.color, linestyle=c.linestyle, label=c.symbol) plt.axvline(color='k') plt.axhline(color='k') legend(key=second_analysis.get_idx, loc='best', ncol=2) plt.show() return
def main(): # Specify which files and variable to compare path = datadir + 'output/' filename = 'precision_errors_temperature_500hpa_stochastic.nc' cs = iris.Constraint(pressure=500, precision=23) cubes = iris.load(path + filename, cs) for cube in cubes: print(cube.name()) cube.coord('forecast_period').convert_units('days') variable, scheme = decode_name(cube.name()) plp = physics_schemes[scheme] plp.plot(cube, label=scheme) variable, scheme = decode_name(cubes[0].name()) units = cubes[0].units pressure = int(cubes[0].coord('pressure').points[0]) precision = cubes[0].coord('precision').points[0] plt.xlabel('Time (days)') plt.ylabel('RMSE ({})'.format(units)) plt.title('{} at {}hPa at {} sbits'.format(variable, pressure, precision)) legend(key=lambda x: physics_schemes[x[0]].idx) plt.show() return
def add_trimmings(ax, n, m): """ # Set Titles if n == 0: ax.get_xaxis().set_ticklabels([]) if m == 0: ax.set_title('Forecast') ax.set_xlim(0, 1.25) elif m == 1: ax.set_title('PV budget') plt.axvline(color='k') ax.set_xlim(-0.6, 0.6) ax.get_yaxis().set_ticklabels([]) if n == 1: ax.set_xlabel('PV (PVU)') elif m == 2: ax.set_title('Physics PV tracers') plt.axvline(color='k') ax.set_xlim(-1, 1) ax.get_yaxis().set_ticklabels([]) """ # plt.gca().invert_yaxis() plt.axvline(color='k') ax.set_ylim(950, 500) if n == 1: legend(ax, key=second_analysis.get_idx, loc='best', ncol=2, bbox_to_anchor=(0.9, -0.2), fontsize=25) return
def main(): # Specify which files and variable to compare path = datadir + 'output/' filename = 'precision_errors_temperature_500hpa.nc' lead_time = 24 cs = iris.Constraint(forecast_period=lead_time) cubes = iris.load(path + filename, cs) for cube in cubes: print(cube.name()) variable, scheme = decode_name(cube.name()) plp = physics_schemes[scheme] plp.plot(cube, label=scheme) variable, scheme = decode_name(cubes[0].name()) units = cubes[0].units pressure = int(cubes[0].coord('pressure').points[0]) plt.xlabel('Precision (sbits)') plt.ylabel('RMSE ({})'.format(units)) plt.title('{} at {}hPa at T+{}h'.format(variable, pressure, lead_time)) legend(key=lambda x: physics_schemes[x[0]].idx) plt.show() return
def profile(coord, mappings, domains, title, xlabel, ylabel, xlims, ylims): ncols = len(mappings) nrows = len(domains) # Initialise the plot fig = plt.figure(figsize=(18, 25)) # Loop over mappings for m, domain in enumerate(domains): cubes = second_analysis.get_data(coord, domain) for n, mapping in enumerate(mappings): mapping = second_analysis.mappings[mapping] ax = plt.subplot2grid((nrows, ncols), (m, n)) profile_multi(cubes, ax, mapping, coord) ax.set_xlim(*xlims[n]) ax.set_ylim(*ylims) if m == 0: ax.set_title(title[n]) else: ax.set_title('') if m == nrows - 1: legend(ax, key=second_analysis.get_idx, loc='upper left', ncol=2, bbox_to_anchor=(0.05, -0.25)) else: ax.get_xaxis().set_ticklabels([]) if n == 0: if m == 1: ax.set_ylabel(ylabel) else: ax.set_ylabel('') else: ax.set_ylabel('') ax.get_yaxis().set_ticklabels([]) if m == nrows - 1 and n == 1: ax.set_xlabel(xlabel) else: ax.set_xlabel('') ax.axvline(color='k') ax.axhline(color='k') if coord == 'air_pressure': ax.set_ylim(ax.get_ylim()[::-1]) multilabel(ax, n + m * ncols) fig.subplots_adjust(bottom=0.4) return
def tropopause_profile(): # Initialise the plot fig = plt.figure(figsize=(18, 15)) # Loop over mappings for n, mapping in enumerate(mappings): mapping = second_analysis.mappings[mapping] for m, domain in enumerate(['ridges', 'troughs']): cubes = second_analysis.get_data(coord, domain) ax = plt.subplot2grid((2, ncols), (m, n)) if n == 0: profile_error(cubes, ax, mapping, coord) else: profile_multi(cubes, ax, mapping, coord) plt.title(title[n]) ax.set_xlim(*xlims[n]) ax.set_ylim(*ylim) legend(ax, key=second_analysis.get_idx, loc='best', ncol=2, bbox_to_anchor=(0.9, -0.2)) if n == 0: ax.set_ylabel(ylabel) else: ax.set_ylabel('') ax.get_yaxis().set_ticklabels([]) if n == 1: ax.set_xlabel(xlabel) else: ax.set_xlabel('') plt.title('') plt.axvline(color='k') plt.axhline(color='k') # if coord == 'air_pressure': # ax.set_ylim(ax.get_ylim()[::-1]) for n, ax in enumerate(fig.axes): multilabel(ax, n) fig.subplots_adjust(bottom=0.4) #plt.savefig(plotdir + 'ch7_low/height_profile.pdf') plt.show() return
def main(): forecasts = [case_studies.iop5b.copy(), case_studies.iop8.copy()] fig = plt.figure(figsize=(18, 8)) for n, forecast in enumerate(forecasts): ax = plt.subplot2grid((1, 2), (0, n)) cubes = forecast.set_lead_time(hours=24) theta = convert.calc('air_potential_temperature', cubes) bl_type = convert.calc('boundary_layer_type', cubes) for m in range(4): mask = np.logical_not( np.logical_or(bl_type.data == 2 * m, bl_type.data == 2 * m + 1)) mask = mask * np.ones_like(theta.data) mean = theta_profile(theta, mask) mean = mean - mean[0] iplt.plot(mean, mean.coord('atmosphere_hybrid_height_coordinate'), style[m], label=label[m]) #mask = np.logical_not(mask) #mean = theta_profile(theta, mask) # iplt.plot(mean, mean.coord('atmosphere_hybrid_height_coordinate'), # '-kx') ax.set_xlim(-0.25, 4.5) ax.set_xlabel(r'$\theta$') ax.set_ylim(0, 1000) if n == 0: ax.set_ylabel('Height (m)') ax.set_title('IOP5') else: ax.get_yaxis().set_ticklabels([]) ax.set_title('IOP8') plt.axvline(color='k') multilabel(ax, n) legend(ax, loc='upper left', ncol=2, bbox_to_anchor=(-0.6, -0.2)) fig.subplots_adjust(bottom=0.4) fig.savefig(plotdir + 'theta_profiles.pdf') plt.show() return
def make_plot(variable, units, sigma, reduced_precision, schemes): fp = load_tendency(variable=variable, sigma=sigma, precision=52) tendency = np.abs(fp.data).flatten() print(tendency.min(), tendency.max()) jg = sb.JointGrid(None, None) bins = 10. ** np.arange(-12, -2, 0.5) bin_widths = [bins[n + 1] - bins[n] for n in range(len(bins) - 1)] jg.ax_joint.plot(bins, bins, '--k') hists = [] for scheme in schemes: rp = load_tendency(variable=variable, rp_scheme=scheme.lower().replace(' ', '_').replace('-', '_'), sigma=sigma, precision=reduced_precision) error = np.abs(rp.data - fp.data).flatten() print(schemes, error.min(), error.max()) plp = speedy.physics_schemes[scheme] hist = draw_distribution(scheme, tendency, error, jg, plp, bins, bin_widths) hists.append(hist) for hist, scheme in zip(hists, schemes): plp = speedy.physics_schemes[scheme] jg.ax_marg_y.barh(bins[:-1], hist, height=bin_widths, align='edge', edgecolor=plp.color, fill=False) jg.ax_joint.set_xscale('log') jg.ax_joint.set_yscale('log') jg.ax_joint.set_xlim(1e-9, 5e-4) jg.ax_joint.set_ylim(1e-12, 5e-4) jg.ax_joint.set_xlabel('Double-Precision Tendency [{}]'.format(units), fontsize='x-large') jg.ax_joint.set_ylabel('Error in Tendency [{}]'.format(units), fontsize='x-large') legend(ax=jg.ax_joint, key=lambda x: speedy.physics_schemes[x[0]].idx, title='Parametrization Schemes') return
def make_plot_pair(filename, time_cs, precision_cs, axes, factor): cubes = iris.load(filename) for cube in cubes: cube.coord('forecast_period').convert_units('days') plt.axes(axes[0]) multilabel(axes[0], 0, factor=factor) make_plot(cubes, precision_cs) legend(key=lambda x: physics_schemes[x[0]].idx, ncol=2, title='Parametrization Schemes') plt.xlabel('Forecast Lead Time [days]') plt.ylabel('RMS Error in Geopotential Height [m]') plt.axes(axes[1]) multilabel(axes[1], 1, factor=factor) make_plot(cubes, time_cs) plt.xlabel('Precision [sbits]') return
def main(): # Load cubes path = datadir + 'deterministic2/' cs = iris.Constraint( cube_func=lambda x: 'Temperature Tendency' in x.name(), forecast_period=2 / 3, pressure=0.95) rp_cubes = iris.load(path + 'rp_*_tendencies.nc', cs) fp_cubes = iris.load(path + 'fp_tendencies.nc', cs) # Show the reference machine epsilon sbits = np.arange(5, 24) error = 2.0**-(sbits + 1) #plt.plot(sbits, error, '--k') # Loop over physics schemes for rp in rp_cubes: scheme, units = parse.parse('Temperature Tendency due to {} [{}]', rp.name()) if scheme == 'Large-Scale Condensation': scheme = 'Condensation' plp = physics_schemes[scheme] fp = fp_cubes.extract(iris.Constraint(name=rp.name()))[0] # Calculate error of nonzero gridpoints rp.data = np.ma.masked_where(np.logical_or(rp.data == 0, fp.data == 0), np.abs(rp.data - fp.data)) mean_error = mean_diff(rp, 0) plp.plot(mean_error, label=scheme) print('{}: {}'.format(scheme, np.mean(mean_error.data / error))) plt.xlabel('Precision [sbits]') plt.ylabel('Mean relative error') legend(key=lambda x: physics_schemes[x[0]].idx) plt.show() return
def main(): sigma = speedy.sigma_levels[1] sbits = np.arange(5, 24) # Loop over physics schemes for scheme in speedy.physics_schemes: plp = speedy.physics_schemes[scheme] fp = load_tendency('Specific Humidity', sigma=sigma, precision=52) rp = load_tendency('Specific Humidity', rp_scheme=speedy.to_filename(scheme), sigma=sigma, precision=sbits) # Calculate error of nonzero gridpoints rp.data = np.ma.masked_where([..., fp.data] == 0, rp.data - fp.data) mean_error = mean_diff(rp, 0) plp.plot(mean_error, label=scheme) plt.xlabel('Precision [sbits]') plt.ylabel('Mean relative error') legend(key=lambda x: speedy.physics_schemes[x[0]].idx) plt.show() return
def humidity_gradients(): # Initialise the plot fig = plt.figure(figsize=(18, 15)) # Columns are Ridges and troughs for n, variable in enumerate(variables): row = n / ncols col = n - row * ncols print(row, col) ax = plt.subplot2grid((nrows, ncols), (row, col)) for subdomain, linestyle in [('ridges', '-'), ('troughs', '--')]: cubes = second_analysis.get_data(coord, subdomain) cube = convert.calc(variable, cubes) cube.coord(coord).convert_units('km') mean, std_err = second_analysis.extract_statistics( cube, 'forecast_index') if variable == 'vertical_vorticity': mean.data = mean.data + 1e-4 else: mean.data = mean.data * 1e3 std_err.data = std_err.data * 1e3 iplt.plot( mean[0], mean.coord(coord), # xerr=std_err[0], linestyle=linestyle, label=subdomain.capitalize(), color='k', marker='x', ms=5) ax.set_ylabel('') ax.set_ylim(-2, 2) if col > 0: ax.get_yaxis().set_ticklabels([]) if variable == 'specific_humidity': ax.set_title('Specific Humidity') ax.set_xlabel(r'Mass Fraction (g kg$^{-1}$)') elif variable == 'vertical_vorticity': ax.set_title('Vertical Vorticity') ax.set_xlabel(r'Vorticity (s$^{-1}$)') elif variable == 'mass_fraction_of_cloud_liquid_water_in_air': ax.set_title('Cloud Liquid') ax.set_xlabel(r'Mass Fraction (g kg$^{-1}$)') elif variable == 'mass_fraction_of_cloud_ice_in_air': ax.set_title('Cloud Ice') ax.set_xlabel(r'Mass Fraction (g kg$^{-1}$)') plt.axhline(color='k') multilabel(ax, n) legend(ax=fig.axes[0], loc='best') fig.text(0.075, 0.5, 'Vertical distance from tropopause (km)', va='center', rotation='vertical') plt.savefig(plotdir + 'analysis_profiles.pdf') plt.show()
def main(): path = datadir + 'stochastic/ensembles/' # Get overlap from exchanged ensembles ovl_range = iris.load(path + 'ovl_perturbed_??.nc') for n, cube in enumerate(ovl_range): cube.add_aux_coord(iris.coords.AuxCoord(n, long_name='ensemble')) ovl_range = ovl_range.merge_cube() ovl_range.coord('forecast_period').convert_units('days') ovl_min = ovl_range.collapsed('ensemble', MIN) ovl_max = ovl_range.collapsed('ensemble', MAX) ovl_mean = ovl_range.collapsed('ensemble', MEAN) # Two panels fig, axes = plt.subplots(nrows=1, ncols=2, sharey='row', figsize=[16, 5]) # Panel 1 - plt.axes(axes[0]) multilabel(axes[0], 0, 0.01) plt.fill_between(ovl_range.coord('forecast_period').points, ovl_min.data, ovl_max.data, color='grey') files = [ ('overlap_52_23.nc', '23 sbit', '-', 'k'), ('overlap_52_10.nc', '10 sbit', '--', 'k'), ('overlap_52_8.nc', '8 sbit', ':', 'k'), ('overlap_52_adj8.nc', '8 sbit, Fixed', '--', 'y'), ('overlap_52_half_precision_exponent.nc', '10 sbit, Exponent', '-', 'y') ] for filename, label, linestyle, color in files: overlap = iris.load_cube(path + filename) overlap.coord('forecast_period').convert_units('days') iplt.plot(overlap, label=label, color=color, linestyle=linestyle) legend() # Panel 2 plt.axes(axes[1]) multilabel(axes[1], 1, 0.01) plt.fill_between(ovl_range.coord('forecast_period').points, ovl_min.data, ovl_max.data, color='grey') files = [ ('overlap_52_cnv8.nc', 'Convection'), ('overlap_52_cond8.nc', 'Condensation'), ('overlap_52_swrad8.nc', 'Short-Wave Radiation'), ('overlap_52_lwrad8.nc', 'Long-Wave Radiation'), ('overlap_52_sflx8.nc', 'Surface Fluxes'), ('overlap_52_vdif8.nc', 'Vertical Diffusion'), ] for filename, label in files: overlap = iris.load_cube(path + filename) overlap.coord('forecast_period').convert_units('days') plp = physics_schemes[label] plp.plot(overlap, label=label) legend() plt.ylabel('Overlapping Coefficient') fig.text(0.5, 0.01, 'Forecast Lead Time [days]', ha='center') plt.show() return
def main(): # Initialise the plot fig = plt.figure(figsize=(18, 12)) # Add subfigures for n in range(2): for m in range(3): plt.subplot2grid((2, 3), (n, m)) # Plot composites pv_gradients('distance_from_dynamical_tropopause', 1, fig) # Add faint lines for q_adv #pv_gradients('distance_from_advection_only_tropopause', 0.25, fig) for n, subdomain in enumerate(['ridges', 'troughs']): coord = 'distance_from_advection_only_tropopause' alpha = 0.3 cubes = second_analysis.get_data(coord, subdomain) m = 1 mapping = second_analysis.mappings['pv_main'] mapping = { k: mapping[k] for k in ('dynamics_tracer_inconsistency', 'sum_of_physics_pv_tracers') } ax = fig.axes[n * 3 + m] pv_gradients_multi(cubes, coord, ax, mapping, alpha) fig.subplots_adjust(bottom=0.2) # Set labels and limits on plots for n, subdomain in enumerate(['ridges', 'troughs']): for m in range(3): ax = fig.axes[n * 3 + m] # X-axis - Same for both rows ax.set_xticks([0, 12, 24, 36, 48, 60]) if n == 0: ax.get_xaxis().set_ticklabels([]) # Set Titles if m == 0: ax.set_title('Forecast') elif m == 1: ax.set_title('PV budget') elif m == 2: ax.set_title('Physics PV tracers') else: legend(ax, key=second_analysis.get_idx, loc='best', ncol=2, bbox_to_anchor=(1.0, -0.2), fontsize=25) if m == 1: ax.set_xlabel('Forecast lead time (hours)') # Y-Axis if m == 0: # First column custom if n == 0: ax.set_ylim(3.0, 3.6) else: ax.set_ylim(2.4, 3.0) elif m == 1: # Columns 2 ax.set_ylim(-0.1, 0.5) else: ax.set_ylim(-0.05, 0.25) multilabel(ax, n * 3 + m) fig.text(0.075, 0.55, 'PV (PVU)', va='center', rotation='vertical', fontsize=20) fig.text(0.05, 0.75, 'Ridges', va='center', rotation='vertical', fontsize=20) fig.text(0.05, 0.35, 'Troughs', va='center', rotation='vertical', fontsize=20) plt.savefig(plotdir + 'pv_gradients_new.pdf') plt.show() return
def main(): # Initialise the plot fig = plt.figure(figsize=(18, 12)) # Add subfigures for n in range(2): for m in range(3): plt.subplot2grid((2, 3), (n, m)) # Plot composites tropopause_profile('distance_from_dynamical_tropopause', 1, fig) # Add faint lines for q_adv #tropopause_profile('distance_from_advection_only_tropopause', 0.25, fig) for n, subdomain in enumerate(['ridges', 'troughs']): coord = 'distance_from_advection_only_tropopause' alpha = 0.3 cubes = second_analysis.get_data(coord, subdomain) m = 1 mapping = second_analysis.mappings['pv_main'] mapping = { k: mapping[k] for k in ('dynamics_tracer_inconsistency', 'sum_of_physics_pv_tracers') } ax = fig.axes[n * 3 + m] profile_multi(cubes, coord, ax, mapping, alpha) fig.subplots_adjust(bottom=0.2) # Set labels and limits on plots for n, subdomain in enumerate(['ridges', 'troughs']): for m in range(3): ax = fig.axes[n * 3 + m] # X-axis - Same for all plots if m == 0: ax.set_xlim(-0.5, 0.2) ax.set_xticks([-0.4, -0.2, 0, 0.2]) else: ax.set_xlim(-0.2, 0.3) ax.set_xticks([-0.2, -0.1, 0, 0.1, 0.2, 0.3]) if n == 0: ax.get_xaxis().set_ticklabels([]) # Set Titles if m == 0: ax.set_title('Forecast minus analysis') elif m == 1: ax.set_title('PV budget') elif m == 2: ax.set_title('Physics PV tracers') else: legend(ax, key=second_analysis.get_idx, loc='best', ncol=2, bbox_to_anchor=(1.0, -0.2), fontsize=25) if m == 1: ax.set_xlabel('PV (PVU)') # Y-axis - Same for all plots ax.set_ylim(-2, 2) ax.set_yticks([-2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2]) if m != 0: ax.get_yaxis().set_ticklabels([]) multilabel(ax, n * 3 + m) fig.text(0.075, 0.55, 'Vertical distance from tropopause (km)', va='center', rotation='vertical', fontsize=20) fig.text(0.05, 0.75, 'Ridges', va='center', rotation='vertical', fontsize=20) fig.text(0.05, 0.35, 'Troughs', va='center', rotation='vertical', fontsize=20) plt.savefig(plotdir + 'tropopause_profile_new.pdf') # plt.show() return
def main2(variable, sigma, table): # Create a two by two grid fig, axes = plt.subplots(nrows=1, ncols=2, sharey='row', figsize=(16, 5), subplot_kw={'yscale': 'log'}) # Show the reference machine epsilon sbits = np.arange(5, 24) machine_error = 2.0**-(sbits + 1) # Errors with respect to individual parametrization tendency plt.axes(axes[0]) for scheme in schemes: plp = speedy.physics_schemes[scheme] try: fp = load_tendency(variable=variable, scheme=scheme, rp_scheme='all_parametrizations', sigma=sigma, precision=52) rp = load_tendency(variable=variable, scheme=scheme, rp_scheme=filename(scheme), sigma=sigma, precision=sbits) # Ignore where tendencies are zero rp.data = np.ma.masked_where((rp.data - fp.data) == 0, rp.data) display_errors(rp, fp, plp) except iris.exceptions.ConstraintMismatchError: print('{} cannot be loaded \n'.format(scheme)) # Errors with respect to total parametrization tendency plt.axes(axes[1]) fp = load_tendency(variable=variable, rp_scheme='all_parametrizations', sigma=sigma, precision=52) tendency = global_mean(maths.abs(fp)) tendency = collapse_sigma(tendency) axes[1].axhline(tendency.data, linestyle='--', color='k', alpha=0.5) axes[1].plot(sbits, machine_error * tendency.data, ':k', alpha=0.5) for scheme in schemes: plp = speedy.physics_schemes[scheme] rp = load_tendency(variable=variable, rp_scheme=filename(scheme), sigma=sigma, precision=sbits) error = display_errors(rp, fp, plp, label=scheme) error = (error / tendency) / machine_error table[scheme] += ' & ${:.0f}-{:.0f}\\varepsilon$'.format( error.data.min(), error.data.max()) # Add dressing to the plot multilabel(axes[0], 0, factor=0.01) axes[0].set_title('Individual Temperature Tendency') axes[0].set_ylabel('Average Tendency Error [{}]'.format(tendency.units)) axes[0].set_xticks(sbits[::5]) multilabel(axes[1], 1, factor=0.01) axes[1].set_title('Total Temperature Tendency') axes[1].set_xticks(sbits[::5]) fig.text(0.45, 0.01, 'Precision [sbits]') legend(ax=axes[1], key=lambda x: speedy.physics_schemes[x[0]].idx, ncol=2) plt.subplots_adjust(left=0.08, right=0.98, wspace=0.05) return