def main(inargs): """Run the program.""" data_dict = {} for experiment in list(experiment_colors.keys()): data_dict[(experiment, 'x_data')] = numpy.ma.array([]) data_dict[(experiment, 'y_data')] = numpy.ma.array([]) metadata_dict = {} for data_file, basin_file in inargs.file_pair: try: time_constraint = gio.get_time_constraint(inargs.time) except AttributeError: time_constraint = iris.Constraint() with iris.FUTURE.context(cell_datetime_objects=True): cube = iris.load_cube(data_file, 'sea_surface_salinity' & time_constraint) basin_cube = read_basin(basin_file) ndim = cube.ndim basin_array = uconv.broadcast_array(basin_cube.data, [ndim - 2, ndim - 1], cube.shape) metadata_dict[data_file] = cube.attributes['history'] metadata_dict[basin_file] = basin_cube.attributes['history'] model, experiment = get_experiment_details(cube) for basin in list(basins.keys()): zonal_climatology, zonal_trends = calc_zonal_stats( cube.copy(), basin_array, basin) data_dict[(experiment, 'x_data')] = numpy.ma.append( data_dict[(experiment, 'x_data')], zonal_climatology) data_dict[(experiment, 'y_data')] = numpy.ma.append( data_dict[(experiment, 'y_data')], zonal_trends) fig = plt.figure(figsize=(12, 8)) for experiment, color in experiment_colors.items(): x_data = data_dict[(experiment, 'x_data')] y_data = data_dict[(experiment, 'y_data')] if numpy.any(x_data): plt.scatter(x_data[::inargs.thin], y_data[::inargs.thin], facecolors='none', edgecolors=color, label=experiment) if experiment in ['AA', 'noAA']: x_trend, y_trend = calc_trend(x_data, y_data, experiment) plt.plot(x_trend, y_trend, color=color) plt.legend(loc=4) plt.xlabel('Climatological mean salinity') plt.ylabel('Salinity trend (per 50 years)') plt.title(model) # Write output plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info=metadata_dict)
def main(inargs): """Run the program.""" time_constraint = gio.get_time_constraint(inargs.time) nh_lower, nh_upper = inargs.nh_lat_bounds nh_constraint = iris.Constraint( latitude=lambda cell: nh_lower <= cell < nh_upper) sh_lower, sh_upper = inargs.sh_lat_bounds sh_constraint = iris.Constraint( latitude=lambda cell: sh_lower <= cell < sh_upper) data_dict = {} plot_details_list = [] for infiles in inargs.experiment_files: cube, model, experiment, orig_units = get_data( infiles, inargs.variable, nh_constraint, sh_constraint, time_constraint, inargs.area_file) iplt.plot(cube, label=experiment, color=experiment_colors[experiment]) plt.legend() plt.xlabel('year') plt.ylabel('NH / SH') title = '%s interhemispheric %s comparison' % ( model, inargs.variable.replace('_', ' ')) plt.title(title) #plt.subplots_adjust(top=0.90) plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info={inargs.experiment_files[0][0]: history[0]})
def main(inargs): """Run the program.""" var = 'zonal vertical mean globe argo sea water potential temperature' common_lats = [('latitude', numpy.arange(-90, 91, 1))] index = 0 current_experiments = [] for filename, experiment in inargs.infile: assert experiment in experiment_colors.keys() cube = iris.load_cube(filename, var) cube, scale_label = scale_data(cube, inargs.xcoord) color = experiment_colors[experiment] if not experiment in current_experiments: iplt.plot(cube, color=color, alpha=0.3, label='individual models, %s' % (experiment)) current_experiments.append(experiment) else: iplt.plot(cube, color=color, alpha=0.3) new_cube = cube.interpolate(common_lats, iris.analysis.Linear()) new_cube.add_aux_coord(iris.coords.DimCoord(index, 'realization')) fix_lats(new_cube.coord('latitude')) experiment_cubes[experiment].append(new_cube) index = index + 1 for experiment in current_experiments: cube_list = experiment_cubes[experiment] equalise_attributes(cube_list) merged_cube = cube_list.merge_cube() ensemble_mean = merged_cube.collapsed('realization', iris.analysis.MEAN) color = experiment_colors[experiment] iplt.plot(ensemble_mean, color=color, linewidth=3.0, label='ensemble mean, %s' % (experiment)) plt.xlim(-70, 70) plt.legend(loc=inargs.legloc) if inargs.xcoord == 'time': plt.ylabel( '1950-2000 linear trend ($%s \enspace K \enspace yr^{-1}$)' % (scale_label)) elif inargs.xcoord == 'tas': plt.ylabel( '1950-2000 linear regression coefficient ($%s \enspace K \enspace K^{-1}$)' % (scale_label)) plt.xlabel('latitude') plt.title( 'Zonal mean, vertical mean sea water potential temperature (0-2000m)') plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info={filename: cube.attributes['history']})
def main(inargs): """Run the program""" # Read data dset_in = xray.open_dataset(inargs.fourier_file) df = dset_in.to_dataframe() # Change the amplitue columns so the value is a ranking amp_df = df.loc[:, df.columns.map(lambda x: 'amp' in x)] rank_df = amp_df.apply(rankdata, axis=1) rank_df = rank_df.combine_first(df) # Select the ones where wave 5 and 6 are in the top 3 amplitudes # (worst ranking must be 8 + 9 = 17) included = (rank_df['wave5_amp'].values + rank_df['wave6_amp'].values) >= 17 final = rank_df.loc[included] # Reject days that change sign too much if inargs.max_sign_change: final = final.loc[final['sign_count'] <= inargs.max_sign_change] final = event_info(final, inargs.freq) if inargs.full_stats: assert not inargs.phase_filter and not inargs.season_filter and not inargs.duration_filter, \ "Cannot filter by phase, season or duration for full stats, because then they would not be full!" final.to_csv(inargs.output_file) else: # Optional filtering by duration if inargs.duration_filter: final = final.loc[final['event_duration'] > inargs.duration_filter] # Optional filtering by season if inargs.season_filter: season = inargs.season_filter months_subset = pandas.to_datetime(final.index.values).month bools_subset = (months_subset == season_months[season][0]) + ( months_subset == season_months[season][1]) + ( months_subset == season_months[season][2]) final = final.loc[bools_subset] # Optional filtering by wave phase if inargs.phase_filter: phase_min, phase_max = set_phase_bounds(inargs.phase_filter, inargs.freq) target_phase = 'wave%i_phase' % (inargs.freq) min_bools = (final[target_phase] > phase_min).values max_bools = (final[target_phase] < phase_max).values if phase_min < phase_max: final = final.loc[numpy.logical_and(min_bools, max_bools)] else: final = final.loc[numpy.logical_or(min_bools, max_bools)] # Write date file gio.write_dates(inargs.output_file, final.index.values) metadata_dict = {inargs.fourier_file: dset_in.attrs['history']} gio.write_metadata(inargs.output_file, file_info=metadata_dict)
def main(inargs): """Run the program.""" assert len(inargs.xfiles) == len(inargs.yfiles) time_constraint = gio.get_time_constraint(inargs.time) fig, ax = plt.subplots() plt.axhline(y=inargs.hline, color='0.5', linestyle='--') plt.axvline(x=0, color='0.5', linestyle='--') color_dict = get_colors(inargs.xfiles) legend_models = [] xtrends = {'historicalGHG': [], 'historicalMisc': [], 'historical': []} ytrends = {'historicalGHG': [], 'historicalMisc': [], 'historical': []} for xfile, yfile in zip(inargs.xfiles, inargs.yfiles): with iris.FUTURE.context(cell_datetime_objects=True): ytrend, ycube, ymodel, yexperiment, yrip = load_data( yfile, inargs.yvar, time_constraint) xtrend, xcube, xmodel, xexperiment, xrip = load_data( xfile, [inargs.xvar], time_constraint) assert (xmodel, xexperiment, xrip) == (ymodel, yexperiment, yrip) if xmodel not in legend_models: label = xmodel legend_models.append(xmodel) else: label = None plt.plot(xtrend, ytrend, markers[xexperiment], label=label, color=color_dict[xmodel]) xtrends[xexperiment].append(xtrend) ytrends[xexperiment].append(ytrend) if inargs.best_fit: for experiment in ['historicalGHG', 'historicalMisc', 'historical']: if xtrends[experiment]: plot_line_of_best_fit(xtrends[experiment], ytrends[experiment]) title = 'linear trend, %s-%s' % (inargs.time[0][0:4], inargs.time[1][0:4]) plt.title(title) xlabel, ylabel = set_axis_labels(inargs, xcube.units, ycube.units) plt.xlabel(xlabel) plt.ylabel(ylabel) handles, labels = ax.get_legend_handles_labels() labels, handles = zip(*sorted(zip(labels, handles), key=lambda t: t[0])) box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) ax.legend(handles, labels, loc='center left', bbox_to_anchor=(1, 0.5)) plt.savefig(inargs.outfile, bbox_inches='tight') metadata_dict = { xfile: xcube.attributes['history'], yfile: ycube.attributes['history'] } gio.write_metadata(inargs.outfile, file_info=metadata_dict)
def main(inargs): """Run the program.""" metadata_dict = {} fig = plt.figure(figsize=[10, 14]) gs = gridspec.GridSpec(2, 1) htc_trend, htc_mean, metadata_dict = get_htc_data( inargs.htc_file, metadata_dict, rolling_window=inargs.rolling_window) hfds_trend, hfds_mean, metadata_dict = get_hfds_data( inargs.hfds_file, metadata_dict, rolling_window=inargs.rolling_window) ohc_tendency_trend, ohc_trend, metadata_dict = get_ohc_data( inargs.ohc_file, metadata_dict) infer_list = select_inferred_plots(inargs.infer, htc_trend, hfds_trend, ohc_tendency_trend) plot_data(htc_mean, hfds_mean, ohc_trend, inargs, gs, 0, 'mean', infer_list) plot_data(htc_trend, hfds_trend, ohc_tendency_trend, inargs, gs, 1, 'trends', infer_list) title = get_title(htc_trend, hfds_trend, ohc_tendency_trend) plt.suptitle(title) plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info=metadata_dict)
def main(inargs): """Run the program""" # Read data dset_in = xray.open_dataset(inargs.infile) gio.check_xrayDataset(dset_in, inargs.metric) subset_dict = gio.get_subset_kwargs(inargs) darray = dset_in[inargs.metric].sel(**subset_dict) # Make selection metric_threshold = uconv.get_threshold(darray.values, inargs.metric_threshold) assert inargs.threshold_direction in ['greater', 'less'] if inargs.threshold_direction == 'greater': indexes = darray >= metric_threshold elif inargs.threshold_direction == 'less': indexes = darray <= metric_threshold darray_selection = darray.loc[indexes] # Write outputs gio.write_dates(inargs.outfile, darray_selection['time'].values) metadata_dict = {inargs.infile: dset_in.attrs['history']} gio.write_metadata(inargs.outfile, file_info=metadata_dict)
def main(inargs): """Run the program.""" # Initialise plot fig = plt.figure(figsize=inargs.figure_size) if not inargs.figure_size: print 'figure width: %s' % (str(fig.get_figwidth())) print 'figure height: %s' % (str(fig.get_figheight())) assert inargs.date_curve or inargs.runmean if inargs.date_curve and inargs.runmean: ax1 = plt.subplot(1, 2, 1) ax2 = plt.subplot(1, 2, 2) metadata_dict = composite_plot(ax1, inargs, label='(a)') temp = timescale_plot(ax2, inargs, label='(b)') elif inargs.date_curve: ax = plt.subplot(1, 1, 1) metadata_dict = composite_plot(ax, inargs) elif inargs.runmean: ax = plt.subplot(1, 1, 1) metadata_dict = timescale_plot(ax, inargs) plt.savefig(inargs.outfile, bbox_inches='tight') plt.clf() # Metadata gio.write_metadata(inargs.outfile, file_info=metadata_dict)
def main(inargs): """Run the program.""" # Read data try: time_constraint = gio.get_time_constraint(inargs.time) except AttributeError: time_constraint = iris.Constraint() diff_trends = {} metadata_dict = {} for infile in inargs.infiles: with iris.FUTURE.context(cell_datetime_objects=True): cube_sthext = iris.load_cube( infile, 'ocean heat content southern extratropics60' & time_constraint) cube_notsthext = iris.load_cube( infile, 'ocean heat content northern extratropics60' & time_constraint) model, experiment, run = gio.get_cmip5_file_details(cube_sthext) run_ri = run[:-2] run_p = run[-2:] update_lists(model, experiment, run_ri, run_p) cube_sthext = cube_sthext.rolling_window('time', iris.analysis.MEAN, 12) cube_notsthext = cube_notsthext.rolling_window('time', iris.analysis.MEAN, 12) diff_trends[(model, experiment, run_ri, run_p)] = calc_diff_trends(cube_sthext, cube_notsthext) metadata_dict[infile] = cube_sthext.attributes['history'] # Plot fig = plt.figure() #figsize=[15, 7]) tex_units, exponent = uconv.units_info(str(cube_sthext.units)) for model in models: for experiment in experiments: for run_p in run_ps: data_compilation = numpy.array([]) for run_ri in run_ris: try: data = diff_trends[(model, experiment, run_ri, run_p)] data_compilation = numpy.concatenate( (data_compilation, data)) except KeyError: pass if data_compilation.any(): plot_trend_distribution(data_compilation, exponent, model, experiment, run_p) if inargs.reference_trend: plt.axvline(x=inargs.reference_trend, linestyle='--', color='0.5') plt.title('10-year trends in hemispheric OHC difference') plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info=metadata_dict)
def main(inargs): """Run the program.""" time_constraints = {} time_constraints['historical'] = gio.get_time_constraint(inargs.hist_time) time_constraints['rcp'] = gio.get_time_constraint(inargs.rcp_time) if inargs.basin_file: basin_cube = iris.load_cube(inargs.basin_file) else: basin_cube = None width = 20 height = 21 fig = plt.figure(figsize=(width, height)) ax_dict = {} ax_dict[('tropics', 'atlantic')] = fig.add_subplot(3, 2, 1) ax_dict[('critical', 'atlantic')] = fig.add_subplot(3, 2, 2) ax_dict[('tropics', 'pacific')] = fig.add_subplot(3, 2, 3) ax_dict[('critical', 'pacific')] = fig.add_subplot(3, 2, 4) ax_dict[('tropics', 'globe')] = fig.add_subplot(3, 2, 5) ax_dict[('critical', 'globe')] = fig.add_subplot(3, 2, 6) previous_experiments = [] for filenum, infile in enumerate(inargs.infiles): for basin in ['atlantic', 'pacific', 'globe']: sh_mean, nh_mean, scrit_mean, ncrit_mean, experiment = load_data( infile, basin_cube, basin) plot_data(ax_dict[('tropics', basin)], sh_mean, nh_mean, experiment, previous_experiments, basin, crit=False, plot_type=inargs.plot_type) plot_data(ax_dict[('critical', basin)], scrit_mean, ncrit_mean, experiment, previous_experiments, basin, crit=True, plot_type=inargs.plot_type) previous_experiments.append(experiment) title = 'Annual Mean Surface Downward X Stress, %s' % ( sh_mean.attributes['model_id']) plt.suptitle(title, size='large') # plt.subplots_adjust(top=0.90) plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata( inargs.outfile, file_info={inargs.infiles[-1]: sh_mean.attributes['history']})
def write_met_file(inargs, spatial_cubes, outfile): """Write the output metadata file.""" infile_history = {} infile_history[inargs.ghg_files[0]] = history[0] infile_history[inargs.aa_files[-1]] = history[-1] infile_history[inargs.ghg_spatial_file] = spatial_cubes['historicalGHG'].attributes['history'] infile_history[inargs.aa_spatial_file] = spatial_cubes['historicalAA'].attributes['history'] gio.write_metadata(outfile, file_info=infile_history)
def main(inargs): """Run the program.""" if inargs.time: try: time_constraint = gio.get_time_constraint(inargs.time) except AttributeError: time_constraint = iris.Constraint() else: time_constraint = iris.Constraint() fig, axes = setup_plot(inargs.nregions) bar_width = 0.7 hfds_values = {} for region in region_names[inargs.nregions]: plot_atmos(axes, inargs.infile, region, bar_width, inargs.aggregation, time_constraint, branch=inargs.branch_time) hfds_values[region] = plot_surface(axes, inargs.infile, region, bar_width, inargs.aggregation, time_constraint, branch=inargs.branch_time) ohc_values, transport_values, ohc_inferred_values, transport_inferred_values = get_ocean_values( inargs.infile, inargs.aggregation, time_constraint, hfds_values, inargs.nregions, branch=inargs.branch_time, infer_ohc=inargs.infer_ohc, infer_hfbasin=inargs.infer_hfbasin) for region in region_names[inargs.nregions]: plot_ocean(axes, region, bar_width, inargs.aggregation, hfds_values, ohc_values, transport_values, ohc_inferred_values, transport_inferred_values) set_title(inargs.infile) fig.tight_layout(rect=[0, 0, 1, 0.93]) # (left, bottom, right, top) plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info={ inargs.infile: iris.load(inargs.infile)[0].attributes['history'] })
def main(inargs): """Run the program.""" time_constraints = {} time_constraints['historical'] = get_time_constraint(inargs.hist_time) time_constraints['rcp'] = get_time_constraint(inargs.rcp_time) variables = ['Surface Downwelling Net Radiation', 'Surface Upward Latent Heat Flux', 'Downward Heat Flux at Sea Water Surface', 'Downward Heat Flux at Sea Water Surface'] if inargs.infer_hfds: variables[-2] = 'Inferred Downward Heat Flux at Sea Water Surface' variables[-1] = 'Inferred Downward Heat Flux at Sea Water Surface' diff_dict = {} s_dict = {} n_dict = {} plot_details_list = [] for infile in inargs.energy_infiles: for plotnum, var in enumerate(variables): region = inargs.regions[plotnum] realm = inargs.realms[plotnum] diff_cube, n_cube, s_cube, history, model, experiment, run, orig_units = get_diff(infile, var, region, realm, time_constraints, inargs.operator) diff_dict[(experiment, var, region, realm)] = diff_cube n_dict[(experiment, var, region, realm)] = n_cube s_dict[(experiment, var, region, realm)] = s_cube plot_ref = (var, region, realm) if not plot_ref in plot_details_list: plot_details_list.append(plot_ref) width=16 height=10 fig = plt.figure(figsize=(width, height)) ax1 = fig.add_subplot(2, 2, 1) ax2 = fig.add_subplot(2, 2, 2) ax3 = fig.add_subplot(2, 2, 3) ax4 = fig.add_subplot(2, 2, 4) axes_list = [ax1, ax2, ax3, ax4] plotnum = 0 for ax, plot_details in zip(axes_list, plot_details_list): var, region, realm = plot_details plot_type = inargs.plot_type[plotnum] if plot_type == 'comparison': plot_comparison(diff_dict, ax, var, region, realm, inargs.runmean, inargs.operator, orig_units) else: plot_hemispheres(n_dict, s_dict, ax, var, region, realm, inargs.runmean, orig_units) plotnum = plotnum + 1 title = '%s interhemispheric difference' %(model) plt.suptitle(title, size='large') plt.subplots_adjust(top=0.90) plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info={inargs.energy_infiles[-1]: history})
def main(inargs): """Run the program.""" # Read input metadata_dict = {} primary_date_list, metadata_dict[inargs.primary_infile] = gio.read_dates( inargs.primary_infile) secondary_date_list, metadata_dict[ inargs.secondary_infile] = gio.read_dates(inargs.secondary_infile) # Find the common dates common_date_list = list( set(primary_date_list).intersection(secondary_date_list)) common_date_list.sort() # Write some stats to a .dat file season_dict = { 'DJF': ['12', '01', '02'], 'MAM': ['03', '04', '05'], 'JJA': ['06', '07', '08'], 'SON': ['09', '10', '11'] } fname, extension = inargs.outfile.split('.') output_data_file = open(fname + '.dat', 'w') stat_writer(output_data_file, len(primary_date_list), len(common_date_list), len(secondary_date_list), 'annual') for season, month_list in season_dict.iteritems(): season_filtered_primary_date_list = [ date for date in primary_date_list if date.split('-')[1] in month_list ] season_filtered_common_date_list = [ date for date in common_date_list if date.split('-')[1] in month_list ] season_filtered_secondary_date_list = [ date for date in secondary_date_list if date.split('-')[1] in month_list ] stat_writer(output_data_file, len(season_filtered_primary_date_list), len(season_filtered_common_date_list), len(season_filtered_secondary_date_list), season) output_data_file.close() # Write output date file gio.write_dates(inargs.outfile, common_date_list) gio.write_metadata(ofile=inargs.outfile, file_info=metadata_dict)
def add_metadata(temperature_cube, temperature_atts, ohc_cube, metadata_dict, inargs): """Add metadata to the output cube.""" # Variable attributes standard_name = 'ocean_heat_content' long_name = 'ocean heat content' var_name = 'ohc' units = 'J' if not (inargs.area_file or inargs.volume_file): units = units + ' m-2' iris.std_names.STD_NAMES[standard_name] = {'canonical_units': units} ohc_cube.standard_name = standard_name ohc_cube.long_name = long_name ohc_cube.var_name = var_name ohc_cube.units = units # File attributes ohc_cube.attributes = temperature_atts ohc_cube.attributes['history'] = gio.write_metadata( file_info=metadata_dict) ohc_cube.attributes['depth_bounds'] = get_depth_text( temperature_cube, inargs.min_depth, inargs.max_depth) return ohc_cube
def read_data(infiles, variable, calc_annual=False, chunk=False): """Load the input data.""" cube = iris.load(infiles, gio.check_iris_var(variable), callback=save_history) equalise_attributes(cube) iris.util.unify_time_units(cube) cube = cube.concatenate_cube() cube = gio.check_time_units(cube) if calc_annual: cube = timeseries.convert_to_annual(cube, chunk=chunk) coord_names = [coord.name() for coord in cube.dim_coords] aux_coord_names = [coord.name() for coord in cube.aux_coords] assert 'time' in coord_names assert len(coord_names) == 3 grid_type = 'curvilinear' if aux_coord_names == ['latitude', 'longitude' ] else 'latlon' infile_history = {} infile_history[infiles[0]] = history[0] cube.attributes['history'] = gio.write_metadata(file_info=infile_history) return cube, coord_names, aux_coord_names, grid_type
def main(inargs): """Run the program.""" hist_time_constraint = gio.get_time_constraint(['1850-01-01', '2005-12-31']) outcubes = iris.cube.CubeList([]) for var in inargs.variables: metadata_dict = {} hist_cube = iris.load_cube(inargs.hist_file, gio.check_iris_var(var) & hist_time_constraint) hist_cube = clean_attributes(hist_cube) branch_time = hist_cube.attributes['branch_time'] history = hist_cube.attributes['history'] rcp_cube = iris.load_cube(inargs.rcp_file, gio.check_iris_var(var)) rcp_cube = clean_attributes(rcp_cube) rcp_experiment = rcp_cube.attributes['experiment_id'] if inargs.cumsum: rcp_cube.data = rcp_cube.data + hist_cube.data[-1] cube_list = iris.cube.CubeList([hist_cube, rcp_cube]) equalise_attributes(cube_list) iris.util.unify_time_units(cube_list) cube = cube_list.concatenate_cube() cube.attributes['branch_time'] = branch_time cube.attributes['experiment_id'] = 'historical-' + rcp_experiment outcubes.append(cube.copy()) for cube in outcubes: cube.attributes['history'] = gio.write_metadata(file_info={inargs.hist_file: history}) equalise_attributes(outcubes) iris.save(outcubes, inargs.outfile)
def main(inargs): """Run the program""" level_constraint = iris.Constraint(air_pressure=50000) cube_list = iris.cube.CubeList([]) for infile in inargs.infiles: with iris.FUTURE.context(cell_datetime_objects=True): print(infile) cube = iris.load_cube(infile, level_constraint) history = cube.attributes['history'] del cube.coord('time').attributes['MD5'] iris.coord_categorisation.add_day_of_year(cube, 'time') iris.coord_categorisation.add_year(cube, 'time') cube = cube.aggregated_by(['day_of_year', 'year'], iris.analysis.MEAN) cube.remove_coord('day_of_year') cube.remove_coord('year') cube_list.append(cube) equalise_attributes(cube_list) iris.util.unify_time_units(cube_list) cube = cube_list.concatenate_cube() cube.coord('latitude').var_name = 'latitude' cube.coord('longitude').var_name = 'longitude' cube.attributes['history'] = gio.write_metadata( file_info={inargs.infiles[-1]: history}) iris.save(cube, inargs.outfile, netcdf_format='NETCDF3_CLASSIC')
def main(inargs): """Run the program.""" cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var), callback=save_history) equalise_attributes(cube) iris.util.unify_time_units(cube) cube = cube.concatenate_cube() cube = gio.check_time_units(cube) if inargs.annual: cube = timeseries.convert_to_annual(cube, full_months=True) cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(cube) if inargs.area: cube = multiply_by_area(cube) if inargs.sftlf_file and inargs.realm: sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction') cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm) zonal_aggregate = cube.collapsed('longitude', aggregation_functions[inargs.aggregation]) zonal_aggregate.remove_coord('longitude') zonal_aggregate.attributes['history'] = gio.write_metadata(file_info={inargs.infiles[0]: history[0]}) iris.save(zonal_aggregate, inargs.outfile)
def main(inargs): """Run the program.""" time_constraint = gio.get_time_constraint(inargs.time) metadata_dict = {} results_dict = {} fig, ax = plt.subplots() metadata_dict, results_dict, ohc_cube = plot_files(inargs.ohc_file, inargs.hfds_file, inargs.rndt_file, inargs.hemisphere, metadata_dict, results_dict, time_constraint, dedrifted=True) if inargs.orig_ohc_file and inargs.orig_hfds_file and inargs.orig_rndt_file: metadata_dict, results_dict, ohc_cube = plot_files( inargs.orig_ohc_file, inargs.orig_hfds_file, inargs.orig_rndt_file, inargs.hemisphere, metadata_dict, results_dict, time_constraint, dedrifted=False) plt.ylabel(ohc_cube.units) plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0), useMathText=True, useOffset=False) ax.yaxis.major.formatter._useMathText = True #plt.ylim(-5e+24, 9e+24) ymin, ymax = plt.ylim() print('ymin:', ymin) print('ymax:', ymax) title, legloc = get_title(ohc_cube, inargs.hemisphere) plt.title(title) plt.legend(loc=legloc) write_result(inargs.outfile, results_dict) plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info=metadata_dict)
def main(inargs): """Run the program""" dates_df = pandas.read_csv(inargs.infile, header=1) all_dates = [] for index, row in dates_df[['Start YYYY-MM-DD', 'End YYYY-MM-DD']].iterrows(): start, end = row dates = list(rrule.rrule(rrule.DAILY, dtstart=parser.parse(start), until=parser.parse(end))) date_list = map(lambda x: x.strftime('%Y-%m-%d'), dates) all_dates.extend(date_list) gio.write_dates(inargs.outfile, all_dates) gio.write_metadata(inargs.outfile)
def main(inargs): """Run the program.""" time_constraint = gio.get_time_constraint(inargs.time) #metadata_dict = {} fig, ax = plt.subplots() plt.axvline(x=0, color='0.5', linestyle='--') data_list = [] for nh_file, sh_file in inargs.rndt_files: diff, model, experiment, mip = calc_interhemispheric_diff( nh_file, sh_file, 'netTOA', time_constraint) data_list.append( generate_data_dict(diff, model, experiment, mip, 'netTOA')) for nh_file, sh_file in inargs.hfds_files: diff, model, experiment, mip = calc_interhemispheric_diff( nh_file, sh_file, 'OHU', time_constraint) data_list.append( generate_data_dict(diff, model, experiment, mip, 'OHU')) for nh_file, sh_file in inargs.ohc_files: diff, model, experiment, mip = calc_interhemispheric_diff( nh_file, sh_file, 'OHC', time_constraint) data_list.append( generate_data_dict(diff, model, experiment, mip, 'OHC')) data_df = pandas.DataFrame(data_list) seaborn.boxplot(data=data_df[columns], orient="h", palette=[ 'red', '#FFDDDD', '#FFDDDD', 'yellow', '#fdffdd', '#fdffdd', 'blue', '#ddddff', '#ddddff' ]) plt.ticklabel_format(style='sci', axis='x', scilimits=(0, 0), useMathText=True) ax.xaxis.major.formatter._useMathText = True ax.set_xlabel('Northern Hemisphere minus Southern Hemisphere (Joules)') plt.title('Interhemispheric difference in accumulated heat, 1861-2005') plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile)
def main(inargs): """Run the program.""" # Read data try: time_constraint = gio.get_time_constraint(inargs.time) except AttributeError: time_constraint = iris.Constraint() with iris.FUTURE.context(cell_datetime_objects=True): ohc_3D_cube = iris.load_cube(inargs.infile, 'ocean heat content 3D' & time_constraint) ohc_2D_cube = iris.load_cube(inargs.infile, 'ocean heat content 2D' & time_constraint) lons = ohc_3D_cube.coord('longitude').points lats = ohc_3D_cube.coord('latitude').points infile_history = ohc_3D_cube.attributes['history'] # Calculate seasonal cycle running_mean = True if inargs.seasonal_cycle: ohc_3D_cube = timeseries.calc_seasonal_cycle(ohc_3D_cube) ohc_2D_cube = timeseries.calc_seasonal_cycle(ohc_2D_cube) running_mean = False # Calculate trend ohc_3D_trend = timeseries.calc_trend(ohc_3D_cube, running_mean=running_mean, per_yr=False, remove_scaling=True) ohc_2D_trend = timeseries.calc_trend(ohc_2D_cube, running_mean=running_mean, per_yr=False, remove_scaling=True) # Plot fig = plt.figure(figsize=[15, 3]) gs = gridspec.GridSpec(1, 2, width_ratios=[4, 1]) cbar_tick_max, cbar_tick_step = inargs.ticks yticks = set_yticks(inargs.max_lat) plot_3D_trend(ohc_3D_trend, lons, lats, gs, cbar_tick_max, cbar_tick_step, yticks) plot_2D_trend(ohc_2D_trend, lats, gs, yticks) # Write output plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info={inargs.outfile:infile_history})
def main(inargs): """Run the program.""" cube = iris.load_cube(inargs.infile, 'sea_water_salinity') cube = gio.salinity_unit_check(cube) outfile_metadata = {inargs.infile: cube.attributes['history'],} cube.attributes['history'] = gio.write_metadata(file_info=outfile_metadata) iris.save(cube, inargs.outfile)
def main(inargs): """Run the program.""" time_constraint = gio.get_time_constraint(inargs.time) trend_dict = {} models = [] for infile in inargs.infiles: with iris.FUTURE.context(cell_datetime_objects=True): cube = iris.load_cube(infile, 'air_temperature' & time_constraint) experiment, model, metric_name = get_file_info(infile) trend_dict[(model, experiment)] = timeseries.calc_trend(cube, per_yr=True) models.append(model) models = sort_list(models) hist_data, ghg_data, aa_data = order_data(trend_dict, models) ant_data = numpy.array(ghg_data) + numpy.array(aa_data) ind = numpy.arange(len(hist_data)) # the x locations for the groups width = 0.2 # the width of the bars fig, ax = plt.subplots(figsize=(20, 8)) rects1 = ax.bar(ind, ghg_data, width, color='red') rects2 = ax.bar(ind + width, aa_data, width, color='blue') rects3 = ax.bar(ind + 2 * width, ant_data, width, color='purple') rects4 = ax.bar(ind + 3 * width, hist_data, width, color='green') ax.set_ylabel('$K yr^{-1}$') start_year = inargs.time[0].split('-')[0] end_year = inargs.time[1].split('-')[0] ax.set_title('Trend in %s, %s-%s' % (metric_name, start_year, end_year)) ax.set_xticks(ind + 1.5 * width) ax.set_xticklabels(models) ax.legend((rects1[0], rects2[0], rects3[0], rects4[0]), ('historicalGHG', 'historicalAA', 'GHG + AA', 'historical'), loc=1) plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info={infile: cube.attributes['history']})
def main(inargs): """Run the program.""" cube = iris.load_cube(inargs.infile) cube = gio.salinity_unit_check(cube) cube.attributes['history'] = gio.write_metadata( file_info={inargs.infile: cube.attributes['history']}) iris.save(cube, inargs.outfile, netcdf_format='NETCDF3_CLASSIC')
def main(inargs): """Run the program.""" # Read data cube = iris.load(inargs.infiles, 'surface_downward_eastward_stress', callback=save_history) equalise_attributes(cube) iris.util.unify_time_units(cube) cube = cube.concatenate_cube() cube = gio.check_time_units(cube) # Prepare data cube = timeseries.convert_to_annual(cube) sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction') mask = create_land_mask(sftlf_cube, cube.shape) cube.data = numpy.ma.asarray(cube.data) cube.data.mask = mask cube = cube.collapsed('longitude', iris.analysis.MEAN) # Calculate metrics xdata = cube.coord('latitude').points xnew = numpy.linspace(xdata[0], xdata[-1], num=1000, endpoint=True) hemispheres = ['sh', 'nh'] directions = ['easterly', 'westerly'] metric_dict = {} for hemisphere, direction in itertools.product(hemispheres, directions): metric_dict[(hemisphere, direction, 'location')] = [] metric_dict[(hemisphere, direction, 'magnitude')] = [] for ycube in cube.slices(['latitude']): func = interp1d(xdata, ycube.data, kind='cubic') ynew = func(xnew) for hemisphere, direction in itertools.product(hemispheres, directions): loc, mag = wind_stress_metrics(xnew, ynew, hemisphere, direction) metric_dict[(hemisphere, direction, 'location')].append(loc) metric_dict[(hemisphere, direction, 'magnitude')].append(mag) # Write the output file atts = cube.attributes infile_history = {inargs.infiles[0]: history[0]} atts['history'] = gio.write_metadata(file_info=infile_history) units_dict = { 'magnitude': cube.units, 'location': cube.coord('latitude').units } cube_list = create_outcubes(metric_dict, cube.attributes, units_dict, cube.coord('time')) iris.save(cube_list, inargs.outfile)
def main(inargs): """Run the program.""" cubes = iris.load(inargs.infile) if inargs.climatology: cube = cubes[0] else: cube = cubes[1] # Edit variable attributes cube.attributes = {'comment': cube.long_name} if inargs.variable == 'temperature': cube.var_name = 'to' cube.standard_name = 'sea_water_temperature' cube.long_name = 'Sea Water Temperature' cube.units = 'K' elif inargs.variable == 'salinity': cube.var_name = 'so' cube.standard_name = 'sea_water_salinity' cube.long_name = 'Sea Water Salinity' cube.units = 'g/kg' # Edit latitude attributes argo_lat = cube.coord('latitude') argo_lat.var_name = 'lat' argo_lat.long_name = 'latitude' # Edit latitude attributes argo_lon = cube.coord('longitude') argo_lon.var_name = 'lon' argo_lon.long_name = 'longitude' # Edit time attributes if not inargs.climatology: argo_time = cube.coord('TIME') new_unit = cf_units.Unit('days since 2004-01-01 00:00:00', calendar='gregorian') argo_time.convert_units(new_unit) argo_time.var_name = 'time' argo_time.long_name = 'time' # Edit depth attributes argo_depth = cube.coord('PRESSURE') argo_depth.var_name = 'lev' argo_depth.long_name = 'ocean depth coordinate' argo_depth.standard_name = 'depth' # Write output file timestamp = datetime.datetime.now().strftime("%a %b %d %H:%M:%S %Y") old_history = timestamp + ': Scripps Institution of Oceanography gridded argo %s data downloaded from http://www.argo.ucsd.edu/Gridded_fields.html' % ( inargs.variable) cube.attributes['history'] = gio.write_metadata( file_info={inargs.infile: old_history}) iris.save(cube, inargs.outfile)
def main(inargs): """Run the program.""" time_constraints = {} time_constraints['historical'] = gio.get_time_constraint(inargs.hist_time) time_constraints['rcp'] = gio.get_time_constraint(inargs.rcp_time) width = 10 height = 20 fig = plt.figure(figsize=(width, height)) ax_dict = {} ax1 = fig.add_subplot(3, 1, 1) ax2 = fig.add_subplot(3, 1, 2) ax3 = fig.add_subplot(3, 1, 3) valid_files = [] for infiles in inargs.experiment_files: spacific_cube, npacific_cube, experiment = load_data( infiles, 'pacific') satlantic_cube, natlantic_cube, experiment = load_data( infiles, 'atlantic') if experiment: spacific_metric, npacific_metric = calc_metrics( spacific_cube, npacific_cube) satlantic_metric, natlantic_metric = calc_metrics( satlantic_cube, natlantic_cube) plot_hemispheres(ax1, npacific_metric, spacific_metric, experiment, 'pacific') plot_comparison(ax2, npacific_metric, spacific_metric, experiment, 'pacific') plot_hemispheres(ax3, natlantic_metric, satlantic_metric, experiment, 'atlantic') model = spacific_cube.attributes['model_id'] valid_files.append(infiles) title = 'Annual Mean Meridional Overturning Mass Streamfunction, %s' % ( model) plt.suptitle(title, size='large') # plt.subplots_adjust(top=0.90) plt.savefig(inargs.outfile, bbox_inches='tight') gio.write_metadata(inargs.outfile, file_info={valid_files[0][0]: history[0]})
def main(inargs): """Run program.""" metadata_dict = {} x_dataframe, metadata_dict[inargs.xfile] = read_data(inargs.xfile, inargs.xvar) y_dataframe, metadata_dict[inargs.yfile] = read_data(inargs.yfile, inargs.yvar) dataframe_list = [x_dataframe, y_dataframe] headers = [inargs.xvar, inargs.yvar] dataframe = pandas.concat(dataframe_list, join='inner', axis=1) dataframe.columns = headers dataframe = dataframe.dropna() dataframe = dataframe.iloc[::7, :] colors = ['black', 'red', 'blue'] g = seaborn.JointGrid(x=inargs.xvar, y=inargs.yvar, data=dataframe) for index, subset_file in enumerate(inargs.subset): print subset_file if subset_file == 'all': subset_file = None dt_list, metadata = calc_composite.get_datetimes(dataframe, subset_file) if subset_file: metadata_dict[subset_file] = metadata dataframe_selection = dataframe[dataframe.index.isin(dt_list)] g.x = dataframe_selection[inargs.xvar].values g.y = dataframe_selection[inargs.yvar].values g = g.plot_joint(plt.scatter, color=colors[index]) #, alpha=0.5) if inargs.ylabel: plt.ylabel(inargs.ylabel.replace('_',' ')) if inargs.xlabel: plt.xlabel(inargs.xlabel.replace('_',' ')) g = g.plot_marginals(seaborn.distplot, kde=True, color=colors[index]) dpi = inargs.dpi if inargs.dpi else plt.savefig.func_globals['rcParams']['figure.dpi'] plt.savefig(inargs.ofile, bbox_inches='tight', dpi=dpi) gio.write_metadata(inargs.ofile, file_info=metadata_dict)