def main(inargs): """Run the program.""" cube, history = gio.combine_files(inargs.infiles, inargs.var, checks=True) if inargs.annual: cube = timeseries.convert_to_annual(cube, chunk=False) if inargs.aggregation == 'sum': cube = cube.collapsed('depth', iris.analysis.SUM) else: dim_coord_names = [coord.name() for coord in cube.dim_coords] depth_coord = cube.coord('depth') assert depth_coord.units in ['m', 'dbar'], "Unrecognised depth axis units" if depth_coord.units == 'm': vert_extents = spatial_weights.calc_vertical_weights_1D(depth_coord, dim_coord_names, cube.shape) elif depth_coord.units == 'dbar': vert_extents = spatial_weights.calc_vertical_weights_2D(depth_coord, cube.coord('latitude'), dim_coord_names, cube.shape) cube = cube.collapsed('depth', iris.analysis.MEAN, weights=vert_extents) cube.remove_coord('depth') metadata_dict = {} metadata_dict[inargs.infiles[0]] = history cube.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir) iris.save(cube, inargs.outfile)
def calc_metrics(sh_cube, nh_cube): """Calculate the metrics.""" dim_coord_names = [coord.name() for coord in sh_cube.dim_coords] nh_vert_extents = spatial_weights.calc_vertical_weights_1D( nh_cube.coord('depth'), dim_coord_names, nh_cube.shape) sh_vert_extents = spatial_weights.calc_vertical_weights_1D( sh_cube.coord('depth'), dim_coord_names, sh_cube.shape) nh_cube.data = numpy.where(nh_cube.data > 0, nh_cube.data, 0) sh_cube.data = numpy.where(sh_cube.data < 0, sh_cube.data, 0) nh_metric = nh_cube.collapsed(['depth', 'latitude'], iris.analysis.SUM, weights=nh_vert_extents) sh_metric = sh_cube.collapsed(['depth', 'latitude'], iris.analysis.SUM, weights=sh_vert_extents) return sh_metric, nh_metric
def create_volume_cube(cube): """Create a volume cube.""" dim_coord_names = [coord.name() for coord in cube.dim_coords] assert 'latitude' in dim_coord_names assert 'longitude' in dim_coord_names assert 'depth' in dim_coord_names lat_extents = spatial_weights.calc_meridional_weights( cube.coord('latitude'), dim_coord_names, cube.shape) lon_extents = spatial_weights.calc_zonal_weights(cube, dim_coord_names) depth_coord = cube.coord('depth') assert depth_coord.units in ['m', 'dbar'], "Unrecognised depth axis units" if depth_coord.units == 'm': vert_extents = spatial_weights.calc_vertical_weights_1D( depth_coord, dim_coord_names, cube.shape) elif depth_coord.units == 'dbar': vert_extents = spatial_weights.calc_vertical_weights_2D( depth_coord, cube.coord('latitude'), dim_coord_names, cube.shape) volume_data = lat_extents * lon_extents * vert_extents volume_data = volume_data.astype(numpy.float32) out_dims = [] for index, name in enumerate(dim_coord_names): out_dims.append((cube.coord(name), index)) volume_cube = iris.cube.Cube( volume_data, standard_name='ocean_volume', long_name='Ocean Grid-Cell Volume', var_name='volcello', units='m3', dim_coords_and_dims=out_dims, ) volume_cube.data = numpy.ma.masked_where(numpy.ma.getmask(cube.data), volume_cube.data) if 'time' in dim_coord_names: first_time = volume_cube.coord('time').points[0] volume_cube = volume_cube.extract(iris.Constraint(time=first_time)) return volume_cube
def main(inargs): """Run the program.""" volume_cube = iris.load_cube(inargs.volcello_file, 'ocean_volume') area_cube = iris.load_cube(inargs.areacello_file, 'cell_area') dim_coord_names = [coord.name() for coord in volume_cube.dim_coords] vert_extents = spatial_weights.calc_vertical_weights_1D(volume_cube.coord('depth'), dim_coord_names, volume_cube.shape) volume_data = area_cube.data * vert_extents volume_data = volume_data.astype(numpy.float32) volume_cube.data = volume_cube.data * 0.0 + volume_data # Write output file outfile_metadata = {inargs.volcello_file: volume_cube.attributes['history'], inargs.areacello_file: area_cube.attributes['history']} volume_cube.attributes['history'] = gio.write_metadata(file_info=outfile_metadata) iris.save(volume_cube, inargs.outfile)
def vertical_mean(cube): """Calculate the vertical mean""" coord_names = [coord.name() for coord in cube.dim_coords] depth_axis = cube.coord('depth') assert depth_axis.units in ['m', 'dbar'], "Unrecognised depth axis units" if depth_axis.units == 'm': weights = spatial_weights.calc_vertical_weights_1D( depth_axis, coord_names, cube.shape) elif depth_axis.units == 'dbar': assert coord_names == [ 'time', 'depth', 'latitude', 'longitude' ], "2D weights will not work for curvilinear grid" weights = spatial_weights.calc_vertical_weights_2D( depth_axis, cube.coord('latitude'), coord_names, cube.shape) cube = cube.collapsed('depth', iris.analysis.MEAN, weights=weights) cube.remove_coord('depth') return cube
def calc_vertical_mean(cube, layer, coord_names, atts, original_standard_name, original_var_name): """Calculate the vertical mean over a given depth range.""" min_depth, max_depth = vertical_layers[layer] level_subset = gio.iris_vertical_constraint(min_depth, max_depth) cube_segment = cube.extract(level_subset) depth_axis = cube_segment.coord('depth') if depth_axis.units == 'm': vertical_weights = spatial_weights.calc_vertical_weights_1D(depth_axis, coord_names, cube_segment.shape) elif depth_axis.units == 'dbar': vertical_weights = spatial_weights.calc_vertical_weights_2D(depth_axis, cube_segment.coord('latitude'), coord_names, cube_segment.shape) vertical_mean_cube = cube_segment.collapsed(['depth'], iris.analysis.MEAN, weights=vertical_weights.astype(numpy.float32)) vertical_mean_cube.remove_coord('depth') vertical_mean_cube.data = vertical_mean_cube.data.astype(numpy.float32) units = str(cube.units) standard_name = 'vertical_mean_%s_%s' %(layer, original_standard_name) var_name = '%s_vm_%s' %(original_var_name, layer) vertical_mean_cube = add_metadata(atts, vertical_mean_cube, standard_name, var_name, units) return vertical_mean_cube