def set_product_attributes(cube, product): """ Set attributes on an output cube of type matching a key string in the improver.metadata.constants.attributes.DATASET_ATTRIBUTES dictionary. Args: cube (iris.cube.Cube): Cube containing product data product (str): String describing product type, which is a key in the DATASET_ATTRIBUTES dictionary. Returns: iris.cube.Cube: Cube with updated attributes """ try: original_title = cube.attributes["title"] except KeyError: original_title = "" try: dataset_attributes = DATASET_ATTRIBUTES[product] except KeyError: options = list(DATASET_ATTRIBUTES.keys()) raise ValueError("product '{}' not available (options: {})".format( product, options)) updated_cube = amend_metadata(cube, attributes=dataset_attributes) if STANDARD_GRID_TITLE_STRING in original_title: updated_cube.attributes["title"] += " on {}".format( STANDARD_GRID_TITLE_STRING) return updated_cube
def test_convert_units(self): """Test amend_metadata updates attributes OK. """ changes = "Celsius" cube = set_up_variable_cube(np.ones((3, 3), dtype=np.float32), units='K') result = amend_metadata(cube, units=changes) self.assertEqual(result.units, "Celsius")
def test_attributes_deleted(self): """Test amend_metadata updates attributes OK. """ attributes = {'attribute_to_update': 'delete'} result = amend_metadata(self.cube, name='new_cube_name', data_type=np.dtype, attributes=attributes) self.assertFalse('attribute_to_update' in result.attributes)
def test_basic(self): """Test that the function returns a Cube and the input cube is not modified. """ result = amend_metadata(self.cube, name='new_cube_name', data_type=np.dtype) self.assertIsInstance(result, Cube) self.assertEqual(result.name(), 'new_cube_name') self.assertNotEqual(self.cube.name(), 'new_cube_name')
def test_attributes_updated_and_added(self): """Test amend_metadata updates and adds attributes OK. """ attributes = { 'attribute_to_update': 'second_value', 'new_attribute': 'new_value' } result = amend_metadata(self.cube, name='new_cube_name', data_type=np.dtype, attributes=attributes) self.assertEqual(result.attributes['attribute_to_update'], 'second_value') self.assertEqual(result.attributes['new_attribute'], 'new_value')
def test_cell_method_updated_and_added(self): """Test amend_metadata updates and adds a cell method. """ cell_methods = { "1": { "action": "add", "method": "point", "coords": "time" } } cm = deepcopy(cell_methods) cm["1"].pop("action") expected_cell_method = iris.coords.CellMethod(**cm["1"]) result = amend_metadata(self.cube, name='new_cube_name', data_type=np.dtype, cell_methods=cell_methods) self.assertTrue(expected_cell_method in result.cell_methods)
def test_cell_method_deleted(self): """Test amend_metadata updates attributes OK. """ cell_methods = { "1": { "action": "delete", "method": "point", "coords": "time" } } cm = deepcopy(cell_methods) cm["1"].pop("action") cell_method = iris.coords.CellMethod(**cm["1"]) self.cube.cell_methods = (cell_method, ) result = amend_metadata(self.cube, name='new_cube_name', data_type=np.dtype, cell_methods=cell_methods) self.assertEqual(result.cell_methods, ())
def test_coords_deleted_and_adds(self): """Test amend metadata deletes and adds coordinate. """ coords = { self.threshold_coord: 'delete', 'new_coord': { 'points': [2.0] } } result = amend_metadata(self.cube, name='new_cube_name', data_type=np.dtype, coordinates=coords) found_key = self.threshold_coord in [ coord.name() for coord in result.coords() ] self.assertFalse(found_key) self.assertArrayEqual( result.coord('new_coord').points, np.array([2.0]))
def test_coords_updated(self): """Test amend_metadata returns a Cube and updates coord correctly. """ updated_coords = { self.threshold_coord: { 'points': [2.0] }, 'time': { 'points': [1447896600, 1447900200] } } result = amend_metadata(self.cube, name='new_cube_name', data_type=np.dtype, coordinates=updated_coords) self.assertArrayEqual( result.coord(self.threshold_coord).points, np.array([2.0])) self.assertArrayEqual( result.coord('time').points, np.array([1447896600, 1447900200]))
def test_warnings_on_works(self, warning_list=None): """Test amend_metadata raises warnings """ updated_attributes = {'new_attribute': 'new_value'} updated_coords = {self.threshold_coord: {'points': [2.0]}} warning_msg_attr = "Adding or updating attribute" warning_msg_coord = "Updated coordinate" result = amend_metadata(self.cube, name='new_cube_name', data_type=np.dtype, coordinates=updated_coords, attributes=updated_attributes, warnings_on=True) self.assertTrue( any(item.category == UserWarning for item in warning_list)) self.assertTrue( any(warning_msg_attr in str(item) for item in warning_list)) self.assertTrue( any(warning_msg_coord in str(item) for item in warning_list)) self.assertEqual(result.attributes['new_attribute'], 'new_value')
def process(output_data, target_grid=None, source_landsea=None, metadata_dict=None, regrid_mode='bilinear', extrapolation_mode='nanmask', landmask_vicinity=25000, fix_float64=False): """Standardises a cube by one or more of regridding, updating meta-data etc Standardise a source cube. Available options are regridding (bi-linear or nearest-neighbour, optionally with land-mask awareness), updating meta-data and converting float64 data to float32. A check for float64 data compliance can be made by only specifying a source cube with no other arguments. Args: output_data (iris.cube.Cube): Output cube. If the only argument, then it is checked bor float64 data. target_grid (iris.cube.Cube): If specified, then regridding of the source against the target grid is enabled. If also using landmask-aware regridding then this must be land_binary_mask data. Default is None. source_landsea (iris.cube.Cube): A cube describing the land_binary_mask on the source-grid if coastline-aware regridding is required. Default is None. metadata_dict (dict): Dictionary containing required changes that will be applied to the metadata. Default is None. regrid_mode (str): Selects which regridding techniques to use. Default uses iris.analysis.Linear(); "nearest" uses Nearest() (Use for less continuous fields, e.g precipitation.); "nearest-with-mask" ensures that target data are sources from points with the same mask value (Use for coast-line-dependant variables like temperature). extrapolation_mode (str): Mode to use for extrapolating data into regions beyond the limits of the source_data domain. Refer to online documentation for iris.analysis. Modes are - extrapolate -The extrapolation points will take their values from the nearest source point. nan - The extrapolation points will be set to NaN. error - A ValueError exception will be raised notifying an attempt to extrapolate. mask - The extrapolation points will always be masked, even if the source data is not a MaskedArray. nanmask - If the source data is a MaskedArray the extrapolation points will be masked. Otherwise they will be set to NaN. Defaults is 'nanmask'. landmask_vicinity (float): Radius of vicinity to search for a coastline, in metres. Defaults is 25000 m fix_float64 (bool): If True, checks and fixes cube for float64 data. Without this option an exception will be raised if float64 data is found but no fix applied. Default is False. Returns: iris.cube.Cube: Processed cube. Raises: ValueError: If source landsea is supplied but regrid mode not nearest-with-mask. ValueError: If source landsea is supplied but not target grid. ValueError: If regrid_mode is "nearest-with-mask" but no landmask cube has been provided. Warns: warning: If the 'source_landsea' did not have a cube named land_binary_mask. warning: If the 'target_grid' did not have a cube named land_binary_mask. """ if (source_landsea and "nearest-with-mask" not in regrid_mode): msg = ("Land-mask file supplied without appropriate regrid_mode. " "Use --regrid_mode=nearest-with-mask.") raise ValueError(msg) if source_landsea and not target_grid: msg = ("Cannot specify input_landmask_filepath without " "target_grid_filepath") raise ValueError(msg) # Process # Re-grid with options: check_cube_not_float64(output_data, fix=fix_float64) # if a target grid file has been specified, then regrid optionally # applying float64 data check, metadata change, Iris nearest and # extrapolation mode as required. if target_grid: regridder = iris.analysis.Linear(extrapolation_mode=extrapolation_mode) if regrid_mode in ["nearest", "nearest-with-mask"]: regridder = iris.analysis.Nearest( extrapolation_mode=extrapolation_mode) output_data = output_data.regrid(target_grid, regridder) if regrid_mode in ["nearest-with-mask"]: if not source_landsea: msg = ("An argument has been specified that requires an input " "landmask cube but none has been provided") raise ValueError(msg) if "land_binary_mask" not in source_landsea.name(): msg = ("Expected land_binary_mask in input_landmask cube " "but found {}".format(repr(source_landsea))) warnings.warn(msg) if "land_binary_mask" not in target_grid.name(): msg = ("Expected land_binary_mask in target_grid cube " "but found {}".format(repr(target_grid))) warnings.warn(msg) output_data = RegridLandSea( vicinity_radius=landmask_vicinity).process( output_data, source_landsea, target_grid) target_grid_attributes = ({ k: v for (k, v) in target_grid.attributes.items() if 'mosg__' in k or 'institution' in k }) output_data = amend_metadata(output_data, attributes=target_grid_attributes) # Change metadata only option: # if output file path and json metadata file specified, # change the metadata if metadata_dict: output_data = amend_metadata(output_data, **metadata_dict) check_cube_not_float64(output_data, fix=fix_float64) return output_data
def process(self, cube_list, new_diagnostic_name, revised_coords=None, revised_attributes=None, expanded_coord=None): """ Create a combined cube. Args: cube_list (iris.cube.CubeList): Cube List contain the cubes to combine. new_diagnostic_name (str): New name for the combined diagnostic. revised_coords (dict or None): Revised coordinates for combined cube. revised_attributes (dict or None): Revised attributes for combined cube. expanded_coord (dict or None): Coordinates to be expanded as a key, with the value indicating whether the upper or mid point of the coordinate should be used as the point value, e.g. {'time': 'upper'}. Returns: iris.cube.Cube: Cube containing the combined data. Raises: TypeError: If cube_list is not an iris.cube.CubeList. ValueError: If the cubelist contains only one cube. """ if not isinstance(cube_list, iris.cube.CubeList): msg = ('Expecting data to be an instance of iris.cube.CubeList ' 'but is {}.'.format(type(cube_list))) raise TypeError(msg) if len(cube_list) < 2: msg = 'Expecting 2 or more cubes in cube_list' raise ValueError(msg) # resulting cube will be based on the first cube. data_type = cube_list[0].dtype result = cube_list[0].copy() for ind in range(1, len(cube_list)): cube1, cube2 = (resolve_metadata_diff( result.copy(), cube_list[ind].copy(), warnings_on=self.warnings_on)) result = self.combine(cube1, cube2) if self.operation == 'mean': result.data = result.data / len(cube_list) # If cube has coord bounds that we want to expand if expanded_coord: result = expand_bounds(result, cube_list, expanded_coord) result = amend_metadata(result, new_diagnostic_name, data_type, revised_coords, revised_attributes, warnings_on=self.warnings_on) return result