def test_ignore_non_matching_metadata(self): """Test a case in which the grid metadata does not match but this is forceably ignored by the user by setting self.grid_metadata_identifier to None.""" self.reference_cube.attributes["mosg__grid_domain"] = "eire" cubes = [self.reference_cube, self.cube1, self.cube2] check_grid_match(None, cubes)
def test_non_matching_grids(self): """Test a case in which a cube with an unmatching grid is included in the comparison, raising a ValueError.""" cubes = [self.reference_cube, self.cube1, self.unmatched_cube] msg = ("Cubes do not share or originate from the same grid, so cannot " "be used together.") with self.assertRaisesRegex(ValueError, msg): check_grid_match(cubes)
def test_non_matching_metadata(self): """Test a case in which the grid metadata does not match. This will raise an ValueError.""" self.reference_cube.attributes["mosg__grid_domain"] = "eire" cubes = [self.reference_cube, self.cube1, self.cube2] msg = "Cubes do not share the metadata identified" with self.assertRaisesRegex(ValueError, msg): check_grid_match('mosg', cubes)
def process(self, spot_data_cube, neighbour_cube, gridded_lapse_rate_cube): """ Extract lapse rates from the appropriate grid points and apply them to the spot extracted temperatures. The calculation is:: lapse_rate_adjusted_temperatures = temperatures + lapse_rate * vertical_displacement Args: spot_data_cube (iris.cube.Cube): A spot data cube of temperatures for the spot data sites, extracted from the gridded temperature field. These temperatures will have been extracted using the same neighbour_cube and neighbour_selection_method that are being used here. neighbour_cube (iris.cube.Cube): The neighbour_cube that contains the grid coordinates at which lapse rates should be extracted and the vertical displacement between those grid points on the model orography and the spot data sites actual altitudes. This cube is only updated when a new site is added. gridded_lapse_rate_cube (iris.cube.Cube): A cube of temperature lapse rates on the same grid as that from which the spot data temperatures were extracted. Returns: iris.cube.Cube: A copy of the input spot_data_cube with the data modified by the lapse rates to give a better representation of the site's temperatures. """ # Check the cubes are compatible. check_grid_match( [neighbour_cube, spot_data_cube, gridded_lapse_rate_cube]) # Extract the lapse rates that correspond to the spot sites. extraction_plugin = SpotExtraction( neighbour_selection_method=self.neighbour_selection_method) spot_lapse_rate = extraction_plugin.process(neighbour_cube, gridded_lapse_rate_cube) # Extract vertical displacements between the model orography and sites. method_constraint = iris.Constraint( neighbour_selection_method_name=self.neighbour_selection_method) data_constraint = iris.Constraint( grid_attributes_key='vertical_displacement') vertical_displacement = neighbour_cube.extract(method_constraint & data_constraint) # Apply lapse rate adjustment to the temperature at each site. new_temperatures = ( spot_data_cube.data + (spot_lapse_rate.data * vertical_displacement.data)).astype( np.float32) new_spot_cube = spot_data_cube.copy(data=new_temperatures) return new_spot_cube
def test_mismatched_model_grid_hash_cubes(self): """Test that a check works when all the cubes passed to the function have model_grid_hashes and these do not match.""" self.cube1.attributes["model_grid_hash"] = "123" cubes = [self.neighbour_cube, self.cube1] msg = ("Cubes do not share or originate from the same grid, so cannot " "be used together.") with self.assertRaisesRegex(ValueError, msg): check_grid_match(cubes)
def test_no_identifier_failure(self): """Test case in which an empty string is provided as the identifier, which matches all keys, assuming no numeric keys. In this case we expect a failure as we add an extra attribute.""" self.cube1.attributes['extra_attribute'] = 'extra' cubes = [self.reference_cube, self.cube1, self.cube2] msg = "Cubes do not share the metadata identified" with self.assertRaisesRegex(ValueError, msg): check_grid_match('', cubes)
def process(self, spot_data_cube: Cube, neighbour_cube: Cube, gridded_lapse_rate_cube: Cube) -> Cube: """ Extract lapse rates from the appropriate grid points and apply them to the spot extracted temperatures. The calculation is:: lapse_rate_adjusted_temperatures = temperatures + lapse_rate * vertical_displacement Args: spot_data_cube: A spot data cube of temperatures for the spot data sites, extracted from the gridded temperature field. These temperatures will have been extracted using the same neighbour_cube and neighbour_selection_method that are being used here. neighbour_cube: The neighbour_cube that contains the grid coordinates at which lapse rates should be extracted and the vertical displacement between those grid points on the model orography and the spot data sites actual altitudes. This cube is only updated when a new site is added. gridded_lapse_rate_cube: A cube of temperature lapse rates on the same grid as that from which the spot data temperatures were extracted. Returns: A copy of the input spot_data_cube with the data modified by the lapse rates to give a better representation of the site's temperatures. Raises: ValueError: If the lapse rate cube was provided but the diagnostic being processed is not air temperature. ValueError: If the lapse rate cube provided does not have the name "air_temperature_lapse_rate" ValueError: If the lapse rate cube does not contain a single valued height coordinate. Warns: warning: If a lapse rate cube was provided, but the height of the temperature does not match that of the data used. """ if is_probability(spot_data_cube): msg = ( "Input cube has a probability coordinate which cannot be lapse " "rate adjusted. Input data should be in percentile or " "deterministic space only.") raise ValueError(msg) # Check that we are dealing with temperature data. if spot_data_cube.name() not in [ "air_temperature", "feels_like_temperature" ]: msg = ( "The diagnostic being processed is not air temperature " "or feels like temperature and therefore cannot be adjusted.") raise ValueError(msg) if not gridded_lapse_rate_cube.name() == "air_temperature_lapse_rate": msg = ("A cube has been provided as a lapse rate cube but does " "not have the expected name air_temperature_lapse_rate: " "{}".format(gridded_lapse_rate_cube.name())) raise ValueError(msg) try: lapse_rate_height_coord = gridded_lapse_rate_cube.coord("height") except (CoordinateNotFoundError): msg = ("Lapse rate cube does not contain a single valued height " "coordinate. This is required to ensure it is applied to " "equivalent temperature data.") raise CoordinateNotFoundError(msg) # Check the height of the temperature data matches that used to # calculate the lapse rates. If so, adjust temperatures using the lapse # rate values. if not spot_data_cube.coord("height") == lapse_rate_height_coord: raise ValueError( "A lapse rate cube was provided, but the height of the " "temperature data does not match that of the data used " "to calculate the lapse rates. As such the temperatures " "were not adjusted with the lapse rates.") # Check the cubes are compatible. check_grid_match( [neighbour_cube, spot_data_cube, gridded_lapse_rate_cube]) # Extract the lapse rates that correspond to the spot sites. spot_lapse_rate = SpotExtraction( neighbour_selection_method=self.neighbour_selection_method)( neighbour_cube, gridded_lapse_rate_cube) # Extract vertical displacements between the model orography and sites. method_constraint = iris.Constraint( neighbour_selection_method_name=self.neighbour_selection_method) data_constraint = iris.Constraint( grid_attributes_key="vertical_displacement") vertical_displacement = neighbour_cube.extract(method_constraint & data_constraint) # Apply lapse rate adjustment to the temperature at each site. new_spot_lapse_rate = iris.util.broadcast_to_shape( spot_lapse_rate.data, spot_data_cube.shape, [-1]) new_temperatures = ( spot_data_cube.data + (new_spot_lapse_rate * vertical_displacement.data)).astype( np.float32) new_spot_cube = spot_data_cube.copy(data=new_temperatures) return new_spot_cube
def test_using_model_grid_hash(self): """Test a case in which one of the cubes is a spotdata cube without a spatial grid. This cube includes a model_grid_hash to indicate on which grid the neighbours were found.""" cubes = [self.reference_cube, self.neighbour_cube, self.cube2] check_grid_match(cubes)
def test_matching_grids(self): """Test a case in which the grids match. There is no assert statement as this test is successful if no exception is raised.""" cubes = [self.reference_cube, self.cube1, self.cube2] check_grid_match(cubes)
def test_multiple_model_grid_hash_cubes(self): """Test that a check works when all the cubes passed to the function have model_grid_hashes.""" self.cube1.attributes["model_grid_hash"] = self.diagnostic_cube_hash cubes = [self.neighbour_cube, self.cube1] check_grid_match(cubes)
def test_using_model_grid_hash_reordered_cubes(self): """Test as above but using the neighbour_cube as the first in the list so that it acts as the reference for all the other cubes.""" cubes = [self.neighbour_cube, self.reference_cube, self.cube2] check_grid_match(cubes)
def test_no_identifier_success(self): """Test case in which an empty string is provided as the identifier, which matches all keys, assuming no numeric keys.""" cubes = [self.reference_cube, self.cube1, self.cube2] check_grid_match('', cubes)