def test_wrong_input(self): """Test a sensible error is raised when the wrong input is passed in""" self.cube.cell_methods = () message = ('Input Cell_method is not an instance of ' 'iris.coord.CellMethod') with self.assertRaisesRegex(TypeError, message): add_renamed_cell_method(self.cube, 'not_a_cell_method', 'weighted_mean')
def test_no_cell_method_in_input_cube(self): """Testing that when there are no cell methods on the input cube then the new cell method still gets added as expected.""" self.cube.cell_methods = () add_renamed_cell_method(self.cube, self.cell_method, 'weighted_mean') expected_cell_method = iris.coords.CellMethod(method='weighted_mean', coords='time') self.assertEqual(self.cube.cell_methods, (expected_cell_method,))
def test_multiple_cell_methods_in_input_cube(self): """Test that other cell methods are preserved.""" extra_cell_method = iris.coords.CellMethod(method='max', coords='realization') self.cube.cell_methods = (self.cell_method, extra_cell_method) add_renamed_cell_method(self.cube, self.cell_method, 'weighted_mean') expected_cell_method = iris.coords.CellMethod(method='weighted_mean', coords='time') self.assertEqual(self.cube.cell_methods, (extra_cell_method, expected_cell_method,))
def test_only_difference_is_name(self): """Testing that the input cell method and the new cell method only differ by name""" add_renamed_cell_method(self.cube, self.cell_method, 'weighted_mean') expected_cell_method = iris.coords.CellMethod(method='weighted_mean', coords='time') self.assertEqual(self.cube.cell_methods, (expected_cell_method,)) new_cell_method = self.cube.cell_methods[0] self.assertEqual(self.cell_method.coord_names, new_cell_method.coord_names) self.assertEqual(self.cell_method.intervals, new_cell_method.intervals) self.assertEqual(self.cell_method.comments, new_cell_method.comments)
def process(self, cube, weights=None): """Calculate weighted blend across the chosen coord, for either probabilistic or percentile data. If there is a percentile coordinate on the cube, it will blend using the PercentileBlendingAggregator but the percentile coordinate must have at least two points. Args: cube (iris.cube.Cube): Cube to blend across the coord. weights (Optional list or np.array of weights): or None (equivalent to equal weights). Returns: result (iris.cube.Cube): containing the weighted blend across the chosen coord. Raises: TypeError : If the first argument not a cube. ValueError : If there is a percentile coord and it is not a dimension coord in the cube. ValueError : If there is a percentile dimension with only one point, we need at least two points in order to do the blending. ValueError : If there are more than one percentile coords in the cube. ValueError : If there is a percentile dimension on the cube and the mode for blending is 'weighted_maximum' ValueError : If the weights shape do not match the dimension of the coord we are blending over. Warns: Warning : If trying to blend across a scalar coordinate with only one value. Returns the original cube in this case. """ if not isinstance(cube, iris.cube.Cube): msg = ('The first argument must be an instance of ' 'iris.cube.Cube but is' ' {0:s}.'.format(type(cube))) raise TypeError(msg) # Check to see if the data is percentile data try: perc_coord = find_percentile_coordinate(cube) perc_dim = cube.coord_dims(perc_coord.name()) if not perc_dim: msg = ('The percentile coord must be a dimension ' 'of the cube.') raise ValueError(msg) # Check the percentile coordinate has more than one point, # otherwise raise an error as we won't be able to blend. if len(perc_coord.points) < 2.0: msg = ('Percentile coordinate does not have enough points' ' in order to blend. Must have at least 2 percentiles.') raise ValueError(msg) except CoordinateNotFoundError: perc_coord = None perc_dim = None # If we have a percentile dimension and the mode is 'max' raise an # exception. if perc_coord and self.mode == 'weighted_maximum': msg = ('The "weighted_maximum" mode cannot be used with' ' percentile data.') raise ValueError(msg) # check weights array matches coordinate shape if not None if weights is not None: if np.array(weights).shape != cube.coord(self.coord).points.shape: msg = ('The weights array must match the shape ' 'of the coordinate in the input cube; ' 'weight shape is ' '{0:s}'.format(np.array(weights).shape) + ', cube shape is ' '{0:s}'.format(cube.coord(self.coord).points.shape)) raise ValueError(msg) # If coord to blend over is a scalar_coord warn # and return original cube. coord_dim = cube.coord_dims(self.coord) if not coord_dim: msg = ('Trying to blend across a scalar coordinate with only one' ' value. Returning original cube') warnings.warn(msg) result = cube else: try: cube.coord('threshold') except iris.exceptions.CoordinateNotFoundError: slices_over_threshold = [cube] else: if self.coord != 'threshold': slices_over_threshold = cube.slices_over('threshold') else: slices_over_threshold = [cube] cubelist = iris.cube.CubeList([]) for cube_thres in slices_over_threshold: # Blend the cube across the coordinate # Use percentile Aggregator if required if perc_coord and self.mode == "weighted_mean": percentiles = np.array(perc_coord.points, dtype=float) perc_dim, = cube_thres.coord_dims(perc_coord.name()) # Set equal weights if none are provided if weights is None: num = len(cube_thres.coord(self.coord).points) weights = np.ones(num) / float(num) # Set up aggregator PERCENTILE_BLEND = (Aggregator( 'weighted_mean', PercentileBlendingAggregator.aggregate)) cube_new = cube_thres.collapsed(self.coord, PERCENTILE_BLEND, arr_percent=percentiles, arr_weights=weights, perc_dim=perc_dim) # Else do a simple weighted average elif self.mode == "weighted_mean": # Equal weights are used as default. weights_array = None # Else broadcast the weights to be used by the aggregator. coord_dim_thres = cube_thres.coord_dims(self.coord) if weights is not None: weights_array = (iris.util.broadcast_to_shape( np.array(weights), cube_thres.shape, coord_dim_thres)) orig_cell_methods = cube_thres.cell_methods # Calculate the weighted average. cube_new = cube_thres.collapsed(self.coord, iris.analysis.MEAN, weights=weights_array) # Update the name of the cell_method created by Iris to # 'weighted_mean' to be consistent. new_cell_methods = cube_new.cell_methods extra_cm = (set(new_cell_methods) - set(orig_cell_methods)).pop() add_renamed_cell_method(cube_new, extra_cm, 'weighted_mean') # Else use the maximum probability aggregator. elif self.mode == "weighted_maximum": # Set equal weights if none are provided if weights is None: num = len(cube_thres.coord(self.coord).points) weights = np.ones(num) / float(num) # Set up aggregator MAX_PROBABILITY = (Aggregator( 'weighted_maximum', MaxProbabilityAggregator.aggregate)) cube_new = cube_thres.collapsed(self.coord, MAX_PROBABILITY, arr_weights=weights) cubelist.append(cube_new) result = cubelist.merge_cube() if isinstance(cubelist[0].data, np.ma.core.MaskedArray): result.data = np.ma.array(result.data) # If set adjust values of collapsed coordinates. if self.coord_adjust is not None: for crd in result.coords(): if cube.coord_dims(crd.name()) == coord_dim: pnts = cube.coord(crd.name()).points crd.points = np.array(self.coord_adjust(pnts), dtype=crd.points.dtype) return result
def test_basic(self): """Basic test for one cell method on input cube""" add_renamed_cell_method(self.cube, self.cell_method, 'weighted_mean') expected_cell_method = iris.coords.CellMethod(method='weighted_mean', coords='time') self.assertEqual(self.cube.cell_methods, (expected_cell_method,))