def test_valid_distribution_with_shape_parameters(self): """Test for a valid distribution with shape parameters.""" plugin = Plugin(distribution="truncnorm", shape_parameters=[0, np.inf]) self.assertEqual(plugin.distribution, scipy_cont_distns.truncnorm) self.assertEqual(plugin.shape_parameters, [0, np.inf])
def test_valid_cube(self): """Pass in a valid cube that raises no exception. Cube should be unchanged by being passed into the function.""" cube = self.cube.copy() Plugin()._check_template_cube(cube) self.assertEqual(cube, self.cube)
def test_valid_distribution(self): """Test for a valid distribution.""" plugin = Plugin(distribution="norm") self.assertEqual(plugin.distribution, stats.norm) self.assertEqual(plugin.shape_parameters, [])
def test_basic(self): """Test that a cube is produced with a realization dimension""" result = Plugin().process(self.cube) self.assertIsInstance(result, Cube) self.assertIsInstance(result.coord("realization"), DimCoord) self.assertEqual(result.coord("realization").units, "1")
def test_number_of_realizations(self): """Check the values for the realization coordinate generated without specifying the ensemble_realization_numbers argument.""" result = Plugin().process(self.cube) self.assertArrayAlmostEqual( result.coord("realization").points, np.array([0, 1, 2]))
def test_2d_cube_recycling_raw_ensemble_realizations(self): """ Test that the plugin returns the correct cube data for a 2d input cube, if the number of raw ensemble realizations is fewer than the number of percentiles required, and therefore, raw ensemble realization recycling is required. Case where two raw ensemble realizations are exactly the same, after the raw ensemble realizations have been recycled. The number of raw ensemble realizations are recycled in order to match the number of percentiles. After recycling the raw _data will be raw_data = np.array([[1], [2], [1]]) If there's a tie, the re-ordering randomly allocates the ordering for the data from the raw ensemble realizations, which is why there are two possible options for the resulting post-processed ensemble realizations. Raw ensemble realizations 1, 2 Post-processed percentiles 1, 2, 3 After recycling raw ensemble realizations 1, 2, 1 As the second ensemble realization(with a data value of 2), is the highest value, the highest value from the post-processed percentiles will be the second ensemble realization data value within the post-processed realizations. The data values of 1 and 2 from the post-processed percentiles will then be split between the first and third post-processed ensemble realizations. """ raw_data = np.array([[1], [2]]) post_processed_percentiles_data = np.array([[1], [2], [3]]) expected_first = np.array([[1], [3], [2]]) expected_second = np.array([[2], [3], [1]]) raw_cube = self.raw_cube[:2, :, 0, 0] raw_cube.data = raw_data post_processed_percentiles = ( self.post_processed_percentiles[:, :, 0, 0]) post_processed_percentiles.data = post_processed_percentiles_data plugin = Plugin() result = plugin.process(post_processed_percentiles, raw_cube) permutations = [expected_first, expected_second] matches = [ np.array_equal(aresult, result.data) for aresult in permutations] self.assertIn(True, matches)
def test_basic(self): """Test string representation""" expected_string = ("<ConvertLocationAndScaleParametersToPercentiles: " "distribution: norm; shape_parameters: []>") result = str(Plugin()) self.assertEqual(result, expected_string)
def test_vicinity_metadata(self): """Test vicinity cube name is correctly regenerated after processing""" self.cube.rename("probability_of_air_temperature_in_vicinity_above_threshold") result = Plugin().process(self.cube) self.assertEqual(result.name(), "air_temperature_in_vicinity")
def test_invalid_distribution(self): """Test for an invalid distribution.""" msg = "The distribution requested" with self.assertRaisesRegex(AttributeError, msg): Plugin(distribution="elephant")
def test_basic(self): """Test that the plugin returns an Iris.cube.Cube.""" result = Plugin()._interpolate_percentiles(self.cube, self.percentiles, self.bounds_pairing, self.perc_coord) self.assertIsInstance(result, Cube)
def test_basic(self): """Test string representation""" expected_string = ("<GeneratePercentilesFromMeanAndVariance: " "distribution: norm; shape_parameters: []>") result = str(Plugin()) self.assertEqual(result, expected_string)
def test_basic(self): """Test that the plugin returns an Iris.cube.Cube with the expected name""" result = Plugin()._probabilities_to_percentiles( self.cube, self.percentiles, self.bounds_pairing) self.assertIsInstance(result, Cube) self.assertEqual(result.name(), "air_temperature")
def test_compatible_units(self): """Pass in compatible cubes that should not raise an exception. No assert statement required as any other input will raise an exception.""" Plugin()._check_unit_compatibility(self.means, self.variances, self.template_cube)
def test_basic(self): """Test string representation""" expected_string = "<GenerateProbabilitiesFromMeanAndVariance>" result = str(Plugin()) self.assertEqual(result, expected_string)
def test_error_shape_parameters_required(self): """Test error is raised when shape parameters are needed""" msg = "shape parameters must be specified" with self.assertRaisesRegex(ValueError, msg): Plugin(distribution="truncnorm")
def test_simple_data_truncnorm_distribution(self): """ Test that the plugin returns an iris.cube.Cube matching the expected data values when cubes containing the location parameter and scale parameter are passed in. In this test, the ensemble mean and standard deviation is used as a proxy for the location and scale parameter. The resulting data values are the percentiles, which have been generated using a truncated normal distribution. """ data = np.array( [ [[1, 1, 1], [1, 1, 1], [1, 1, 1]], [[2, 2, 2], [2, 2, 2], [2, 2, 2]], [[3, 3, 3], [3, 3, 3], [3, 3, 3]], ] ) self.temperature_cube.data = data expected_data = np.array( [ [ [1.0121, 1.0121, 1.0121], [1.0121, 1.0121, 1.0121], [1.0121, 1.0121, 1.0121], ], [ [3.1677, 3.1677, 3.1677], [3.1677, 3.1677, 3.1677], [3.1677, 3.1677, 3.1677], ], [ [5.6412, 5.6412, 5.6412], [5.6412, 5.6412, 5.6412], [5.6412, 5.6412, 5.6412], ], ] ) # Use an adjusted version of the ensemble mean as a proxy for the # location parameter for the truncated normal distribution. current_forecast_predictor = self.temperature_cube.collapsed( "realization", iris.analysis.MEAN ) current_forecast_predictor.data = current_forecast_predictor.data + 1 # Use an adjusted version of the ensemble standard deviation as a proxy for the # scale parameter for the truncated normal distribution. current_forecast_stddev = self.temperature_cube.collapsed( "realization", iris.analysis.STD_DEV, ) current_forecast_stddev.data = current_forecast_stddev.data + 1 plugin = Plugin( distribution="truncnorm", shape_parameters=np.array([0, np.inf], dtype=np.float32), ) result = plugin._location_and_scale_parameters_to_percentiles( current_forecast_predictor, current_forecast_stddev, self.temperature_cube, self.percentiles, ) self.assertIsInstance(result, Cube) np.testing.assert_allclose(result.data, expected_data, rtol=1.0e-4)