def _set_up_height_cube(height_points, ascending=True): """Create cube of temperatures decreasing with height""" data = 280 * np.ones((3, 3, 3), dtype=np.float32) data[1, :] = 278 data[2, :] = 276 cube = set_up_variable_cube(data[0].astype(np.float32)) height_points = np.sort(height_points) cube = add_coordinate(cube, height_points, "height", coord_units="m") cube.coord("height").attributes["positive"] = "up" cube.data = data.astype(np.float32) if not ascending: cube = sort_coord_in_cube(cube, "height", descending=True) cube.coord("height").attributes["positive"] = "down" return cube
def setUp(self): """Set up test cubes""" cube = set_up_variable_cube( np.ones((12, 12), dtype=np.float32), time=datetime(2017, 2, 17, 6, 0), frt=datetime(2017, 2, 17, 6, 0), ) cube.remove_coord("forecast_period") self.time_points = np.arange(1487311200, 1487354400, 3600).astype(np.int64) self.cube = add_coordinate( cube, self.time_points, "time", dtype=np.int64, coord_units="seconds since 1970-01-01 00:00:00", )
def setUp(self): """Set up for testing process method""" cube = set_up_variable_cube( np.zeros((2, 2), dtype=np.float32), name="lwe_thickness_of_precipitation_amount", units="m", time=dt(2017, 1, 10, 5, 0), frt=dt(2017, 1, 10, 3, 0), ) self.cube = add_coordinate( cube, [dt(2017, 1, 10, 5, 0), dt(2017, 1, 10, 6, 0)], "time", is_datetime=True, ) self.coord_name = "time"
def setUp(self): """ Set up cubes for testing. """ data = np.full((1, 2, 2), 0.5, dtype=np.float32) self.cube1 = set_up_probability_cube( data, np.array([0.001], dtype=np.float32), variable_name="lwe_thickness_of_precipitation_amount", time=datetime(2015, 11, 19, 0), time_bounds=(datetime(2015, 11, 18, 23), datetime(2015, 11, 19, 0)), frt=datetime(2015, 11, 18, 22), ) data = np.full((1, 2, 2), 0.6, dtype=np.float32) self.cube2 = set_up_probability_cube( data, np.array([0.001], dtype=np.float32), variable_name="lwe_thickness_of_precipitation_amount", time=datetime(2015, 11, 19, 1), time_bounds=(datetime(2015, 11, 19, 0), datetime(2015, 11, 19, 1)), frt=datetime(2015, 11, 18, 22), ) data = np.full((1, 2, 2), 0.1, dtype=np.float32) self.cube3 = set_up_probability_cube( data, np.array([0.001], dtype=np.float32), variable_name="lwe_thickness_of_precipitation_amount", time=datetime(2015, 11, 19, 1), time_bounds=(datetime(2015, 11, 19, 0), datetime(2015, 11, 19, 1)), frt=datetime(2015, 11, 18, 22), ) data = np.full((2, 2, 2), 0.1, dtype=np.float32) self.cube4 = set_up_probability_cube( data, np.array([1.0, 2.0], dtype=np.float32), variable_name="lwe_thickness_of_precipitation_amount", time=datetime(2015, 11, 19, 1), time_bounds=(datetime(2015, 11, 19, 0), datetime(2015, 11, 19, 1)), frt=datetime(2015, 11, 18, 22), ) self.cube4 = add_coordinate(iris.util.squeeze(self.cube4), np.arange(3), "realization", coord_units="1")
def test_datetime(self): """Test a leading time coordinate can be added successfully""" datetime_points = [ datetime(2017, 10, 10, 3, 0), datetime(2017, 10, 10, 4, 0) ] result = add_coordinate(self.input_cube, datetime_points, "time", is_datetime=True) # check time is now the leading dimension self.assertEqual(result.coord_dims("time"), (0, )) self.assertEqual(len(result.coord("time").points), 2) # check forecast period has been updated expected_fp_points = 3600 * np.array([6, 7], dtype=np.int64) self.assertArrayAlmostEqual( result.coord("forecast_period").points, expected_fp_points)
def test_time_points(self): """Test a time coordinate can be added using integer points rather than datetimes, and that forecast period is correctly re-calculated""" time_val = self.input_cube.coord("time").points[0] time_points = np.array([time_val + 3600, time_val + 7200]) fp_val = self.input_cube.coord("forecast_period").points[0] expected_fp_points = np.array([fp_val + 3600, fp_val + 7200]) result = add_coordinate( self.input_cube, time_points, "time", coord_units=TIME_COORDS["time"].units, dtype=TIME_COORDS["time"].dtype, ) self.assertArrayEqual(result.coord("time").points, time_points) self.assertArrayEqual( result.coord("forecast_period").points, expected_fp_points)
def setUp(self): """Set up the test inputs.""" time_start = datetime.datetime(2017, 11, 1, 3) time_mid = datetime.datetime(2017, 11, 1, 6) time_end = datetime.datetime(2017, 11, 1, 9) self.npoints = 10 domain_corner, grid_spacing = _grid_params("latlon", self.npoints) data_time_0 = np.ones((self.npoints, self.npoints), dtype=np.float32) cube_time_0 = set_up_variable_cube( data_time_0, time=time_start, frt=time_start, domain_corner=domain_corner, grid_spacing=grid_spacing, ) cube_times = add_coordinate( cube_time_0.copy(), [time_start, time_mid, time_end], "time", is_datetime=True, ) # Convert units and datatypes, so that they are non-standard. cube_times.coord("time").convert_units( "hours since 1970-01-01 00:00:00") cube_times.coord("time").points = cube_times.coord( "time").points.astype(np.int32) cube_times.coord("forecast_reference_time").convert_units( "hours since 1970-01-01 00:00:00") cube_times.coord("forecast_reference_time").points = cube_times.coord( "forecast_reference_time").points.astype(np.int32) cube_times.coord("forecast_period").convert_units("hours") cube_times.coord("forecast_period").points.astype(np.float32) self.cube = cube_times self.coord_dtypes = { "time": np.int64, "forecast_reference_time": np.int64, "forecast_period": np.int32, } self.coord_units = { "time": "seconds since 1970-01-01 00:00:00", "forecast_reference_time": "seconds since 1970-01-01 00:00:00", "forecast_period": "seconds", }
def setUp(self): """Set up a cube for the tests.""" cube = set_up_variable_cube( np.ones((1, 7, 7), dtype=np.float32), time=datetime(2015, 11, 23, 7, 0), frt=datetime(2015, 11, 23, 3, 0), ) cube.remove_coord("forecast_period") time_points = [1448262000, 1448265600] self.cube = add_coordinate( cube, time_points, "time", dtype=np.int64, coord_units="seconds since 1970-01-01 00:00:00", order=[1, 0, 2, 3], )
def test_check_data_multiple_timesteps(self): """ Test that the plugin returns an Iris.cube.Cube with the expected data values for the percentiles. """ expected = np.array( [ [[[8.0, 8.0], [-8.0, 8.66666667]], [[8.0, -16.0], [8.0, -16.0]] ], [[[12.0, 12.0], [12.0, 12.0]], [[10.5, 10.0], [10.5, 10.0]]], [[[31.0, 31.0], [31.0, 31.0]], [[11.5, 11.33333333], [11.5, 12.0]]], ], dtype=np.float32, ) cube = set_up_probability_cube( np.zeros((3, 2, 2), dtype=np.float32), ECC_TEMPERATURE_THRESHOLDS, threshold_units="degC", time=datetime(2015, 11, 23, 7), frt=datetime(2015, 11, 23, 6), ) cube = add_coordinate( cube, [datetime(2015, 11, 23, 7), datetime(2015, 11, 23, 8)], "time", is_datetime=True, order=[1, 0, 2, 3], ) cube.data = np.array( [ [[[0.8, 0.8], [0.7, 0.9]], [[0.8, 0.6], [0.8, 0.6]]], [[[0.6, 0.6], [0.6, 0.6]], [[0.5, 0.4], [0.5, 0.4]]], [[[0.4, 0.4], [0.4, 0.4]], [[0.1, 0.1], [0.1, 0.2]]], ], dtype=np.float32, ) percentiles = [20, 60, 80] result = Plugin()._probabilities_to_percentiles( cube, percentiles, self.bounds_pairing) self.assertArrayAlmostEqual(result.data, expected, decimal=5)
def test_with_multiple_realizations_and_times(self): """Test for multiple realizations and times, so that multiple iterations will be required within the process method.""" expected = np.array( [ [ [ [0.0, 0.0, 0.0, 0.0], [1.0, 1.0, 1.0, 0.0], [1.0, 1.0, 1.0, 0.0], [1.0, 1.0, 1.0, 0.0], ], [ [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], ], ], [ [ [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], ], [ [0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 0.0, 0.0], ], ], ] ) cube = add_coordinate( self.cube, self.timesteps, "time", order=[1, 0, 2, 3], is_datetime=True, ) cube.data[0, 0, 2, 1] = 1.0 cube.data[1, 1, 1, 3] = 1.0 orig_shape = cube.data.copy().shape result = OccurrenceWithinVicinity(self.distance)(cube) self.assertIsInstance(result, Cube) self.assertEqual(result.data.shape, orig_shape) self.assertArrayAlmostEqual(result.data, expected)
def setUp(self): """Set up cube """ data = np.array( [0, 1, 5, 11, 20, 5, 9, 10, 4, 2, 0, 1, 29, 30, 1, 5, 6, 6], dtype=np.int32 ).reshape((2, 3, 3)) cube = set_up_variable_cube(data, "weather_code", "1",) date_times = [ datetime.datetime(2017, 11, 19, 0, 30), datetime.datetime(2017, 11, 19, 1, 30), ] self.cube = add_coordinate( cube, date_times, "time", is_datetime=True, order=[1, 0, 2, 3], ) self.wxcode = np.array(list(WX_DICT.keys())) self.wxmeaning = " ".join(WX_DICT.values()) self.data_directory = mkdtemp() self.nc_file = self.data_directory + "/wxcode.nc" pathlib.Path(self.nc_file).touch(exist_ok=True)
def test_remove_unnecessary_scalar_coordinates(self): """Test model_id and model_configuration coordinates are both removed after model blending""" cube_model = set_up_variable_cube(282 * np.zeros( (2, 2), dtype=np.float32)) cube_model = add_coordinate(cube_model, [0, 1], "model_id") cube_model.add_aux_coord(AuxCoord(["uk_ens", "uk_det"], long_name="model_configuration"), data_dims=0) weights_model = Cube( np.array([0.5, 0.5]), long_name="weights", dim_coords_and_dims=[(cube_model.coord("model_id"), 0)], ) plugin = WeightedBlendAcrossWholeDimension("model_id") result = plugin(cube_model, weights_model) for coord_name in ["model_id", "model_configuration"]: self.assertNotIn(coord_name, [coord.name() for coord in result.coords()])
def percentile_cube(frt_points, time, frt): """Create a percentile cube for testing.""" cube = set_up_percentile_cube( np.zeros((6, 2, 2), dtype=np.float32), np.arange(0, 101, 20).astype(np.float32), name="air_temperature", units="C", time=time, frt=frt, ) cube = add_coordinate( cube, frt_points, "forecast_reference_time", is_datetime=True, order=(1, 0, 2, 3), ) cube.data = np.reshape(PERCENTILE_DATA, (6, 3, 2, 2)).astype(np.float32) return cube
def test_multi_point_single_real(self): """Test behaviour for points over a single realization.""" data = np.ones((5, 5), dtype=np.float32) cube = set_up_variable_cube( data, spatial_grid="equalarea", ) reals_points = np.array([0], dtype=np.int32) cube = add_coordinate(cube, coord_points=reals_points, coord_name="realization") cube.data[2, 2] = 0 expected = np.array([ [ [1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 0.4, 1.0, 1.0], [1.0, 0.4, 0.4, 0.4, 1.0], [1.0, 1.0, 0.4, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0], ], [ [1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0], ], [ [1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0], ], ]) percentiles = np.array([10, 50, 90]) radius = 2000.0 result = GeneratePercentilesFromANeighbourhood( radius, percentiles=percentiles).process(cube) self.assertArrayAlmostEqual(result.data, expected)
def error_percentile_cube(error_thresholds): """Create sample error-percentile cube""" data = np.broadcast_to(error_thresholds[1:-1, np.newaxis, np.newaxis], (4, 10, 10)) percentile_cube = set_up_percentile_cube( data.astype(np.float32), percentiles=np.array([20.0, 40.0, 60.0, 80.0], dtype=np.float32), name="forecast_error_of_lwe_thickness_of_precipitation_amount", units="m", attributes=ATTRIBUTES, ) error_percentile_cube = add_coordinate( percentile_cube, coord_points=np.arange(5), coord_name="realization", coord_units="1", order=[0, 1, 2, 3], ) return error_percentile_cube
def test_multiple_thresholds(self): """Test multiple thresholds applied to a multi-realization cube return a single cube arrays corresponding to each realization and threshold.""" multi_realization_cube = add_coordinate(self.cube, [0, 1, 2], "realization", dtype=np.int32) all_zeroes = np.zeros_like(multi_realization_cube.data) one_exceed_point = all_zeroes.copy() one_exceed_point[:, 2, 2] = 1.0 expected_result_array = np.array( [one_exceed_point, one_exceed_point, all_zeroes]) # transpose array so that realization is leading coordinate expected_result_array = np.transpose(expected_result_array, [1, 0, 2, 3]) thresholds = [0.2, 0.4, 0.6] plugin = Threshold(thresholds) result = plugin(multi_realization_cube) self.assertIsInstance(result, Cube) self.assertArrayAlmostEqual(result.data, expected_result_array)
def test_works_two_thresh(self): """Test that the plugin works with a cube that contains multiple thresholds.""" width = 2.0 thresh_cube = self.cube.copy() thresh_cubes = add_coordinate(thresh_cube, [0.25, 0.5, 0.75], "precipitation_amount", coord_units="mm") thresh_cubes.coord("precipitation_amount").var_name = "threshold" plugin = TriangularWeightedBlendAcrossAdjacentPoints( "forecast_period", self.forecast_period, "hours", width) result = plugin(thresh_cubes) # Test that the result cube retains threshold co-ordinates # from original cube. self.assertEqual( thresh_cubes.coord("precipitation_amount"), result.coord("precipitation_amount"), )
def setUp(self): """Set up test input cubes""" # Skip if pysteps not available pytest.importorskip("pysteps") shape = (30, 30) earlier_cube = set_up_test_cube( np.zeros(shape, dtype=np.float32), name="lwe_precipitation_rate", units="m s-1", time=datetime(2018, 2, 20, 4, 15), ) later_cube = set_up_test_cube( np.zeros(shape, dtype=np.float32), name="lwe_precipitation_rate", units="m s-1", time=datetime(2018, 2, 20, 4, 30), ) self.cubes = iris.cube.CubeList([earlier_cube, later_cube]) wind_u = set_up_test_cube( np.ones(shape, dtype=np.float32), name="grid_eastward_wind", units="m s-1", time=datetime(2018, 2, 20, 4, 0), ) wind_v = wind_u.copy() wind_v.rename("grid_northward_wind") self.steering_flow = iris.cube.CubeList([wind_u, wind_v]) orogenh = set_up_test_cube( np.zeros(shape, dtype=np.float32), name="orographic_enhancement", units="m s-1", time=datetime(2018, 2, 20, 3, 0), ) time_points = [] for i in range(3): time_points.append(datetime(2018, 2, 20, 3 + i)) self.orogenh = add_coordinate(orogenh, time_points, "time", is_datetime=True)
def setUp(self): """Set up temperature, pressure, and relative humidity cubes that contain multiple height levels; in this case the values of these diagnostics are identical on each level.""" super().setUp() self.height_points = np.array([5.0, 10.0, 20.0]) height_attribute = {"positive": "up"} data = np.array([[-88.15, -13.266943, 60.81063]], dtype=np.float32) self.wet_bulb_temperature = set_up_variable_cube( data, name="wet_bulb_temperature", units="Celsius" ) self.wet_bulb_temperature = add_coordinate( self.wet_bulb_temperature, self.height_points, "height", coord_units="m", attributes=height_attribute, )
def test_multi_realization(self): """Test that the expected changes occur and meta-data are unchanged when handling a multi-realization cube.""" cube = add_coordinate(self.cube, [0, 1], "realization", coord_units=1) expected = cube.data.copy() # Output sea-point populated with data from input sea-point: expected[:, 0, 0] = 0.5 # Output sea-point populated with data from input sea-point: expected[:, 1, 1] = 0.5 # Output land-point populated with data from input land-point: expected[:, 0, 1] = 1.0 # Output land-point populated with data from input sea-point due to # vicinity-constraint: expected[:, 4, 4] = 1.0 result = self.plugin.process(cube, self.input_land, self.output_land) self.assertIsInstance(result, Cube) self.assertArrayEqual(result.data, expected) self.assertDictEqual(result.attributes, self.cube.attributes) self.assertEqual(result.name(), self.cube.name())
def test_dimension_preservation(self): """Test the result preserves input dimension order when the coordinate to integrate is not the first dimension (eg there's a leading realization coordinate) """ cube = set_up_variable_cube(280 * np.ones((3, 3, 3), dtype=np.float32)) cube = add_coordinate(cube, np.array([5.0, 10.0, 20.0]), "height", coord_units="m") cube.transpose([1, 0, 2, 3]) expected_coord_order = [ coord.name() for coord in cube.coords(dim_coords=True) ] result = self.plugin.process(cube) self.assertEqual(result.coord_dims("height"), (1, )) result_coord_order = [ coord.name() for coord in result.coords(dim_coords=True) ] self.assertListEqual(result_coord_order, expected_coord_order)
def setUp(self): """Set up cube.""" data = np.ones((16, 16), dtype=np.float32) data[7, 7] = 0 self.cube = set_up_variable_cube( data, spatial_grid="equalarea", ) time_points = [ datetime(2017, 11, 10, 2), datetime(2017, 11, 10, 3), datetime(2017, 11, 10, 4), ] self.multi_time_cube = add_coordinate( self.cube, coord_points=time_points, coord_name="time", is_datetime="true", )
def error_threshold_cube(error_thresholds): """Create sample error-threshold cube""" prob = np.array([1.0, 0.8, 0.6, 0.4, 0.2, 0.0]) data = np.broadcast_to(prob[:, np.newaxis, np.newaxis], (6, 10, 10)) probability_cube = set_up_probability_cube( data.astype(np.float32), thresholds=error_thresholds, variable_name="forecast_error_of_lwe_thickness_of_precipitation_amount", threshold_units="m", attributes=ATTRIBUTES, spp__relative_to_threshold="above", ) error_threshold_cube = add_coordinate( probability_cube, coord_points=np.arange(5), coord_name="realization", coord_units="1", order=[1, 0, 2, 3], ) return error_threshold_cube
def setUp(self): """Set up cubes used in unit tests""" cube = set_up_variable_cube( np.zeros((2, 2), dtype=np.float32), name="lwe_thickness_of_precipitation_amount", units="m", time=dt(2017, 1, 10, 4, 0), frt=dt(2017, 1, 10, 3, 0), ) self.cube = add_coordinate( cube, [dt(2017, 1, 10, 3, 0), dt(2017, 1, 10, 4, 0)], "time", is_datetime=True, ) data = np.array([[[1.0, 1.0], [1.0, 1.0]], [[2.0, 2.0], [2.0, 2.0]]], dtype=np.float32) self.cube.data = data self.coord_name = "forecast_period" self.units = Unit("seconds")
def create_wind_percentile_cube(data=None, perc_values=None, name="wind_speed_of_gust"): """Create a cube with percentile coordinate and two time slices""" if perc_values is None: perc_values = [50.0] if data is None: data = np.zeros((len(perc_values), 2, 2, 2), dtype=np.float32) data[:, 0, :, :] = 1.0 data[:, 1, :, :] = 2.0 data_times = [datetime(2015, 11, 19, 0, 30), datetime(2015, 11, 19, 1, 30)] perc_cube = set_up_percentile_cube( data[:, 0, :, :], perc_values, name=name, units="m s-1", time=data_times[0], frt=datetime(2015, 11, 18, 21), ) cube = add_coordinate(perc_cube, data_times, "time", is_datetime=True) cube.data = np.squeeze(data) return cube
def test_cubelist_input(self): """Test when supplying a cubelist as input containing cubes representing UK deterministic and UK ensemble model configuration and unifying the forecast_reference_time, so that both model configurations have a common forecast_reference_time.""" cube_uk_ens = set_up_variable_cube( np.full((3, 4, 4), 273.15, dtype=np.float32), time=self.cycletime, frt=datetime(2017, 1, 10, 4), ) # set up forecast periods of 5, 7 and 9 hours time_points = [1484031600, 1484038800, 1484046000] cube_uk_ens = add_coordinate( cube_uk_ens, time_points, "time", dtype=np.int64, coord_units="seconds since 1970-01-01 00:00:00", ) expected_uk_det = self.cube_uk_det.copy() frt_units = expected_uk_det.coord("forecast_reference_time").units frt_points = [ np.round(frt_units.date2num(self.cycletime)).astype(np.int64) ] expected_uk_det.coord("forecast_reference_time").points = frt_points expected_uk_det.coord("forecast_period").points = np.array([3, 5, 7 ]) * 3600 expected_uk_ens = cube_uk_ens.copy() expected_uk_ens.coord("forecast_reference_time").points = frt_points expected_uk_ens.coord("forecast_period").points = np.array([1, 3, 5 ]) * 3600 expected = iris.cube.CubeList([expected_uk_det, expected_uk_ens]) cubes = iris.cube.CubeList([self.cube_uk_det, cube_uk_ens]) result = unify_cycletime(cubes, self.cycletime) self.assertIsInstance(result, iris.cube.CubeList) self.assertEqual(result, expected)
def test_height_and_realization_dict(self): """Test blending members with a configuration dictionary.""" cube = set_up_variable_cube(274.0 * np.ones( (2, 2, 2), dtype=np.float32)) cube = add_coordinate(cube, [10.0, 20.0], "height", coord_units="m") cubes = iris.cube.CubeList([]) for cube_slice in cube.slices_over("realization"): cubes.append(cube_slice) expected_weights = np.array([[1.0, 0.5], [0.0, 0.5]]) config_dict = { 0: { "height": [15, 25], "weights": [1, 0], "units": "m" }, 1: { "height": [15, 25], "weights": [0, 1], "units": "m" }, } plugin = ChooseWeightsLinear("height", config_dict, config_coord_name="realization") result = plugin.process(cubes) self.assertIsInstance(result, iris.cube.Cube) self.assertArrayAlmostEqual(result.data, expected_weights) self.assertAlmostEqual(result.name(), "weights") expected_coords = { "time", "forecast_reference_time", "forecast_period", "height", "realization", } result_coords = {coord.name() for coord in result.coords()} self.assertSetEqual(result_coords, expected_coords)
def _make_orogenh_cube(analysis_time, interval, max_lead_time): """Construct an orographic enhancement cube with data valid for every lead time""" orogenh_data = 0.05 * np.ones((8, 8), dtype=np.float32) orogenh_cube = set_up_variable_cube( orogenh_data, name="orographic_enhancement", units="mm/h", spatial_grid="equalarea", time=analysis_time, frt=analysis_time, ) time_points = [analysis_time] lead_time = 0 while lead_time <= max_lead_time: lead_time += interval new_point = time_points[-1] + datetime.timedelta(seconds=60 * interval) time_points.append(new_point) orogenh_cube = add_coordinate(orogenh_cube, time_points, "time", is_datetime=True) return orogenh_cube
def set_up_wxcube(time_points=None, lat_lon=False): """ Set up a wxcube for a particular time and location, which can cover the terminator and test the "update_daynight" functionality Args: time_points (list of datetime.datetime): List of time points as datetime instances lat_lon (bool): If True, returns a cube on a lat-lon grid. If False, returns equal area. Returns: iris.cube.Cube: cube of weather codes set to 1 data shape (time_points, 16, 16) """ kwargs = { "name": "weather_code", "units": 1, "time": datetime.datetime(2018, 9, 12, 5, 43), "frt": datetime.datetime(2018, 9, 12, 3), "attributes": weather_code_attributes(), "spatial_grid": "equalarea", "domain_corner": (0, -30000), } if lat_lon: kwargs.update({ "spatial_grid": "latlon", "domain_corner": (49, -8), "grid_spacing": 1 }) cube = set_up_variable_cube(np.ones((16, 16), dtype=np.float32), **kwargs) if time_points is not None: cube = add_coordinate(cube, time_points, "time", is_datetime=True) return cube
def setUp(self): """Set up plugin with suitable parameters (used for dict only)""" self.plugin = ChooseWeightsLinear("forecast_period", CONFIG_DICT_UKV) # create a cube with irrelevant threshold coordinate (dimensions: # model_id: 2; threshold: 2; latitude: 2; longitude: 2) cube = set_up_probability_cube( np.ones((2, 2, 2), dtype=np.float32), thresholds=np.array([278.0, 279.0], dtype=np.float32), ) self.cube = add_coordinate(cube, [1000, 2000], "model_id", dtype=np.int32) self.cube.add_aux_coord(AuxCoord(["uk_det", "uk_ens"], long_name="model_configuration"), data_dims=0) # create a reference cube as above WITHOUT threshold self.reference_cube = iris.util.squeeze(self.cube[:, 0, :, :].copy()) # split into a cubelist by model self.reference_cubelist = iris.cube.CubeList( [self.reference_cube[0], self.reference_cube[1]])