def _rationalise_blend_time_coords(self, cubelist, cycletime=None): """ Updates time coordinates on unmerged input cubes before blending depending on the coordinate over which the blend will be performed. Modifies cubes in place. If self.blend_coord is forecast_reference_time, ensures the cube does not have a forecast_period coordinate (this is recreated after blending). If self.weighting_coord is forecast_period, equalises forecast_reference_time on each cube before blending. Args: cubelist (iris.cube.CubeList): List of cubes containing data to be blended cycletime (str or None): The cycletime in a YYYYMMDDTHHMMZ format e.g. 20171122T0100Z Raises: ValueError: if forecast_reference_time (to be unified) is a dimension coordinate """ if "forecast_reference_time" in self.blend_coord: for cube in cubelist: coord_names = [x.name() for x in cube.coords()] if "forecast_period" in coord_names: cube.remove_coord("forecast_period") # if blending models using weights by forecast period, set forecast # reference times to current cycle time if ("model" in self.blend_coord and self.weighting_coord is not None and "forecast_period" in self.weighting_coord): cycletime = (find_latest_cycletime(cubelist) if cycletime is None else cycletime_to_datetime(cycletime)) unify_forecast_reference_time(cubelist, cycletime)
def process(self, cubelist): """ Take an input cubelist containing forecasts from different cycles and merges them into a single cube. The steps taken are: 1. If no cycletime is given then find the latest cycle time from the input cubes. 2. Update the forecast periods in each input cube to be relative to the new cycletime. 3. Checks if there are duplicate realization numbers. If a duplicate is found, renumbers all of the realizations to remove any duplicates. 4. Merge cubes into one cube, removing any metadata that doesn't match. """ if self.cycletime is None: cycletime = find_latest_cycletime(cubelist) else: cycletime = cycletime_to_datetime(self.cycletime) cubelist = unify_forecast_reference_time(cubelist, cycletime) # Take all the realizations from all the input cube and # put in one array all_realizations = [ cube.coord("realization").points for cube in cubelist ] all_realizations = np.concatenate(all_realizations) # Find unique realizations unique_realizations = np.unique(all_realizations) # If we have fewer unique realizations than total realizations we have # duplicate realizations so we rebadge all realizations in the cubelist if len(unique_realizations) < len(all_realizations): first_realization = 0 for cube in cubelist: n_realization = len(cube.coord("realization").points) cube.coord("realization").points = np.arange( first_realization, first_realization + n_realization) first_realization = first_realization + n_realization # slice over realization to deal with cases where direct concatenation # would result in a non-monotonic coordinate lagged_ensemble = concatenate_cubes( cubelist, master_coord="realization", coords_to_slice_over=["realization"]) return lagged_ensemble
def test_cube_input(self): """Test when supplying a cube representing a UK deterministic model configuration only. This effectively updates the forecast_reference_time on the cube to the specified cycletime.""" expected_uk_det = self.cube_uk_det.copy() frt_units = expected_uk_det.coord('forecast_reference_time').units frt_points = [ np.round(frt_units.date2num(self.cycletime)).astype(np.int64)] expected_uk_det.coord("forecast_reference_time").points = frt_points expected_uk_det.coord("forecast_period").points = ( np.array([3, 5, 7]) * 3600) result = unify_forecast_reference_time( self.cube_uk_det, self.cycletime) self.assertIsInstance(result, iris.cube.CubeList) self.assertEqual(result[0], expected_uk_det)
def test_cube_input_no_forecast_period_coordinate(self): """Test when supplying a cube representing a UK deterministic model configuration only. This forces a forecast_period coordinate to be created from a forecast_reference_time coordinate and a time coordinate.""" expected_uk_det = self.cube_uk_det.copy() frt_units = expected_uk_det.coord('forecast_reference_time').units frt_points = [ np.round(frt_units.date2num(self.cycletime)).astype(np.int64)] expected_uk_det.coord("forecast_reference_time").points = frt_points expected_uk_det.coord("forecast_period").points = ( np.array([3, 5, 7]) * 3600) cube_uk_det = self.cube_uk_det.copy() cube_uk_det.remove_coord("forecast_period") result = unify_forecast_reference_time(cube_uk_det, self.cycletime) self.assertIsInstance(result, iris.cube.CubeList) self.assertEqual(result[0], expected_uk_det)
def rationalise_blend_time_coords(cubelist, blend_coord, cycletime=None, weighting_coord=None): """ Updates time coordinates on unmerged input cubes before blending depending on the coordinate over which the blend will be performed. Modifies cubes in place. If blend_coord is forecast_reference_time, ensures the cube does not have a forecast_period dimension. If weighting_coord is forecast_period, equalises forecast_reference_time on each cube before blending. Args: cubelist (iris.cube.CubeList): List of cubes containing data to be blended blend_coord (str): Name of coordinate over which the blend will be performed Kwargs: cycletime (str or None): The cycletime in a YYYYMMDDTHHMMZ format e.g. 20171122T0100Z weighting_coord (str or None): The coordinate across which weights will be scaled in a multi-model blend. Raises: ValueError: if forecast_reference_time (to be unified) is a dimension coordinate """ if "forecast_reference_time" in blend_coord: for cube in cubelist: coord_names = [x.name() for x in cube.coords()] if "forecast_period" in coord_names: cube.remove_coord("forecast_period") # if blending models using weights by forecast period, set forecast # reference times to current cycle time if ("model" in blend_coord and weighting_coord is not None and "forecast_period" in weighting_coord): if cycletime is None: cycletime = find_latest_cycletime(cubelist) else: cycletime = cycletime_to_datetime(cycletime) cubelist = unify_forecast_reference_time(cubelist, cycletime)
def test_cubelist_input(self): """Test when supplying a cubelist as input containing cubes representing UK deterministic and UK ensemble model configuration and unifying the forecast_reference_time, so that both model configurations have a common forecast_reference_time.""" cube_uk_ens = set_up_variable_cube(np.full((3, 4, 4), 273.15, dtype=np.float32), time=self.cycletime, frt=datetime.datetime( 2017, 1, 10, 4, 0)) cube_uk_ens.remove_coord("forecast_period") # set up forecast periods of 5, 7 and 9 hours time_points = [1484031600, 1484038800, 1484046000] cube_uk_ens = add_coordinate( cube_uk_ens, time_points, "time", dtype=np.int64, coord_units="seconds since 1970-01-01 00:00:00") fp_coord = forecast_period_coord(cube_uk_ens) cube_uk_ens.add_aux_coord(fp_coord, data_dims=0) expected_uk_det = self.cube_uk_det.copy() frt_units = expected_uk_det.coord('forecast_reference_time').units frt_points = [ np.round(frt_units.date2num(self.cycletime)).astype(np.int64) ] expected_uk_det.coord("forecast_reference_time").points = frt_points expected_uk_det.coord("forecast_period").points = ( np.array([3, 5, 7]) * 3600) expected_uk_ens = cube_uk_ens.copy() expected_uk_ens.coord("forecast_reference_time").points = frt_points expected_uk_ens.coord("forecast_period").points = ( np.array([1, 3, 5]) * 3600) expected = iris.cube.CubeList([expected_uk_det, expected_uk_ens]) cubes = iris.cube.CubeList([self.cube_uk_det, cube_uk_ens]) result = unify_forecast_reference_time(cubes, self.cycletime) self.assertIsInstance(result, iris.cube.CubeList) self.assertEqual(result, expected)