Ejemplo n.º 1
0
 def test_error_remove_bounds(self):
     """Test the expand_bounds function fails if its effect would be
     to remove bounds from a bounded coordinate, i.e. if a mixture of
     bounded and unbounded coordinates are input"""
     self.cubelist[1].coord("time").bounds = None
     msg = "cannot expand bounds for a mixture of bounded / unbounded"
     with self.assertRaisesRegex(ValueError, msg):
         expand_bounds(self.cubelist[0], self.cubelist, ["time"])
Ejemplo n.º 2
0
    def _set_metadata(cube_subset):
        """Set the metadata on the accumulation cube. This includes
        expanding the bounds to cover the accumulation period with the
        point within the time and forecast_period coordinates recorded as the
        upper bound of the accumulation period.

        Args:
            cube_subset(iris.cube.CubeList):
                Cubelist containing the subset of cubes used to calculate
                the accumulations. The bounds from these cubes will be used
                to set the metadata on the output accumulation cube.

        Returns:
            accumulation_cube (iris.cube.Cube):
                Accumulation cube with the desired metadata.

        """
        cube_name = 'lwe_thickness_of_precipitation_amount'
        accumulation_cube = expand_bounds(cube_subset[0].copy(),
                                          iris.cube.CubeList(cube_subset),
                                          expanded_coords={
                                              'time': 'upper',
                                              'forecast_period': 'upper'
                                          })
        accumulation_cube.rename(cube_name)
        accumulation_cube.units = 'm'
        return accumulation_cube
Ejemplo n.º 3
0
    def _set_metadata(cube_subset):
        """Set the metadata on the accumulation cube. This includes
        expanding the bounds to cover the accumulation period with the
        point within the time and forecast_period coordinates recorded as the
        upper bound of the accumulation period.

        Args:
            cube_subset(iris.cube.CubeList):
                Cubelist containing the subset of cubes used to calculate
                the accumulations. The bounds from these cubes will be used
                to set the metadata on the output accumulation cube.

        Returns:
            iris.cube.Cube:
                Accumulation cube with the desired metadata.

        """
        cube_name = "lwe_thickness_of_precipitation_amount"
        accumulation_cube = expand_bounds(
            cube_subset[0].copy(),
            iris.cube.CubeList(cube_subset),
            ["time", "forecast_period"],
        )
        accumulation_cube.rename(cube_name)
        accumulation_cube.units = "m"
        accumulation_cell_method = iris.coords.CellMethod(
            "sum", coords=accumulation_cube.coord("time"))
        accumulation_cube.add_cell_method(accumulation_cell_method)
        return accumulation_cube
Ejemplo n.º 4
0
    def test_basic_no_time_bounds(self):
        """Test that it creates appropriate bounds if there are no time bounds
        """
        for cube in self.cubelist:
            cube.coord("time").bounds = None

        time_point = np.around(
            date2num(dt(2015, 11, 19, 2), TIME_UNIT, CALENDAR)
        ).astype(np.int64)
        time_bounds = [
            np.around(date2num(dt(2015, 11, 19, 1), TIME_UNIT, CALENDAR)).astype(
                np.int64
            ),
            np.around(date2num(dt(2015, 11, 19, 3), TIME_UNIT, CALENDAR)).astype(
                np.int64
            ),
        ]
        expected_result = iris.coords.DimCoord(
            time_point, bounds=time_bounds, standard_name="time", units=TIME_UNIT
        )

        result = expand_bounds(
            self.cubelist[0], self.cubelist, ["time"], use_midpoint=True
        )
        self.assertEqual(result.coord("time"), expected_result)
Ejemplo n.º 5
0
    def process(
        self, cube_list: Union[List[Cube], CubeList], new_diagnostic_name: str,
    ) -> Cube:
        """
        Combine data and metadata from a list of input cubes into a single
        cube, using the specified operation to combine the cube data.  The
        first cube in the input list provides the template for the combined
        cube metadata.
        If coordinates are expanded as a result of this combine operation
        (e.g. expanding time for accumulations / max in period) the upper bound
        of the new coordinate will also be used as the point for the new coordinate.

        Args:
            cube_list:
                List of cubes to combine.
            new_diagnostic_name:
                New name for the combined diagnostic.

        Returns:
            Cube containing the combined data.

        Raises:
            ValueError: If the cube_list contains only one cube.
        """
        if len(cube_list) < 2:
            msg = "Expecting 2 or more cubes in cube_list"
            raise ValueError(msg)

        self._check_dimensions_match(cube_list)
        result = self._combine_cube_data(cube_list)
        expanded_coord_names = self._get_expanded_coord_names(cube_list)
        if expanded_coord_names:
            result = expand_bounds(result, cube_list, expanded_coord_names)
        result.rename(new_diagnostic_name)
        return result
Ejemplo n.º 6
0
    def test_multiple_coordinate_expanded(self):
        """Test that expand_bound produces sensible bounds when more than one
        coordinate is operated on, in this case expanding both the time and
        forecast period coordinates."""
        time_point = np.around(
            date2num(dt(2015, 11, 19, 3), TIME_UNIT,
                     CALENDAR)).astype(np.int64)
        expected_result_time = iris.coords.DimCoord(
            [time_point],
            bounds=self.expected_bounds_seconds,
            standard_name='time',
            units=TIME_UNIT)
        expected_result_fp = iris.coords.DimCoord(
            [10800],
            bounds=[0, 10800],
            standard_name='forecast_period',
            units='seconds')

        result = expand_bounds(self.cubelist[0], self.cubelist, {
            'time': 'upper',
            'forecast_period': 'upper'
        })
        self.assertEqual(result.coord('time'), expected_result_time)
        self.assertEqual(result.coord('forecast_period'), expected_result_fp)

        self.assertEqual(result.coord('time').dtype, np.int64)
Ejemplo n.º 7
0
 def test_basic_time(self):
     """Test that expand_bound produces sensible bounds."""
     time_point = np.around(
         date2num(dt(2015, 11, 19, 3), TIME_UNIT,
                  CALENDAR)).astype(np.int64)
     expected_result = iris.coords.DimCoord(
         [time_point],
         bounds=self.expected_bounds_seconds,
         standard_name="time",
         units=TIME_UNIT,
     )
     result = expand_bounds(self.cubelist[0], self.cubelist, ["time"])
     self.assertEqual(result.coord("time"), expected_result)
Ejemplo n.º 8
0
 def test_time_mid_data_precision(self):
     """Test that expand_bound does not escalate precision when input is
     of dtype int32"""
     expected_result = iris.coords.DimCoord(np.array([5400],
                                                     dtype=np.int32),
                                            bounds=np.array([0, 10800],
                                                            dtype=np.int32),
                                            standard_name='forecast_period',
                                            units='seconds')
     result = expand_bounds(self.cubelist[0], self.cubelist,
                            {'forecast_period': 'mid'})
     self.assertEqual(result.coord('forecast_period'), expected_result)
     self.assertEqual(result.coord('forecast_period').dtype, np.int32)
Ejemplo n.º 9
0
 def test_time_mid_data_precision(self):
     """Test that expand_bound does not escalate precision when input is
     of dtype int32"""
     expected_result = iris.coords.DimCoord(
         np.array([5400], dtype=np.int32),
         bounds=np.array([0, 10800], dtype=np.int32),
         standard_name="forecast_period",
         units="seconds",
     )
     result = expand_bounds(self.cubelist[0],
                            self.cubelist, ["forecast_period"],
                            use_midpoint=True)
     self.assertEqual(result.coord("forecast_period"), expected_result)
     self.assertEqual(result.coord("forecast_period").dtype, np.int32)
Ejemplo n.º 10
0
 def test_basic_time_upper(self):
     """Test that expand_bound produces sensible bounds
     when given arg 'upper'"""
     time_point = np.around(
         date2num(dt(2015, 11, 19, 3), TIME_UNIT,
                  CALENDAR)).astype(np.int64)
     expected_result = iris.coords.DimCoord(
         [time_point],
         bounds=self.expected_bounds_seconds,
         standard_name='time',
         units=TIME_UNIT)
     result = expand_bounds(self.cubelist[0], self.cubelist,
                            {'time': 'upper'})
     self.assertEqual(result.coord('time'), expected_result)
Ejemplo n.º 11
0
 def test_float_time_mid(self):
     """Test that expand_bound produces sensible bounds
     when given arg 'mid' for times in hours"""
     time_unit = 'hours since 1970-01-01 00:00:00'
     for cube in self.cubelist:
         cube.coord("time").convert_units(time_unit)
     time_point = date2num(dt(2015, 11, 19, 1, 30), time_unit, CALENDAR)
     expected_result = iris.coords.DimCoord(
         [time_point],
         bounds=self.expected_bounds_hours,
         standard_name='time',
         units=time_unit)
     result = expand_bounds(self.cubelist[0], self.cubelist,
                            {'time': 'mid'})
     self.assertEqual(result.coord('time'), expected_result)
     self.assertEqual(result.coord('time').dtype, np.float32)
Ejemplo n.º 12
0
 def test_basic_time_mid(self):
     """Test that expand_bound produces sensible bounds
     when given arg 'mid' for times in seconds"""
     time_point = np.around(
         date2num(dt(2015, 11, 19, 1, 30), TIME_UNIT,
                  CALENDAR)).astype(np.int64)
     expected_result = iris.coords.DimCoord(
         [time_point],
         bounds=self.expected_bounds_seconds,
         standard_name='time',
         units=TIME_UNIT)
     result = expand_bounds(self.cubelist[0],
                            self.cubelist, ['time'],
                            use_midpoint=True)
     self.assertEqual(result.coord('time'), expected_result)
     self.assertEqual(result.coord('time').dtype, np.int64)
Ejemplo n.º 13
0
    def process(
        self,
        cube_list,
        new_diagnostic_name,
        use_midpoint=False,
    ):
        """
        Combine data and metadata from a list of input cubes into a single
        cube, using the specified operation to combine the cube data.  The
        first cube in the input list provides the template for the combined
        cube metadata.

        Args:
            cube_list (list of iris.cube.Cube):
                List of cubes to combine.
            new_diagnostic_name (str):
                New name for the combined diagnostic.
            use_midpoint (bool):
                Determines the nature of the points and bounds for expanded
                coordinates.  If False, the upper bound of the coordinate is
                used as the point values.  If True, the midpoint is used.

        Returns:
            iris.cube.Cube:
                Cube containing the combined data.

        Raises:
            ValueError: If the cube_list contains only one cube.
        """
        if len(cube_list) < 2:
            msg = "Expecting 2 or more cubes in cube_list"
            raise ValueError(msg)

        self._check_dimensions_match(cube_list)
        result = self._combine_cube_data(cube_list)
        expanded_coord_names = self._get_expanded_coord_names(cube_list)
        if expanded_coord_names:
            result = expand_bounds(result,
                                   cube_list,
                                   expanded_coord_names,
                                   use_midpoint=use_midpoint)
        result.rename(new_diagnostic_name)
        return result
Ejemplo n.º 14
0
    def process(self, cube_list, new_diagnostic_name, coords_to_expand=None):
        """
        Create a combined cube.

        Args:
            cube_list (iris.cube.CubeList or list):
                List of cubes to combine.
            new_diagnostic_name (str):
                New name for the combined diagnostic.
            coords_to_expand (dict or None):
                Coordinates to be expanded as a key, with the value
                indicating whether the upper or mid point of the coordinate
                should be used as the point value, e.g.
                {'time': 'upper'}.
        Returns:
            iris.cube.Cube:
                Cube containing the combined data.
        Raises:
            ValueError: If the cubelist contains only one cube.
        """
        if len(cube_list) < 2:
            msg = 'Expecting 2 or more cubes in cube_list'
            raise ValueError(msg)

        self._check_dimensions_match(cube_list)

        # perform operation (add, subtract, min, max, multiply) cumulatively
        result = cube_list[0].copy()
        for cube in cube_list[1:]:
            result.data = self.operator(result.data, cube.data)

        # normalise mean (for which self.operator is np.add)
        if self.operation == 'mean':
            result.data = result.data / len(cube_list)

        # update coordinate bounds and cube name
        if coords_to_expand is not None:
            result = expand_bounds(result, cube_list, coords_to_expand)
        result.rename(new_diagnostic_name)

        return result
Ejemplo n.º 15
0
    def process(
        self,
        cube_list,
        new_diagnostic_name,
        broadcast_to_coords=None,
        use_midpoint=False,
    ):
        """
        Combine data and metadata from a list of input cubes into a single
        cube, using the specified operation to combine the cube data.  The
        first cube in the input list provides the template for the combined
        cube metadata.

        NOTE the behaviour for the "multiply" operation is different from
        other types of cube combination.  The only valid use case for
        "multiply" is to apply a factor that conditions an input probability
        field - that is, to apply Bayes Theorem.  The input probability is
        therefore used as the source of ALL input metadata, and should always
        be the first cube in the input list.  The factor(s) by which this is
        multiplied are not compared for any mis-match in scalar coordinates,
        neither do they to contribute to expanded bounds.

        TODO the "multiply" case should be factored out into a separate plugin
        given its substantial differences from other combine use cases.

        Args:
            cube_list (iris.cube.CubeList or list):
                List of cubes to combine.
            new_diagnostic_name (str):
                New name for the combined diagnostic.
            broadcast_to_coords (list):
                Specifies a list of coord names that exist only on the first cube that
                the other cube(s) need(s) broadcasting to prior to the combine.
            use_midpoint (bool):
                Determines the nature of the points and bounds for expanded
                coordinates.  If False, the upper bound of the coordinate is
                used as the point values.  If True, the midpoint is used.

        Returns:
            iris.cube.Cube:
                Cube containing the combined data.

        Raises:
            ValueError: If the cube_list contains only one cube.
        """
        if len(cube_list) < 2:
            msg = "Expecting 2 or more cubes in cube_list"
            raise ValueError(msg)

        self.broadcast_coords = broadcast_to_coords
        if self.broadcast_coords:
            cube_list = self._setup_coords_for_broadcast(cube_list)
        self._check_dimensions_match(cube_list)

        # perform operation (add, subtract, min, max, multiply) cumulatively
        result = cube_list[0].copy()
        for cube in cube_list[1:]:
            result.data = self.operator(result.data, cube.data)

        # normalise mean (for which self.operator is np.add)
        if self.operation == "mean":
            result.data = result.data / len(cube_list)

        # where the operation is "multiply", retain all coordinate metadata
        # from the first cube in the list; otherwise expand coordinate bounds
        if self.operation != "multiply":
            expanded_coord_names = self._get_expanded_coord_names(cube_list)
            if expanded_coord_names:
                result = expand_bounds(result,
                                       cube_list,
                                       expanded_coord_names,
                                       use_midpoint=use_midpoint)

        result.rename(new_diagnostic_name)

        return result
Ejemplo n.º 16
0
 def test_fails_with_multi_point_coord(self):
     """Test that if an error is raised if a coordinate with more than
     one point is given"""
     emsg = "the expand bounds function should only be used on a"
     with self.assertRaisesRegex(ValueError, emsg):
         expand_bounds(self.cubelist[0], self.cubelist, ["latitude"])
Ejemplo n.º 17
0
    def process(self,
                cube_list,
                new_diagnostic_name,
                revised_coords=None,
                revised_attributes=None,
                expanded_coord=None):
        """
        Create a combined cube.

        Args:
            cube_list (iris.cube.CubeList):
                Cube List contain the cubes to combine.
            new_diagnostic_name (str):
                New name for the combined diagnostic.
        Keyword Args:
            revised_coords (dict or None):
                Revised coordinates for combined cube.
            revised_attributes (dict or None):
                Revised attributes for combined cube.
            expanded_coord (dict or None):
                Coordinates to be expanded as a key, with the value
                indicating whether the upper or mid point of the coordinate
                should be used as the point value, e.g.
                {'time': 'upper'}.
        Returns:
            result (iris.cube.Cube):
                Cube containing the combined data.
        Raises:
            TypeError: If cube_list is not an iris.cube.CubeList.
            ValueError: If the cubelist contains only one cube.
        """
        if not isinstance(cube_list, iris.cube.CubeList):
            msg = ('Expecting data to be an instance of iris.cube.CubeList '
                   'but is {}.'.format(type(cube_list)))
            raise TypeError(msg)
        if len(cube_list) < 2:
            msg = 'Expecting 2 or more cubes in cube_list'
            raise ValueError(msg)

        # resulting cube will be based on the first cube.
        data_type = cube_list[0].dtype
        result = cube_list[0].copy()

        for ind in range(1, len(cube_list)):
            cube1, cube2 = (resolve_metadata_diff(
                result.copy(),
                cube_list[ind].copy(),
                warnings_on=self.warnings_on))
            result = self.combine(cube1, cube2)

        if self.operation == 'mean':
            result.data = result.data / len(cube_list)

        # If cube has coord bounds that we want to expand
        if expanded_coord:
            result = expand_bounds(result, cube_list, expanded_coord)

        result = amend_metadata(result,
                                new_diagnostic_name,
                                data_type,
                                revised_coords,
                                revised_attributes,
                                warnings_on=self.warnings_on)

        return result
Ejemplo n.º 18
0
    def process(self, cubes):
        """
        Calculate period precipitation accumulations based upon precipitation
        rate fields. All calculations are performed in SI units, so
        precipitation rates are converted to "m/s" and times into seconds
        before calculations are performed. The output units of accumulation
        are set by the plugin keyword argument accumulation_units.

        Args:
            cubes (iris.cube.CubeList):
                A cubelist containing input precipitation rate cubes.
        Returns:
            accumulation_cubes (iris.cube.CubeList):
                A cubelist containing precipitation accumulation cubes where
                the accumulation periods are determined by plugin argument
                accumulation_period.
        """
        # Standardise inputs to expected units.
        cubes = enforce_coordinate_units_and_dtypes(
            cubes, ['time', 'forecast_reference_time', 'forecast_period'],
            inplace=False)
        enforce_diagnostic_units_and_dtypes(cubes)

        # Sort cubes into time order and calculate intervals.
        cubes, times = self.sort_cubes_by_time(cubes)
        try:
            time_interval, = np.unique(np.diff(times, axis=0))
        except ValueError:
            msg = ("Accumulation is designed to work with "
                   "rates cubes at regular time intervals. Cubes "
                   "provided are unevenly spaced in time; time intervals are "
                   "{}.".format(np.diff(times, axis=0)))
            raise ValueError(msg)

        cube_subsets = self._get_period_sets(time_interval, cubes)

        accumulation_cubes = iris.cube.CubeList()
        for cube_subset in cube_subsets:
            # Handle accumulation periods equal to the rates cube time interval
            # in which case the last subset does not get used.
            if len(cube_subset) == 1:
                continue

            accumulation = 0.
            # Accumulations are calculated using the mean precipitation rate
            # calculated from the rates cubes that bookend the desired period.
            iterator = zip(cube_subset[0:-1], cube_subset[1:])
            for start_cube, end_cube in iterator:
                accumulation += ((start_cube.data + end_cube.data) *
                                 time_interval * 0.5)

            cube_name = 'lwe_thickness_of_precipitation_amount'
            accumulation_cube = expand_bounds(cube_subset[0],
                                              iris.cube.CubeList(cube_subset),
                                              expanded_coords={
                                                  'time': 'upper',
                                                  'forecast_period': 'upper'
                                              })
            accumulation_cube.rename(cube_name)
            accumulation_cube.units = 'm'

            # Calculate new data and insert into cube.
            accumulation_cube.data = accumulation
            accumulation_cube.convert_units(self.accumulation_units)
            accumulation_cubes.append(accumulation_cube)

        return accumulation_cubes