Beispiel #1
0
    def run(self,
            cube: Cube,
            radius: float,
            mask_cube: Optional[Cube] = None) -> Cube:
        """
        Call the methods required to calculate and apply a circular
        neighbourhood.

        Args:
            cube:
                Cube containing to array to apply CircularNeighbourhood
                processing to.
            radius:
                Radius in metres for use in specifying the number of
                grid cells used to create a circular neighbourhood.
            mask_cube:
                Cube containing the array to be used as a mask.

        Returns:
            Cube containing the smoothed field after the kernel has been
            applied.
        """
        if mask_cube is not None:
            msg = ("The use of a mask cube with a circular kernel is not "
                   "yet implemented.")
            raise NotImplementedError(msg)

        # Check that the cube has an equal area grid.
        check_if_grid_is_equal_area(cube)
        grid_cells = distance_to_number_of_grid_cells(cube, radius)
        cube = self.apply_circular_kernel(cube, grid_cells)
        return cube
Beispiel #2
0
 def test_wrong_coordinate(self):
     """Test an exception is raised if the x and y coordinates are not
     projection_x_coordinate or projection_y_coordinate."""
     cube = set_up_cube_lat_long()
     msg = "Invalid grid"
     with self.assertRaisesRegexp(ValueError, msg):
         check_if_grid_is_equal_area(cube)
Beispiel #3
0
    def _update_spatial_weights(self, cube, weights, fuzzy_length):
        """
        Update weights using spatial information

        Args:
            cube (iris.cube.Cube):
                Cube of input data to be blended
            weights (iris.cube.Cube):
                Initial 1D cube of weights scaled by self.weighting_coord
            fuzzy_length (float):
                Distance (in metres) over which to smooth weights at domain
                boundaries

        Returns:
            iris.cube.Cube:
                Updated 3D cube of spatially-varying weights
        """
        check_if_grid_is_equal_area(cube)
        grid_cells = distance_to_number_of_grid_cells(
            cube, fuzzy_length, return_int=False
        )
        plugin = SpatiallyVaryingWeightsFromMask(
            self.blend_coord, fuzzy_length=grid_cells
        )
        weights = plugin(cube, weights)
        return weights
Beispiel #4
0
    def run(self, cube, radius):
        """

        Call the methods required to calculate and apply a circular
        neighbourhood.

        Args:
            cube : Iris.cube.Cube
                Cube containing to array to apply CircularNeighbourhood
                processing to.
            radius : Float
                Radius in metres for use in specifying the number of
                grid cells used to create a circular neighbourhood.

        Returns:
            cube : Iris.cube.Cube
                Cube containing the smoothed field after the kernel has been
                applied.

        """
        # Check that the cube has an equal area grid.
        check_if_grid_is_equal_area(cube)
        ranges = convert_distance_into_number_of_grid_cells(
            cube, radius, MAX_RADIUS_IN_GRID_CELLS)
        cube = self.apply_circular_kernel(cube, ranges)
        return cube
Beispiel #5
0
    def _check_input_cubes(cube1: Cube, cube2: Cube) -> None:
        """Check that input cubes have appropriate and matching dimensions"""
        # check the nature of the input cubes, and raise a warning if they are
        # not both precipitation
        if cube1.name() != cube2.name():
            msg = "Input cubes contain different data types {} and {}"
            raise ValueError(msg.format(cube1.name(), cube2.name()))

        data_name = cube1.name().lower()
        if "rain" not in data_name and "precipitation" not in data_name:
            msg = ("Input data are of non-precipitation type {}.  Plugin "
                   "parameters have not been tested and may not be appropriate"
                   " for this variable.")
            warnings.warn(msg.format(cube1.name()))

        # check cubes have exactly two spatial dimension coordinates and a
        # scalar time coordinate
        check_input_coords(cube1, require_time=True)
        check_input_coords(cube2, require_time=True)

        # check cube dimensions match
        if cube1.coord(axis="x") != cube2.coord(axis="x") or cube1.coord(
                axis="y") != cube2.coord(axis="y"):
            raise InvalidCubeError("Input cubes on unmatched grids")

        # check grids are equal area
        check_if_grid_is_equal_area(cube1)
        check_if_grid_is_equal_area(cube2)
Beispiel #6
0
 def test_lat_lon_failure_with_override(self):
     """Test that a lat/lon cube still fails when 'require_equal_xy_spacing'
     is set to False"""
     msg = "Unable to convert from"
     with self.assertRaisesRegex(ValueError, msg):
         check_if_grid_is_equal_area(self.lat_lon_cube,
                                     require_equal_xy_spacing=False)
Beispiel #7
0
    def run(self, cube, radius, mask_cube=None):
        """

        Call the methods required to calculate and apply a circular
        neighbourhood.

        Args:
            cube (iris.cube.Cube):
                Cube containing to array to apply CircularNeighbourhood
                processing to.
            radius (float):
                Radius in metres for use in specifying the number of
                grid cells used to create a circular neighbourhood.
            mask_cube (iris.cube.Cube or None):
                Cube containing the array to be used as a mask.

        Returns:
            iris.cube.Cube:
                Cube containing the smoothed field after the kernel has been
                applied.

        """
        if mask_cube is not None:
            msg = ("The use of a mask cube with a circular kernel is not "
                   "yet implemented.")
            raise NotImplementedError(msg)

        # Check that the cube has an equal area grid.
        check_if_grid_is_equal_area(cube)
        grid_cells_x = convert_distance_into_number_of_grid_cells(
            cube, radius, max_distance_in_grid_cells=MAX_RADIUS_IN_GRID_CELLS)
        ranges = (grid_cells_x, grid_cells_x)
        cube = self.apply_circular_kernel(cube, ranges)
        return cube
Beispiel #8
0
    def run(self, cube, radius):
        """
        Method to apply a circular kernel to the data within the input cube in
        order to derive percentiles over the kernel.

        Args:
            cube : Iris.cube.Cube
                Cube containing array to apply processing to.
            radius : Float
                Radius in metres for use in specifying the number of
                grid cells used to create a circular neighbourhood.

        Returns:
            result : Iris.cube.Cube
                Cube containing the percentile fields.
                Has percentile as an added dimension.

        """
        # Check that the cube has an equal area grid.
        check_if_grid_is_equal_area(cube)
        # Take data array and identify X and Y axes indices
        ranges_tuple = convert_distance_into_number_of_grid_cells(
            cube, radius, MAX_RADIUS_IN_GRID_CELLS)
        ranges_xy = np.array(ranges_tuple)
        kernel = circular_kernel(ranges_xy, ranges_tuple, weighted_mode=False)
        # Loop over each 2D slice to reduce memory demand and derive
        # percentiles on the kernel. Will return an extra dimension.
        pctcubelist = iris.cube.CubeList()
        for slice_2d in cube.slices(['projection_y_coordinate',
                                     'projection_x_coordinate']):
            pctcubelist.append(
                self.pad_and_unpad_cube(slice_2d, kernel))
        result = pctcubelist.merge_cube()
        exception_coordinates = (
            find_dimension_coordinate_mismatch(
                cube, result, two_way_mismatch=False))
        result = (
            check_cube_coordinates(
                cube, result, exception_coordinates=exception_coordinates))

        # Arrange cube, so that the coordinate order is:
        # realization, percentile, other coordinates.
        required_order = []
        if result.coords("realization"):
            if result.coords("realization", dimensions=[]):
                result = iris.util.new_axis(result, "realization")
            required_order.append(result.coord_dims("realization")[0])
        if result.coords("percentiles_over_neighbourhood"):
            required_order.append(
                result.coord_dims("percentiles_over_neighbourhood")[0])
        other_coords = []
        for coord in result.dim_coords:
            if coord.name() not in ["realization",
                                    "percentiles_over_neighbourhood"]:
                other_coords.append(result.coord_dims(coord.name())[0])
        required_order.extend(other_coords)
        result.transpose(required_order)

        return result
    def process(
        self, initial_cube, ucube, vcube, orographic_enhancement, attributes_dict=None
    ):
        """
        Extrapolate the initial precipitation field using the velocities
        provided to the required forecast lead times

        Args:
            initial_cube (iris.cube.Cube):
                Cube of precipitation at initial time
            ucube (iris.cube.Cube):
                x-advection velocities
            vcube (iris.cube.Cube):
                y-advection velocities
            orographic_enhancement (iris.cube.Cube):
                Cube containing orographic enhancement fields at all required
                lead times
            attributes_dict (dict or None):
                Dictionary containing information for amending the attributes
                of the output cube.

        Returns:
            forecast_cubes (list):
                List of extrapolated iris.cube.Cube instances at the required
                lead times (including T+0 / analysis time)
        """
        # ensure input cube is suitable for advection
        if "rate" not in initial_cube.name():
            msg = "{} is not a precipitation rate cube"
            raise ValueError(msg.format(initial_cube.name()))
        check_if_grid_is_equal_area(initial_cube)

        self.analysis_cube = initial_cube.copy()
        self.required_units = initial_cube.units
        self.orogenh = orographic_enhancement

        # get unmasked precipitation rate array with orographic enhancement
        # subtracted to input into advection
        precip_rate = self._get_advectable_precip_rate()

        # calculate displacement in grid squares per time step
        displacement = self._generate_displacement_array(ucube, vcube)

        # PySteps prints a message on import to stdout - trap this
        # This should be removed for PySteps v1.1.0 which has a configuration setting
        # for this
        # Import here to minimise dependencies
        with redirect_stdout():
            from pysteps.extrapolation.semilagrangian import extrapolate
        # call pysteps extrapolation method
        all_forecasts = extrapolate(
            precip_rate, displacement, self.num_timesteps, allow_nonfinite_values=True
        )

        # repackage data as IMPROVER masked cubes
        forecast_cubes = self._generate_forecast_cubes(all_forecasts, attributes_dict)

        return forecast_cubes
Beispiel #10
0
    def process(self, cube: Cube, mask_cube: Optional[Cube] = None) -> Cube:
        """
        Call the methods required to apply a neighbourhood processing to a cube.

        Applies neighbourhood processing to each 2D x-y-slice of the input cube.

        If the input cube is masked the neighbourhood sum is calculated from
        the total of the unmasked data in the neighbourhood around each grid
        point. The neighbourhood mean is then calculated by dividing the
        neighbourhood sum at each grid point by the total number of valid grid
        points that contributed to that sum. If a mask_cube is provided then
        this is used to mask each x-y-slice prior to the neighburhood sum
        or mean being calculated.


        Args:
            cube:
                Cube containing the array to which the neighbourhood processing
                will be applied.
            mask_cube:
                Cube containing the array to be used as a mask. Zero values in
                this array are taken as points to be masked.

        Returns:
            Cube containing the smoothed field after the
            neighbourhood method has been applied.
        """
        super().process(cube)
        check_if_grid_is_equal_area(cube)

        # If the data is masked, the mask will be processed as well as the
        # original_data * mask array.
        check_radius_against_distance(cube, self.radius)

        grid_cells = distance_to_number_of_grid_cells(cube, self.radius)
        if self.neighbourhood_method == "circular":
            self.kernel = circular_kernel(grid_cells, self.weighted_mode)
        elif self.neighbourhood_method == "square":
            self.nb_size = 2 * grid_cells + 1

        try:
            mask_cube_data = mask_cube.data
        except AttributeError:
            mask_cube_data = None

        result_slices = CubeList()
        for cube_slice in cube.slices(
            [cube.coord(axis="y"), cube.coord(axis="x")]):
            cube_slice.data = self._calculate_neighbourhood(
                cube_slice.data, mask_cube_data)
            result_slices.append(cube_slice)
        neighbourhood_averaged_cube = result_slices.merge_cube()

        return neighbourhood_averaged_cube
Beispiel #11
0
    def process(self, cube: Cube) -> Cube:
        """
        Method to apply a circular kernel to the data within the input cube in
        order to derive percentiles over the kernel.

        Args:
            cube:
                Cube containing array to apply processing to.

        Returns:
            Cube containing the percentile fields.
            Has percentile as an added dimension.
        """
        super().process(cube)
        if np.ma.is_masked(cube.data):
            msg = ("The use of masked input cubes is not yet implemented in"
                   " the GeneratePercentilesFromANeighbourhood plugin.")
            raise NotImplementedError(msg)

        # Check that the cube has an equal area grid.
        check_if_grid_is_equal_area(cube)
        # Take data array and identify X and Y axes indices
        grid_cell = distance_to_number_of_grid_cells(cube, self.radius)
        check_radius_against_distance(cube, self.radius)
        kernel = circular_kernel(grid_cell, weighted_mode=False)
        # Loop over each 2D slice to reduce memory demand and derive
        # percentiles on the kernel. Will return an extra dimension.
        pctcubelist = iris.cube.CubeList()
        for slice_2d in cube.slices(
            ["projection_y_coordinate", "projection_x_coordinate"]):
            pctcubelist.append(self.pad_and_unpad_cube(slice_2d, kernel))

        result = pctcubelist.merge_cube()
        exception_coordinates = find_dimension_coordinate_mismatch(
            cube, result, two_way_mismatch=False)
        result = check_cube_coordinates(
            cube, result, exception_coordinates=exception_coordinates)

        # Arrange cube, so that the coordinate order is:
        # realization, percentile, other coordinates.
        required_order = []
        if result.coords("realization", dim_coords=True):
            required_order.append(result.coord_dims("realization")[0])
        if result.coords("percentile", dim_coords=True):
            required_order.append(result.coord_dims("percentile")[0])
        other_coords = []
        for coord in result.dim_coords:
            if coord.name() not in ["realization", "percentile"]:
                other_coords.append(result.coord_dims(coord.name())[0])
        required_order.extend(other_coords)
        result.transpose(required_order)

        return result
Beispiel #12
0
 def test_non_equal_xy_spacing_override(self):
     """Test that the requirement for equal x and y spacing can be
     overridden"""
     self.cube.coord(axis="x").points = 2 * self.cube.coord(axis="x").points
     self.assertIsNone(
         check_if_grid_is_equal_area(self.cube,
                                     require_equal_xy_spacing=False))
Beispiel #13
0
 def test_allow_negative_stride(self):
     """Test no errors raised if cube has negative stride in x and y axes"""
     coord_points_x = np.arange(-20000, -52000.0, -2000)
     coord_points_y = np.arange(30000.0, -2000, -2000)
     self.cube.coord("projection_x_coordinate").points = coord_points_x
     self.cube.coord("projection_y_coordinate").points = coord_points_y
     self.assertIsNone(check_if_grid_is_equal_area(self.cube))
Beispiel #14
0
 def test_lat_lon_failure(self):
     """Test that a lat/lon cube fails"""
     msg = "Unable to convert from"
     with self.assertRaisesRegex(ValueError, msg):
         check_if_grid_is_equal_area(self.lat_lon_cube)
Beispiel #15
0
    def process(self, cube1, cube2, boxsize=30):
        """
        Extracts data from input cubes, performs dimensionless advection
        displacement calculation, and creates new cubes with advection
        velocities in metres per second.  Each input cube should have precisely
        two non-scalar dimension coordinates (spatial x/y), and are expected to
        be in a projection such that grid spacing is the same (or very close)
        at all points within the spatial domain.  Each input cube must also
        have a scalar "time" coordinate.

        Args:
            cube1 (iris.cube.Cube):
                2D cube from (earlier) time 1
            cube2 (iris.cube.Cube):
                2D cube from (later) time 2

        Kwargs:
            boxsize (int):
                The side length of the square box over which to solve the
                optical flow constraint.  This should be greater than the
                data smoothing radius.

        Returns:
            (tuple) : tuple containing:
                **ucube** (iris.cube.Cube):
                    2D cube of advection velocities in the x-direction
                **vcube** (iris.cube.Cube):
                    2D cube of advection velocities in the y-direction
        """
        # clear existing parameters
        self.data_smoothing_radius = None
        self.boxsize = None

        # check the nature of the input cubes, and raise a warning if they are
        # not both precipitation
        if cube1.name() != cube2.name():
            msg = 'Input cubes contain different data types {} and {}'
            raise ValueError(msg.format(cube1.name(), cube2.name()))

        data_name = cube1.name().lower()
        if "rain" not in data_name and "precipitation" not in data_name:
            msg = ('Input data are of non-precipitation type {}.  Plugin '
                   'parameters have not been tested and may not be appropriate'
                   ' for this variable.')
            warnings.warn(msg.format(cube1.name()))

        # check cubes have exactly two spatial dimension coordinates and a
        # scalar time coordinate
        check_input_coords(cube1, require_time=True)
        check_input_coords(cube2, require_time=True)

        # check cube dimensions match
        if (cube1.coord(axis="x") != cube2.coord(axis="x") or
                cube1.coord(axis="y") != cube2.coord(axis="y")):
            raise InvalidCubeError("Input cubes on unmatched grids")

        # check grids are equal area
        check_if_grid_is_equal_area(cube1)
        check_if_grid_is_equal_area(cube2)

        # convert units to mm/hr as these avoid the need to manipulate tiny
        # decimals
        try:
            cube1 = cube1.copy()
            cube2 = cube2.copy()
            cube1.convert_units('mm/hr')
            cube2.convert_units('mm/hr')
        except ValueError as err:
            msg = ('Input data are in units that cannot be converted to mm/hr '
                   'which are the required units for use with optical flow.')
            raise ValueError(msg) from err

        # check time difference is positive
        time1 = (cube1.coord("time").units).num2date(
            cube1.coord("time").points[0])
        time2 = (cube2.coord("time").units).num2date(
            cube2.coord("time").points[0])
        cube_time_diff = time2 - time1
        if cube_time_diff.total_seconds() <= 0:
            msg = "Expected positive time difference cube2 - cube1: got {} s"
            raise InvalidCubeError(msg.format(cube_time_diff.total_seconds()))

        # if time difference is greater 15 minutes, increase data smoothing
        # radius so that larger advection displacements can be resolved
        if cube_time_diff.total_seconds() > 900:
            data_smoothing_radius_km = self.data_smoothing_radius_km * (
                cube_time_diff.total_seconds()/900.)
        else:
            data_smoothing_radius_km = self.data_smoothing_radius_km

        # calculate smoothing radius in grid square units
        new_coord = cube1.coord(axis='x').copy()
        new_coord.convert_units('km')
        grid_length_km = np.float32(np.diff((new_coord).points)[0])
        data_smoothing_radius = \
            int(data_smoothing_radius_km / grid_length_km)

        # Fail verbosely if data smoothing radius is too small and will
        # trigger silent failures downstream
        if data_smoothing_radius < 3:
            msg = ("Input data smoothing radius {} too small (minimum 3 "
                   "grid squares)")
            raise ValueError(msg.format(data_smoothing_radius))

        # Fail if self.boxsize is less than data smoothing radius
        self.boxsize = boxsize
        if self.boxsize < data_smoothing_radius:
            msg = ("Box size {} too small (should not be less than data "
                   "smoothing radius {})")
            raise ValueError(
                msg.format(self.boxsize, data_smoothing_radius))

        # extract 2-dimensional data arrays
        data1 = next(cube1.slices([cube1.coord(axis='y'),
                                   cube1.coord(axis='x')])).data
        data2 = next(cube2.slices([cube2.coord(axis='y'),
                                   cube2.coord(axis='x')])).data

        # fill any mask with 0 values so fill_values are not spread into the
        # domain when smoothing the fields.
        if np.ma.is_masked(data1):
            data1 = data1.filled(0)
        if np.ma.is_masked(data2):
            data2 = data2.filled(0)

        # if input arrays have no non-zero values, set velocities to zero here
        # and raise a warning
        if (np.allclose(data1, np.zeros(data1.shape)) or
                np.allclose(data2, np.zeros(data2.shape))):
            msg = ("No non-zero data in input fields: setting optical flow "
                   "velocities to zero")
            warnings.warn(msg)
            ucomp = np.zeros(data1.shape, dtype=np.float32)
            vcomp = np.zeros(data2.shape, dtype=np.float32)
        else:
            # calculate dimensionless displacement between the two input fields
            ucomp, vcomp = self.process_dimensionless(data1, data2, 1, 0,
                                                      data_smoothing_radius)
            # convert displacements to velocities in metres per second
            for vel in [ucomp, vcomp]:
                vel *= np.float32(1000.*grid_length_km)
                vel /= cube_time_diff.total_seconds()

        # create velocity output cubes based on metadata from later input cube
        x_coord = cube2.coord(axis="x")
        y_coord = cube2.coord(axis="y")
        t_coord = cube2.coord("time")

        ucube = iris.cube.Cube(
            ucomp, long_name="precipitation_advection_x_velocity",
            units="m s-1", dim_coords_and_dims=[(y_coord, 0), (x_coord, 1)])
        ucube.add_aux_coord(t_coord)
        ucube = amend_metadata(ucube, **self.metadata_dict)

        vcube = iris.cube.Cube(
            vcomp, long_name="precipitation_advection_y_velocity",
            units="m s-1", dim_coords_and_dims=[(y_coord, 0), (x_coord, 1)])
        vcube.add_aux_coord(t_coord)
        vcube = amend_metadata(vcube, **self.metadata_dict)
        return ucube, vcube
Beispiel #16
0
 def non_equal_area_grid(self):
     """Test that the cubes have an equal areas grid."""
     cube = set_up_cube()
     msg = "The size of the intervals along the x and y axis"
     with self.assertRaisesRegexp(ValueError, msg):
         check_if_grid_is_equal_area(cube)
Beispiel #17
0
 def non_equal_intervals_along_axis(self):
     """Test that the cube has equal intervals along the x or y axis."""
     cube = set_up_cube()
     msg = "Intervals between points along the "
     with self.assertRaisesRegexp(ValueError, msg):
         check_if_grid_is_equal_area(cube)
Beispiel #18
0
def main(argv=None):
    """Load in arguments and ensure they are set correctly.
       Then load in the data to blend and calculate default weights
       using the method chosen before carrying out the blending."""
    parser = ArgParser(
        description='Calculate the default weights to apply in weighted '
        'blending plugins using the ChooseDefaultWeightsLinear or '
        'ChooseDefaultWeightsNonLinear plugins. Then apply these '
        'weights to the dataset using the BasicWeightedAverage plugin.'
        ' Required for ChooseDefaultWeightsLinear: y0val and ynval.'
        ' Required for ChooseDefaultWeightsNonLinear: cval.'
        ' Required for ChooseWeightsLinear with dict: wts_dict.')

    parser.add_argument('--wts_calc_method',
                        metavar='WEIGHTS_CALCULATION_METHOD',
                        choices=['linear', 'nonlinear', 'dict'],
                        default='linear',
                        help='Method to use to calculate '
                        'weights used in blending. "linear" (default): '
                        'calculate linearly varying blending weights. '
                        '"nonlinear": calculate blending weights that decrease'
                        ' exponentially with increasing blending coordinate. '
                        '"dict": calculate weights using a dictionary passed '
                        'in as a command line argument.')

    parser.add_argument('coordinate',
                        type=str,
                        metavar='COORDINATE_TO_AVERAGE_OVER',
                        help='The coordinate over which the blending '
                        'will be applied.')
    parser.add_argument('--coordinate_unit',
                        metavar='UNIT_STRING',
                        default='hours since 1970-01-01 00:00:00',
                        help='Units for blending coordinate. Default= '
                        'hours since 1970-01-01 00:00:00')
    parser.add_argument('--calendar',
                        metavar='CALENDAR',
                        help='Calendar for time coordinate. Default=gregorian')
    parser.add_argument('--cycletime',
                        metavar='CYCLETIME',
                        type=str,
                        help='The forecast reference time to be used after '
                        'blending has been applied, in the format '
                        'YYYYMMDDTHHMMZ. If not provided, the blended file '
                        'will take the latest available forecast reference '
                        'time from the input cubes supplied.')
    parser.add_argument('--model_id_attr',
                        metavar='MODEL_ID_ATTR',
                        type=str,
                        default="mosg__model_configuration",
                        help='The name of the netCDF file attribute to be '
                        'used to identify the source model for '
                        'multi-model blends. Default assumes Met Office '
                        'model metadata. Must be present on all input '
                        'files if blending over models.')
    parser.add_argument('--spatial_weights_from_mask',
                        action='store_true',
                        default=False,
                        help='If set this option will result in the generation'
                        ' of spatially varying weights based on the'
                        ' masks of the data we are blending. The'
                        ' one dimensional weights are first calculated '
                        ' using the chosen weights calculation method,'
                        ' but the weights will then be adjusted spatially'
                        ' based on where there is masked data in the data'
                        ' we are blending. The spatial weights are'
                        ' calculated using the'
                        ' SpatiallyVaryingWeightsFromMask plugin.')
    parser.add_argument('weighting_mode',
                        metavar='WEIGHTED_BLEND_MODE',
                        choices=['weighted_mean', 'weighted_maximum'],
                        help='The method used in the weighted blend. '
                        '"weighted_mean": calculate a normal weighted'
                        ' mean across the coordinate. '
                        '"weighted_maximum": multiplies the values in the'
                        ' coordinate by the weights, and then takes the'
                        ' maximum.')

    parser.add_argument('input_filepaths',
                        metavar='INPUT_FILES',
                        nargs="+",
                        help='Paths to input files to be blended.')
    parser.add_argument('output_filepath',
                        metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF.')

    spatial = parser.add_argument_group(
        'Spatial weights from mask options',
        'Options for calculating the spatial weights using the '
        'SpatiallyVaryingWeightsFromMask plugin.')
    spatial.add_argument('--fuzzy_length',
                         metavar='FUZZY_LENGTH',
                         type=float,
                         default=20000,
                         help='When calculating spatially varying weights we'
                         ' can smooth the weights so that areas close to'
                         ' areas that are masked have lower weights than'
                         ' those further away. This fuzzy length controls'
                         ' the scale over which the weights are smoothed.'
                         ' The fuzzy length is in terms of m, the'
                         ' default is 20km. This distance is then'
                         ' converted into a number of grid squares,'
                         ' which does not have to be an integer. Assumes'
                         ' the grid spacing is the same in the x and y'
                         ' directions, and raises an error if this is not'
                         ' true. See SpatiallyVaryingWeightsFromMask for'
                         ' more detail.')

    linear = parser.add_argument_group(
        'linear weights options', 'Options for the linear weights '
        'calculation in '
        'ChooseDefaultWeightsLinear')
    linear.add_argument('--y0val',
                        metavar='LINEAR_STARTING_POINT',
                        type=float,
                        help='The relative value of the weighting start point '
                        '(lowest value of blend coord) for choosing default '
                        'linear weights. This must be a positive float or 0.')
    linear.add_argument('--ynval',
                        metavar='LINEAR_END_POINT',
                        type=float,
                        help='The relative value of the weighting '
                        'end point (highest value of blend coord) for choosing'
                        ' default linear weights. This must be a positive '
                        'float or 0.  Note that if blending over forecast '
                        'reference time, ynval >= y0val would normally be '
                        'expected (to give greater weight to the more recent '
                        'forecast).')

    nonlinear = parser.add_argument_group(
        'nonlinear weights options', 'Options for the non-linear '
        'weights calculation in '
        'ChooseDefaultWeightsNonLinear')
    nonlinear.add_argument('--cval',
                           metavar='NON_LINEAR_FACTOR',
                           type=float,
                           help='Factor used to determine how skewed the '
                           'non linear weights will be. '
                           'A value of 1 implies equal weighting. If not '
                           'set, a default value of cval=0.85 is set.')

    wts_dict = parser.add_argument_group(
        'dict weights options', 'Options for linear weights to be '
        'calculated based on parameters '
        'read from a json file dict')
    wts_dict.add_argument('--wts_dict',
                          metavar='WEIGHTS_DICTIONARY',
                          help='Path to json file containing dictionary from '
                          'which to calculate blending weights. Dictionary '
                          'format is as specified in the improver.blending.'
                          'weights.ChooseWeightsLinear plugin.')
    wts_dict.add_argument('--weighting_coord',
                          metavar='WEIGHTING_COORD',
                          default='forecast_period',
                          help='Name of '
                          'coordinate over which linear weights should be '
                          'scaled. This coordinate must be avilable in the '
                          'weights dictionary.')

    args = parser.parse_args(args=argv)

    # if the linear weights method is called with non-linear args or vice
    # versa, exit with error
    if (args.wts_calc_method == "linear") and args.cval:
        parser.wrong_args_error('cval', 'linear')
    if ((args.wts_calc_method == "nonlinear")
            and np.any([args.y0val, args.ynval])):
        parser.wrong_args_error('y0val, ynval', 'non-linear')
    if (args.wts_calc_method == "dict") and not args.wts_dict:
        parser.error('Dictionary is required if --wts_calc_method="dict"')

    # set blending coordinate units
    if "time" in args.coordinate:
        coord_unit = Unit(args.coordinate_unit, args.calendar)
    elif args.coordinate_unit != 'hours since 1970-01-01 00:00:00.':
        coord_unit = args.coordinate_unit
    else:
        coord_unit = 'no_unit'

    # For blending across models, only blending across "model_id" is directly
    # supported. This is because the blending coordinate must be sortable, in
    # order to ensure that the data cube and the weights cube have coordinates
    # in the same order for blending. Whilst the model_configuration is
    # sortable itself, as it is associated with model_id, which is the
    # dimension coordinate, sorting the model_configuration coordinate can
    # result in the model_id coordinate becoming non-monotonic. As dimension
    # coordinates must be monotonic, this leads to the model_id coordinate
    # being demoted to an auxiliary coordinate. Therefore, for simplicity
    # model_id is used as the blending coordinate, instead of
    # model_configuration.
    # TODO: Support model_configuration as a blending coordinate directly.
    if args.coordinate == "model_configuration":
        blend_coord = "model_id"
        dict_coord = "model_configuration"
    else:
        blend_coord = args.coordinate
        dict_coord = args.coordinate

    # load cubes to be blended
    cubelist = load_cubelist(args.input_filepaths)

    # determine whether or not to equalise forecast periods for model
    # blending weights calculation
    weighting_coord = (args.weighting_coord
                       if args.weighting_coord else "forecast_period")

    # prepare cubes for weighted blending
    merger = MergeCubesForWeightedBlending(blend_coord,
                                           weighting_coord=weighting_coord,
                                           model_id_attr=args.model_id_attr)
    cube = merger.process(cubelist, cycletime=args.cycletime)

    # if the coord for blending does not exist or has only one value,
    # update metadata only
    coord_names = [coord.name() for coord in cube.coords()]
    if (blend_coord not in coord_names) or (len(
            cube.coord(blend_coord).points) == 1):
        result = cube.copy()
        conform_metadata(result, cube, blend_coord, cycletime=args.cycletime)
        # raise a warning if this happened because the blend coordinate
        # doesn't exist
        if blend_coord not in coord_names:
            warnings.warn('Blend coordinate {} is not present on input '
                          'data'.format(blend_coord))

    # otherwise, calculate weights and blend across specified dimension
    else:
        weights = calculate_blending_weights(
            cube,
            blend_coord,
            args.wts_calc_method,
            wts_dict=args.wts_dict,
            weighting_coord=args.weighting_coord,
            coord_unit=coord_unit,
            y0val=args.y0val,
            ynval=args.ynval,
            cval=args.cval,
            dict_coord=dict_coord)

        if args.spatial_weights_from_mask:
            check_if_grid_is_equal_area(cube)
            grid_cells_x, _ = convert_distance_into_number_of_grid_cells(
                cube, args.fuzzy_length, int_grid_cells=False)
            SpatialWeightsPlugin = SpatiallyVaryingWeightsFromMask(
                grid_cells_x)
            weights = SpatialWeightsPlugin.process(cube, weights, blend_coord)

        # blend across specified dimension
        BlendingPlugin = WeightedBlendAcrossWholeDimension(
            blend_coord, args.weighting_mode, cycletime=args.cycletime)
        result = BlendingPlugin.process(cube, weights=weights)

    save_netcdf(result, args.output_filepath)
Beispiel #19
0
 def test_equal_area(self):
     """Test an that no exception is raised if the x and y coordinates
     are on an equal area grid."""
     cube = set_up_cube()
     self.assertEqual(check_if_grid_is_equal_area(cube), None)
Beispiel #20
0
 def test_non_equal_xy_spacing(self):
     """Test that the cubes have an equal areas grid"""
     self.cube.coord(axis="x").points = 2 * self.cube.coord(axis="x").points
     msg = "Grid does not have equal spacing in x and y"
     with self.assertRaisesRegex(ValueError, msg):
         check_if_grid_is_equal_area(self.cube)
Beispiel #21
0
    def run(self,
            cube: Cube,
            radius: float,
            mask_cube: Optional[Cube] = None) -> Cube:
        """
        Method to apply a circular kernel to the data within the input cube in
        order to derive percentiles over the kernel.

        Args:
            cube:
                Cube containing array to apply processing to.
            radius:
                Radius in metres for use in specifying the number of
                grid cells used to create a circular neighbourhood.
            mask_cube:
                Cube containing the array to be used as a mask.

        Returns:
            Cube containing the percentile fields.
            Has percentile as an added dimension.
        """
        if mask_cube is not None:
            msg = ("The use of a mask cube with a circular kernel is not "
                   "yet implemented.")
            raise NotImplementedError(msg)

        # Check that the cube has an equal area grid.
        check_if_grid_is_equal_area(cube)
        # Take data array and identify X and Y axes indices
        grid_cell = distance_to_number_of_grid_cells(cube, radius)
        check_radius_against_distance(cube, radius)
        ranges_xy = np.array((grid_cell, grid_cell))
        kernel = circular_kernel(ranges_xy, grid_cell, weighted_mode=False)
        # Loop over each 2D slice to reduce memory demand and derive
        # percentiles on the kernel. Will return an extra dimension.
        pctcubelist = iris.cube.CubeList()
        for slice_2d in cube.slices(
            ["projection_y_coordinate", "projection_x_coordinate"]):
            pctcubelist.append(self.pad_and_unpad_cube(slice_2d, kernel))

        result = pctcubelist.merge_cube()
        exception_coordinates = find_dimension_coordinate_mismatch(
            cube, result, two_way_mismatch=False)
        result = check_cube_coordinates(
            cube, result, exception_coordinates=exception_coordinates)

        # Arrange cube, so that the coordinate order is:
        # realization, percentile, other coordinates.
        required_order = []
        if result.coords("realization"):
            if result.coords("realization", dimensions=[]):
                result = iris.util.new_axis(result, "realization")
            required_order.append(result.coord_dims("realization")[0])
        if result.coords("percentile"):
            required_order.append(result.coord_dims("percentile")[0])
        other_coords = []
        for coord in result.dim_coords:
            if coord.name() not in ["realization", "percentile"]:
                other_coords.append(result.coord_dims(coord.name())[0])
        required_order.extend(other_coords)
        result.transpose(required_order)

        return result
Beispiel #22
0
 def test_equal_area(self):
     """Test an that no exception is raised if the x and y coordinates
     are on an equal area grid"""
     self.assertIsNone(check_if_grid_is_equal_area(self.cube))