Exemplo n.º 1
0
 def test_metadata(self):
     """Test correct output types and metadata"""
     metadata_dict = {"attributes": {
         "mosg__grid_version": "1.0.0",
         "mosg__model_configuration": "nc_det",
         "source": "Met Office Nowcast",
         "institution": "Met Office",
         "title": "Nowcast on UK 2 km Standard Grid"}}
     plugin = OpticalFlow(iterations=20, metadata_dict=metadata_dict)
     plugin.data_smoothing_radius_km = 6.
     ucube, vcube = plugin.process(self.cube1, self.cube2, boxsize=3)
     for cube in [ucube, vcube]:
         self.assertEqual(cube.attributes, metadata_dict["attributes"])
Exemplo n.º 2
0
 def test_error_small_kernel(self):
     """Test failure if data smoothing radius is too small"""
     plugin = OpticalFlow(data_smoothing_radius_km=3, boxsize_km=6)
     msg = "Input data smoothing radius 1 too small "
     with self.assertRaisesRegexp(ValueError, msg):
         _ = plugin.process(self.cube1, self.cube2)
Exemplo n.º 3
0
class Test_process(IrisTest):
    """Test the process method"""
    def setUp(self):
        """Set up plugin and input rainfall-like cubes"""
        self.plugin = OpticalFlow(data_smoothing_radius_km=6,
                                  boxsize_km=6,
                                  iterations=10)

        coord_points = 2 * np.arange(16)
        x_coord = DimCoord(coord_points, 'projection_x_coordinate', units='km')
        y_coord = DimCoord(coord_points, 'projection_y_coordinate', units='km')

        rainfall_block = np.array([[1., 1., 1., 1., 1., 1., 1.],
                                   [1., 2., 2., 2., 2., 1., 1.],
                                   [1., 2., 3., 3., 2., 1., 1.],
                                   [1., 2., 3., 3., 2., 1., 1.],
                                   [1., 2., 2., 2., 2., 1., 1.],
                                   [1., 1., 1., 1., 1., 1., 1.],
                                   [1., 1., 1., 1., 1., 1., 1.]])

        data1 = np.zeros((16, 16))
        data1[1:8, 2:9] = rainfall_block
        self.cube1 = iris.cube.Cube(data1,
                                    standard_name='rainfall_rate',
                                    units='mm h-1',
                                    dim_coords_and_dims=[(y_coord, 0),
                                                         (x_coord, 1)])
        # time1: [datetime.datetime(2018, 2, 20, 4, 0)]
        time1 = DimCoord(1519099200,
                         standard_name="time",
                         units='seconds since 1970-01-01 00:00:00')
        self.cube1.add_aux_coord(time1)

        data2 = np.zeros((16, 16))
        data2[2:9, 1:8] = rainfall_block
        self.cube2 = iris.cube.Cube(data2,
                                    standard_name='rainfall_rate',
                                    units='mm h-1',
                                    dim_coords_and_dims=[(y_coord, 0),
                                                         (x_coord, 1)])
        # time2: [datetime.datetime(2018, 2, 20, 4, 15)]
        time2 = DimCoord(1519100100,
                         standard_name="time",
                         units='seconds since 1970-01-01 00:00:00')
        self.cube2.add_aux_coord(time2)

    def test_basic(self):
        """Test correct output types and metadata"""
        ucube, vcube = self.plugin.process(self.cube1, self.cube2)
        for cube in [ucube, vcube]:
            self.assertIsInstance(cube, iris.cube.Cube)
            self.assertEqual(
                cube.coord("time")[0],
                self.cube2.coord("time")[0])
            self.assertEqual(cube.units, "m s-1")
            self.assertIn("advection_velocity_", cube.name())

    def test_values(self):
        """Test velocity values are as expected (in m/s)"""
        ucube, vcube = self.plugin.process(self.cube1, self.cube2)
        self.assertAlmostEqual(np.mean(ucube.data), -2.12078369915)
        self.assertAlmostEqual(np.mean(vcube.data), 2.12078369915)

    def test_error_small_kernel(self):
        """Test failure if data smoothing radius is too small"""
        plugin = OpticalFlow(data_smoothing_radius_km=3, boxsize_km=6)
        msg = "Input data smoothing radius 1 too small "
        with self.assertRaisesRegexp(ValueError, msg):
            _ = plugin.process(self.cube1, self.cube2)

    def test_error_unmatched_coords(self):
        """Test failure if cubes are provided on unmatched grids"""
        cube2 = self.cube2.copy()
        for ax in ["x", "y"]:
            cube2.coord(axis=ax).points = 4 * np.arange(16)
        msg = "Input cubes on unmatched grids"
        with self.assertRaisesRegexp(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube1, cube2)

    def test_error_no_time_difference(self):
        """Test failure if two cubes are provided with the same time"""
        msg = "Expected positive time difference "
        with self.assertRaisesRegexp(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube1, self.cube1)

    def test_error_negative_time_difference(self):
        """Test failure if cubes are provided in the wrong order"""
        msg = "Expected positive time difference "
        with self.assertRaisesRegexp(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube2, self.cube1)

    def test_error_irregular_grid(self):
        """Test failure if cubes have different x/y grid lengths"""
        cube1 = self.cube1.copy()
        cube2 = self.cube2.copy()
        for cube in [cube1, cube2]:
            cube.coord(axis="y").points = 4 * np.arange(16)
        msg = "Input cube has different grid spacing in x and y"
        with self.assertRaisesRegexp(InvalidCubeError, msg):
            _ = self.plugin.process(cube1, cube2)
Exemplo n.º 4
0
class Test_process(IrisTest):
    """Test the process method"""
    def setUp(self):
        """Set up plugin and input rainfall-like cubes"""
        self.plugin = OpticalFlow(iterations=20)
        self.plugin.data_smoothing_radius_km = np.float32(6.0)

        coord_points = 2000 * np.arange(16, dtype=np.float32)  # in metres
        rainfall_block = np.array(
            [
                [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
                [1.0, 2.0, 2.0, 2.0, 2.0, 1.0, 1.0],
                [1.0, 2.0, 3.0, 3.0, 2.0, 1.0, 1.0],
                [1.0, 2.0, 3.0, 3.0, 2.0, 1.0, 1.0],
                [1.0, 2.0, 2.0, 2.0, 2.0, 1.0, 1.0],
                [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
                [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
            ],
            dtype=np.float32,
        )

        data1 = np.zeros((16, 16), dtype=np.float32)
        data1[1:8, 2:9] = rainfall_block
        self.cube1 = set_up_variable_cube(
            data1,
            name="rainfall_rate",
            units="mm h-1",
            spatial_grid="equalarea",
            time=datetime(2018, 2, 20, 4, 0),
            frt=datetime(2018, 2, 20, 4, 0),
        )
        self.cube1.coord(axis="x").points = coord_points
        self.cube1.coord(axis="y").points = coord_points

        data2 = np.zeros((16, 16), dtype=np.float32)
        data2[2:9, 1:8] = rainfall_block
        self.cube2 = set_up_variable_cube(
            data2,
            name="rainfall_rate",
            units="mm h-1",
            spatial_grid="equalarea",
            time=datetime(2018, 2, 20, 4, 15),
            frt=datetime(2018, 2, 20, 4, 15),
        )
        self.cube2.coord(axis="x").points = coord_points
        self.cube2.coord(axis="y").points = coord_points

    def test_basic(self):
        """Test correct output types and metadata"""
        ucube, vcube = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        for cube in [ucube, vcube]:
            self.assertIsInstance(cube, iris.cube.Cube)
            self.assertEqual(
                cube.coord("time")[0],
                self.cube2.coord("time")[0])
            self.assertEqual(cube.units, "m s-1")
            self.assertIn("precipitation_advection", cube.name())
            self.assertIn("velocity", cube.name())

    def test_values(self):
        """Test velocity values are as expected (in m/s)"""
        ucube, vcube = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        self.assertAlmostEqual(np.mean(ucube.data), -2.1719084)
        self.assertAlmostEqual(np.mean(vcube.data), 2.1719084)

    def test_values_perturbation(self):
        """Test velocity values are as expected when input cubes are presented
        as an older extrapolation forecast and recent observation"""
        # make cube 1 into a forecast with a 15 minute forecast period
        self.cube1.coord("time").points = self.cube2.coord("time").points
        self.cube1.coord("forecast_reference_time").points = (
            self.cube1.coord("forecast_reference_time").points - 15 * 60)
        self.cube1.coord("forecast_period").points = [15 * 60]
        ucube, vcube = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        self.assertAlmostEqual(np.mean(ucube.data), -2.1719084)
        self.assertAlmostEqual(np.mean(vcube.data), 2.1719084)

    def test_values_with_precip_rate_in_m_per_s(self):
        """Test velocity values are as expected (in m/s) when the input
        precipitation rates are in units of m/s rather than the expected
        mm/hr."""
        self.cube1.convert_units("m s-1")
        self.cube2.convert_units("m s-1")
        ucube, vcube = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        self.assertAlmostEqual(np.mean(ucube.data), -2.1719084)
        self.assertAlmostEqual(np.mean(vcube.data), 2.1719084)

    def test_values_with_masked_data(self):
        """Test velocity values are as expected when masked cubes are used as
        input to the tests. This test is to capture behaviour whereby mask
        fill values were being used as valid data. This resulted in far from
        correct velocities being calculated by the optical flow code. Notably
        the velocity fields did not reflect the position of precipitation in
        the input precipitation fields, and the returned velocities were too
        low.

        In this test masked cubes are used and comparable unmasked cubes in
        which there the fill values are included in the field. We expect
        the results to be different, with higher velocities returned for the
        masked cubes.
        """
        mask = np.zeros((16, 16))
        mask[:2, :] = 1
        mask[:, :2] = 1

        # Ensure the masked data points contain a high fill value.
        data1 = self.cube1.data
        data2 = self.cube2.data
        data1[:2, :] = 1.0e36
        data1[:, :2] = 1.0e36
        data2[:2, :] = 1.0e36
        data2[:, :2] = 1.0e36

        masked1 = np.ma.MaskedArray(self.cube1.data, mask=mask)
        masked2 = np.ma.MaskedArray(self.cube2.data, mask=mask)

        masked_cube1 = self.cube1.copy(data=masked1)
        masked_cube2 = self.cube2.copy(data=masked2)
        unmasked_cube1 = self.cube1.copy(data=data1)
        unmasked_cube2 = self.cube2.copy(data=data2)

        ucube_masked, vcube_masked = self.plugin.process(masked_cube1,
                                                         masked_cube2,
                                                         boxsize=3)
        ucube_unmasked, vcube_unmasked = self.plugin.process(unmasked_cube1,
                                                             unmasked_cube2,
                                                             boxsize=3)

        self.assertAlmostEqual(np.mean(ucube_masked.data), -1.4995803)
        self.assertAlmostEqual(np.mean(vcube_masked.data), 1.4995805)
        self.assertAlmostEqual(np.mean(ucube_unmasked.data), -0.2869996)
        self.assertAlmostEqual(np.mean(vcube_unmasked.data), 0.28699964)

    def test_error_for_unconvertable_units(self):
        """Test that an exception is raised if the input precipitation cubes
        have units that cannot be converted to mm/hr."""
        self.cube1.units = "m"
        self.cube2.units = "m"

        msg = "Input data are in units that cannot be converted to mm/hr"
        with self.assertRaisesRegex(ValueError, msg):
            self.plugin.process(self.cube1, self.cube2, boxsize=3)

    def test_input_cubes_unchanged(self):
        """Test the input precipitation rate cubes are unchanged by use in the
        optical flow plugin. One of the cubes is converted to rates in ms-1
        before use to ensure the cube remains in these units despite the
        default working units within optical flow being mm/hr."""
        self.cube1.convert_units("m s-1")
        cube1_ref = self.cube1.copy()
        cube2_ref = self.cube2.copy()
        _, _ = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        self.assertEqual(self.cube1, cube1_ref)
        self.assertEqual(self.cube2, cube2_ref)

    def test_decrease_time_interval(self):
        """Test that decreasing the time interval between radar frames below
        15 minutes does not alter the smoothing radius. To test this the time
        interval is halved, which should give an answer identical to the values
        test above multiplied by a factor of two."""
        time_unit = self.cube2.coord("time").units
        new_time = time_unit.num2date(self.cube2.coord("time").points[0])
        new_time -= timedelta(seconds=450)
        self.cube2.remove_coord("time")
        time_coord = DimCoord(time_unit.date2num(new_time),
                              standard_name="time",
                              units=time_unit)
        self.cube2.add_aux_coord(time_coord)

        ucube, vcube = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        self.assertAlmostEqual(np.mean(ucube.data), -2.1719084 * 2.0)
        self.assertAlmostEqual(np.mean(vcube.data), 2.1719084 * 2.0)

    def test_increase_time_interval(self):
        """Test that increasing the time interval between radar frames above
        15 minutes leads to an increase in the data smoothing radius. In this
        test this will result in a smoothing radius larger than the box size,
        which is not allowed and will raise an exception. The updated radius
        value in this case is 12 km (6 grid squares), exceeding the 3 square
        box size."""
        time_unit = self.cube2.coord("time").units
        new_time = time_unit.num2date(self.cube2.coord("time").points[0])
        new_time += timedelta(seconds=900)
        self.cube2.remove_coord("time")
        time_coord = DimCoord(time_unit.date2num(new_time),
                              standard_name="time",
                              units=time_unit)
        self.cube2.add_aux_coord(time_coord)
        msg = "Box size ([0-9]+) too small"
        with self.assertRaisesRegex(ValueError, msg):
            self.plugin.process(self.cube1, self.cube2, boxsize=3)

    def test_error_small_kernel(self):
        """Test failure if data smoothing radius is too small"""
        self.plugin.data_smoothing_radius_km = 3.0
        msg = "Input data smoothing radius 1 too small "
        with self.assertRaisesRegex(ValueError, msg):
            _ = self.plugin.process(self.cube1, self.cube2)

    def test_error_small_box(self):
        """Test failure if box size is smaller than data smoothing radius"""
        msg = "Box size 2 too small"
        with self.assertRaisesRegex(ValueError, msg):
            self.plugin.process(self.cube1, self.cube2, boxsize=2)

    def test_error_unmatched_coords(self):
        """Test failure if cubes are provided on unmatched grids"""
        cube2 = self.cube2.copy()
        for ax in ["x", "y"]:
            cube2.coord(axis=ax).points = 4 * np.arange(16)
        msg = "Input cubes on unmatched grids"
        with self.assertRaisesRegex(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube1, cube2)

    def test_error_no_time_difference(self):
        """Test failure if two cubes are provided with the same time"""
        msg = "Expected positive time difference "
        with self.assertRaisesRegex(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube1, self.cube1)

    def test_error_negative_time_difference(self):
        """Test failure if cubes are provided in the wrong order"""
        msg = "Expected positive time difference "
        with self.assertRaisesRegex(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube2, self.cube1)

    @ManageWarnings(record=True)
    def test_warning_zero_inputs(self, warning_list=None):
        """Test code raises a warning and sets advection velocities to zero
        if there is no rain in the input cubes."""
        null_data = np.zeros(self.cube1.shape)
        cube1 = self.cube1.copy(data=null_data)
        cube2 = self.cube2.copy(data=null_data)
        ucube, vcube = self.plugin.process(cube1, cube2)
        warning_msg = "No non-zero data in input fields"
        self.assertTrue(
            any(item.category == UserWarning for item in warning_list))
        self.assertTrue(any(warning_msg in str(item) for item in warning_list))
        self.assertArrayAlmostEqual(ucube.data, null_data)
        self.assertArrayAlmostEqual(vcube.data, null_data)

    def test_error_nonmatching_inputs(self):
        """Test failure if cubes are of different data types"""
        self.cube1.rename("snowfall_rate")
        msg = "Input cubes contain different data types"
        with self.assertRaisesRegex(ValueError, msg):
            self.plugin.process(self.cube1, self.cube2)

    @ManageWarnings(record=True)
    def test_warning_nonprecip_inputs(self, warning_list=None):
        """Test code raises a warning if input cubes have
        non-rain variable names"""
        self.cube1.rename("snowfall_rate")
        self.cube2.rename("snowfall_rate")
        _, _ = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        warning_msg = "Input data are of non-precipitation type"
        self.assertTrue(
            any(item.category == UserWarning for item in warning_list))
        self.assertTrue(any(warning_msg in str(item) for item in warning_list))
Exemplo n.º 5
0
def process(original_cube_list,
            orographic_enhancement_cube=None,
            metadata_dict=None,
            ofc_box_size=30,
            smart_smoothing_iterations=100,
            extrapolate=False,
            max_lead_time=360,
            lead_time_interval=15):
    """Calculates optical flow and can (optionally) extrapolate data.

    Calculates optical flow components from input fields and (optionally)
    extrapolate to required lead times.

    Args:
        original_cube_list (iris.cube.CubeList):
            Cubelist from which to calculate optical flow velocities.
            The cubes require a 'time' coordinate on which they are sorted,
            so the order of cubes does not matter.
        orographic_enhancement_cube (iris.cube.Cube):
            Cube containing the orographic enhancement fields.
            Default is None.
        metadata_dict (dict):
            Dictionary containing required changes to the metadata.
            Information describing the intended contents of the dictionary is
            available in improver.utilities.cube_metadata.amend_metadata.
            Every output cube will have the metadata_dict applied.
            Default is None.
        ofc_box_size (int):
            Size of square 'box' (in grid spaces) within which to solve
            the optical flow equations.
            Default is 30.
        smart_smoothing_iterations (int):
            Number of iterations to perform in enforcing smoothness constraint
            for optical flow velocities.
            Default is 100.
        extrapolate (bool):
            If True, advects current data forward to specified lead times.
            Default is False.
        max_lead_time (int):
            Maximum lead time required (mins). Ignored unless extrapolate is
            True.
            Default is 360.
        lead_time_interval (int):
            Interval between required lead times (mins). Ignored unless
            extrapolate is True.
            Default is 15.

    Returns:
        (tuple): tuple containing:
            **forecast_cubes** (list<Cube>):
                List of Cubes if extrapolate is True, else None.
            **u_and_v_mean** (list<Cube>):
                List of the umean and vmean cubes.

    Raises:
        ValueError:
            If there is no oe_cube but a cube is called 'precipitation_rate'.

    """
    if orographic_enhancement_cube:
        cube_list = ApplyOrographicEnhancement("subtract").process(
            original_cube_list, orographic_enhancement_cube)
    else:
        cube_list = original_cube_list
        if any("precipitation_rate" in cube.name() for cube in cube_list):
            cube_names = [cube.name() for cube in cube_list]
            msg = ("For precipitation fields, orographic enhancement "
                   "filepaths must be supplied. The names of the cubes "
                   "supplied were: {}".format(cube_names))
            raise ValueError(msg)

    # order input files by validity time
    cube_list.sort(key=lambda x: x.coord("time").points[0])
    time_coord = cube_list[-1].coord("time")
    # calculate optical flow velocities from T-1 to T and T-2 to T-1
    ofc_plugin = OpticalFlow(iterations=smart_smoothing_iterations,
                             metadata_dict=metadata_dict)
    u_cubes = iris.cube.CubeList([])
    v_cubes = iris.cube.CubeList([])
    for older_cube, newer_cube in zip(cube_list[:-1], cube_list[1:]):
        ucube, vcube = ofc_plugin.process(older_cube,
                                          newer_cube,
                                          boxsize=ofc_box_size)
        u_cubes.append(ucube)
        v_cubes.append(vcube)

    # average optical flow velocity components
    u_cube = u_cubes.merge_cube()
    u_mean = u_cube.collapsed("time", iris.analysis.MEAN)
    u_mean.coord("time").points = time_coord.points
    u_mean.coord("time").units = time_coord.units

    v_cube = v_cubes.merge_cube()
    v_mean = v_cube.collapsed("time", iris.analysis.MEAN)
    v_mean.coord("time").points = time_coord.points
    v_mean.coord("time").units = time_coord.units

    u_and_v_mean = [u_mean, v_mean]
    forecast_cubes = []
    if extrapolate:
        # generate list of lead times in minutes
        lead_times = np.arange(0, max_lead_time + 1, lead_time_interval)
        forecast_plugin = CreateExtrapolationForecast(
            original_cube_list[-1],
            u_mean,
            v_mean,
            orographic_enhancement_cube=orographic_enhancement_cube,
            metadata_dict=metadata_dict)
        # extrapolate input data to required lead times
        for lead_time in lead_times:
            forecast_cubes.append(
                forecast_plugin.extrapolate(leadtime_minutes=lead_time))

    return forecast_cubes, u_and_v_mean
Exemplo n.º 6
0
class Test_process(IrisTest):
    """Test the process method"""
    def setUp(self):
        """Set up plugin and input rainfall-like cubes"""
        self.plugin = OpticalFlow(iterations=20)
        self.plugin.data_smoothing_radius_km = np.float32(6.)

        coord_points = 2 * np.arange(16, dtype=np.float32)
        x_coord = DimCoord(coord_points, 'projection_x_coordinate', units='km')
        y_coord = DimCoord(coord_points, 'projection_y_coordinate', units='km')

        rainfall_block = np.array(
            [[1., 1., 1., 1., 1., 1., 1.], [1., 2., 2., 2., 2., 1., 1.],
             [1., 2., 3., 3., 2., 1., 1.], [1., 2., 3., 3., 2., 1., 1.],
             [1., 2., 2., 2., 2., 1., 1.], [1., 1., 1., 1., 1., 1., 1.],
             [1., 1., 1., 1., 1., 1., 1.]],
            dtype=np.float32)

        data1 = np.zeros((16, 16))
        data1[1:8, 2:9] = rainfall_block
        self.cube1 = iris.cube.Cube(data1,
                                    standard_name='rainfall_rate',
                                    units='mm h-1',
                                    dim_coords_and_dims=[(y_coord, 0),
                                                         (x_coord, 1)])
        # time1: [datetime.datetime(2018, 2, 20, 4, 0)]
        time1 = DimCoord(1519099200,
                         standard_name="time",
                         units='seconds since 1970-01-01 00:00:00')
        self.cube1.add_aux_coord(time1)

        data2 = np.zeros((16, 16))
        data2[2:9, 1:8] = rainfall_block
        self.cube2 = iris.cube.Cube(data2,
                                    standard_name='rainfall_rate',
                                    units='mm h-1',
                                    dim_coords_and_dims=[(y_coord, 0),
                                                         (x_coord, 1)])
        # time2: [datetime.datetime(2018, 2, 20, 4, 15)]
        time2 = DimCoord(1519100100,
                         standard_name="time",
                         units='seconds since 1970-01-01 00:00:00')
        self.cube2.add_aux_coord(time2)

    def test_basic(self):
        """Test correct output types and metadata"""
        ucube, vcube = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        for cube in [ucube, vcube]:
            self.assertIsInstance(cube, iris.cube.Cube)
            self.assertEqual(
                cube.coord("time")[0],
                self.cube2.coord("time")[0])
            self.assertEqual(cube.units, "m s-1")
            self.assertIn("precipitation_advection", cube.name())
            self.assertIn("velocity", cube.name())

    def test_metadata(self):
        """Test correct output types and metadata"""
        metadata_dict = {
            "attributes": {
                "mosg__grid_version": "1.0.0",
                "mosg__model_configuration": "nc_det",
                "source": "Met Office Nowcast",
                "institution": "Met Office",
                "title": "Nowcast on UK 2 km Standard Grid"
            }
        }
        plugin = OpticalFlow(iterations=20, metadata_dict=metadata_dict)
        plugin.data_smoothing_radius_km = 6.
        ucube, vcube = plugin.process(self.cube1, self.cube2, boxsize=3)
        for cube in [ucube, vcube]:
            self.assertEqual(cube.attributes, metadata_dict["attributes"])

    def test_values(self):
        """Test velocity values are as expected (in m/s)"""
        ucube, vcube = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        self.assertAlmostEqual(np.mean(ucube.data), -2.1719086)
        self.assertAlmostEqual(np.mean(vcube.data), 2.1719084)

    def test_decrease_time_interval(self):
        """Test that decreasing the time interval between radar frames below
        15 minutes does not alter the smoothing radius. To test this the time
        interval is halved, which should give an answer identical to the values
        test above multiplied by a factor of two."""
        time_unit = self.cube2.coord("time").units
        new_time = time_unit.num2date(self.cube2.coord("time").points[0])
        new_time -= datetime.timedelta(seconds=450)
        self.cube2.remove_coord("time")
        time_coord = DimCoord(time_unit.date2num(new_time),
                              standard_name="time",
                              units=time_unit)
        self.cube2.add_aux_coord(time_coord)

        ucube, vcube = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        self.assertAlmostEqual(np.mean(ucube.data), -2.1719086 * 2.)
        self.assertAlmostEqual(np.mean(vcube.data), 2.1719084 * 2.)

    def test_increase_time_interval(self):
        """Test that increasing the time interval between radar frames above
        15 minutes leads to an increase in the data smoothing radius. In this
        test this will result in a smoothing radius larger than the box size,
        which is not allowed and will raise an exception. The updated radius
        value in this case is 12 km (6 grid squares), exceeding the 3 square
        box size."""
        time_unit = self.cube2.coord("time").units
        new_time = time_unit.num2date(self.cube2.coord("time").points[0])
        new_time += datetime.timedelta(seconds=900)
        self.cube2.remove_coord("time")
        time_coord = DimCoord(time_unit.date2num(new_time),
                              standard_name="time",
                              units=time_unit)
        self.cube2.add_aux_coord(time_coord)
        msg = "Box size ([0-9]+) too small"
        with self.assertRaisesRegex(ValueError, msg):
            _, _ = self.plugin.process(self.cube1, self.cube2, boxsize=3)

    def test_error_small_kernel(self):
        """Test failure if data smoothing radius is too small"""
        self.plugin.data_smoothing_radius_km = 3.
        msg = "Input data smoothing radius 1 too small "
        with self.assertRaisesRegexp(ValueError, msg):
            _ = self.plugin.process(self.cube1, self.cube2)

    def test_error_small_box(self):
        """Test failure if box size is smaller than data smoothing radius"""
        msg = "Box size 2 too small"
        with self.assertRaisesRegexp(ValueError, msg):
            _, _ = self.plugin.process(self.cube1, self.cube2, boxsize=2)

    def test_error_unmatched_coords(self):
        """Test failure if cubes are provided on unmatched grids"""
        cube2 = self.cube2.copy()
        for ax in ["x", "y"]:
            cube2.coord(axis=ax).points = 4 * np.arange(16)
        msg = "Input cubes on unmatched grids"
        with self.assertRaisesRegexp(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube1, cube2)

    def test_error_no_time_difference(self):
        """Test failure if two cubes are provided with the same time"""
        msg = "Expected positive time difference "
        with self.assertRaisesRegexp(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube1, self.cube1)

    def test_error_negative_time_difference(self):
        """Test failure if cubes are provided in the wrong order"""
        msg = "Expected positive time difference "
        with self.assertRaisesRegexp(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube2, self.cube1)

    @ManageWarnings(record=True)
    def test_warning_zero_inputs(self, warning_list=None):
        """Test code raises a warning and sets advection velocities to zero
        if there is no rain in the input cubes."""
        null_data = np.zeros(self.cube1.shape)
        cube1 = self.cube1.copy(data=null_data)
        cube2 = self.cube2.copy(data=null_data)
        ucube, vcube = self.plugin.process(cube1, cube2)
        warning_msg = "No non-zero data in input fields"
        self.assertTrue(
            any(item.category == UserWarning for item in warning_list))
        self.assertTrue(any(warning_msg in str(item) for item in warning_list))
        self.assertArrayAlmostEqual(ucube.data, null_data)
        self.assertArrayAlmostEqual(vcube.data, null_data)

    def test_error_nonmatching_inputs(self):
        """Test failure if cubes are of different data types"""
        self.cube1.rename("snowfall_rate")
        msg = "Input cubes contain different data types"
        with self.assertRaisesRegexp(ValueError, msg):
            _, _ = self.plugin.process(self.cube1, self.cube2)

    @ManageWarnings(record=True)
    def test_warning_nonprecip_inputs(self, warning_list=None):
        """Test code raises a warning if input cubes have
        non-rain variable names"""
        self.cube1.rename("snowfall_rate")
        self.cube2.rename("snowfall_rate")
        _, _ = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        warning_msg = "Input data are of non-precipitation type"
        self.assertTrue(
            any(item.category == UserWarning for item in warning_list))
        self.assertTrue(any(warning_msg in str(item) for item in warning_list))
Exemplo n.º 7
0
class Test_process(IrisTest):
    """Test the process method"""
    def setUp(self):
        """Set up plugin and input rainfall-like cubes"""
        self.plugin = OpticalFlow(iterations=20)
        self.plugin.data_smoothing_radius_km = 6.

        coord_points = 2 * np.arange(16)
        x_coord = DimCoord(coord_points, 'projection_x_coordinate', units='km')
        y_coord = DimCoord(coord_points, 'projection_y_coordinate', units='km')

        rainfall_block = np.array([[1., 1., 1., 1., 1., 1., 1.],
                                   [1., 2., 2., 2., 2., 1., 1.],
                                   [1., 2., 3., 3., 2., 1., 1.],
                                   [1., 2., 3., 3., 2., 1., 1.],
                                   [1., 2., 2., 2., 2., 1., 1.],
                                   [1., 1., 1., 1., 1., 1., 1.],
                                   [1., 1., 1., 1., 1., 1., 1.]])

        data1 = np.zeros((16, 16))
        data1[1:8, 2:9] = rainfall_block
        self.cube1 = iris.cube.Cube(data1,
                                    standard_name='rainfall_rate',
                                    units='mm h-1',
                                    dim_coords_and_dims=[(y_coord, 0),
                                                         (x_coord, 1)])
        # time1: [datetime.datetime(2018, 2, 20, 4, 0)]
        time1 = DimCoord(1519099200,
                         standard_name="time",
                         units='seconds since 1970-01-01 00:00:00')
        self.cube1.add_aux_coord(time1)

        data2 = np.zeros((16, 16))
        data2[2:9, 1:8] = rainfall_block
        self.cube2 = iris.cube.Cube(data2,
                                    standard_name='rainfall_rate',
                                    units='mm h-1',
                                    dim_coords_and_dims=[(y_coord, 0),
                                                         (x_coord, 1)])
        # time2: [datetime.datetime(2018, 2, 20, 4, 15)]
        time2 = DimCoord(1519100100,
                         standard_name="time",
                         units='seconds since 1970-01-01 00:00:00')
        self.cube2.add_aux_coord(time2)

    def test_basic(self):
        """Test correct output types and metadata"""
        ucube, vcube = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        for cube in [ucube, vcube]:
            self.assertIsInstance(cube, iris.cube.Cube)
            self.assertEqual(
                cube.coord("time")[0],
                self.cube2.coord("time")[0])
            self.assertEqual(cube.units, "m s-1")
            self.assertIn("precipitation_advection", cube.name())
            self.assertIn("velocity", cube.name())

    def test_values(self):
        """Test velocity values are as expected (in m/s)"""
        ucube, vcube = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        self.assertAlmostEqual(np.mean(ucube.data), -2.171908358)
        self.assertAlmostEqual(np.mean(vcube.data), 2.171908358)

    def test_update_smoothing_radius(self):
        """Test data smoothing radius is updated if cube time difference is not
        15 minutes.  We don't care about the error this trips, we just want to
        make sure the radius is updated correctly."""
        time_unit = self.cube2.coord("time").units
        new_time = time_unit.num2date(self.cube2.coord("time").points[0])
        new_time += datetime.timedelta(seconds=900)
        self.cube2.remove_coord("time")
        time_coord = DimCoord(time_unit.date2num(new_time),
                              standard_name="time",
                              units=time_unit)
        self.cube2.add_aux_coord(time_coord)
        with self.assertRaises(ValueError):
            _, _ = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        self.assertAlmostEqual(self.plugin.data_smoothing_radius_km, 12.)

    def test_error_small_kernel(self):
        """Test failure if data smoothing radius is too small"""
        self.plugin.data_smoothing_radius_km = 3.
        msg = "Input data smoothing radius 1 too small "
        with self.assertRaisesRegexp(ValueError, msg):
            _ = self.plugin.process(self.cube1, self.cube2)

    def test_error_small_box(self):
        """Test failure if box size is smaller than data smoothing radius"""
        msg = "Box size 2 too small"
        with self.assertRaisesRegexp(ValueError, msg):
            _, _ = self.plugin.process(self.cube1, self.cube2, boxsize=2)

    def test_error_unmatched_coords(self):
        """Test failure if cubes are provided on unmatched grids"""
        cube2 = self.cube2.copy()
        for ax in ["x", "y"]:
            cube2.coord(axis=ax).points = 4 * np.arange(16)
        msg = "Input cubes on unmatched grids"
        with self.assertRaisesRegexp(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube1, cube2)

    def test_error_no_time_difference(self):
        """Test failure if two cubes are provided with the same time"""
        msg = "Expected positive time difference "
        with self.assertRaisesRegexp(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube1, self.cube1)

    def test_error_negative_time_difference(self):
        """Test failure if cubes are provided in the wrong order"""
        msg = "Expected positive time difference "
        with self.assertRaisesRegexp(InvalidCubeError, msg):
            _ = self.plugin.process(self.cube2, self.cube1)

    @ManageWarnings(record=True)
    def test_warning_zero_inputs(self, warning_list=None):
        """Test code raises a warning and sets advection velocities to zero
        if there is no rain in the input cubes."""
        null_data = np.zeros(self.cube1.shape)
        cube1 = self.cube1.copy(data=null_data)
        cube2 = self.cube2.copy(data=null_data)
        ucube, vcube = self.plugin.process(cube1, cube2)
        self.assertTrue(len(warning_list) == 1)
        self.assertTrue(warning_list[0].category == UserWarning)
        self.assertIn("No non-zero data in input fields", str(warning_list[0]))
        self.assertArrayAlmostEqual(ucube.data, null_data)
        self.assertArrayAlmostEqual(vcube.data, null_data)

    def test_error_nonmatching_inputs(self):
        """Test failure if cubes are of different data types"""
        self.cube1.rename("snowfall_rate")
        msg = "Input cubes contain different data types"
        with self.assertRaisesRegexp(ValueError, msg):
            _, _ = self.plugin.process(self.cube1, self.cube2)

    @ManageWarnings(record=True)
    def test_warning_nonprecip_inputs(self, warning_list=None):
        """Test code raises a warning if input cubes have
        non-rain variable names"""
        self.cube1.rename("snowfall_rate")
        self.cube2.rename("snowfall_rate")
        _, _ = self.plugin.process(self.cube1, self.cube2, boxsize=3)
        self.assertTrue(len(warning_list) == 1)
        self.assertTrue(warning_list[0].category == UserWarning)
        self.assertIn("Input data are of non-precipitation type",
                      str(warning_list[0]))
Exemplo n.º 8
0
def main(argv=None):
    """Calculate optical flow advection velocities and (optionally)
    extrapolate data."""

    parser = ArgParser(
        description="Calculate optical flow components from input fields "
        "and (optionally) extrapolate to required lead times.")

    parser.add_argument("input_filepaths", metavar="INPUT_FILEPATHS",
                        nargs=3, type=str, help="Paths to the input radar "
                        "files. There should be 3 input files at T, T-1 and "
                        "T-2 from which to calculate optical flow velocities. "
                        "The files require a 'time' coordinate on which they "
                        "are sorted, so the order of inputs does not matter.")
    parser.add_argument("--output_dir", metavar="OUTPUT_DIR", type=str,
                        default='', help="Directory to write all output files,"
                        " or only advection velocity components if "
                        "NOWCAST_FILEPATHS is specified.")
    parser.add_argument("--nowcast_filepaths", nargs="+", type=str,
                        default=None, help="Optional list of full paths to "
                        "output nowcast files. Overrides OUTPUT_DIR. Ignored "
                        "unless '--extrapolate' is set.")
    parser.add_argument("--orographic_enhancement_filepaths", nargs="+",
                        type=str, default=None, help="List or wildcarded "
                        "file specification to the input orographic "
                        "enhancement files. Orographic enhancement files are "
                        "compulsory for precipitation fields.")
    parser.add_argument("--json_file", metavar="JSON_FILE", default=None,
                        help="Filename for the json file containing "
                        "required changes to the metadata. Information "
                        "describing the intended contents of the json file "
                        "is available in "
                        "improver.utilities.cube_metadata.amend_metadata."
                        "Every output cube will have the metadata_dict "
                        "applied. Defaults to None.", type=str)

    # OpticalFlow plugin configurable parameters
    parser.add_argument("--ofc_box_size", type=int, default=30, help="Size of "
                        "square 'box' (in grid squares) within which to solve "
                        "the optical flow equations.")
    parser.add_argument("--smart_smoothing_iterations", type=int, default=100,
                        help="Number of iterations to perform in enforcing "
                        "smoothness constraint for optical flow velocities.")

    # AdvectField options
    parser.add_argument("--extrapolate", action="store_true", default=False,
                        help="Optional flag to advect current data forward to "
                        "specified lead times.")
    parser.add_argument("--max_lead_time", type=int, default=360,
                        help="Maximum lead time required (mins).  Ignored "
                        "unless '--extrapolate' is set.")
    parser.add_argument("--lead_time_interval", type=int, default=15,
                        help="Interval between required lead times (mins). "
                        "Ignored unless '--extrapolate' is set.")

    args = parser.parse_args(args=argv)

    # read input data
    original_cube_list = load_cubelist(args.input_filepaths)

    if args.orographic_enhancement_filepaths:
        # Subtract orographic enhancement
        oe_cube = load_cube(args.orographic_enhancement_filepaths)
        cube_list = ApplyOrographicEnhancement("subtract").process(
            original_cube_list, oe_cube)
    else:
        cube_list = original_cube_list
        if any("precipitation_rate" in cube.name() for cube in cube_list):
            cube_names = [cube.name() for cube in cube_list]
            msg = ("For precipitation fields, orographic enhancement "
                   "filepaths must be supplied. The names of the cubes "
                   "supplied were: {}".format(cube_names))
            raise ValueError(msg)

    # order input files by validity time
    cube_list.sort(key=lambda x: x.coord("time").points[0])
    time_coord = cube_list[-1].coord("time")

    metadata_dict = None
    if args.json_file:
        # Load JSON file for metadata amendments.
        with open(args.json_file, 'r') as input_file:
            metadata_dict = json.load(input_file)

    # calculate optical flow velocities from T-1 to T and T-2 to T-1
    ofc_plugin = OpticalFlow(iterations=args.smart_smoothing_iterations,
                             metadata_dict=metadata_dict)
    ucubes = iris.cube.CubeList([])
    vcubes = iris.cube.CubeList([])
    for older_cube, newer_cube in zip(cube_list[:-1], cube_list[1:]):
        ucube, vcube = ofc_plugin.process(older_cube, newer_cube,
                                          boxsize=args.ofc_box_size)
        ucubes.append(ucube)
        vcubes.append(vcube)

    # average optical flow velocity components
    ucube = ucubes.merge_cube()
    umean = ucube.collapsed("time", iris.analysis.MEAN)
    umean.coord("time").points = time_coord.points
    umean.coord("time").units = time_coord.units

    vcube = vcubes.merge_cube()
    vmean = vcube.collapsed("time", iris.analysis.MEAN)
    vmean.coord("time").points = time_coord.points
    vmean.coord("time").units = time_coord.units

    # save mean optical flow components as netcdf files
    for wind_cube in [umean, vmean]:
        file_name = generate_file_name(wind_cube)
        save_netcdf(wind_cube, os.path.join(args.output_dir, file_name))

    # advect latest input data to the required lead times
    if args.extrapolate:

        # generate list of lead times in minutes
        lead_times = np.arange(0, args.max_lead_time+1,
                               args.lead_time_interval)

        if args.nowcast_filepaths:
            if len(args.nowcast_filepaths) != len(lead_times):
                raise ValueError("Require exactly one output file name for "
                                 "each forecast lead time")

        forecast_plugin = CreateExtrapolationForecast(
            original_cube_list[-1], umean, vmean,
            orographic_enhancement_cube=oe_cube, metadata_dict=metadata_dict)
        # extrapolate input data to required lead times
        for i, lead_time in enumerate(lead_times):
            forecast_cube = forecast_plugin.extrapolate(
                leadtime_minutes=lead_time)

            # save to a suitably-named output file
            if args.nowcast_filepaths:
                file_name = args.nowcast_filepaths[i]
            else:
                file_name = os.path.join(
                    args.output_dir, generate_file_name(forecast_cube))
            save_netcdf(forecast_cube, file_name)