Example #1
0
 def test_FAIL_phenomena_nostash(self):
     # If we remove the 'STASH' attributes, certain phenomena can still be
     # successfully encoded+decoded by standard load using LBFC values.
     # Structured loading gets this wrong, because it does not use LBFC in
     # characterising phenomena.
     flds = self.fields(c_t="1122", phn="0101")
     for fld in flds:
         del fld.attributes["STASH"]
     file = self.save_fieldcubes(flds)
     results = iris.load(file)
     if not self.do_fast_loads:
         # This is what we'd LIKE to get (what iris.load gives).
         expected = CubeList(flds).merge()
     else:
         # At present, we get a cube incorrectly combined together over all
         # 4 timepoints, with the same phenomenon for all (!wrong!).
         # It's a bit tricky to arrange the existing data like that.
         # Do it by hacking the time values to allow merge, and then fixing
         # up the time
         old_t1, old_t2 = (
             fld.coord("time").points[0] for fld in (flds[0], flds[2])
         )
         for i_fld, fld in enumerate(flds):
             # Hack the phenomena to all look like the first one.
             fld.rename("air_temperature")
             fld.units = "K"
             # Hack the time points so the 4 cube can merge into one.
             fld.coord("time").points = [old_t1 + i_fld]
         one_cube = CubeList(flds).merge_cube()
         # Replace time dim with an anonymous dim.
         co_t_fake = one_cube.coord("time")
         one_cube.remove_coord(co_t_fake)
         # Reconstruct + add back the expected auxiliary time coord.
         co_t_new = AuxCoord(
             [old_t1, old_t1, old_t2, old_t2],
             standard_name="time",
             units=co_t_fake.units,
         )
         one_cube.add_aux_coord(co_t_new, 0)
         expected = [one_cube]
     self.assertEqual(results, expected)
Example #2
0
 def test_FAIL_phenomena_nostash(self):
     # If we remove the 'STASH' attributes, certain phenomena can still be
     # successfully encoded+decoded by standard load using LBFC values.
     # Structured loading gets this wrong, because it does not use LBFC in
     # characterising phenomena.
     flds = self.fields(c_t='1122', phn='0101')
     for fld in flds:
         del fld.attributes['STASH']
     file = self.save_fieldcubes(flds)
     results = iris.load(file)
     if not self.do_fast_loads:
         # This is what we'd LIKE to get (what iris.load gives).
         expected = CubeList(flds).merge()
     else:
         # At present, we get a cube incorrectly combined together over all
         # 4 timepoints, with the same phenomenon for all (!wrong!).
         # It's a bit tricky to arrange the existing data like that.
         # Do it by hacking the time values to allow merge, and then fixing
         # up the time
         old_t1, old_t2 = (fld.coord('time').points[0]
                           for fld in (flds[0], flds[2]))
         for i_fld, fld in enumerate(flds):
             # Hack the phenomena to all look like the first one.
             fld.rename('air_temperature')
             fld.units = 'K'
             # Hack the time points so the 4 cube can merge into one.
             fld.coord('time').points = [old_t1 + i_fld]
         one_cube = CubeList(flds).merge_cube()
         # Replace time dim with an anonymous dim.
         co_t_fake = one_cube.coord('time')
         one_cube.remove_coord(co_t_fake)
         # Reconstruct + add back the expected auxiliary time coord.
         co_t_new = AuxCoord([old_t1, old_t1, old_t2, old_t2],
                             standard_name='time', units=co_t_fake.units)
         one_cube.add_aux_coord(co_t_new, 0)
         expected = [one_cube]
     self.assertEqual(results, expected)
Example #3
0
class Test__create_template_slice(IrisTest):
    """Test create_template_slice method"""
    def setUp(self):
        """
        Set up a basic input cube. Input cube has 2 thresholds on and 3
        forecast_reference_times
        """
        thresholds = [10, 20]
        data = np.ones((2, 2, 3), dtype=np.float32)
        cycle1 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 0, 0),
        )
        cycle2 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 1, 0),
        )
        cycle3 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 2, 0),
        )
        self.cube_to_collapse = CubeList([cycle1, cycle2, cycle3]).merge_cube()
        self.cube_to_collapse = squeeze(self.cube_to_collapse)
        self.cube_to_collapse.rename("weights")
        # This input array has 3 forecast reference times and 2 thresholds.
        # The two thresholds have the same weights.
        self.cube_to_collapse.data = np.array(
            [
                [[[1, 0, 1], [1, 1, 1]], [[1, 0, 1], [1, 1, 1]]],
                [[[0, 0, 1], [0, 1, 1]], [[0, 0, 1], [0, 1, 1]]],
                [[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]],
            ],
            dtype=np.float32,
        )
        self.cube_to_collapse.data = np.ma.masked_equal(
            self.cube_to_collapse.data, 0)
        self.plugin = SpatiallyVaryingWeightsFromMask(
            "forecast_reference_time")

    def test_multi_dim_blend_coord_fail(self):
        """Test error is raised when we have a multi-dimensional blend_coord"""
        # Add a surface altitude coordinate which covers x and y dimensions.
        altitudes = np.array([[10, 20, 30], [20, 30, 10]])
        altitudes_coord = AuxCoord(altitudes,
                                   standard_name="surface_altitude",
                                   units="m")
        self.cube_to_collapse.add_aux_coord(altitudes_coord, data_dims=(2, 3))
        message = "Blend coordinate must only be across one dimension."
        plugin = SpatiallyVaryingWeightsFromMask("surface_altitude")
        with self.assertRaisesRegex(ValueError, message):
            plugin._create_template_slice(self.cube_to_collapse)

    def test_varying_mask_fail(self):
        """Test error is raised when mask varies along collapsing dim"""
        # Check fails when blending along threshold coordinate, as mask
        # varies along this coordinate.
        threshold_coord = find_threshold_coordinate(self.cube_to_collapse)
        message = "The mask on the input cube can only vary along the blend_coord"
        plugin = SpatiallyVaryingWeightsFromMask(threshold_coord.name())
        with self.assertRaisesRegex(ValueError, message):
            plugin._create_template_slice(self.cube_to_collapse)

    def test_scalar_blend_coord_fail(self):
        """Test error is raised when blend_coord is scalar"""
        message = "Blend coordinate must only be across one dimension."
        with self.assertRaisesRegex(ValueError, message):
            self.plugin._create_template_slice(self.cube_to_collapse[0])

    def test_basic(self):
        """Test a correct template slice is returned for simple case"""
        expected = self.cube_to_collapse.copy()[:, 0, :, :]
        result = self.plugin._create_template_slice(self.cube_to_collapse)
        self.assertEqual(expected.metadata, result.metadata)
        self.assertArrayAlmostEqual(expected.data, result.data)

    def test_basic_no_change(self):
        """Test a correct template slice is returned for a case where
           no slicing is needed"""
        input_cube = self.cube_to_collapse.copy()[:, 0, :, :]
        expected = input_cube.copy()
        result = self.plugin._create_template_slice(input_cube)
        self.assertEqual(expected.metadata, result.metadata)
        self.assertArrayAlmostEqual(expected.data, result.data)

    def test_aux_blending_coord(self):
        """Test a correct template slice is returned when blending_coord is
           an AuxCoord"""
        expected = self.cube_to_collapse.copy()[:, 0, :, :]
        plugin = SpatiallyVaryingWeightsFromMask("forecast_period")
        result = self.plugin._create_template_slice(self.cube_to_collapse)
        self.assertEqual(expected.metadata, result.metadata)
        self.assertArrayAlmostEqual(expected.data, result.data)