Example #1
0
 def test_FAIL_phenomena_nostash(self):
     # If we remove the 'STASH' attributes, certain phenomena can still be
     # successfully encoded+decoded by standard load using LBFC values.
     # Structured loading gets this wrong, because it does not use LBFC in
     # characterising phenomena.
     flds = self.fields(c_t="1122", phn="0101")
     for fld in flds:
         del fld.attributes["STASH"]
     file = self.save_fieldcubes(flds)
     results = iris.load(file)
     if not self.do_fast_loads:
         # This is what we'd LIKE to get (what iris.load gives).
         expected = CubeList(flds).merge()
     else:
         # At present, we get a cube incorrectly combined together over all
         # 4 timepoints, with the same phenomenon for all (!wrong!).
         # It's a bit tricky to arrange the existing data like that.
         # Do it by hacking the time values to allow merge, and then fixing
         # up the time
         old_t1, old_t2 = (
             fld.coord("time").points[0] for fld in (flds[0], flds[2])
         )
         for i_fld, fld in enumerate(flds):
             # Hack the phenomena to all look like the first one.
             fld.rename("air_temperature")
             fld.units = "K"
             # Hack the time points so the 4 cube can merge into one.
             fld.coord("time").points = [old_t1 + i_fld]
         one_cube = CubeList(flds).merge_cube()
         # Replace time dim with an anonymous dim.
         co_t_fake = one_cube.coord("time")
         one_cube.remove_coord(co_t_fake)
         # Reconstruct + add back the expected auxiliary time coord.
         co_t_new = AuxCoord(
             [old_t1, old_t1, old_t2, old_t2],
             standard_name="time",
             units=co_t_fake.units,
         )
         one_cube.add_aux_coord(co_t_new, 0)
         expected = [one_cube]
     self.assertEqual(results, expected)
Example #2
0
 def test_FAIL_phenomena_nostash(self):
     # If we remove the 'STASH' attributes, certain phenomena can still be
     # successfully encoded+decoded by standard load using LBFC values.
     # Structured loading gets this wrong, because it does not use LBFC in
     # characterising phenomena.
     flds = self.fields(c_t='1122', phn='0101')
     for fld in flds:
         del fld.attributes['STASH']
     file = self.save_fieldcubes(flds)
     results = iris.load(file)
     if not self.do_fast_loads:
         # This is what we'd LIKE to get (what iris.load gives).
         expected = CubeList(flds).merge()
     else:
         # At present, we get a cube incorrectly combined together over all
         # 4 timepoints, with the same phenomenon for all (!wrong!).
         # It's a bit tricky to arrange the existing data like that.
         # Do it by hacking the time values to allow merge, and then fixing
         # up the time
         old_t1, old_t2 = (fld.coord('time').points[0]
                           for fld in (flds[0], flds[2]))
         for i_fld, fld in enumerate(flds):
             # Hack the phenomena to all look like the first one.
             fld.rename('air_temperature')
             fld.units = 'K'
             # Hack the time points so the 4 cube can merge into one.
             fld.coord('time').points = [old_t1 + i_fld]
         one_cube = CubeList(flds).merge_cube()
         # Replace time dim with an anonymous dim.
         co_t_fake = one_cube.coord('time')
         one_cube.remove_coord(co_t_fake)
         # Reconstruct + add back the expected auxiliary time coord.
         co_t_new = AuxCoord([old_t1, old_t1, old_t2, old_t2],
                             standard_name='time', units=co_t_fake.units)
         one_cube.add_aux_coord(co_t_new, 0)
         expected = [one_cube]
     self.assertEqual(results, expected)