Пример #1
0
    def test_t1_list_t2_scalar(self):
        # lbtim ia = 0, ib = 1, ic = 1
        # with a single forecast reference time (t2) and a series
        # of validity times (t1).
        lbcode = _lbcode(1)
        lbtim = _lbtim(ia=0, ib=1, ic=1)
        forecast_period_in_hours = np.array([0, 3, 6, 9, 12])
        # Validity time - vector of different values
        t1 = [nc_datetime(1970, 1, 9, hour=(3 + fp)) for fp in
              forecast_period_in_hours]
        t1_dims = (0,)
        # Forecast reference time - scalar
        t2 = nc_datetime(1970, 1, 9, hour=3)
        lbft = None  # Not used.

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1, t2=t2, lbft=lbft,
            t1_dims=t1_dims)

        # Expected coords.
        fp_coord = DimCoord(forecast_period_in_hours,
                            standard_name='forecast_period',
                            units='hours')
        time_coord = DimCoord((24 * 8) + 3 + forecast_period_in_hours,
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT)
        fref_time_coord = DimCoord((24 * 8) + 3,
                                   standard_name='forecast_reference_time',
                                   units=_EPOCH_HOURS_UNIT)
        expected = [(fp_coord, (0,)),
                    (time_coord, (0,)),
                    (fref_time_coord, None)]
        self.assertCoordsAndDimsListsMatch(coords_and_dims, expected)
    def test_t1_list(self):
        # lbtim ia = 0, ib = 0, ic = 1
        # with a series of times (t1).
        lbcode = _lbcode(1)
        lbtim = _lbtim(ia=0, ib=0, ic=1)
        hours = np.array([0, 3, 6, 9, 12])
        # Validity time - vector of different values
        t1 = [nc_datetime(1970, 1, 9, hour=3 + hour) for hour in hours]
        t1_dims = (0, )
        # Forecast reference time - scalar (not used)
        t2 = nc_datetime(1970, 1, 9, hour=3)
        lbft = None

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode,
            lbtim=lbtim,
            epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1,
            t2=t2,
            lbft=lbft,
            t1_dims=t1_dims)

        # Expected coords.
        time_coord = DimCoord((24 * 8) + 3 + hours,
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT)
        expected = [(time_coord, (0, ))]
        self.assertCoordsAndDimsListsMatch(coords_and_dims, expected)
Пример #3
0
 def _check_yearly(self, lbcode, expect_match=True):
     lbtim = _lbtim(ib=3, ic=1)
     # Start time
     t1 = nc_datetime(1970, 1, 9, hour=9, minute=0, second=0)
     # End time
     t2 = nc_datetime(1972, 1, 11, hour=9, minute=0, second=0)
     lbft = 3.0  # sample period
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode,
         lbtim=lbtim,
         epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1,
         t2=t2,
         lbft=lbft)
     if expect_match:
         t1_hours = 24 * 8.375
         t2_hours = 24 * (10.375 + 2 * 365)
         period_hours = 24.0 * (2 * 365 + 2)
         expect_result = [(DimCoord([t2_hours - lbft],
                                    standard_name='forecast_reference_time',
                                    units=_EPOCH_HOURS_UNIT), None),
                          (DimCoord(standard_name='forecast_period',
                                    units='hours',
                                    points=[lbft],
                                    bounds=[lbft - period_hours,
                                            lbft]), None),
                          (DimCoord(standard_name='time',
                                    units=_EPOCH_HOURS_UNIT,
                                    points=[t2_hours],
                                    bounds=[t1_hours, t2_hours]), None)]
     else:
         expect_result = []
     self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result)
Пример #4
0
 def _check_period(self, lbcode, expect_match=True):
     lbtim = _lbtim(ib=2, ic=1)
     # Start time
     t1 = nc_datetime(1970, 1, 9, hour=3, minute=0, second=0)
     # End time
     t2 = nc_datetime(1970, 1, 10, hour=3, minute=0, second=0)
     lbft = 2.0  # sample period
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode,
         lbtim=lbtim,
         epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1,
         t2=t2,
         lbft=lbft)
     if expect_match:
         expect_result = [(DimCoord(24 * 9.125 - 2.0,
                                    standard_name='forecast_reference_time',
                                    units=_EPOCH_HOURS_UNIT), None),
                          (DimCoord(standard_name='forecast_period',
                                    units='hours',
                                    points=[-10.0],
                                    bounds=[-22.0, 2.0]), None),
                          (DimCoord(standard_name='time',
                                    units=_EPOCH_HOURS_UNIT,
                                    points=[24 * 8.625],
                                    bounds=[24 * 8.125, 24 * 9.125]), None)]
     else:
         expect_result = []
     self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result)
 def _check_forecast(self, lbcode, expect_match=True):
     lbtim = _lbtim(ib=1, ic=1)
     # Validity time
     t1 = nc_datetime(1970, 1, 10, hour=6, minute=0, second=0)
     # Forecast time
     t2 = nc_datetime(1970, 1, 9, hour=3, minute=0, second=0)
     lbft = None  # unused
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode,
         lbtim=lbtim,
         epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1,
         t2=t2,
         lbft=lbft)
     if expect_match:
         expect_result = [(DimCoord(24 * 1.125,
                                    standard_name='forecast_period',
                                    units='hours'), None),
                          (DimCoord(24 * 9.25,
                                    standard_name='time',
                                    units=_EPOCH_HOURS_UNIT), None),
                          (DimCoord(24 * 8.125,
                                    standard_name='forecast_reference_time',
                                    units=_EPOCH_HOURS_UNIT), None)]
     else:
         expect_result = []
     self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result)
Пример #6
0
 def _check_yearly(self, lbcode, expect_match=True):
     lbtim = _lbtim(ib=3, ic=1)
     # Start time
     t1 = nc_datetime(1970, 1, 9, hour=9, minute=0, second=0)
     # End time
     t2 = nc_datetime(1972, 1, 11, hour=9, minute=0, second=0)
     lbft = 3.0  # sample period
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1, t2=t2, lbft=lbft)
     if expect_match:
         t1_hours = 24 * 8.375
         t2_hours = 24 * (10.375 + 2 * 365)
         period_hours = 24.0 * (2 * 365 + 2)
         expect_result = [
             (DimCoord([t2_hours - lbft],
                       standard_name='forecast_reference_time',
                       units=_EPOCH_HOURS_UNIT), None),
             (DimCoord(standard_name='forecast_period', units='hours',
                       points=[lbft], bounds=[lbft - period_hours, lbft]),
              None),
             (DimCoord(standard_name='time', units=_EPOCH_HOURS_UNIT,
                       points=[t2_hours],
                       bounds=[t1_hours, t2_hours]), None)]
     else:
         expect_result = []
     self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result)
Пример #7
0
 def _check_forecast(self, lbcode, expect_match=True):
     lbtim = _lbtim(ib=1, ic=1)
     # Validity time
     t1 = nc_datetime(1970, 1, 10, hour=6, minute=0, second=0)
     # Forecast time
     t2 = nc_datetime(1970, 1, 9, hour=3, minute=0, second=0)
     lbft = None  # unused
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1, t2=t2, lbft=lbft)
     if expect_match:
         expect_result = [
             (DimCoord(24 * 1.125,
                       standard_name='forecast_period', units='hours'),
              None),
             (DimCoord(24 * 9.25,
                       standard_name='time', units=_EPOCH_HOURS_UNIT),
              None),
             (DimCoord(24 * 8.125,
                       standard_name='forecast_reference_time',
                       units=_EPOCH_HOURS_UNIT),
              None)]
     else:
         expect_result = []
     self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result)
Пример #8
0
 def test_unrecognised(self):
     lbtim = _lbtim(ib=4, ic=1)
     t1 = nc_datetime(0, 0, 0)
     t2 = nc_datetime(0, 0, 0)
     lbft = None
     lbcode = _lbcode(0)
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1, t2=t2, lbft=lbft)
     self.assertEqual(coords_and_dims, [])
Пример #9
0
 def test_not_exact_hours(self):
     lbtim = _lbtim(ib=1, ic=1)
     t1 = nc_datetime(2015, 1, 20, hour=7, minute=10, second=0)
     t2 = nc_datetime(2015, 1, 20, hour=0, minute=0, second=0)
     coords_and_dims = _convert_time_coords(
         lbcode=_lbcode(1), lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1, t2=t2, lbft=None)
     (fp, _), (t, _), (frt, _) = coords_and_dims
     self.assertEqual(fp.points[0], 7.1666666641831398)
     self.assertEqual(t.points[0], 394927.16666666418)
Пример #10
0
 def test_(self):
     lbtim = _lbtim(ib=2, ic=1)
     t1 = nc_datetime(0, 1, 1)
     t2 = nc_datetime(0, 1, 31, 23, 59, 00)
     lbft = 0
     lbcode = _lbcode(1)
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1, t2=t2, lbft=lbft)
     self.assertEqual(coords_and_dims, [])
Пример #11
0
 def test_exact_hours(self):
     lbtim = _lbtim(ib=1, ic=1)
     t1 = nc_datetime(2015, 1, 20, hour=7, minute=0, second=0)
     t2 = nc_datetime(2015, 1, 20, hour=0, minute=0, second=0)
     coords_and_dims = _convert_time_coords(
         lbcode=_lbcode(1), lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1, t2=t2, lbft=None)
     (fp, _), (t, _), (frt, _) = coords_and_dims
     # These should both be exact whole numbers.
     self.assertEqual(fp.points[0], 7)
     self.assertEqual(t.points[0], 394927)
Пример #12
0
 def test_(self):
     lbtim = _lbtim(ib=2, ic=1)
     t1 = nc_datetime(0, 1, 1)
     t2 = nc_datetime(0, 1, 31, 23, 59, 00)
     lbft = 0
     lbcode = _lbcode(1)
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode,
         lbtim=lbtim,
         epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1,
         t2=t2,
         lbft=lbft)
     self.assertEqual(coords_and_dims, [])
Пример #13
0
 def test(self):
     lbcode = _lbcode(value=31323)
     lbtim = _lbtim(ib=2, ic=2)
     t1 = nc_datetime(1970, 1, 3, hour=0, minute=0, second=0)
     t2 = nc_datetime(1970, 1, 4, hour=0, minute=0, second=0)
     lbft = 24 * 4
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1, t2=t2, lbft=lbft)
     t2_hours = 24 * 3
     expected_result = [(DimCoord([t2_hours-lbft],
                                  standard_name='forecast_reference_time',
                                  units=_EPOCH_HOURS_UNIT), None)]
     self.assertCoordsAndDimsListsMatch(coords_and_dims, expected_result)
Пример #14
0
 def test_unrecognised(self):
     lbtim = _lbtim(ib=4, ic=1)
     t1 = nc_datetime(0, 0, 0)
     t2 = nc_datetime(0, 0, 0)
     lbft = None
     lbcode = _lbcode(0)
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode,
         lbtim=lbtim,
         epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1,
         t2=t2,
         lbft=lbft)
     self.assertEqual(coords_and_dims, [])
Пример #15
0
 def test_not_exact_hours(self):
     lbtim = _lbtim(ib=1, ic=1)
     t1 = nc_datetime(2015, 1, 20, hour=7, minute=10, second=0)
     t2 = nc_datetime(2015, 1, 20, hour=0, minute=0, second=0)
     coords_and_dims = _convert_time_coords(
         lbcode=_lbcode(1),
         lbtim=lbtim,
         epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1,
         t2=t2,
         lbft=None)
     (fp, _), (t, _), (frt, _) = coords_and_dims
     self.assertEqual(fp.points[0], 7.1666666641831398)
     self.assertEqual(t.points[0], 394927.16666666418)
Пример #16
0
 def test_exact_hours(self):
     lbtim = _lbtim(ib=1, ic=1)
     t1 = nc_datetime(2015, 1, 20, hour=7, minute=0, second=0)
     t2 = nc_datetime(2015, 1, 20, hour=0, minute=0, second=0)
     coords_and_dims = _convert_time_coords(
         lbcode=_lbcode(1),
         lbtim=lbtim,
         epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1,
         t2=t2,
         lbft=None)
     (fp, _), (t, _), (frt, _) = coords_and_dims
     # These should both be exact whole numbers.
     self.assertEqual(fp.points[0], 7)
     self.assertEqual(t.points[0], 394927)
Пример #17
0
 def _check_timepoint(self, lbcode, expect_match=True):
     lbtim = _lbtim(ib=0, ic=1)
     t1 = nc_datetime(1970, 1, 1, hour=6, minute=0, second=0)
     t2 = nc_datetime(0, 0, 0)  # not used in result
     lbft = None  # unused
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1, t2=t2, lbft=lbft)
     if expect_match:
         expect_result = [
             (DimCoord(24 * 0.25, standard_name='time',
                       units=_EPOCH_HOURS_UNIT),
              None)]
     else:
         expect_result = []
     self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result)
Пример #18
0
    def test_t1_and_t2_orthogonal_lists(self):
        # lbtim ia = 0, ib = 1, ic = 1
        # with a single repeated forecast reference time (t2) and a series
        # of validity times (t1).
        lbcode = _lbcode(1)
        lbtim = _lbtim(ia=0, ib=1, ic=1)
        years = np.array([1970, 1971, 1972])
        hours = np.array([3, 6, 9, 12])
        # Validity time - vector of different values
        t1 = [nc_datetime(year, 1, 9, hour=12) for year in years]
        t1_dims = (0, )
        # Forecast reference time - vector of different values
        t2 = [nc_datetime(1970, 1, 9, hour=hour) for hour in hours]
        t2_dims = (1, )
        lbft = None  # Not used.

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode,
            lbtim=lbtim,
            epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1,
            t2=t2,
            lbft=lbft,
            t1_dims=t1_dims,
            t2_dims=t2_dims)

        # Expected coords.
        points = [[(year - 1970) * 365 * 24 + 12 - hour for hour in hours]
                  for year in years]
        fp_coord = AuxCoord(points,
                            standard_name='forecast_period',
                            units='hours')
        points = (years - 1970) * 24 * 365 + (24 * 8) + 12
        time_coord = DimCoord(points,
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT)
        points = (24 * 8) + hours
        fref_time_coord = DimCoord(points,
                                   standard_name='forecast_reference_time',
                                   units=_EPOCH_HOURS_UNIT)
        expected = [
            (fp_coord, (0, 1)),  # Spans dims 0 and 1.
            (time_coord, (0, )),
            (fref_time_coord, (1, ))
        ]
        self.assertCoordsAndDimsListsMatch(coords_and_dims, expected)
Пример #19
0
 def test(self):
     lbcode = _lbcode(value=31323)
     lbtim = _lbtim(ib=2, ic=2)
     t1 = nc_datetime(1970, 1, 3, hour=0, minute=0, second=0)
     t2 = nc_datetime(1970, 1, 4, hour=0, minute=0, second=0)
     lbft = 24 * 4
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode,
         lbtim=lbtim,
         epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1,
         t2=t2,
         lbft=lbft)
     t2_hours = 24 * 3
     expected_result = [(DimCoord([t2_hours - lbft],
                                  standard_name='forecast_reference_time',
                                  units=_EPOCH_HOURS_UNIT), None)]
     self.assertCoordsAndDimsListsMatch(coords_and_dims, expected_result)
Пример #20
0
    def test_t1_multi_dim_list_t2_scalar(self):
        # Another case of lbtim ia = 0, ib = 1, ic = 1 but
        # with a changing forecast reference time (t2) and
        # validity time (t1).
        lbcode = _lbcode(1)
        lbtim = _lbtim(ia=0, ib=1, ic=1)
        forecast_period_in_hours = np.array([0, 3, 6, 9, 12])
        years = np.array([1970, 1971, 1972])
        # Validity time - 2d array of different values
        t1 = [[
            nc_datetime(year, 1, 9, hour=(3 + fp))
            for fp in forecast_period_in_hours
        ] for year in years]
        t1_dims = (0, 1)
        # Forecast reference time - vector of different values
        t2 = nc_datetime(1970, 1, 9, hour=3)
        lbft = None  # Not used.

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode,
            lbtim=lbtim,
            epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1,
            t2=t2,
            lbft=lbft,
            t1_dims=t1_dims)

        # Expected coords.
        fp_coord = AuxCoord([
            forecast_period_in_hours + (year - 1970) * 365 * 24
            for year in years
        ],
                            standard_name='forecast_period',
                            units='hours')
        time_coord = AuxCoord([(24 * 8) + 3 + forecast_period_in_hours +
                               (year - 1970) * 365 * 24 for year in years],
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT)
        fref_time_coord = DimCoord((24 * 8) + 3,
                                   standard_name='forecast_reference_time',
                                   units=_EPOCH_HOURS_UNIT)
        expected = [(fp_coord, (0, 1)), (time_coord, (0, 1)),
                    (fref_time_coord, None)]
        self.assertCoordsAndDimsListsMatch(coords_and_dims, expected)
Пример #21
0
    def test_t1_and_t2_nparrays(self):
        # lbtim ia = 0, ib = 1, ic = 1
        # with a single repeated forecast reference time (t2) and a series
        # of validity times (t1).
        lbcode = _lbcode(1)
        lbtim = _lbtim(ia=0, ib=1, ic=1)
        forecast_period_in_hours = np.array([0, 3, 6, 9, 12])
        # Validity time - vector of different values
        t1 = np.array([
            nc_datetime(1970, 1, 9, hour=(3 + fp))
            for fp in forecast_period_in_hours
        ])
        t1_dims = (0, )
        # Forecast reference time - vector of same values
        t2 = np.array([
            nc_datetime(1970, 1, 9, hour=3) for _ in forecast_period_in_hours
        ])
        t2_dims = (0, )
        lbft = None  # Not used.

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode,
            lbtim=lbtim,
            epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1,
            t2=t2,
            lbft=lbft,
            t1_dims=t1_dims,
            t2_dims=t2_dims)

        # Expected coords.
        fp_coord = DimCoord(forecast_period_in_hours,
                            standard_name='forecast_period',
                            units='hours')
        time_coord = DimCoord((24 * 8) + 3 + forecast_period_in_hours,
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT)
        fref_time_coord = DimCoord((24 * 8) + 3,
                                   standard_name='forecast_reference_time',
                                   units=_EPOCH_HOURS_UNIT)
        expected = [(fp_coord, (0, )), (time_coord, (0, )),
                    (fref_time_coord, None)]
        self.assertCoordsAndDimsListsMatch(coords_and_dims, expected)
Пример #22
0
 def _check_timepoint(self, lbcode, expect_match=True):
     lbtim = _lbtim(ib=0, ic=1)
     t1 = nc_datetime(1970, 1, 1, hour=6, minute=0, second=0)
     t2 = nc_datetime(0, 0, 0)  # not used in result
     lbft = None  # unused
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode,
         lbtim=lbtim,
         epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1,
         t2=t2,
         lbft=lbft)
     if expect_match:
         expect_result = [(DimCoord(24 * 0.25,
                                    standard_name='time',
                                    units=_EPOCH_HOURS_UNIT), None)]
     else:
         expect_result = []
     self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result)
Пример #23
0
    def test_t1_list_t2_scalar(self):
        lbtim = _lbtim(ib=2, ic=1)
        lbcode = _lbcode(1)
        hours = np.array([0, 3, 6, 9])
        # Start times - vector
        t1 = [nc_datetime(1970, 1, 9, hour=9 + hour) for hour in hours]
        t1_dims = (0, )
        # End time - scalar
        t2 = nc_datetime(1970, 1, 11, hour=9)
        lbft = 3.0  # Sample period

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode,
            lbtim=lbtim,
            epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1,
            t2=t2,
            lbft=lbft,
            t1_dims=t1_dims)

        # Expected coords.
        points = lbft - (48 - hours) / 2.0
        bounds = np.array([lbft - (48 - hours),
                           np.ones_like(hours) * lbft]).transpose()
        fp_coord = AuxCoord(points,
                            standard_name='forecast_period',
                            units='hours',
                            bounds=bounds)
        points = 9 * 24 + 9 + (hours / 2.0)
        bounds = np.array(
            [8 * 24 + 9 + hours,
             np.ones_like(hours) * 10 * 24 + 9]).transpose()
        time_coord = AuxCoord(points,
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT,
                              bounds=bounds)
        points = 10 * 24 + 9 - lbft
        fref_time_coord = DimCoord(points,
                                   standard_name='forecast_reference_time',
                                   units=_EPOCH_HOURS_UNIT)
        expected = [(fp_coord, (0, )), (time_coord, (0, )),
                    (fref_time_coord, None)]
        self.assertCoordsAndDimsListsMatch(coords_and_dims, expected)
Пример #24
0
    def test_t1_scalar_t2_list(self):
        lbtim = _lbtim(ib=3, ic=1)
        lbcode = _lbcode(1)
        years = np.array([1972, 1973, 1974])
        # Start times - scalar
        t1 = nc_datetime(1970, 1, 9, hour=9)
        # End time - vector
        t2 = [nc_datetime(year, 1, 11, hour=9) for year in years]
        t2_dims = (0, )
        lbft = 3.0  # Sample period

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode,
            lbtim=lbtim,
            epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1,
            t2=t2,
            lbft=lbft,
            t2_dims=t2_dims)

        # Expected coords.
        points = np.ones_like(years) * lbft
        bounds = np.array(
            [lbft - ((years - 1970) * 365 * 24 + 2 * 24), points]).transpose()
        fp_coord = AuxCoord(points,
                            standard_name='forecast_period',
                            units='hours',
                            bounds=bounds)
        points = (years - 1970) * 365 * 24 + 10 * 24 + 9
        bounds = np.array([np.ones_like(points) * (8 * 24 + 9),
                           points]).transpose()
        # The time coordinate is an AuxCoord as the lower bound for each
        # cell is the same so it does not meet the monotonicity requirement.
        time_coord = AuxCoord(points,
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT,
                              bounds=bounds)
        fref_time_coord = DimCoord(points - lbft,
                                   standard_name='forecast_reference_time',
                                   units=_EPOCH_HOURS_UNIT)
        expected = [(fp_coord, (0, )), (time_coord, (0, )),
                    (fref_time_coord, (0, ))]
Пример #25
0
    def test_t1_scalar_t2_list(self):
        lbtim = _lbtim(ib=3, ic=1)
        lbcode = _lbcode(1)
        years = np.array([1972, 1973, 1974])
        # Start times - scalar
        t1 = nc_datetime(1970, 1, 9, hour=9)
        # End time - vector
        t2 = [nc_datetime(year, 1, 11, hour=9) for
              year in years]
        t2_dims = (0,)
        lbft = 3.0  # Sample period

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1, t2=t2, lbft=lbft,
            t2_dims=t2_dims)

        # Expected coords.
        points = np.ones_like(years) * lbft
        bounds = np.array([lbft - ((years - 1970) * 365 * 24 + 2 * 24),
                           points]).transpose()
        fp_coord = AuxCoord(points,
                            standard_name='forecast_period',
                            units='hours',
                            bounds=bounds)
        points = (years - 1970) * 365 * 24 + 10 * 24 + 9
        bounds = np.array([np.ones_like(points) * (8 * 24 + 9),
                           points]).transpose()
        # The time coordinate is an AuxCoord as the lower bound for each
        # cell is the same so it does not meet the monotonicity requirement.
        time_coord = AuxCoord(points,
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT,
                              bounds=bounds)
        fref_time_coord = DimCoord(points - lbft,
                                   standard_name='forecast_reference_time',
                                   units=_EPOCH_HOURS_UNIT)
        expected = [(fp_coord, (0,)),
                    (time_coord, (0,)),
                    (fref_time_coord, (0,))]
Пример #26
0
    def test_t1_list_t2_scalar(self):
        lbtim = _lbtim(ib=2, ic=1)
        lbcode = _lbcode(1)
        hours = np.array([0, 3, 6, 9])
        # Start times - vector
        t1 = [nc_datetime(1970, 1, 9, hour=9 + hour) for
              hour in hours]
        t1_dims = (0,)
        # End time - scalar
        t2 = nc_datetime(1970, 1, 11, hour=9)
        lbft = 3.0  # Sample period

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1, t2=t2, lbft=lbft,
            t1_dims=t1_dims)

        # Expected coords.
        points = lbft - (48 - hours) / 2.0
        bounds = np.array([lbft - (48 - hours),
                           np.ones_like(hours) * lbft]).transpose()
        fp_coord = AuxCoord(points,
                            standard_name='forecast_period',
                            units='hours',
                            bounds=bounds)
        points = 9 * 24 + 9 + (hours / 2.0)
        bounds = np.array([8 * 24 + 9 + hours,
                           np.ones_like(hours) * 10 * 24 + 9]).transpose()
        time_coord = AuxCoord(points,
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT,
                              bounds=bounds)
        points = 10 * 24 + 9 - lbft
        fref_time_coord = DimCoord(points,
                                   standard_name='forecast_reference_time',
                                   units=_EPOCH_HOURS_UNIT)
        expected = [(fp_coord, (0,)),
                    (time_coord, (0,)),
                    (fref_time_coord, None)]
        self.assertCoordsAndDimsListsMatch(coords_and_dims, expected)
Пример #27
0
    def test_t1_and_t2_orthogonal_lists(self):
        # lbtim ia = 0, ib = 1, ic = 1
        # with a single repeated forecast reference time (t2) and a series
        # of validity times (t1).
        lbcode = _lbcode(1)
        lbtim = _lbtim(ia=0, ib=1, ic=1)
        years = np.array([1970, 1971, 1972])
        hours = np.array([3, 6, 9, 12])
        # Validity time - vector of different values
        t1 = [nc_datetime(year, 1, 9, hour=12) for year in years]
        t1_dims = (0,)
        # Forecast reference time - vector of different values
        t2 = [nc_datetime(1970, 1, 9, hour=hour) for hour in hours]
        t2_dims = (1,)
        lbft = None  # Not used.

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1, t2=t2, lbft=lbft,
            t1_dims=t1_dims, t2_dims=t2_dims)

        # Expected coords.
        points = [[(year - 1970) * 365 * 24 + 12 - hour for hour
                   in hours] for year in years]
        fp_coord = AuxCoord(points,
                            standard_name='forecast_period',
                            units='hours')
        points = (years - 1970) * 24 * 365 + (24 * 8) + 12
        time_coord = DimCoord(points,
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT)
        points = (24 * 8) + hours
        fref_time_coord = DimCoord(points,
                                   standard_name='forecast_reference_time',
                                   units=_EPOCH_HOURS_UNIT)
        expected = [(fp_coord, (0, 1)),     # Spans dims 0 and 1.
                    (time_coord, (0,)),
                    (fref_time_coord, (1,))]
        self.assertCoordsAndDimsListsMatch(coords_and_dims, expected)
Пример #28
0
 def _check_period(self, lbcode, expect_match=True):
     lbtim = _lbtim(ib=2, ic=1)
     # Start time
     t1 = nc_datetime(1970, 1, 9, hour=3, minute=0, second=0)
     # End time
     t2 = nc_datetime(1970, 1, 10, hour=3, minute=0, second=0)
     lbft = 2.0  # sample period
     coords_and_dims = _convert_time_coords(
         lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
         t1=t1, t2=t2, lbft=lbft)
     if expect_match:
         expect_result = [
             (DimCoord(24 * 9.125 - 2.0,
                       standard_name='forecast_reference_time',
                       units=_EPOCH_HOURS_UNIT), None),
             (DimCoord(standard_name='forecast_period', units='hours',
                       points=[-10.0], bounds=[-22.0, 2.0]), None),
             (DimCoord(standard_name='time', units=_EPOCH_HOURS_UNIT,
                       points=[24 * 8.625],
                       bounds=[24 * 8.125, 24 * 9.125]), None)]
     else:
         expect_result = []
     self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result)
Пример #29
0
    def test_t1_multi_dim_list_t2_scalar(self):
        # Another case of lbtim ia = 0, ib = 1, ic = 1 but
        # with a changing forecast reference time (t2) and
        # validity time (t1).
        lbcode = _lbcode(1)
        lbtim = _lbtim(ia=0, ib=1, ic=1)
        forecast_period_in_hours = np.array([0, 3, 6, 9, 12])
        years = np.array([1970, 1971, 1972])
        # Validity time - 2d array of different values
        t1 = [[nc_datetime(year, 1, 9, hour=(3 + fp)) for fp in
               forecast_period_in_hours] for year in years]
        t1_dims = (0, 1)
        # Forecast reference time - vector of different values
        t2 = nc_datetime(1970, 1, 9, hour=3)
        lbft = None  # Not used.

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1, t2=t2, lbft=lbft,
            t1_dims=t1_dims)

        # Expected coords.
        fp_coord = AuxCoord([forecast_period_in_hours +
                             (year - 1970) * 365 * 24 for year in years],
                            standard_name='forecast_period',
                            units='hours')
        time_coord = AuxCoord([(24 * 8) + 3 + forecast_period_in_hours +
                               (year - 1970) * 365 * 24 for year in years],
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT)
        fref_time_coord = DimCoord((24 * 8) + 3,
                                   standard_name='forecast_reference_time',
                                   units=_EPOCH_HOURS_UNIT)
        expected = [(fp_coord, (0, 1)),
                    (time_coord, (0, 1)),
                    (fref_time_coord, None)]
        self.assertCoordsAndDimsListsMatch(coords_and_dims, expected)
Пример #30
0
    def test_t1_list(self):
        # lbtim ia = 0, ib = 0, ic = 1
        # with a series of times (t1).
        lbcode = _lbcode(1)
        lbtim = _lbtim(ia=0, ib=0, ic=1)
        hours = np.array([0, 3, 6, 9, 12])
        # Validity time - vector of different values
        t1 = [nc_datetime(1970, 1, 9, hour=3 + hour) for hour in hours]
        t1_dims = (0,)
        # Forecast reference time - scalar (not used)
        t2 = nc_datetime(1970, 1, 9, hour=3)
        lbft = None

        coords_and_dims = _convert_time_coords(
            lbcode=lbcode, lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
            t1=t1, t2=t2, lbft=lbft,
            t1_dims=t1_dims)

        # Expected coords.
        time_coord = DimCoord((24 * 8) + 3 + hours,
                              standard_name='time',
                              units=_EPOCH_HOURS_UNIT)
        expected = [(time_coord, (0,))]
        self.assertCoordsAndDimsListsMatch(coords_and_dims, expected)
def _convert_collation(collation):
    """
    Converts a FieldCollation into the corresponding items of Cube
    metadata.

    Args:

    * collation:
        A FieldCollation object.

    Returns:
        A :class:`iris.fileformats.rules.ConversionMetadata` object.

    """
    # For all the scalar conversions all fields in the collation will
    # give the same result, so the choice is arbitrary.
    field = collation.fields[0]

    # All the "other" rules.
    (references, standard_name, long_name, units, attributes, cell_methods,
     dim_coords_and_dims, aux_coords_and_dims) = _all_other_rules(field)

    # Adjust any dimension bindings to account for the extra leading
    # dimensions added by the collation.
    if collation.vector_dims_shape:
        n_collation_dims = len(collation.vector_dims_shape)
        dim_coords_and_dims = _adjust_dims(dim_coords_and_dims,
                                           n_collation_dims)
        aux_coords_and_dims = _adjust_dims(aux_coords_and_dims,
                                           n_collation_dims)

    # "Normal" (non-cross-sectional) time values
    vector_headers = collation.element_arrays_and_dims
    # If the collation doesn't define a vector of values for a
    # particular header then it must be constant over all fields in the
    # collation. In which case it's safe to get the value from any field.
    t1, t1_dims = vector_headers.get('t1', (field.t1, ()))
    t2, t2_dims = vector_headers.get('t2', (field.t2, ()))
    lbft, lbft_dims = vector_headers.get('lbft', (field.lbft, ()))
    coords_and_dims = _convert_time_coords(field.lbcode, field.lbtim,
                                           field.time_unit('hours'), t1, t2,
                                           lbft, t1_dims, t2_dims, lbft_dims)
    dim_coord_dims = set()
    _bind_coords(coords_and_dims, dim_coord_dims, dim_coords_and_dims,
                 aux_coords_and_dims)

    # "Normal" (non-cross-sectional) vertical levels
    blev, blev_dims = vector_headers.get('blev', (field.blev, ()))
    lblev, lblev_dims = vector_headers.get('lblev', (field.lblev, ()))
    bhlev, bhlev_dims = vector_headers.get('bhlev', (field.bhlev, ()))
    bhrlev, bhrlev_dims = vector_headers.get('bhrlev', (field.bhrlev, ()))
    brsvd1, brsvd1_dims = vector_headers.get('brsvd1', (field.brsvd[0], ()))
    brsvd2, brsvd2_dims = vector_headers.get('brsvd2', (field.brsvd[1], ()))
    brlev, brlev_dims = vector_headers.get('brlev', (field.brlev, ()))
    # Find all the non-trivial dimension values
    dims = set(
        filter(None, [
            blev_dims, lblev_dims, bhlev_dims, bhrlev_dims, brsvd1_dims,
            brsvd2_dims, brlev_dims
        ]))
    if len(dims) > 1:
        raise TranslationError('Unsupported multiple values for vertical '
                               'dimension.')
    if dims:
        v_dims = dims.pop()
        if len(v_dims) > 1:
            raise TranslationError('Unsupported multi-dimension vertical '
                                   'headers.')
    else:
        v_dims = ()
    coords_and_dims, factories = _convert_vertical_coords(
        field.lbcode, field.lbvc, blev, lblev, field.stash, bhlev, bhrlev,
        brsvd1, brsvd2, brlev, v_dims)
    _bind_coords(coords_and_dims, dim_coord_dims, dim_coords_and_dims,
                 aux_coords_and_dims)

    # Realization (aka ensemble) (--> scalar coordinates)
    aux_coords_and_dims.extend(
        _convert_scalar_realization_coords(lbrsvd4=field.lbrsvd[3]))

    # Pseudo-level coordinate (--> scalar coordinates)
    aux_coords_and_dims.extend(
        _convert_scalar_pseudo_level_coords(lbuser5=field.lbuser[4]))

    return ConversionMetadata(factories, references, standard_name, long_name,
                              units, attributes, cell_methods,
                              dim_coords_and_dims, aux_coords_and_dims)
Пример #32
0
def _convert_collation(collation):
    """
    Converts a FieldCollation into the corresponding items of Cube
    metadata.

    Args:

    * collation:
        A FieldCollation object.

    Returns:
        A :class:`iris.fileformats.rules.ConversionMetadata` object.

    .. note:
        This is the 'loader.converter', in the control structure passed to the
        generic rules code, :meth:`iris.fileformats.rules.load_cubes`.

    """
    from iris.fileformats.rules import ConversionMetadata
    from iris.fileformats.pp_rules import (_convert_time_coords,
                                           _convert_vertical_coords,
                                           _convert_scalar_realization_coords,
                                           _convert_scalar_pseudo_level_coords,
                                           _all_other_rules)

    # For all the scalar conversions, all fields in the collation will
    # give the same result, so the choice is arbitrary.
    field = collation.fields[0]

    # Call "all other" rules.
    (references, standard_name, long_name, units, attributes, cell_methods,
     dim_coords_and_dims, aux_coords_and_dims) = _all_other_rules(field)

    # Adjust any dimension bindings to account for the extra leading
    # dimensions added by the collation.
    if collation.vector_dims_shape:

        def _adjust_dims(coords_and_dims, n_dims):
            def adjust(dims):
                if dims is not None:
                    dims += n_dims
                return dims

            return [(coord, adjust(dims)) for coord, dims in coords_and_dims]

        n_collation_dims = len(collation.vector_dims_shape)
        dim_coords_and_dims = _adjust_dims(dim_coords_and_dims,
                                           n_collation_dims)
        aux_coords_and_dims = _adjust_dims(aux_coords_and_dims,
                                           n_collation_dims)

    # Dimensions to which we've already assigned dimension coordinates.
    dim_coord_dims = set()

    # Helper call to choose which coords are dimensions and which auxiliary.
    def _bind_coords(coords_and_dims, dim_coord_dims, dim_coords_and_dims,
                     aux_coords_and_dims):
        def key_func(item):
            return _HINTS.get(item[0].name(), len(_HINTS))

        # Target the first DimCoord for a dimension at dim_coords,
        # and target everything else at aux_coords.
        for coord, dims in sorted(coords_and_dims, key=key_func):
            if (isinstance(coord, DimCoord) and dims is not None
                    and len(dims) == 1 and dims[0] not in dim_coord_dims):
                dim_coords_and_dims.append((coord, dims))
                dim_coord_dims.add(dims[0])
            else:
                aux_coords_and_dims.append((coord, dims))

    # Call "time" rules.
    #
    # For "normal" (non-cross-sectional) time values.
    vector_headers = collation.element_arrays_and_dims
    # If the collation doesn't define a vector of values for a
    # particular header then it must be constant over all fields in the
    # collation. In which case it's safe to get the value from any field.
    t1, t1_dims = vector_headers.get('t1', (field.t1, ()))
    t2, t2_dims = vector_headers.get('t2', (field.t2, ()))
    lbft, lbft_dims = vector_headers.get('lbft', (field.lbft, ()))
    coords_and_dims = _convert_time_coords(field.lbcode, field.lbtim,
                                           field.time_unit('hours'), t1, t2,
                                           lbft, t1_dims, t2_dims, lbft_dims)
    # Bind resulting coordinates to dimensions, where suitable.
    _bind_coords(coords_and_dims, dim_coord_dims, dim_coords_and_dims,
                 aux_coords_and_dims)

    # Call "vertical" rules.
    #
    # "Normal" (non-cross-sectional) vertical levels
    blev, blev_dims = vector_headers.get('blev', (field.blev, ()))
    lblev, lblev_dims = vector_headers.get('lblev', (field.lblev, ()))
    bhlev, bhlev_dims = vector_headers.get('bhlev', (field.bhlev, ()))
    bhrlev, bhrlev_dims = vector_headers.get('bhrlev', (field.bhrlev, ()))
    brsvd1, brsvd1_dims = vector_headers.get('brsvd1', (field.brsvd[0], ()))
    brsvd2, brsvd2_dims = vector_headers.get('brsvd2', (field.brsvd[1], ()))
    brlev, brlev_dims = vector_headers.get('brlev', (field.brlev, ()))
    # Find all the non-trivial dimension values
    dims = set(
        filter(None, [
            blev_dims, lblev_dims, bhlev_dims, bhrlev_dims, brsvd1_dims,
            brsvd2_dims, brlev_dims
        ]))
    if len(dims) > 1:
        raise TranslationError('Unsupported multiple values for vertical '
                               'dimension.')
    if dims:
        v_dims = dims.pop()
        if len(v_dims) > 1:
            raise TranslationError('Unsupported multi-dimension vertical '
                                   'headers.')
    else:
        v_dims = ()
    coords_and_dims, factories = _convert_vertical_coords(
        field.lbcode, field.lbvc, blev, lblev, field.stash, bhlev, bhrlev,
        brsvd1, brsvd2, brlev, v_dims)
    # Bind resulting coordinates to dimensions, where suitable.
    _bind_coords(coords_and_dims, dim_coord_dims, dim_coords_and_dims,
                 aux_coords_and_dims)

    # Realization (aka ensemble) (--> scalar coordinates)
    aux_coords_and_dims.extend(
        _convert_scalar_realization_coords(lbrsvd4=field.lbrsvd[3]))

    # Pseudo-level coordinate (--> scalar coordinates)
    aux_coords_and_dims.extend(
        _convert_scalar_pseudo_level_coords(lbuser5=field.lbuser[4]))

    return ConversionMetadata(factories, references, standard_name, long_name,
                              units, attributes, cell_methods,
                              dim_coords_and_dims, aux_coords_and_dims)
Пример #33
0
def _convert_collation(collation):
    """
    Converts a FieldCollation into the corresponding items of Cube
    metadata.

    Args:

    * collation:
        A FieldCollation object.

    Returns:
        A :class:`iris.fileformats.rules.ConversionMetadata` object.

    """
    # For all the scalar conversions all fields in the collation will
    # give the same result, so the choice is arbitrary.
    field = collation.fields[0]

    # All the "other" rules.
    (references, standard_name, long_name, units, attributes, cell_methods,
     dim_coords_and_dims, aux_coords_and_dims) = _all_other_rules(field)

    # Adjust any dimension bindings to account for the extra leading
    # dimensions added by the collation.
    if collation.vector_dims_shape:
        n_collation_dims = len(collation.vector_dims_shape)
        dim_coords_and_dims = _adjust_dims(dim_coords_and_dims,
                                           n_collation_dims)
        aux_coords_and_dims = _adjust_dims(aux_coords_and_dims,
                                           n_collation_dims)

    # "Normal" (non-cross-sectional) time values
    vector_headers = collation.element_arrays_and_dims
    # If the collation doesn't define a vector of values for a
    # particular header then it must be constant over all fields in the
    # collation. In which case it's safe to get the value from any field.
    t1, t1_dims = vector_headers.get('t1', (field.t1, ()))
    t2, t2_dims = vector_headers.get('t2', (field.t2, ()))
    lbft, lbft_dims = vector_headers.get('lbft', (field.lbft, ()))
    coords_and_dims = _convert_time_coords(field.lbcode, field.lbtim,
                                           field.time_unit('hours'),
                                           t1, t2, lbft,
                                           t1_dims, t2_dims, lbft_dims)
    dim_coord_dims = set()
    _bind_coords(coords_and_dims, dim_coord_dims, dim_coords_and_dims,
                 aux_coords_and_dims)

    # "Normal" (non-cross-sectional) vertical levels
    blev, blev_dims = vector_headers.get('blev', (field.blev, ()))
    lblev, lblev_dims = vector_headers.get('lblev', (field.lblev, ()))
    bhlev, bhlev_dims = vector_headers.get('bhlev', (field.bhlev, ()))
    bhrlev, bhrlev_dims = vector_headers.get('bhrlev', (field.bhrlev, ()))
    brsvd1, brsvd1_dims = vector_headers.get('brsvd1', (field.brsvd[0], ()))
    brsvd2, brsvd2_dims = vector_headers.get('brsvd2', (field.brsvd[1], ()))
    brlev, brlev_dims = vector_headers.get('brlev', (field.brlev, ()))
    # Find all the non-trivial dimension values
    dims = set(filter(None, [blev_dims, lblev_dims, bhlev_dims, bhrlev_dims,
                             brsvd1_dims, brsvd2_dims, brlev_dims]))
    if len(dims) > 1:
        raise TranslationError('Unsupported multiple values for vertical '
                               'dimension.')
    if dims:
        v_dims = dims.pop()
        if len(v_dims) > 1:
            raise TranslationError('Unsupported multi-dimension vertical '
                                   'headers.')
    else:
        v_dims = ()
    coords_and_dims, factories = _convert_vertical_coords(field.lbcode,
                                                          field.lbvc,
                                                          blev, lblev,
                                                          field.stash,
                                                          bhlev, bhrlev,
                                                          brsvd1, brsvd2,
                                                          brlev, v_dims)
    _bind_coords(coords_and_dims, dim_coord_dims, dim_coords_and_dims,
                 aux_coords_and_dims)

    # Realization (aka ensemble) (--> scalar coordinates)
    aux_coords_and_dims.extend(_convert_scalar_realization_coords(
        lbrsvd4=field.lbrsvd[3]))

    # Pseudo-level coordinate (--> scalar coordinates)
    aux_coords_and_dims.extend(_convert_scalar_pseudo_level_coords(
        lbuser5=field.lbuser[4]))

    return ConversionMetadata(factories, references, standard_name, long_name,
                              units, attributes, cell_methods,
                              dim_coords_and_dims, aux_coords_and_dims)
Пример #34
0
def _convert_collation(collation):
    """
    Converts a FieldCollation into the corresponding items of Cube
    metadata.

    Args:

    * collation:
        A FieldCollation object.

    Returns:
        A :class:`iris.fileformats.rules.ConversionMetadata` object.

    .. note:
        This is the 'loader.converter', in the control structure passed to the
        generic rules code, :meth:`iris.fileformats.rules.load_cubes`.

    """
    from iris.fileformats.rules import ConversionMetadata
    from iris.fileformats.pp_rules import (_convert_time_coords,
                                           _convert_vertical_coords,
                                           _convert_scalar_realization_coords,
                                           _convert_scalar_pseudo_level_coords,
                                           _all_other_rules)

    # For all the scalar conversions, all fields in the collation will
    # give the same result, so the choice is arbitrary.
    field = collation.fields[0]

    # Call "all other" rules.
    (references, standard_name, long_name, units, attributes, cell_methods,
     dim_coords_and_dims, aux_coords_and_dims) = _all_other_rules(field)

    # Adjust any dimension bindings to account for the extra leading
    # dimensions added by the collation.
    if collation.vector_dims_shape:
        def _adjust_dims(coords_and_dims, n_dims):
            def adjust(dims):
                if dims is not None:
                    dims += n_dims
                return dims
            return [(coord, adjust(dims)) for coord, dims in coords_and_dims]

        n_collation_dims = len(collation.vector_dims_shape)
        dim_coords_and_dims = _adjust_dims(dim_coords_and_dims,
                                           n_collation_dims)
        aux_coords_and_dims = _adjust_dims(aux_coords_and_dims,
                                           n_collation_dims)

    # Dimensions to which we've already assigned dimension coordinates.
    dim_coord_dims = set()

    # Helper call to choose which coords are dimensions and which auxiliary.
    def _bind_coords(coords_and_dims, dim_coord_dims, dim_coords_and_dims,
                     aux_coords_and_dims):
        def key_func(item):
            return _HINTS.get(item[0].name(), len(_HINTS))
        # Target the first DimCoord for a dimension at dim_coords,
        # and target everything else at aux_coords.
        for coord, dims in sorted(coords_and_dims, key=key_func):
            if (isinstance(coord, DimCoord) and dims is not None and
                    len(dims) == 1 and dims[0] not in dim_coord_dims):
                dim_coords_and_dims.append((coord, dims))
                dim_coord_dims.add(dims[0])
            else:
                aux_coords_and_dims.append((coord, dims))

    # Call "time" rules.
    #
    # For "normal" (non-cross-sectional) time values.
    vector_headers = collation.element_arrays_and_dims
    # If the collation doesn't define a vector of values for a
    # particular header then it must be constant over all fields in the
    # collation. In which case it's safe to get the value from any field.
    t1, t1_dims = vector_headers.get('t1', (field.t1, ()))
    t2, t2_dims = vector_headers.get('t2', (field.t2, ()))
    lbft, lbft_dims = vector_headers.get('lbft', (field.lbft, ()))
    coords_and_dims = _convert_time_coords(field.lbcode, field.lbtim,
                                           field.time_unit('hours'),
                                           t1, t2, lbft,
                                           t1_dims, t2_dims, lbft_dims)
    # Bind resulting coordinates to dimensions, where suitable.
    _bind_coords(coords_and_dims, dim_coord_dims, dim_coords_and_dims,
                 aux_coords_and_dims)

    # Call "vertical" rules.
    #
    # "Normal" (non-cross-sectional) vertical levels
    blev, blev_dims = vector_headers.get('blev', (field.blev, ()))
    lblev, lblev_dims = vector_headers.get('lblev', (field.lblev, ()))
    bhlev, bhlev_dims = vector_headers.get('bhlev', (field.bhlev, ()))
    bhrlev, bhrlev_dims = vector_headers.get('bhrlev', (field.bhrlev, ()))
    brsvd1, brsvd1_dims = vector_headers.get('brsvd1', (field.brsvd[0], ()))
    brsvd2, brsvd2_dims = vector_headers.get('brsvd2', (field.brsvd[1], ()))
    brlev, brlev_dims = vector_headers.get('brlev', (field.brlev, ()))
    # Find all the non-trivial dimension values
    dims = set(filter(None, [blev_dims, lblev_dims, bhlev_dims, bhrlev_dims,
                             brsvd1_dims, brsvd2_dims, brlev_dims]))
    if len(dims) > 1:
        raise TranslationError('Unsupported multiple values for vertical '
                               'dimension.')
    if dims:
        v_dims = dims.pop()
        if len(v_dims) > 1:
            raise TranslationError('Unsupported multi-dimension vertical '
                                   'headers.')
    else:
        v_dims = ()
    coords_and_dims, factories = _convert_vertical_coords(field.lbcode,
                                                          field.lbvc,
                                                          blev, lblev,
                                                          field.stash,
                                                          bhlev, bhrlev,
                                                          brsvd1, brsvd2,
                                                          brlev, v_dims)
    # Bind resulting coordinates to dimensions, where suitable.
    _bind_coords(coords_and_dims, dim_coord_dims, dim_coords_and_dims,
                 aux_coords_and_dims)

    # Realization (aka ensemble) (--> scalar coordinates)
    aux_coords_and_dims.extend(_convert_scalar_realization_coords(
        lbrsvd4=field.lbrsvd[3]))

    # Pseudo-level coordinate (--> scalar coordinates)
    aux_coords_and_dims.extend(_convert_scalar_pseudo_level_coords(
        lbuser5=field.lbuser[4]))

    return ConversionMetadata(factories, references, standard_name, long_name,
                              units, attributes, cell_methods,
                              dim_coords_and_dims, aux_coords_and_dims)