Esempio n. 1
0
 def test_all_scalar(self):
     field = self._field()
     field.lbtim = 11
     field.t1 = cftime.datetime(1970, 1, 1, 18)
     field.t2 = cftime.datetime(1970, 1, 1, 12)
     collation = mock.Mock(fields=[field],
                           vector_dims_shape=(),
                           element_arrays_and_dims={})
     metadata = convert_collation(collation)
     self._check_phenomenon(metadata)
     coords_and_dims = [(LONGITUDE, 1), (LATITUDE, 0)]
     self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims)
     coords_and_dims = [
         (
             iris.coords.DimCoord(18, "time", units="hours since epoch"),
             None,
         ),
         (
             iris.coords.DimCoord(12,
                                  "forecast_reference_time",
                                  units="hours since epoch"),
             None,
         ),
         (iris.coords.DimCoord(6, "forecast_period", units="hours"), None),
     ]
     self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims)
Esempio n. 2
0
    def test_365_calendar(self):
        f = mock.MagicMock(lbtim=SplittableInt(4, {
            'ia': 2,
            'ib': 1,
            'ic': 0
        }),
                           lbyr=2013,
                           lbmon=1,
                           lbdat=1,
                           lbhr=12,
                           lbmin=0,
                           lbsec=0,
                           t1=cftime.datetime(2013, 1, 1, 12, 0, 0),
                           t2=cftime.datetime(2013, 1, 2, 12, 0, 0),
                           spec=PPField3)
        f.time_unit = six.create_bound_method(PPField3.time_unit, f)
        f.calendar = cf_units.CALENDAR_365_DAY
        (factories, references, standard_name, long_name, units, attributes,
         cell_methods, dim_coords_and_dims, aux_coords_and_dims) = convert(f)

        def is_t_coord(coord_and_dims):
            coord, dims = coord_and_dims
            return coord.standard_name == 'time'

        coords_and_dims = list(filter(is_t_coord, aux_coords_and_dims))
        self.assertEqual(len(coords_and_dims), 1)
        coord, dims = coords_and_dims[0]
        self.assertEqual(guess_coord_axis(coord), 'T')
        self.assertEqual(coord.units.calendar, '365_day')
Esempio n. 3
0
 def test_vector_t2(self):
     field = self._field()
     field.lbtim = 11
     field.t1 = cftime.datetime(1970, 1, 1, 18)
     t2 = ([
         cftime.datetime(1970, 1, 1, 12),
         cftime.datetime(1970, 1, 1, 15),
         cftime.datetime(1970, 1, 1, 18)
     ], [0])
     collation = mock.Mock(fields=[field],
                           vector_dims_shape=(3, ),
                           element_arrays_and_dims={'t2': t2})
     metadata = convert_collation(collation)
     self._check_phenomenon(metadata)
     coords_and_dims = [(LONGITUDE, 2), (LATITUDE, 1),
                        (iris.coords.DimCoord([12, 15, 18],
                                              'forecast_reference_time',
                                              units='hours since epoch'),
                         (0, ))]
     self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims)
     coords_and_dims = [(iris.coords.DimCoord(18,
                                              'time',
                                              units='hours since epoch'),
                         None),
                        (iris.coords.DimCoord([6, 3, 0.],
                                              'forecast_period',
                                              units='hours'), (0, ))]
     self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims)
Esempio n. 4
0
 def test_vector_lbft(self):
     field = self._field()
     field.lbtim = 21
     field.t1 = cftime.datetime(1970, 1, 1, 12)
     field.t2 = cftime.datetime(1970, 1, 1, 18)
     lbft = ([18, 15, 12], [0])
     collation = mock.Mock(fields=[field],
                           vector_dims_shape=(3, ),
                           element_arrays_and_dims={'lbft': lbft})
     metadata = convert_collation(collation)
     self._check_phenomenon(metadata)
     coords_and_dims = [(LONGITUDE, 2), (LATITUDE, 1),
                        (iris.coords.DimCoord([0, 3, 6],
                                              'forecast_reference_time',
                                              units='hours since epoch'),
                         (0, ))]
     coords_and_dims = [(iris.coords.DimCoord(15,
                                              'time',
                                              units='hours since epoch',
                                              bounds=[[12, 18]]), None),
                        (iris.coords.DimCoord([15, 12, 9],
                                              'forecast_period',
                                              units='hours',
                                              bounds=[[12, 18], [9, 15],
                                                      [6, 12]]), (0, ))]
     self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims)
Esempio n. 5
0
 def test_vector_t1(self):
     field = self._field()
     field.lbtim = 11
     field.t2 = cftime.datetime(1970, 1, 1, 12)
     t1 = ([cftime.datetime(1970, 1, 1, 18),
            cftime.datetime(1970, 1, 2, 0),
            cftime.datetime(1970, 1, 2, 6)], [0])
     collation = mock.Mock(fields=[field], vector_dims_shape=(3,),
                           element_arrays_and_dims={'t1': t1})
     metadata = convert_collation(collation)
     self._check_phenomenon(metadata)
     coords_and_dims = [(LONGITUDE, 2),
                        (LATITUDE, 1),
                        (iris.coords.DimCoord([18, 24, 30], 'time',
                                              units='hours since epoch'),
                         (0,))
                        ]
     self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims)
     coords_and_dims = [
         (iris.coords.DimCoord(12, 'forecast_reference_time',
                               units='hours since epoch'), None),
         (iris.coords.DimCoord([6, 12, 18], 'forecast_period',
                               units='hours'), (0,))
     ]
     self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims)
Esempio n. 6
0
 def test_vector_t1_and_t2(self):
     field = self._field()
     field.lbtim = 11
     t1 = ([cftime.datetime(1970, 1, 2, 6),
            cftime.datetime(1970, 1, 2, 9),
            cftime.datetime(1970, 1, 2, 12)], [1])
     t2 = ([cftime.datetime(1970, 1, 1, 12),
            cftime.datetime(1970, 1, 2, 0)], [0])
     collation = mock.Mock(fields=[field], vector_dims_shape=(2, 3),
                           element_arrays_and_dims={'t1': t1, 't2': t2})
     metadata = convert_collation(collation)
     self._check_phenomenon(metadata)
     coords_and_dims = [(LONGITUDE, 3),
                        (LATITUDE, 2),
                        (iris.coords.DimCoord([30, 33, 36], 'time',
                                              units='hours since epoch'),
                         (1,)),
                        (iris.coords.DimCoord([12, 24],
                                              'forecast_reference_time',
                                              units='hours since epoch'),
                         (0,))]
     self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims)
     coords_and_dims = [
         (iris.coords.AuxCoord([[18, 21, 24], [6, 9, 12]],
                               'forecast_period', units='hours'), (0, 1))
     ]
     self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims)
Esempio n. 7
0
 def test_vector_lbft(self):
     field = self._field()
     field.lbtim = 21
     field.t1 = cftime.datetime(1970, 1, 1, 12)
     field.t2 = cftime.datetime(1970, 1, 1, 18)
     lbft = ([18, 15, 12], [0])
     collation = mock.Mock(fields=[field], vector_dims_shape=(3,),
                           element_arrays_and_dims={'lbft': lbft})
     metadata = convert_collation(collation)
     self._check_phenomenon(metadata)
     coords_and_dims = [(LONGITUDE, 2),
                        (LATITUDE, 1),
                        (iris.coords.DimCoord([0, 3, 6],
                                              'forecast_reference_time',
                                              units='hours since epoch'),
                         (0,))]
     coords_and_dims = [
         (iris.coords.DimCoord(15, 'time', units='hours since epoch',
                               bounds=[[12, 18]]), None),
         (iris.coords.DimCoord([15, 12, 9], 'forecast_period',
                               units='hours',
                               bounds=[[12, 18], [9, 15], [6, 12]]),
          (0,))
     ]
     self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims)
Esempio n. 8
0
 def test_vector_t1_and_t2(self):
     field = self._field()
     field.lbtim = 11
     t1 = ([
         cftime.datetime(1970, 1, 2, 6),
         cftime.datetime(1970, 1, 2, 9),
         cftime.datetime(1970, 1, 2, 12)
     ], [1])
     t2 = ([
         cftime.datetime(1970, 1, 1, 12),
         cftime.datetime(1970, 1, 2, 0)
     ], [0])
     collation = mock.Mock(fields=[field],
                           vector_dims_shape=(2, 3),
                           element_arrays_and_dims={
                               't1': t1,
                               't2': t2
                           })
     metadata = convert_collation(collation)
     self._check_phenomenon(metadata)
     coords_and_dims = [
         (LONGITUDE, 3), (LATITUDE, 2),
         (iris.coords.DimCoord([30, 33, 36],
                               'time',
                               units='hours since epoch'), (1, )),
         (iris.coords.DimCoord([12, 24],
                               'forecast_reference_time',
                               units='hours since epoch'), (0, ))
     ]
     self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims)
     coords_and_dims = [(iris.coords.AuxCoord([[18, 21, 24], [6, 9, 12]],
                                              'forecast_period',
                                              units='hours'), (0, 1))]
     self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims)
Esempio n. 9
0
    def axisinfo(unit, axis):
        """
        Returns the :class:`~matplotlib.units.AxisInfo` for *unit*.

        *unit* is a tzinfo instance or None.
        The *axis* argument is required but not used.
        """
        calendar, date_unit, date_type = unit

        majloc = NetCDFTimeDateLocator(4,
                                       calendar=calendar,
                                       date_unit=date_unit)
        majfmt = NetCDFTimeDateFormatter(majloc,
                                         calendar=calendar,
                                         time_units=date_unit)
        if date_type is CalendarDateTime:
            datemin = CalendarDateTime(cftime.datetime(2000, 1, 1),
                                       calendar=calendar)
            datemax = CalendarDateTime(cftime.datetime(2010, 1, 1),
                                       calendar=calendar)
        else:
            datemin = date_type(2000, 1, 1)
            datemax = date_type(2010, 1, 1)
        return munits.AxisInfo(majloc=majloc,
                               majfmt=majfmt,
                               label='',
                               default_limits=(datemin, datemax))
Esempio n. 10
0
def _get_time_range(year, months, calendar='proleptic_gregorian'):
    """ 
    Creates a start and end date (a datetime.date timestamp) for a 
    given year and a list of months. If the list of months overlaps into 
    the following year (for example [11,12,1,2,3,4]) then the end date 
    adds 1 to the original year 
    
    """
    #start_date = datetime.datetime(year, months[0], 1)
    #print 'get_time_range ',start_date
    start_date = cftime.datetime(year, months[0], 1)
    cdftime = cftime.utime('hours since 1950-01-01 00:00:00',
                           calendar=calendar)
    t = cdftime.date2num(start_date)
    t1 = cdftime.num2date(t)

    end_year = year
    end_month = months[-1] + 1
    if months[-1] + 1 < months[0] or months[-1] + 1 == 13 or len(months) >= 12:
        end_year = year + 1
    if months[-1] + 1 == 13:
        end_month = 1
    #end_date = datetime.datetime(end_year, end_month, 1)
    end_date = cftime.datetime(end_year, end_month, 1)
    t = cdftime.date2num(end_date)
    t2 = cdftime.num2date(t)
    return t1, t2
Esempio n. 11
0
 def test_single(self):
     for calendar in self.calendars:
         dt = cftime.datetime(**self.kwargs, calendar=calendar)
         actual = discard_microsecond(dt)
         expected = cftime.datetime(**self.kwargs,
                                    microsecond=0,
                                    calendar=calendar)
         self.assertEqual(expected, actual)
Esempio n. 12
0
def test_TimeConfig_interval_average_endpoint():
    config = TimeConfig(frequency=3600,
                        kind="interval-average",
                        includes_lower=True)
    container = config.time_container(datetime(2020, 1, 1))
    assert container == IntervalAveragedTimes(timedelta(seconds=3600),
                                              datetime(2020, 1, 1),
                                              includes_lower=True)
Esempio n. 13
0
 def test_360_day_calendar_nd_CalendarDateTime(self):
     # Test the case where the input is an nd-array.
     calendar = '360_day'
     unit = 'days since 2000-01-01'
     val = np.array(
         [[CalendarDateTime(cftime.datetime(2014, 8, 12), calendar)],
          [CalendarDateTime(cftime.datetime(2014, 8, 13), calendar)]])
     result = NetCDFTimeConverter().default_units(val, None)
     self.assertEqual(result, (calendar, unit, CalendarDateTime))
Esempio n. 14
0
 def test_t1(self):
     collation = FieldCollation([_make_field(lbyr=2013),
                                 _make_field(lbyr=2014)])
     result = collation.element_arrays_and_dims
     self.assertEqual(list(result.keys()), ['t1'])
     values, dims = result['t1']
     self.assertArrayEqual(values, [datetime(2013, 1, 1),
                                    datetime(2014, 1, 1)])
     self.assertEqual(dims, (0,))
Esempio n. 15
0
 def test_365_day_calendar(self):
     unit = Unit('minutes since 2000-02-25 00:00:00', calendar='365_day')
     coord = AuxCoord([30, 60, 150], 'time', units=unit)
     result = _fixup_dates(coord, coord.points)
     expected_datetimes = [cftime.datetime(2000, 2, 25, 0, 30),
                           cftime.datetime(2000, 2, 25, 1, 0),
                           cftime.datetime(2000, 2, 25, 2, 30)]
     self.assertArrayEqual([cdt.datetime for cdt in result],
                           expected_datetimes)
Esempio n. 16
0
    def tick_values(self, vmin, vmax):
        vmin, vmax = mtransforms.nonsingular(vmin,
                                             vmax,
                                             expander=1e-7,
                                             tiny=1e-13)

        self.ndays = float(abs(vmax - vmin))

        utime = cftime.utime(self.date_unit, self.calendar)
        lower = utime.num2date(vmin)
        upper = utime.num2date(vmax)

        resolution, n = self.compute_resolution(vmin, vmax, lower, upper)

        if resolution == 'YEARLY':
            # TODO START AT THE BEGINNING OF A DECADE/CENTURY/MILLENIUM as
            # appropriate.
            years = self._max_n_locator.tick_values(lower.year, upper.year)
            ticks = [cftime.datetime(int(year), 1, 1) for year in years]
        elif resolution == 'MONTHLY':
            # TODO START AT THE BEGINNING OF A DECADE/CENTURY/MILLENIUM as
            # appropriate.
            months_offset = self._max_n_locator.tick_values(0, n)
            ticks = []
            for offset in months_offset:
                year = lower.year + np.floor((lower.month + offset) / 12)
                month = ((lower.month + offset) % 12) + 1
                ticks.append(cftime.datetime(int(year), int(month), 1))
        elif resolution == 'DAILY':
            # TODO: It would be great if this favoured multiples of 7.
            days = self._max_n_locator_days.tick_values(vmin, vmax)
            ticks = [utime.num2date(dt) for dt in days]
        elif resolution == 'HOURLY':
            hour_unit = 'hours since 2000-01-01'
            hour_utime = cftime.utime(hour_unit, self.calendar)
            in_hours = hour_utime.date2num([lower, upper])
            hours = self._max_n_locator.tick_values(in_hours[0], in_hours[1])
            ticks = [hour_utime.num2date(dt) for dt in hours]
        elif resolution == 'MINUTELY':
            minute_unit = 'minutes since 2000-01-01'
            minute_utime = cftime.utime(minute_unit, self.calendar)
            in_minutes = minute_utime.date2num([lower, upper])
            minutes = self._max_n_locator.tick_values(in_minutes[0],
                                                      in_minutes[1])
            ticks = [minute_utime.num2date(dt) for dt in minutes]
        elif resolution == 'SECONDLY':
            second_unit = 'seconds since 2000-01-01'
            second_utime = cftime.utime(second_unit, self.calendar)
            in_seconds = second_utime.date2num([lower, upper])
            seconds = self._max_n_locator.tick_values(in_seconds[0],
                                                      in_seconds[1])
            ticks = [second_utime.num2date(dt) for dt in seconds]
        else:
            msg = 'Resolution {} not implemented yet.'.format(resolution)
            raise ValueError(msg)

        return utime.date2num(ticks)
Esempio n. 17
0
def test_generate_range_on_edges():
    offset = to_offset("AS")
    start = datetime(2000, 1, 1)
    end = datetime(2100, 1, 1)

    res = generate_range(start, end, offset)
    exp = [datetime(y, 1, 1) for y in range(start.year, end.year + 1)]

    assert list(res) == exp
Esempio n. 18
0
    def tick_values(self, vmin, vmax):
        vmin, vmax = mtransforms.nonsingular(vmin, vmax, expander=1e-7,
                                             tiny=1e-13)

        self.ndays = float(abs(vmax - vmin))

        utime = cftime.utime(self.date_unit, self.calendar)
        lower = utime.num2date(vmin)
        upper = utime.num2date(vmax)

        resolution, n = self.compute_resolution(vmin, vmax, lower, upper)

        if resolution == 'YEARLY':
            # TODO START AT THE BEGINNING OF A DECADE/CENTURY/MILLENIUM as
            # appropriate.
            years = self._max_n_locator.tick_values(lower.year, upper.year)
            ticks = [cftime.datetime(int(year), 1, 1) for year in years]
        elif resolution == 'MONTHLY':
            # TODO START AT THE BEGINNING OF A DECADE/CENTURY/MILLENIUM as
            # appropriate.
            months_offset = self._max_n_locator.tick_values(0, n)
            ticks = []
            for offset in months_offset:
                year = lower.year + np.floor((lower.month + offset) / 12)
                month = ((lower.month + offset) % 12) + 1
                ticks.append(cftime.datetime(int(year), int(month), 1))
        elif resolution == 'DAILY':
            # TODO: It would be great if this favoured multiples of 7.
            days = self._max_n_locator_days.tick_values(vmin, vmax)
            ticks = [utime.num2date(dt) for dt in days]
        elif resolution == 'HOURLY':
            hour_unit = 'hours since 2000-01-01'
            hour_utime = cftime.utime(hour_unit, self.calendar)
            in_hours = hour_utime.date2num([lower, upper])
            hours = self._max_n_locator.tick_values(in_hours[0], in_hours[1])
            ticks = [hour_utime.num2date(dt) for dt in hours]
        elif resolution == 'MINUTELY':
            minute_unit = 'minutes since 2000-01-01'
            minute_utime = cftime.utime(minute_unit, self.calendar)
            in_minutes = minute_utime.date2num([lower, upper])
            minutes = self._max_n_locator.tick_values(in_minutes[0],
                                                      in_minutes[1])
            ticks = [minute_utime.num2date(dt) for dt in minutes]
        elif resolution == 'SECONDLY':
            second_unit = 'seconds since 2000-01-01'
            second_utime = cftime.utime(second_unit, self.calendar)
            in_seconds = second_utime.date2num([lower, upper])
            seconds = self._max_n_locator.tick_values(in_seconds[0],
                                                      in_seconds[1])
            ticks = [second_utime.num2date(dt) for dt in seconds]
        else:
            msg = 'Resolution {} not implemented yet.'.format(resolution)
            raise ValueError(msg)

        return utime.date2num(ticks)
Esempio n. 19
0
 def test_axis_default_limits(self):
     cal = '360_day'
     unit = (cal, 'days since 2000-02-25 00:00:00', CalendarDateTime)
     result = NetCDFTimeConverter().axisinfo(unit, None)
     expected_dt = [
         cftime.datetime(2000, 1, 1),
         cftime.datetime(2010, 1, 1)
     ]
     np.testing.assert_array_equal(
         result.default_limits,
         [CalendarDateTime(edt, cal) for edt in expected_dt])
Esempio n. 20
0
 def test_360_day_calendar(self):
     unit = Unit('days since 2000-02-25 00:00:00', calendar='360_day')
     coord = AuxCoord([3, 4, 5], 'time', units=unit)
     result = _fixup_dates(coord, coord.points)
     expected_datetimes = [
         cftime.datetime(2000, 2, 28),
         cftime.datetime(2000, 2, 29),
         cftime.datetime(2000, 2, 30)
     ]
     self.assertArrayEqual([cdt.datetime for cdt in result],
                           expected_datetimes)
Esempio n. 21
0
 def test_365_day_calendar(self):
     unit = Unit("minutes since 2000-02-25 00:00:00", calendar="365_day")
     coord = AuxCoord([30, 60, 150], "time", units=unit)
     result = _fixup_dates(coord, coord.points)
     expected_datetimes = [
         cftime.datetime(2000, 2, 25, 0, 30),
         cftime.datetime(2000, 2, 25, 1, 0),
         cftime.datetime(2000, 2, 25, 2, 30),
     ]
     self.assertArrayEqual([cdt.datetime for cdt in result],
                           expected_datetimes)
Esempio n. 22
0
 def test_360_day_calendar(self):
     calendar = "360_day"
     unit = Unit("days since 2000-02-25 00:00:00", calendar=calendar)
     coord = AuxCoord([3, 4, 5], "time", units=unit)
     result = _fixup_dates(coord, coord.points)
     expected_datetimes = [
         cftime.datetime(2000, 2, 28, calendar=calendar),
         cftime.datetime(2000, 2, 29, calendar=calendar),
         cftime.datetime(2000, 2, 30, calendar=calendar),
     ]
     self.assertArrayEqual(result, expected_datetimes)
Esempio n. 23
0
 def test_nonequal_calendars(self):
     # Test that different supplied calendars causes an error.
     calendar_1 = '360_day'
     calendar_2 = '365_day'
     unit = 'days since 2000-01-01'
     val = [
         CalendarDateTime(cftime.datetime(2014, 8, 12), calendar_1),
         CalendarDateTime(cftime.datetime(2014, 8, 13), calendar_2)
     ]
     with assertRaisesRegex(self, ValueError, 'not all equal'):
         NetCDFTimeConverter().default_units(val, None)
Esempio n. 24
0
 def test_data_frame_cftime_360(self):
     data_frame = pandas.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]],
                                   index=[
                                       cftime.datetime(2001, 1, 1, 1, 1, 1),
                                       cftime.datetime(2002, 2, 2, 2, 2, 2)
                                   ],
                                   columns=[10, 11, 12, 13, 14])
     self.assertCML(
         iris.pandas.as_cube(data_frame,
                             calendars={0: cf_units.CALENDAR_360_DAY}),
         tests.get_result_path(
             ('pandas', 'as_cube', 'data_frame_netcdftime_360.cml')))
Esempio n. 25
0
    def test_simple_gregorian(self):
        self.setup_units("gregorian")
        nums = [20.0, 40.0, 75.0, 150.0, 8.0, 16.0, 300.0, 600.0]
        units = [
            self.useconds,
            self.useconds,
            self.uminutes,
            self.uminutes,
            self.uhours,
            self.uhours,
            self.udays,
            self.udays,
        ]
        expected = [
            cftime.datetime(1970, 1, 1, 0, 0, 20, calendar="gregorian"),
            cftime.datetime(1970, 1, 1, 0, 0, 40, calendar="gregorian"),
            cftime.datetime(1970, 1, 1, 1, 15, calendar="gregorian"),
            cftime.datetime(1970, 1, 1, 2, 30, calendar="gregorian"),
            cftime.datetime(1970, 1, 1, 8, calendar="gregorian"),
            cftime.datetime(1970, 1, 1, 16, calendar="gregorian"),
            cftime.datetime(1970, 10, 28, calendar="gregorian"),
            cftime.datetime(1971, 8, 24, calendar="gregorian"),
        ]

        self.check_dates(nums, units, expected)
Esempio n. 26
0
 def test_series_cftime_360(self):
     series = pandas.Series([0, 1, 2, 3, 4],
                            index=[
                                cftime.datetime(2001, 1, 1, 1, 1, 1),
                                cftime.datetime(2002, 2, 2, 2, 2, 2),
                                cftime.datetime(2003, 3, 3, 3, 3, 3),
                                cftime.datetime(2004, 4, 4, 4, 4, 4),
                                cftime.datetime(2005, 5, 5, 5, 5, 5)
                            ])
     self.assertCML(
         iris.pandas.as_cube(series,
                             calendars={0: cf_units.CALENDAR_360_DAY}),
         tests.get_result_path(
             ('pandas', 'as_cube', 'series_netcdfimte_360.cml')))
Esempio n. 27
0
    def test_multi(self):
        shape = (2, 5)
        n = np.prod(shape)

        for calendar in self.calendars:
            dates = np.array([
                cftime.datetime(**self.kwargs, calendar=calendar)
                for i in range(n)
            ]).reshape(shape)
            actual = discard_microsecond(dates)
            expected = np.array([
                cftime.datetime(
                    **self.kwargs, microsecond=0, calendar=calendar)
            ] * n).reshape(shape)
            np.testing.assert_array_equal(expected, actual)
Esempio n. 28
0
def time(cube, field):
    """Add a time coord to the cube based on validity time and time-window."""
    if field.vt_year <= 0:
        # Some ancillary files, eg land sea mask do not
        # have a validity time.
        return
    else:
        valid_date = cftime.datetime(
            field.vt_year,
            field.vt_month,
            field.vt_day,
            field.vt_hour,
            field.vt_minute,
            field.vt_second,
        )
    point = np.around(TIME_UNIT.date2num(valid_date)).astype(np.int64)

    period_seconds = None
    if field.period_minutes == 32767:
        period_seconds = field.period_seconds
    elif (not is_missing(field, field.period_minutes)
          and field.period_minutes != 0):
        period_seconds = field.period_minutes * 60
    if period_seconds:
        bounds = np.array([point - period_seconds, point], dtype=np.int64)
    else:
        bounds = None

    time_coord = DimCoord(points=point,
                          bounds=bounds,
                          standard_name="time",
                          units=TIME_UNIT)

    cube.add_aux_coord(time_coord)
Esempio n. 29
0
def add_month(date, months_to_add):

    """
    Finds the next month from date.

    :param cftime.datetime date: Accepts datetime or phony datetime
        from ``netCDF4.num2date``.
    :param int months_to_add: The number of months to add to the date
    :returns: The final date
    :rtype: *cftime.datetime*
    """

    years_to_add = int((
        date.month +
        months_to_add -
        np.mod(date.month + months_to_add - 1, 12) - 1) / 12)

    new_month = int(np.mod(date.month + months_to_add - 1, 12)) + 1

    new_year = date.year + years_to_add

    date_next = datetime(
        year=new_year,
        month=new_month,
        day=date.day,
        hour=date.hour,
        minute=date.minute,
        second=date.second)
    return date_next
Esempio n. 30
0
def _fixup_dates(coord, values):
    if coord.units.calendar is not None and values.ndim == 1:
        # Convert coordinate values into tuples of
        # (year, month, day, hour, min, sec)
        dates = [coord.units.num2date(val).timetuple()[0:6] for val in values]
        if coord.units.calendar == 'gregorian':
            r = [datetime.datetime(*date) for date in dates]
        else:
            try:
                import nc_time_axis
            except ImportError:
                msg = ('Cannot plot against time in a non-gregorian '
                       'calendar, because "nc_time_axis" is not available :  '
                       'Install the package from '
                       'https://github.com/SciTools/nc-time-axis to enable '
                       'this usage.')
                raise IrisError(msg)

            r = [
                nc_time_axis.CalendarDateTime(cftime.datetime(*date),
                                              coord.units.calendar)
                for date in dates
            ]
        values = np.empty(len(r), dtype=object)
        values[:] = r
    return values
Esempio n. 31
0
def test_decode_non_standard_calendar_single_element_fallback(
        calendar, enable_cftimeindex):
    if enable_cftimeindex:
        pytest.importorskip('cftime')

    cftime = _import_cftime()

    units = 'days since 0001-01-01'
    try:
        dt = cftime.netcdftime.datetime(2001, 2, 29)
    except AttributeError:
        # Must be using standalone netcdftime library
        dt = cftime.datetime(2001, 2, 29)

    num_time = cftime.date2num(dt, units, calendar)
    if enable_cftimeindex:
        actual = coding.times.decode_cf_datetime(
            num_time, units, calendar=calendar,
            enable_cftimeindex=enable_cftimeindex)
    else:
        with pytest.warns(SerializationWarning,
                          match='Unable to decode time axis'):
            actual = coding.times.decode_cf_datetime(
                num_time, units, calendar=calendar,
                enable_cftimeindex=enable_cftimeindex)

    expected = np.asarray(cftime.num2date(num_time, units, calendar))
    assert actual.dtype == np.dtype('O')
    assert expected == actual
Esempio n. 32
0
def _fixup_dates(coord, values):
    if coord.units.calendar is not None and values.ndim == 1:
        # Convert coordinate values into tuples of
        # (year, month, day, hour, min, sec)
        dates = [coord.units.num2date(val).timetuple()[0:6]
                 for val in values]
        if coord.units.calendar == 'gregorian':
            r = [datetime.datetime(*date) for date in dates]
        else:
            try:
                import nc_time_axis
            except ImportError:
                msg = ('Cannot plot against time in a non-gregorian '
                       'calendar, because "nc_time_axis" is not available :  '
                       'Install the package from '
                       'https://github.com/SciTools/nc-time-axis to enable '
                       'this usage.')
                raise IrisError(msg)

            r = [nc_time_axis.CalendarDateTime(
                 cftime.datetime(*date), coord.units.calendar)
                 for date in dates]
        values = np.empty(len(r), dtype=object)
        values[:] = r
    return values
Esempio n. 33
0
def test_decode_non_standard_calendar_single_element_fallback(
        calendar, enable_cftimeindex):
    if enable_cftimeindex:
        pytest.importorskip('cftime')

    cftime = _import_cftime()

    units = 'days since 0001-01-01'
    try:
        dt = cftime.netcdftime.datetime(2001, 2, 29)
    except AttributeError:
        # Must be using standalone netcdftime library
        dt = cftime.datetime(2001, 2, 29)

    num_time = cftime.date2num(dt, units, calendar)
    if enable_cftimeindex:
        actual = coding.times.decode_cf_datetime(
            num_time,
            units,
            calendar=calendar,
            enable_cftimeindex=enable_cftimeindex)
    else:
        with pytest.warns(SerializationWarning,
                          match='Unable to decode time axis'):
            actual = coding.times.decode_cf_datetime(
                num_time,
                units,
                calendar=calendar,
                enable_cftimeindex=enable_cftimeindex)

    expected = np.asarray(cftime.num2date(num_time, units, calendar))
    assert actual.dtype == np.dtype('O')
    assert expected == actual
Esempio n. 34
0
def test_IntervalAveragedTimes_indicator(time, expected, includes_lower: bool):
    times = IntervalAveragedTimes(
        frequency=timedelta(hours=3),
        initial_time=datetime(2000, 1, 1),
        includes_lower=includes_lower,
    )
    assert times.indicator(time) == expected
Esempio n. 35
0
def test_decode_non_standard_calendar_single_element(calendar):
    cftime = _import_cftime()
    units = 'days since 0001-01-01'

    try:
        dt = cftime.netcdftime.datetime(2001, 2, 29)
    except AttributeError:
        # Must be using the standalone cftime library
        dt = cftime.datetime(2001, 2, 29)

    num_time = cftime.date2num(dt, units, calendar)
    actual = coding.times.decode_cf_datetime(num_time,
                                             units,
                                             calendar=calendar)

    if cftime.__name__ == 'cftime':
        expected = np.asarray(
            cftime.num2date(num_time,
                            units,
                            calendar,
                            only_use_cftime_datetimes=True))
    else:
        expected = np.asarray(cftime.num2date(num_time, units, calendar))
    assert actual.dtype == np.dtype('O')
    assert expected == actual
Esempio n. 36
0
 def test_t1_t2_access(self):
     field = self.r[0]
     calander = "360_day"
     self.assertEqual(
         field.t1.timetuple(),
         cftime.datetime(1994, 12, 1, 0, 0, calendar=calander).timetuple(),
     )
Esempio n. 37
0
 def test_PartialDateTime_unbounded_cell(self):
     # Check that cell comparison works with PartialDateTimes.
     dt = PartialDateTime(month=6)
     cell = Cell(cftime.datetime(2010, 3, 1))
     self.assertLess(cell, dt)
     self.assertGreater(dt, cell)
     self.assertLessEqual(cell, dt)
     self.assertGreaterEqual(dt, cell)
Esempio n. 38
0
 def test_cftime_other(self):
     # Check that cell comparison to a cftime.datetime object
     # raises an exception otherwise this will fall back to id comparison
     # producing unreliable results.
     dt = cftime.datetime(2010, 3, 21)
     cell = Cell(mock.Mock(timetuple=mock.Mock()))
     self.assert_raises_on_comparison(cell, dt, TypeError,
                                      'determine the order of cftime')
Esempio n. 39
0
def _modified_julian_day(cftime_in):
    days_in_year_standard = _days_in_year(cftime_in.year)  # 365 or 366
    days_in_year_chosen = _days_in_year(
        cftime_in.year, cftime_in.calendar)  # how to handle 1582?
    doy = _day_of_year(cftime_in)
    year_offset = cftime.datetime(cftime_in.year, 1, 1).toordinal()
    year_fraction = doy * days_in_year_standard / days_in_year_chosen
    return int(np.round(year_offset + year_fraction))
Esempio n. 40
0
 def test_360_day_calendar_CalendarDateTime(self):
     datetimes = [cftime.datetime(1986, month, 30)
                  for month in range(1, 6)]
     cal_datetimes = [nc_time_axis.CalendarDateTime(dt, '360_day')
                      for dt in datetimes]
     line1, = plt.plot(cal_datetimes)
     result_ydata = line1.get_ydata()
     np.testing.assert_array_equal(result_ydata, cal_datetimes)
Esempio n. 41
0
    def test_t2_no_time_mean(self):
        cube = _get_single_time_cube(set_time_mean=False)
        expected = cftime.datetime(0, 0, 0)

        with mock.patch('iris.fileformats.pp.PPField3',
                        autospec=True) as pp_field:
            verify(cube, pp_field)
        actual = pp_field.t2
        self.assertEqual(expected, actual)
Esempio n. 42
0
 def test_all_scalar(self):
     field = self._field()
     field.lbtim = 11
     field.t1 = cftime.datetime(1970, 1, 1, 18)
     field.t2 = cftime.datetime(1970, 1, 1, 12)
     collation = mock.Mock(fields=[field], vector_dims_shape=(),
                           element_arrays_and_dims={})
     metadata = convert_collation(collation)
     self._check_phenomenon(metadata)
     coords_and_dims = [(LONGITUDE, 1),
                        (LATITUDE, 0)]
     self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims)
     coords_and_dims = [
         (iris.coords.DimCoord(18, 'time', units='hours since epoch'),
          None),
         (iris.coords.DimCoord(12, 'forecast_reference_time',
                               units='hours since epoch'), None),
         (iris.coords.DimCoord(6, 'forecast_period', units='hours'), None)
     ]
     self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims)
Esempio n. 43
0
def reference_time(cube, field):
    """Add a 'reference time' to the cube, if present in the field."""
    if field.dt_year != field.int_mdi:
        data_date = cftime.datetime(field.dt_year, field.dt_month,
                                    field.dt_day, field.dt_hour,
                                    field.dt_minute)

        ref_time_coord = DimCoord(TIME_UNIT.date2num(data_date),
                                  standard_name='forecast_reference_time',
                                  units=TIME_UNIT)

        cube.add_aux_coord(ref_time_coord)
Esempio n. 44
0
 def test_cftime_cell(self):
     # Check that cell comparison when the cell contains
     # cftime.datetime objects raises an exception otherwise
     # this will fall back to id comparison producing unreliable
     # results.
     cell = Cell(cftime.datetime(2010, 3, 21))
     dt = mock.Mock(timetuple=mock.Mock())
     self.assert_raises_on_comparison(cell, dt, TypeError,
                                      'determine the order of cftime')
     self.assert_raises_on_comparison(cell, 23, TypeError,
                                      'determine the order of cftime')
     self.assert_raises_on_comparison(cell, 'hello', TypeError,
                                      'Unexpected type.*str')
Esempio n. 45
0
    def test_365_calendar(self):
        f = mock.MagicMock(lbtim=SplittableInt(4, {'ia': 2, 'ib': 1, 'ic': 0}),
                           lbyr=2013, lbmon=1, lbdat=1, lbhr=12, lbmin=0,
                           lbsec=0,
                           t1=cftime.datetime(2013, 1, 1, 12, 0, 0),
                           t2=cftime.datetime(2013, 1, 2, 12, 0, 0),
                           spec=PPField3)
        f.time_unit = six.create_bound_method(PPField3.time_unit, f)
        f.calendar = cf_units.CALENDAR_365_DAY
        (factories, references, standard_name, long_name, units,
         attributes, cell_methods, dim_coords_and_dims,
         aux_coords_and_dims) = convert(f)

        def is_t_coord(coord_and_dims):
            coord, dims = coord_and_dims
            return coord.standard_name == 'time'

        coords_and_dims = list(filter(is_t_coord, aux_coords_and_dims))
        self.assertEqual(len(coords_and_dims), 1)
        coord, dims = coords_and_dims[0]
        self.assertEqual(guess_coord_axis(coord), 'T')
        self.assertEqual(coord.units.calendar, '365_day')
Esempio n. 46
0
    def axisinfo(unit, axis):
        """
        Returns the :class:`~matplotlib.units.AxisInfo` for *unit*.

        *unit* is a tzinfo instance or None.
        The *axis* argument is required but not used.
        """
        calendar, date_unit, date_type = unit

        majloc = NetCDFTimeDateLocator(4, calendar=calendar,
                                       date_unit=date_unit)
        majfmt = NetCDFTimeDateFormatter(majloc, calendar=calendar,
                                         time_units=date_unit)
        if date_type is CalendarDateTime:
            datemin = CalendarDateTime(cftime.datetime(2000, 1, 1),
                                       calendar=calendar)
            datemax = CalendarDateTime(cftime.datetime(2010, 1, 1),
                                       calendar=calendar)
        else:
            datemin = date_type(2000, 1, 1)
            datemax = date_type(2010, 1, 1)
        return munits.AxisInfo(majloc=majloc, majfmt=majfmt, label='',
                               default_limits=(datemin, datemax))
Esempio n. 47
0
 def test_360_day_calendar(self):
     n = 360
     calendar = '360_day'
     time_unit = Unit('days since 1970-01-01 00:00', calendar=calendar)
     time_coord = AuxCoord(np.arange(n), 'time', units=time_unit)
     times = [time_unit.num2date(point) for point in time_coord.points]
     times = [cftime.datetime(atime.year, atime.month, atime.day,
                              atime.hour, atime.minute, atime.second)
              for atime in times]
     expected_ydata = np.array([CalendarDateTime(time, calendar)
                                for time in times])
     line1, = iplt.plot(time_coord)
     result_ydata = line1.get_ydata()
     self.assertArrayEqual(expected_ydata, result_ydata)
    def _calculate_structure(self):
        # Make value arrays for the vectorisable field elements.
        element_definitions = self._field_vector_element_arrays()

        # Identify the vertical elements and payload.
        blev_array = dict(element_definitions).get('blev')
        vertical_elements = ('lblev', 'bhlev', 'bhrlev',
                             'brsvd1', 'brsvd2', 'brlev')

        # Make an ordering copy.
        ordering_definitions = element_definitions[:]
        # Replace time value tuples with integers and bind the vertical
        # elements to the (expected) primary vertical element "blev".
        for index, (name, array) in enumerate(ordering_definitions):
            if name in ('t1', 't2'):
                array = np.array(
                    [self._time_comparable_int(*tuple(val)) for val in array])
                ordering_definitions[index] = (name, array)
            if name in vertical_elements and blev_array is not None:
                ordering_definitions[index] = (name, blev_array)

        # Perform the main analysis: get vector dimensions, elements, arrays.
        dims_shape, primary_elements, vector_element_arrays_and_dims = \
            optimal_array_structure(ordering_definitions,
                                    element_definitions)

        # Replace time tuples in the result with real datetime-like values.
        # N.B. so we *don't* do this on the whole (expanded) input arrays.
        for name in ('t1', 't2'):
            if name in vector_element_arrays_and_dims:
                arr, dims = vector_element_arrays_and_dims[name]
                arr_shape = arr.shape[:-1]
                extra_length = arr.shape[-1]
                # Flatten out the array apart from the last dimension,
                # convert to cftime objects, then reshape back.
                arr = np.array([cftime.datetime(*args)
                                for args in arr.reshape(-1, extra_length)]
                               ).reshape(arr_shape)
                vector_element_arrays_and_dims[name] = (arr, dims)

        # Write the private cache values, exposed as public properties.
        self._vector_dims_shape = dims_shape
        self._primary_dimension_elements = primary_elements
        self._element_arrays_and_dims = vector_element_arrays_and_dims
        # Do all this only once.
        self._structure_calculated = True
Esempio n. 49
0
def time(cube, field):
    """Add a time coord to the cube."""
    valid_date = cftime.datetime(field.vt_year, field.vt_month,
                                 field.vt_day, field.vt_hour,
                                 field.vt_minute, field.vt_second)
    point = TIME_UNIT.date2num(valid_date)

    bounds = None
    if field.period_minutes != field.int_mdi and field.period_minutes != 0:
        # Create a bound array to handle the Period of Interest if set.
        bounds = (point - (field.period_minutes / 60.0), point)

    time_coord = DimCoord(points=point,
                          bounds=bounds,
                          standard_name='time',
                          units=TIME_UNIT)

    cube.add_aux_coord(time_coord)
Esempio n. 50
0
 def test_t1_and_t2(self):
     collation = FieldCollation([_make_field(lbyr=2013, lbyrd=2000),
                                 _make_field(lbyr=2014, lbyrd=2001),
                                 _make_field(lbyr=2015, lbyrd=2002)])
     result = collation.element_arrays_and_dims
     self.assertEqual(set(result.keys()), set(['t1', 't2']))
     values, dims = result['t1']
     self.assertArrayEqual(values, [datetime(2013, 1, 1),
                                    datetime(2014, 1, 1),
                                    datetime(2015, 1, 1)])
     self.assertEqual(dims, (0,))
     values, dims = result['t2']
     self.assertArrayEqual(values, [datetime(2000, 1, 1),
                                    datetime(2001, 1, 1),
                                    datetime(2002, 1, 1)])
     self.assertEqual(dims, (0,))
Esempio n. 51
0
def test_decode_non_standard_calendar_single_element(
        calendar):
    cftime = _import_cftime()
    units = 'days since 0001-01-01'

    try:
        dt = cftime.netcdftime.datetime(2001, 2, 29)
    except AttributeError:
        # Must be using the standalone cftime library
        dt = cftime.datetime(2001, 2, 29)

    num_time = cftime.date2num(dt, units, calendar)
    actual = coding.times.decode_cf_datetime(
        num_time, units, calendar=calendar)

    if cftime.__name__ == 'cftime':
        expected = np.asarray(cftime.num2date(
            num_time, units, calendar, only_use_cftime_datetimes=True))
    else:
        expected = np.asarray(cftime.num2date(num_time, units, calendar))
    assert actual.dtype == np.dtype('O')
    assert expected == actual
Esempio n. 52
0
 def test_t1_and_t2_and_lbft(self):
     collation = FieldCollation([_make_field(lbyr=1, lbyrd=15, lbft=6),
                                 _make_field(lbyr=1, lbyrd=16, lbft=9),
                                 _make_field(lbyr=11, lbyrd=25, lbft=6),
                                 _make_field(lbyr=11, lbyrd=26, lbft=9)])
     result = collation.element_arrays_and_dims
     self.assertEqual(set(result.keys()), set(['t1', 't2', 'lbft']))
     values, dims = result['t1']
     self.assertArrayEqual(values, [datetime(1, 1, 1),
                                    datetime(11, 1, 1)])
     self.assertEqual(dims, (0,))
     values, dims = result['t2']
     self.assertArrayEqual(values,
                           [[datetime(15, 1, 1), datetime(16, 1, 1)],
                            [datetime(25, 1, 1), datetime(26, 1, 1)]])
     self.assertEqual(dims, (0, 1))
     values, dims = result['lbft']
     self.assertArrayEqual(values, [6, 9])
     self.assertEqual(dims, (1,))
Esempio n. 53
0
 def test_cftime_not_equal(self):
     pdt = PartialDateTime(month=3, microsecond=2)
     other = cftime.datetime(year=2013, month=4, day=20, second=2)
     self.assertFalse(pdt == other)
Esempio n. 54
0
 def test_cftime_not_greater(self):
     pdt = PartialDateTime(month=3, microsecond=2)
     other = cftime.datetime(year=2013, month=3, day=20, second=3)
     self.assertFalse(pdt > other)
Esempio n. 55
0
def _general_time_rules(cube, pp):
    """
    Rules for setting time metadata of the PP field.

    Args:
        cube: the cube being saved as a series of PP fields.
        pp: the current PP field having save rules applied.

    Returns:
        The PP field with updated metadata.

    """
    time_coord = scalar_coord(cube, 'time')
    fp_coord = scalar_coord(cube, 'forecast_period')
    frt_coord = scalar_coord(cube, 'forecast_reference_time')
    clim_season_coord = scalar_coord(cube, 'clim_season')

    cm_time_mean = scalar_cell_method(cube, 'mean', 'time')
    cm_time_min = scalar_cell_method(cube, 'minimum', 'time')
    cm_time_max = scalar_cell_method(cube, 'maximum', 'time')

    # No forecast.
    if time_coord is not None and fp_coord is None and frt_coord is None:
        pp.lbtim.ia = 0
        pp.lbtim.ib = 0
        pp.t1 = time_coord.units.num2date(time_coord.points[0])
        pp.t2 = cftime.datetime(0, 0, 0)

    # Forecast.
    if (time_coord is not None and
            not time_coord.has_bounds() and
            fp_coord is not None):
        pp.lbtim.ia = 0
        pp.lbtim.ib = 1
        pp.t1 = time_coord.units.num2date(time_coord.points[0])
        pp.t2 = time_coord.units.num2date(time_coord.points[0] -
                                          fp_coord.points[0])
        pp.lbft = fp_coord.points[0]

    # Time mean (non-climatological).
    # XXX This only works when we have a single timestep.
    if (time_coord is not None and
            time_coord.has_bounds() and
            clim_season_coord is None and
            fp_coord is not None and
            fp_coord.has_bounds()):
        # XXX How do we know *which* time to use if there are more than
        # one? *Can* there be more than one?
        pp.lbtim.ib = 2
        pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
        pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
        pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')

    if (time_coord is not None and
            time_coord.has_bounds() and
            clim_season_coord is None and
            fp_coord is None and
            frt_coord is not None):
        # Handle missing forecast period using time and forecast ref time.
        pp.lbtim.ib = 2
        pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
        pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
        stop = time_coord.units.convert(time_coord.bounds[0, 1],
                                        'hours since epoch')
        start = frt_coord.units.convert(frt_coord.points[0],
                                        'hours since epoch')
        pp.lbft = stop - start

    if (time_coord is not None and
            time_coord.has_bounds() and
            clim_season_coord is None and
            cm_time_mean is not None):
        pp.lbtim.ib = 2
        pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
        pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])

    if (time_coord is not None and
            time_coord.has_bounds() and
            clim_season_coord is None and
            cm_time_mean is not None and
            cm_time_mean.intervals != () and
            cm_time_mean.intervals[0].endswith('hour')):
        pp.lbtim.ia = int(cm_time_mean.intervals[0][:-5])

    if (time_coord is not None and
            time_coord.has_bounds() and
            clim_season_coord is None and
            (fp_coord is not None or frt_coord is not None) and
            (cm_time_mean is None or cm_time_mean.intervals == () or
             not cm_time_mean.intervals[0].endswith('hour'))):
        pp.lbtim.ia = 0

    # If the cell methods contain a minimum then overwrite lbtim.ia with this
    # interval.
    if (time_coord is not None and
            time_coord.has_bounds() and
            clim_season_coord is None and
            (fp_coord is not None or frt_coord is not None) and
            cm_time_min is not None and
            cm_time_min.intervals != () and
            cm_time_min.intervals[0].endswith('hour')):
        # Set lbtim.ia with the integer part of the cell method's interval
        # e.g. if interval is '24 hour' then lbtim.ia becomes 24.
        pp.lbtim.ia = int(cm_time_min.intervals[0][:-5])

    # If the cell methods contain a maximum then overwrite lbtim.ia with this
    # interval.
    if (time_coord is not None and
            time_coord.has_bounds() and
            clim_season_coord is None and
            (fp_coord is not None or frt_coord is not None) and
            cm_time_max is not None and
            cm_time_max.intervals != () and
            cm_time_max.intervals[0].endswith('hour')):
        # Set lbtim.ia with the integer part of the cell method's interval
        # e.g. if interval is '1 hour' then lbtim.ia becomes 1.
        pp.lbtim.ia = int(cm_time_max.intervals[0][:-5])

    if time_coord is not None and time_coord.has_bounds():
        lower_bound_yr =\
            time_coord.units.num2date(time_coord.bounds[0, 0]).year
        upper_bound_yr =\
            time_coord.units.num2date(time_coord.bounds[0, 1]).year
    else:
        lower_bound_yr = None
        upper_bound_yr = None

    # Climatological time means.
    if (time_coord is not None and
            time_coord.has_bounds() and
            lower_bound_yr == upper_bound_yr and
            fp_coord is not None and
            fp_coord.has_bounds() and
            clim_season_coord is not None and
            'clim_season' in cube.cell_methods[-1].coord_names):
        # Climatological time mean - single year.
        pp.lbtim.ia = 0
        pp.lbtim.ib = 2
        pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
        pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
        pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')

    elif (time_coord is not None and
            time_coord.has_bounds() and
            lower_bound_yr != upper_bound_yr and
            fp_coord is not None and
            fp_coord.has_bounds() and
            clim_season_coord is not None and
            'clim_season' in cube.cell_methods[-1].coord_names and
            clim_season_coord.points[0] == 'djf'):
        # Climatological time mean - spanning years - djf.
        pp.lbtim.ia = 0
        pp.lbtim.ib = 3
        pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
        pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
        if pp.t1.month == 12:
            pp.t1 = cftime.datetime(pp.t1.year)
        else:
            pp.t1 = cftime.datetime(pp.t1.year-1, 12, 1, 0, 0, 0)
        pp.t2 = cftime.datetime(pp.t2.year, 3, 1, 0, 0, 0)
        _conditional_warning(
            time_coord.bounds[0, 0] != time_coord.units.date2num(pp.t1),
            "modified t1 for climatological seasonal mean")
        _conditional_warning(
            time_coord.bounds[0, 1] != time_coord.units.date2num(pp.t2),
            "modified t2 for climatological seasonal mean")
        pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')

    elif (time_coord is not None and
            time_coord.has_bounds() and
            lower_bound_yr != upper_bound_yr and
            fp_coord is not None and
            fp_coord.has_bounds() and
            clim_season_coord is not None and
            'clim_season' in cube.cell_methods[-1].coord_names and
            clim_season_coord.points[0] == 'mam'):
        # Climatological time mean - spanning years - mam.
        pp.lbtim.ia = 0
        pp.lbtim.ib = 3
        # TODO: wut?
        pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
        pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
        pp.t1 = cftime.datetime(pp.t1.year, 3, 1, 0, 0, 0)
        pp.t2 = cftime.datetime(pp.t2.year, 6, 1, 0, 0, 0)
        _conditional_warning(
            time_coord.bounds[0, 0] != time_coord.units.date2num(pp.t1),
            "modified t1 for climatological seasonal mean")
        _conditional_warning(
            time_coord.bounds[0, 1] != time_coord.units.date2num(pp.t2),
            "modified t2 for climatological seasonal mean")
        pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')

    elif (time_coord is not None and
            time_coord.has_bounds() and
            lower_bound_yr != upper_bound_yr and
            fp_coord is not None and
            fp_coord.has_bounds() and
            clim_season_coord is not None and
            'clim_season' in cube.cell_methods[-1].coord_names and
            clim_season_coord.points[0] == 'jja'):
        # Climatological time mean - spanning years - jja.
        pp.lbtim.ia = 0
        pp.lbtim.ib = 3
        # TODO: wut?
        pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
        pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
        pp.t1 = cftime.datetime(pp.t1.year, 6, 1, 0, 0, 0)
        pp.t2 = cftime.datetime(pp.t2.year, 9, 1, 0, 0, 0)
        _conditional_warning(
            time_coord.bounds[0, 0] != time_coord.units.date2num(pp.t1),
            "modified t1 for climatological seasonal mean")
        _conditional_warning(
            time_coord.bounds[0, 1] != time_coord.units.date2num(pp.t2),
            "modified t2 for climatological seasonal mean")
        pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')

    elif (time_coord is not None and
            time_coord.has_bounds() and
            lower_bound_yr != upper_bound_yr and
            fp_coord is not None and
            fp_coord.has_bounds() and
            clim_season_coord is not None and
            'clim_season' in cube.cell_methods[-1].coord_names and
            clim_season_coord.points[0] == 'son'):
        # Climatological time mean - spanning years - son.
        pp.lbtim.ia = 0
        pp.lbtim.ib = 3
        # TODO: wut?
        pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
        pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
        pp.t1 = cftime.datetime(pp.t1.year, 9, 1, 0, 0, 0)
        pp.t2 = cftime.datetime(pp.t2.year, 12, 1, 0, 0, 0)
        _conditional_warning(
            time_coord.bounds[0, 0] != time_coord.units.date2num(pp.t1),
            "modified t1 for climatological seasonal mean")
        _conditional_warning(
            time_coord.bounds[0, 1] != time_coord.units.date2num(pp.t2),
            "modified t2 for climatological seasonal mean")
        pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')

    return pp
Esempio n. 56
0
 def test_t1_t2_access(self):
     self.assertEqual(self.r[0].t1.timetuple(),
                      cftime.datetime(1994, 12, 1, 0, 0).timetuple())