def times(calendar): cftime = _import_cftime() return cftime.num2date(np.arange(4), units='hours since 2000-01-01', calendar=calendar, only_use_cftime_datetimes=True)
def test_decode_nonstandard_calendar_multidim_time_inside_timestamp_range(calendar): cftime = _import_cftime() units = "days since 0001-01-01" times1 = pd.date_range("2001-04-01", end="2001-04-05", freq="D") times2 = pd.date_range("2001-05-01", end="2001-05-05", freq="D") time1 = cftime.date2num(times1.to_pydatetime(), units, calendar=calendar) time2 = cftime.date2num(times2.to_pydatetime(), units, calendar=calendar) mdim_time = np.empty((len(time1), 2)) mdim_time[:, 0] = time1 mdim_time[:, 1] = time2 if cftime.__name__ == "cftime": expected1 = cftime.num2date( time1, units, calendar, only_use_cftime_datetimes=True ) expected2 = cftime.num2date( time2, units, calendar, only_use_cftime_datetimes=True ) else: expected1 = cftime.num2date(time1, units, calendar) expected2 = cftime.num2date(time2, units, calendar) expected_dtype = np.dtype("O") actual = coding.times.decode_cf_datetime(mdim_time, units, calendar=calendar) assert actual.dtype == expected_dtype abs_diff1 = abs(actual[:, 0] - expected1) abs_diff2 = abs(actual[:, 1] - expected2) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff1 <= np.timedelta64(1, "s")).all() assert (abs_diff2 <= np.timedelta64(1, "s")).all()
def test_decode_standard_calendar_multidim_time_inside_timestamp_range( calendar): cftime = _import_cftime() units = 'days since 0001-01-01' times1 = pd.date_range('2001-04-01', end='2001-04-05', freq='D') times2 = pd.date_range('2001-05-01', end='2001-05-05', freq='D') time1 = cftime.date2num(times1.to_pydatetime(), units, calendar=calendar) time2 = cftime.date2num(times2.to_pydatetime(), units, calendar=calendar) mdim_time = np.empty((len(time1), 2), ) mdim_time[:, 0] = time1 mdim_time[:, 1] = time2 expected1 = times1.values expected2 = times2.values actual = coding.times.decode_cf_datetime(mdim_time, units, calendar=calendar) assert actual.dtype == np.dtype('M8[ns]') abs_diff1 = abs(actual[:, 0] - expected1) abs_diff2 = abs(actual[:, 1] - expected2) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff1 <= np.timedelta64(1, 's')).all() assert (abs_diff2 <= np.timedelta64(1, 's')).all()
def test_decode_non_standard_calendar_single_element(calendar): cftime = _import_cftime() units = 'days since 0001-01-01' try: dt = cftime.netcdftime.datetime(2001, 2, 29) except AttributeError: # Must be using the standalone cftime library dt = cftime.datetime(2001, 2, 29) num_time = cftime.date2num(dt, units, calendar) actual = coding.times.decode_cf_datetime(num_time, units, calendar=calendar) if cftime.__name__ == 'cftime': expected = np.asarray( cftime.num2date(num_time, units, calendar, only_use_cftime_datetimes=True)) else: expected = np.asarray(cftime.num2date(num_time, units, calendar)) assert actual.dtype == np.dtype('O') assert expected == actual
def test_decode_dates_outside_timestamp_range(calendar): from datetime import datetime cftime = _import_cftime() units = 'days since 0001-01-01' times = [datetime(1, 4, 1, h) for h in range(1, 5)] time = cftime.date2num(times, units, calendar=calendar) if cftime.__name__ == 'cftime': expected = cftime.num2date(time, units, calendar=calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(time, units, calendar=calendar) expected_date_type = type(expected[0]) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(time, units, calendar=calendar) assert all(isinstance(value, expected_date_type) for value in actual) abs_diff = abs(actual - expected) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all()
def test_decode_360_day_calendar(): cftime = _import_cftime() calendar = '360_day' # ensure leap year doesn't matter for year in [2010, 2011, 2012, 2013, 2014]: units = 'days since {0}-01-01'.format(year) num_times = np.arange(100) if cftime.__name__ == 'cftime': expected = cftime.num2date(num_times, units, calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(num_times, units, calendar) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') actual = coding.times.decode_cf_datetime(num_times, units, calendar=calendar) assert len(w) == 0 assert actual.dtype == np.dtype('O') assert_array_equal(actual, expected)
def test_decode_non_standard_calendar_single_element_fallback( calendar, enable_cftimeindex): if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() units = 'days since 0001-01-01' try: dt = cftime.netcdftime.datetime(2001, 2, 29) except AttributeError: # Must be using standalone netcdftime library dt = cftime.datetime(2001, 2, 29) num_time = cftime.date2num(dt, units, calendar) if enable_cftimeindex: actual = coding.times.decode_cf_datetime( num_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) else: with pytest.warns(SerializationWarning, match='Unable to decode time axis'): actual = coding.times.decode_cf_datetime( num_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) expected = np.asarray(cftime.num2date(num_time, units, calendar)) assert actual.dtype == np.dtype('O') assert expected == actual
def test_decode_non_standard_calendar_single_element_fallback( calendar, enable_cftimeindex): if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() units = 'days since 0001-01-01' try: dt = cftime.netcdftime.datetime(2001, 2, 29) except AttributeError: # Must be using standalone netcdftime library dt = cftime.datetime(2001, 2, 29) num_time = cftime.date2num(dt, units, calendar) if enable_cftimeindex: actual = coding.times.decode_cf_datetime( num_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) else: with pytest.warns(SerializationWarning, match='Unable to decode time axis'): actual = coding.times.decode_cf_datetime( num_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) expected = np.asarray(cftime.num2date(num_time, units, calendar)) assert actual.dtype == np.dtype('O') assert expected == actual
def test_decode_non_standard_calendar_inside_timestamp_range( calendar): cftime = _import_cftime() units = 'days since 0001-01-01' times = pd.date_range('2001-04-01-00', end='2001-04-30-23', freq='H') non_standard_time = cftime.date2num( times.to_pydatetime(), units, calendar=calendar) if cftime.__name__ == 'cftime': expected = cftime.num2date( non_standard_time, units, calendar=calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(non_standard_time, units, calendar=calendar) expected_dtype = np.dtype('O') actual = coding.times.decode_cf_datetime( non_standard_time, units, calendar=calendar) assert actual.dtype == expected_dtype abs_diff = abs(actual - expected) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all()
def test_decode_non_standard_calendar_inside_timestamp_range( calendar, enable_cftimeindex): if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() units = 'days since 0001-01-01' times = pd.date_range('2001-04-01-00', end='2001-04-30-23', freq='H') noleap_time = cftime.date2num(times.to_pydatetime(), units, calendar=calendar) if enable_cftimeindex: expected = cftime.num2date(noleap_time, units, calendar=calendar) expected_dtype = np.dtype('O') else: expected = times.values expected_dtype = np.dtype('M8[ns]') with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime( noleap_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) assert actual.dtype == expected_dtype abs_diff = abs(actual - expected) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all()
def test_decode_non_standard_calendar_inside_timestamp_range(calendar): cftime = _import_cftime() units = 'days since 0001-01-01' times = pd.date_range('2001-04-01-00', end='2001-04-30-23', freq='H') non_standard_time = cftime.date2num(times.to_pydatetime(), units, calendar=calendar) if cftime.__name__ == 'cftime': expected = cftime.num2date(non_standard_time, units, calendar=calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(non_standard_time, units, calendar=calendar) expected_dtype = np.dtype('O') actual = coding.times.decode_cf_datetime(non_standard_time, units, calendar=calendar) assert actual.dtype == expected_dtype abs_diff = abs(actual - expected) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all()
def test_decode_standard_calendar_multidim_time_inside_timestamp_range( calendar, enable_cftimeindex): if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() units = 'days since 0001-01-01' times1 = pd.date_range('2001-04-01', end='2001-04-05', freq='D') times2 = pd.date_range('2001-05-01', end='2001-05-05', freq='D') noleap_time1 = cftime.date2num(times1.to_pydatetime(), units, calendar=calendar) noleap_time2 = cftime.date2num(times2.to_pydatetime(), units, calendar=calendar) mdim_time = np.empty((len(noleap_time1), 2), ) mdim_time[:, 0] = noleap_time1 mdim_time[:, 1] = noleap_time2 expected1 = times1.values expected2 = times2.values actual = coding.times.decode_cf_datetime( mdim_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) assert actual.dtype == np.dtype('M8[ns]') abs_diff1 = abs(actual[:, 0] - expected1) abs_diff2 = abs(actual[:, 1] - expected2) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff1 <= np.timedelta64(1, 's')).all() assert (abs_diff2 <= np.timedelta64(1, 's')).all()
def test_decode_dates_outside_timestamp_range( calendar, enable_cftimeindex): from datetime import datetime if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() units = 'days since 0001-01-01' times = [datetime(1, 4, 1, h) for h in range(1, 5)] noleap_time = cftime.date2num(times, units, calendar=calendar) if enable_cftimeindex: expected = cftime.num2date(noleap_time, units, calendar=calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(noleap_time, units, calendar=calendar) expected_date_type = type(expected[0]) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime( noleap_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) assert all(isinstance(value, expected_date_type) for value in actual) abs_diff = abs(actual - expected) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all()
def test_decode_non_standard_calendar_fallback(calendar, enable_cftimeindex): if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() # ensure leap year doesn't matter for year in [2010, 2011, 2012, 2013, 2014]: units = 'days since {0}-01-01'.format(year) num_times = np.arange(100) expected = cftime.num2date(num_times, units, calendar) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') actual = coding.times.decode_cf_datetime( num_times, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) if enable_cftimeindex: assert len(w) == 0 else: assert len(w) == 1 assert 'Unable to decode time axis' in str(w[0].message) assert actual.dtype == np.dtype('O') assert_array_equal(actual, expected)
def test_decode_non_standard_calendar_multidim_time(self): cftime = _import_cftime() calendar = 'noleap' units = 'days since 0001-01-01' times1 = pd.date_range('2001-04-01', end='2001-04-05', freq='D') times2 = pd.date_range('2001-05-01', end='2001-05-05', freq='D') noleap_time1 = cftime.date2num(times1.to_pydatetime(), units, calendar=calendar) noleap_time2 = cftime.date2num(times2.to_pydatetime(), units, calendar=calendar) mdim_time = np.empty((len(noleap_time1), 2), ) mdim_time[:, 0] = noleap_time1 mdim_time[:, 1] = noleap_time2 expected1 = times1.values expected2 = times2.values with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(mdim_time, units, calendar=calendar) assert actual.dtype == np.dtype('M8[ns]') assert_array_equal(actual[:, 0], expected1) assert_array_equal(actual[:, 1], expected2)
def test_decode_non_standard_calendar(self): cftime = _import_cftime() for calendar in [ 'noleap', '365_day', '360_day', 'julian', 'all_leap', '366_day' ]: units = 'days since 0001-01-01' times = pd.date_range('2001-04-01-00', end='2001-04-30-23', freq='H') noleap_time = cftime.date2num(times.to_pydatetime(), units, calendar=calendar) expected = times.values with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(noleap_time, units, calendar=calendar) assert actual.dtype == np.dtype('M8[ns]') abs_diff = abs(actual - expected) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all()
def test_cf_datetime(num_dates, units, calendar): cftime = _import_cftime() expected = _ensure_naive_tz(cftime.num2date(num_dates, units, calendar)) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(num_dates, units, calendar) if (isinstance(actual, np.ndarray) and np.issubdtype(actual.dtype, np.datetime64)): # self.assertEqual(actual.dtype.kind, 'M') # For some reason, numpy 1.8 does not compare ns precision # datetime64 arrays as equal to arrays of datetime objects, # but it works for us precision. Thus, convert to us # precision for the actual array equal comparison... actual_cmp = actual.astype('M8[us]') else: actual_cmp = actual assert_array_equal(expected, actual_cmp) encoded, _, _ = coding.times.encode_cf_datetime(actual, units, calendar) if '1-1-1' not in units: # pandas parses this date very strangely, so the original # units/encoding cannot be preserved in this case: # (Pdb) pd.to_datetime('1-1-1 00:00:0.0') # Timestamp('2001-01-01 00:00:00') assert_array_equal(num_dates, np.around(encoded, 1)) if (hasattr(num_dates, 'ndim') and num_dates.ndim == 1 and '1000' not in units): # verify that wrapping with a pandas.Index works # note that it *does not* currently work to even put # non-datetime64 compatible dates into a pandas.Index encoded, _, _ = coding.times.encode_cf_datetime( pd.Index(actual), units, calendar) assert_array_equal(num_dates, np.around(encoded, 1))
def test_decode_non_standard_calendar_fallback( calendar, enable_cftimeindex): if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() # ensure leap year doesn't matter for year in [2010, 2011, 2012, 2013, 2014]: units = 'days since {0}-01-01'.format(year) num_times = np.arange(100) expected = cftime.num2date(num_times, units, calendar) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') actual = coding.times.decode_cf_datetime( num_times, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) if enable_cftimeindex: assert len(w) == 0 else: assert len(w) == 1 assert 'Unable to decode time axis' in str(w[0].message) assert actual.dtype == np.dtype('O') assert_array_equal(actual, expected)
def test_decode_non_standard_calendar_inside_timestamp_range( calendar, enable_cftimeindex): if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() units = 'days since 0001-01-01' times = pd.date_range('2001-04-01-00', end='2001-04-30-23', freq='H') noleap_time = cftime.date2num(times.to_pydatetime(), units, calendar=calendar) if enable_cftimeindex: expected = cftime.num2date(noleap_time, units, calendar=calendar) expected_dtype = np.dtype('O') else: expected = times.values expected_dtype = np.dtype('M8[ns]') with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime( noleap_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) assert actual.dtype == expected_dtype abs_diff = abs(actual - expected) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all()
def test_decode_multidim_time_outside_timestamp_range(calendar, enable_cftimeindex): from datetime import datetime if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() units = 'days since 0001-01-01' times1 = [datetime(1, 4, day) for day in range(1, 6)] times2 = [datetime(1, 5, day) for day in range(1, 6)] noleap_time1 = cftime.date2num(times1, units, calendar=calendar) noleap_time2 = cftime.date2num(times2, units, calendar=calendar) mdim_time = np.empty((len(noleap_time1), 2), ) mdim_time[:, 0] = noleap_time1 mdim_time[:, 1] = noleap_time2 if enable_cftimeindex: expected1 = cftime.num2date(noleap_time1, units, calendar, only_use_cftime_datetimes=True) expected2 = cftime.num2date(noleap_time2, units, calendar, only_use_cftime_datetimes=True) else: expected1 = cftime.num2date(noleap_time1, units, calendar) expected2 = cftime.num2date(noleap_time2, units, calendar) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime( mdim_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) assert actual.dtype == np.dtype('O') abs_diff1 = abs(actual[:, 0] - expected1) abs_diff2 = abs(actual[:, 1] - expected2) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff1 <= np.timedelta64(1, 's')).all() assert (abs_diff2 <= np.timedelta64(1, 's')).all()
def test_decode_standard_calendar_inside_timestamp_range(calendar): cftime = _import_cftime() units = "days since 0001-01-01" times = pd.date_range("2001-04-01-00", end="2001-04-30-23", freq="H") time = cftime.date2num(times.to_pydatetime(), units, calendar=calendar) expected = times.values expected_dtype = np.dtype("M8[ns]") actual = coding.times.decode_cf_datetime(time, units, calendar=calendar) assert actual.dtype == expected_dtype abs_diff = abs(actual - expected) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, "s")).all()
def test_decode_single_element_outside_timestamp_range( calendar, enable_cftimeindex): if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() units = 'days since 0001-01-01' for days in [1, 1470376]: for num_time in [days, [days], [[days]]]: with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime( num_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) expected = cftime.num2date(days, units, calendar) assert isinstance(actual.item(), type(expected))
def test_decode_single_element_outside_timestamp_range( calendar, enable_cftimeindex): if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() units = 'days since 0001-01-01' for days in [1, 1470376]: for num_time in [days, [days], [[days]]]: with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime( num_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) expected = cftime.num2date(days, units, calendar) assert isinstance(actual.item(), type(expected))
def test_decode_non_standard_calendar_single_element_fallback(self): cftime = _import_cftime() units = 'days since 0001-01-01' try: dt = cftime.netcdftime.datetime(2001, 2, 29) except AttributeError: # Must be using standalone netcdftime library dt = cftime.datetime(2001, 2, 29) for calendar in ['360_day', 'all_leap', '366_day']: num_time = cftime.date2num(dt, units, calendar) with pytest.warns(Warning, match='Unable to decode time axis'): actual = coding.times.decode_cf_datetime(num_time, units, calendar=calendar) expected = np.asarray(cftime.num2date(num_time, units, calendar)) assert actual.dtype == np.dtype('O') assert expected == actual
def test_decode_non_standard_calendar_single_element_fallback(self): cftime = _import_cftime() units = 'days since 0001-01-01' try: dt = cftime.netcdftime.datetime(2001, 2, 29) except AttributeError: # Must be using standalone netcdftime library dt = cftime.datetime(2001, 2, 29) for calendar in ['360_day', 'all_leap', '366_day']: num_time = cftime.date2num(dt, units, calendar) with pytest.warns(Warning, match='Unable to decode time axis'): actual = coding.times.decode_cf_datetime(num_time, units, calendar=calendar) expected = np.asarray(cftime.num2date(num_time, units, calendar)) assert actual.dtype == np.dtype('O') assert expected == actual
def test_decode_single_element_outside_timestamp_range( calendar): cftime = _import_cftime() units = 'days since 0001-01-01' for days in [1, 1470376]: for num_time in [days, [days], [[days]]]: with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime( num_time, units, calendar=calendar) if cftime.__name__ == 'cftime': expected = cftime.num2date(days, units, calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(days, units, calendar) assert isinstance(actual.item(), type(expected))
def test_decode_single_element_outside_timestamp_range( calendar): cftime = _import_cftime() units = 'days since 0001-01-01' for days in [1, 1470376]: for num_time in [days, [days], [[days]]]: with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime( num_time, units, calendar=calendar) if cftime.__name__ == 'cftime': expected = cftime.num2date(days, units, calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(days, units, calendar) assert isinstance(actual.item(), type(expected))
def test_decode_multidim_time_outside_timestamp_range(calendar): from datetime import datetime cftime = _import_cftime() units = "days since 0001-01-01" times1 = [datetime(1, 4, day) for day in range(1, 6)] times2 = [datetime(1, 5, day) for day in range(1, 6)] time1 = cftime.date2num(times1, units, calendar=calendar) time2 = cftime.date2num(times2, units, calendar=calendar) mdim_time = np.empty((len(time1), 2)) mdim_time[:, 0] = time1 mdim_time[:, 1] = time2 if cftime.__name__ == "cftime": expected1 = cftime.num2date(time1, units, calendar, only_use_cftime_datetimes=True) expected2 = cftime.num2date(time2, units, calendar, only_use_cftime_datetimes=True) else: expected1 = cftime.num2date(time1, units, calendar) expected2 = cftime.num2date(time2, units, calendar) with warnings.catch_warnings(): warnings.filterwarnings("ignore", "Unable to decode time axis") actual = coding.times.decode_cf_datetime(mdim_time, units, calendar=calendar) assert actual.dtype == np.dtype("O") abs_diff1 = abs(actual[:, 0] - expected1) abs_diff2 = abs(actual[:, 1] - expected2) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff1 <= np.timedelta64(1, "s")).all() assert (abs_diff2 <= np.timedelta64(1, "s")).all()
def test_decode_multidim_time_outside_timestamp_range( calendar, enable_cftimeindex): from datetime import datetime if enable_cftimeindex: pytest.importorskip('cftime') cftime = _import_cftime() units = 'days since 0001-01-01' times1 = [datetime(1, 4, day) for day in range(1, 6)] times2 = [datetime(1, 5, day) for day in range(1, 6)] noleap_time1 = cftime.date2num(times1, units, calendar=calendar) noleap_time2 = cftime.date2num(times2, units, calendar=calendar) mdim_time = np.empty((len(noleap_time1), 2), ) mdim_time[:, 0] = noleap_time1 mdim_time[:, 1] = noleap_time2 if enable_cftimeindex: expected1 = cftime.num2date(noleap_time1, units, calendar, only_use_cftime_datetimes=True) expected2 = cftime.num2date(noleap_time2, units, calendar, only_use_cftime_datetimes=True) else: expected1 = cftime.num2date(noleap_time1, units, calendar) expected2 = cftime.num2date(noleap_time2, units, calendar) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime( mdim_time, units, calendar=calendar, enable_cftimeindex=enable_cftimeindex) assert actual.dtype == np.dtype('O') abs_diff1 = abs(actual[:, 0] - expected1) abs_diff2 = abs(actual[:, 1] - expected2) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff1 <= np.timedelta64(1, 's')).all() assert (abs_diff2 <= np.timedelta64(1, 's')).all()
def test_decode_non_standard_calendar_fallback(self): cftime = _import_cftime() # ensure leap year doesn't matter for year in [2010, 2011, 2012, 2013, 2014]: for calendar in ['360_day', '366_day', 'all_leap']: calendar = '360_day' units = 'days since {0}-01-01'.format(year) num_times = np.arange(100) expected = cftime.num2date(num_times, units, calendar) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') actual = coding.times.decode_cf_datetime(num_times, units, calendar=calendar) assert len(w) == 1 assert 'Unable to decode time axis' in \ str(w[0].message) assert actual.dtype == np.dtype('O') assert_array_equal(actual, expected)
def test_decode_non_standard_calendar(self): cftime = _import_cftime() for calendar in ['noleap', '365_day', '360_day', 'julian', 'all_leap', '366_day']: units = 'days since 0001-01-01' times = pd.date_range('2001-04-01-00', end='2001-04-30-23', freq='H') noleap_time = cftime.date2num(times.to_pydatetime(), units, calendar=calendar) expected = times.values with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(noleap_time, units, calendar=calendar) assert actual.dtype == np.dtype('M8[ns]') abs_diff = abs(actual - expected) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all()
def test_decode_non_standard_calendar_fallback(self): cftime = _import_cftime() # ensure leap year doesn't matter for year in [2010, 2011, 2012, 2013, 2014]: for calendar in ['360_day', '366_day', 'all_leap']: calendar = '360_day' units = 'days since {0}-01-01'.format(year) num_times = np.arange(100) expected = cftime.num2date(num_times, units, calendar) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') actual = coding.times.decode_cf_datetime(num_times, units, calendar=calendar) assert len(w) == 1 assert 'Unable to decode time axis' in \ str(w[0].message) assert actual.dtype == np.dtype('O') assert_array_equal(actual, expected)
def test_cf_datetime(num_dates, units, calendar): cftime = _import_cftime() if cftime.__name__ == "cftime": expected = cftime.num2date(num_dates, units, calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(num_dates, units, calendar) min_y = np.ravel(np.atleast_1d(expected))[np.nanargmin(num_dates)].year max_y = np.ravel(np.atleast_1d(expected))[np.nanargmax(num_dates)].year if min_y >= 1678 and max_y < 2262: expected = cftime_to_nptime(expected) with warnings.catch_warnings(): warnings.filterwarnings("ignore", "Unable to decode time axis") actual = coding.times.decode_cf_datetime(num_dates, units, calendar) abs_diff = np.asarray(abs(actual - expected)).ravel() abs_diff = pd.to_timedelta(abs_diff.tolist()).to_numpy() # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, "s")).all() encoded, _, _ = coding.times.encode_cf_datetime(actual, units, calendar) if "1-1-1" not in units: # pandas parses this date very strangely, so the original # units/encoding cannot be preserved in this case: # (Pdb) pd.to_datetime('1-1-1 00:00:0.0') # Timestamp('2001-01-01 00:00:00') assert_array_equal(num_dates, np.around(encoded, 1)) if hasattr(num_dates, "ndim") and num_dates.ndim == 1 and "1000" not in units: # verify that wrapping with a pandas.Index works # note that it *does not* currently work to even put # non-datetime64 compatible dates into a pandas.Index encoded, _, _ = coding.times.encode_cf_datetime( pd.Index(actual), units, calendar) assert_array_equal(num_dates, np.around(encoded, 1))
def test_decode_non_standard_calendar_single_element( calendar): cftime = _import_cftime() units = 'days since 0001-01-01' try: dt = cftime.netcdftime.datetime(2001, 2, 29) except AttributeError: # Must be using the standalone cftime library dt = cftime.datetime(2001, 2, 29) num_time = cftime.date2num(dt, units, calendar) actual = coding.times.decode_cf_datetime( num_time, units, calendar=calendar) if cftime.__name__ == 'cftime': expected = np.asarray(cftime.num2date( num_time, units, calendar, only_use_cftime_datetimes=True)) else: expected = np.asarray(cftime.num2date(num_time, units, calendar)) assert actual.dtype == np.dtype('O') assert expected == actual
def test_decode_360_day_calendar(): cftime = _import_cftime() calendar = '360_day' # ensure leap year doesn't matter for year in [2010, 2011, 2012, 2013, 2014]: units = 'days since {0}-01-01'.format(year) num_times = np.arange(100) if cftime.__name__ == 'cftime': expected = cftime.num2date(num_times, units, calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(num_times, units, calendar) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') actual = coding.times.decode_cf_datetime( num_times, units, calendar=calendar) assert len(w) == 0 assert actual.dtype == np.dtype('O') assert_array_equal(actual, expected)
def test_cf_datetime(num_dates, units, calendar): cftime = _import_cftime() if cftime.__name__ == 'cftime': expected = cftime.num2date(num_dates, units, calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(num_dates, units, calendar) min_y = np.ravel(np.atleast_1d(expected))[np.nanargmin(num_dates)].year max_y = np.ravel(np.atleast_1d(expected))[np.nanargmax(num_dates)].year if min_y >= 1678 and max_y < 2262: expected = cftime_to_nptime(expected) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(num_dates, units, calendar) abs_diff = np.atleast_1d(abs(actual - expected)).astype(np.timedelta64) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all() encoded, _, _ = coding.times.encode_cf_datetime(actual, units, calendar) if '1-1-1' not in units: # pandas parses this date very strangely, so the original # units/encoding cannot be preserved in this case: # (Pdb) pd.to_datetime('1-1-1 00:00:0.0') # Timestamp('2001-01-01 00:00:00') assert_array_equal(num_dates, np.around(encoded, 1)) if (hasattr(num_dates, 'ndim') and num_dates.ndim == 1 and '1000' not in units): # verify that wrapping with a pandas.Index works # note that it *does not* currently work to even put # non-datetime64 compatible dates into a pandas.Index encoded, _, _ = coding.times.encode_cf_datetime( pd.Index(actual), units, calendar) assert_array_equal(num_dates, np.around(encoded, 1))
def test_decode_nonstandard_calendar_multidim_time_inside_timestamp_range( calendar): cftime = _import_cftime() units = 'days since 0001-01-01' times1 = pd.date_range('2001-04-01', end='2001-04-05', freq='D') times2 = pd.date_range('2001-05-01', end='2001-05-05', freq='D') time1 = cftime.date2num(times1.to_pydatetime(), units, calendar=calendar) time2 = cftime.date2num(times2.to_pydatetime(), units, calendar=calendar) mdim_time = np.empty((len(time1), 2), ) mdim_time[:, 0] = time1 mdim_time[:, 1] = time2 if cftime.__name__ == 'cftime': expected1 = cftime.num2date(time1, units, calendar, only_use_cftime_datetimes=True) expected2 = cftime.num2date(time2, units, calendar, only_use_cftime_datetimes=True) else: expected1 = cftime.num2date(time1, units, calendar) expected2 = cftime.num2date(time2, units, calendar) expected_dtype = np.dtype('O') actual = coding.times.decode_cf_datetime( mdim_time, units, calendar=calendar) assert actual.dtype == expected_dtype abs_diff1 = abs(actual[:, 0] - expected1) abs_diff2 = abs(actual[:, 1] - expected2) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff1 <= np.timedelta64(1, 's')).all() assert (abs_diff2 <= np.timedelta64(1, 's')).all()
def test_decode_non_standard_calendar_multidim_time(self): cftime = _import_cftime() calendar = 'noleap' units = 'days since 0001-01-01' times1 = pd.date_range('2001-04-01', end='2001-04-05', freq='D') times2 = pd.date_range('2001-05-01', end='2001-05-05', freq='D') noleap_time1 = cftime.date2num(times1.to_pydatetime(), units, calendar=calendar) noleap_time2 = cftime.date2num(times2.to_pydatetime(), units, calendar=calendar) mdim_time = np.empty((len(noleap_time1), 2), ) mdim_time[:, 0] = noleap_time1 mdim_time[:, 1] = noleap_time2 expected1 = times1.values expected2 = times2.values with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(mdim_time, units, calendar=calendar) assert actual.dtype == np.dtype('M8[ns]') assert_array_equal(actual[:, 0], expected1) assert_array_equal(actual[:, 1], expected2)
def test_cf_datetime(num_dates, units, calendar): cftime = _import_cftime() expected = _ensure_naive_tz( cftime.num2date(num_dates, units, calendar)) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(num_dates, units, calendar) if (isinstance(actual, np.ndarray) and np.issubdtype(actual.dtype, np.datetime64)): # self.assertEqual(actual.dtype.kind, 'M') # For some reason, numpy 1.8 does not compare ns precision # datetime64 arrays as equal to arrays of datetime objects, # but it works for us precision. Thus, convert to us # precision for the actual array equal comparison... actual_cmp = actual.astype('M8[us]') else: actual_cmp = actual assert_array_equal(expected, actual_cmp) encoded, _, _ = coding.times.encode_cf_datetime(actual, units, calendar) if '1-1-1' not in units: # pandas parses this date very strangely, so the original # units/encoding cannot be preserved in this case: # (Pdb) pd.to_datetime('1-1-1 00:00:0.0') # Timestamp('2001-01-01 00:00:00') assert_array_equal(num_dates, np.around(encoded, 1)) if (hasattr(num_dates, 'ndim') and num_dates.ndim == 1 and '1000' not in units): # verify that wrapping with a pandas.Index works # note that it *does not* currently work to even put # non-datetime64 compatible dates into a pandas.Index encoded, _, _ = coding.times.encode_cf_datetime( pd.Index(actual), units, calendar) assert_array_equal(num_dates, np.around(encoded, 1))
def test_cf_datetime(self): cftime = _import_cftime() for num_dates, units in [ (np.arange(10), 'days since 2000-01-01'), (np.arange(10).astype('float64'), 'days since 2000-01-01'), (np.arange(10).astype('float32'), 'days since 2000-01-01'), (np.arange(10).reshape(2, 5), 'days since 2000-01-01'), (12300 + np.arange(5), 'hours since 1680-01-01 00:00:00'), # here we add a couple minor formatting errors to test # the robustness of the parsing algorithm. (12300 + np.arange(5), 'hour since 1680-01-01 00:00:00'), (12300 + np.arange(5), u'Hour since 1680-01-01 00:00:00'), (12300 + np.arange(5), ' Hour since 1680-01-01 00:00:00 '), (10, 'days since 2000-01-01'), ([10], 'daYs since 2000-01-01'), ([[10]], 'days since 2000-01-01'), ([10, 10], 'days since 2000-01-01'), (np.array(10), 'days since 2000-01-01'), (0, 'days since 1000-01-01'), ([0], 'days since 1000-01-01'), ([[0]], 'days since 1000-01-01'), (np.arange(2), 'days since 1000-01-01'), (np.arange(0, 100000, 20000), 'days since 1900-01-01'), (17093352.0, 'hours since 1-1-1 00:00:0.0'), ([0.5, 1.5], 'hours since 1900-01-01T00:00:00'), (0, 'milliseconds since 2000-01-01T00:00:00'), (0, 'microseconds since 2000-01-01T00:00:00'), (np.int32(788961600), 'seconds since 1981-01-01'), # GH2002 ]: for calendar in ['standard', 'gregorian', 'proleptic_gregorian']: expected = _ensure_naive_tz( cftime.num2date(num_dates, units, calendar)) print(num_dates, units, calendar) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(num_dates, units, calendar) if (isinstance(actual, np.ndarray) and np.issubdtype(actual.dtype, np.datetime64)): # self.assertEqual(actual.dtype.kind, 'M') # For some reason, numpy 1.8 does not compare ns precision # datetime64 arrays as equal to arrays of datetime objects, # but it works for us precision. Thus, convert to us # precision for the actual array equal comparison... actual_cmp = actual.astype('M8[us]') else: actual_cmp = actual assert_array_equal(expected, actual_cmp) encoded, _, _ = coding.times.encode_cf_datetime(actual, units, calendar) if '1-1-1' not in units: # pandas parses this date very strangely, so the original # units/encoding cannot be preserved in this case: # (Pdb) pd.to_datetime('1-1-1 00:00:0.0') # Timestamp('2001-01-01 00:00:00') assert_array_equal(num_dates, np.around(encoded, 1)) if (hasattr(num_dates, 'ndim') and num_dates.ndim == 1 and '1000' not in units): # verify that wrapping with a pandas.Index works # note that it *does not* currently work to even put # non-datetime64 compatible dates into a pandas.Index encoded, _, _ = coding.times.encode_cf_datetime( pd.Index(actual), units, calendar) assert_array_equal(num_dates, np.around(encoded, 1))
def test_cf_datetime(self): cftime = _import_cftime() for num_dates, units in [ (np.arange(10), 'days since 2000-01-01'), (np.arange(10).astype('float64'), 'days since 2000-01-01'), (np.arange(10).astype('float32'), 'days since 2000-01-01'), (np.arange(10).reshape(2, 5), 'days since 2000-01-01'), (12300 + np.arange(5), 'hours since 1680-01-01 00:00:00'), # here we add a couple minor formatting errors to test # the robustness of the parsing algorithm. (12300 + np.arange(5), 'hour since 1680-01-01 00:00:00'), (12300 + np.arange(5), u'Hour since 1680-01-01 00:00:00'), (12300 + np.arange(5), ' Hour since 1680-01-01 00:00:00 '), (10, 'days since 2000-01-01'), ([10], 'daYs since 2000-01-01'), ([[10]], 'days since 2000-01-01'), ([10, 10], 'days since 2000-01-01'), (np.array(10), 'days since 2000-01-01'), (0, 'days since 1000-01-01'), ([0], 'days since 1000-01-01'), ([[0]], 'days since 1000-01-01'), (np.arange(2), 'days since 1000-01-01'), (np.arange(0, 100000, 20000), 'days since 1900-01-01'), (17093352.0, 'hours since 1-1-1 00:00:0.0'), ([0.5, 1.5], 'hours since 1900-01-01T00:00:00'), (0, 'milliseconds since 2000-01-01T00:00:00'), (0, 'microseconds since 2000-01-01T00:00:00'), (np.int32(788961600), 'seconds since 1981-01-01'), # GH2002 ]: for calendar in ['standard', 'gregorian', 'proleptic_gregorian']: expected = _ensure_naive_tz( cftime.num2date(num_dates, units, calendar)) print(num_dates, units, calendar) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime( num_dates, units, calendar) if (isinstance(actual, np.ndarray) and np.issubdtype(actual.dtype, np.datetime64)): # self.assertEqual(actual.dtype.kind, 'M') # For some reason, numpy 1.8 does not compare ns precision # datetime64 arrays as equal to arrays of datetime objects, # but it works for us precision. Thus, convert to us # precision for the actual array equal comparison... actual_cmp = actual.astype('M8[us]') else: actual_cmp = actual assert_array_equal(expected, actual_cmp) encoded, _, _ = coding.times.encode_cf_datetime( actual, units, calendar) if '1-1-1' not in units: # pandas parses this date very strangely, so the original # units/encoding cannot be preserved in this case: # (Pdb) pd.to_datetime('1-1-1 00:00:0.0') # Timestamp('2001-01-01 00:00:00') assert_array_equal(num_dates, np.around(encoded, 1)) if (hasattr(num_dates, 'ndim') and num_dates.ndim == 1 and '1000' not in units): # verify that wrapping with a pandas.Index works # note that it *does not* currently work to even put # non-datetime64 compatible dates into a pandas.Index encoded, _, _ = coding.times.encode_cf_datetime( pd.Index(actual), units, calendar) assert_array_equal(num_dates, np.around(encoded, 1))
def times(calendar): cftime = _import_cftime() return cftime.num2date( np.arange(4), units='hours since 2000-01-01', calendar=calendar, only_use_cftime_datetimes=True)