def test_cf_datetime(num_dates, units, calendar): cftime = _import_cftime() if cftime.__name__ == 'cftime': expected = cftime.num2date(num_dates, units, calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(num_dates, units, calendar) min_y = np.ravel(np.atleast_1d(expected))[np.nanargmin(num_dates)].year max_y = np.ravel(np.atleast_1d(expected))[np.nanargmax(num_dates)].year if min_y >= 1678 and max_y < 2262: expected = cftime_to_nptime(expected) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(num_dates, units, calendar) abs_diff = np.atleast_1d(abs(actual - expected)).astype(np.timedelta64) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all() encoded, _, _ = coding.times.encode_cf_datetime(actual, units, calendar) if '1-1-1' not in units: # pandas parses this date very strangely, so the original # units/encoding cannot be preserved in this case: # (Pdb) pd.to_datetime('1-1-1 00:00:0.0') # Timestamp('2001-01-01 00:00:00') assert_array_equal(num_dates, np.around(encoded, 1)) if (hasattr(num_dates, 'ndim') and num_dates.ndim == 1 and '1000' not in units): # verify that wrapping with a pandas.Index works # note that it *does not* currently work to even put # non-datetime64 compatible dates into a pandas.Index encoded, _, _ = coding.times.encode_cf_datetime( pd.Index(actual), units, calendar) assert_array_equal(num_dates, np.around(encoded, 1))
def test_cf_datetime(num_dates, units, calendar): cftime = _import_cftime() if cftime.__name__ == 'cftime': expected = cftime.num2date(num_dates, units, calendar, only_use_cftime_datetimes=True) else: expected = cftime.num2date(num_dates, units, calendar) min_y = np.ravel(np.atleast_1d(expected))[np.nanargmin(num_dates)].year max_y = np.ravel(np.atleast_1d(expected))[np.nanargmax(num_dates)].year if min_y >= 1678 and max_y < 2262: expected = cftime_to_nptime(expected) with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Unable to decode time axis') actual = coding.times.decode_cf_datetime(num_dates, units, calendar) abs_diff = np.atleast_1d(abs(actual - expected)).astype(np.timedelta64) # once we no longer support versions of netCDF4 older than 1.1.5, # we could do this check with near microsecond accuracy: # https://github.com/Unidata/netcdf4-python/issues/355 assert (abs_diff <= np.timedelta64(1, 's')).all() encoded, _, _ = coding.times.encode_cf_datetime(actual, units, calendar) if '1-1-1' not in units: # pandas parses this date very strangely, so the original # units/encoding cannot be preserved in this case: # (Pdb) pd.to_datetime('1-1-1 00:00:0.0') # Timestamp('2001-01-01 00:00:00') assert_array_equal(num_dates, np.around(encoded, 1)) if (hasattr(num_dates, 'ndim') and num_dates.ndim == 1 and '1000' not in units): # verify that wrapping with a pandas.Index works # note that it *does not* currently work to even put # non-datetime64 compatible dates into a pandas.Index encoded, _, _ = coding.times.encode_cf_datetime( pd.Index(actual), units, calendar) assert_array_equal(num_dates, np.around(encoded, 1))
def check_nptime(da): ''' Check if dataArray time axis format is numpy datetime64 and apply conversion if not. Parameters ---------- da : dataArray Xarray data structure Returns ------- da: dataArray Xarray data structure ''' if not np.issubdtype(da['time'].dtype, np.datetime64): try: from xarray.coding.times import cftime_to_nptime da['time'] = cftime_to_nptime(da['time']) except ValueError as e: print(e) pass return da