Exemple #1
0
def test_decode_ambiguous_time_warns(calendar) -> None:
    # GH 4422, 4506
    from cftime import num2date

    # we don't decode non-standard calendards with
    # pandas so expect no warning will be emitted
    is_standard_calendar = calendar in coding.times._STANDARD_CALENDARS

    dates = [1, 2, 3]
    units = "days since 1-1-1"
    expected = num2date(dates,
                        units,
                        calendar=calendar,
                        only_use_cftime_datetimes=True)

    if is_standard_calendar:
        with pytest.warns(SerializationWarning) as record:
            result = decode_cf_datetime(dates, units, calendar=calendar)
        relevant_warnings = [
            r for r in record.list if str(r.message).startswith(
                "Ambiguous reference date string: 1-1-1")
        ]
        assert len(relevant_warnings) == 1
    else:
        with assert_no_warnings():
            result = decode_cf_datetime(dates, units, calendar=calendar)

    np.testing.assert_array_equal(result, expected)
def format_date(x, pos=None):
    calendar = "standard"
    units = "days since 1970-01-01 00:00:00"

    np_ts = decode_cf_datetime([x], units, calendar=calendar,
                               use_cftime=None)[0]
    return pd.Timestamp(np_ts).strftime("%d.%b %Hh")
def test_time_units_with_timezone_roundtrip(calendar):
    # Regression test for GH 2649
    expected_units = "days since 2000-01-01T00:00:00-05:00"
    expected_num_dates = np.array([1, 2, 3])
    dates = decode_cf_datetime(expected_num_dates, expected_units, calendar)

    # Check that dates were decoded to UTC; here the hours should all
    # equal 5.
    result_hours = DataArray(dates).dt.hour
    expected_hours = DataArray([5, 5, 5])
    assert_equal(result_hours, expected_hours)

    # Check that the encoded values are accurately roundtripped.
    result_num_dates, result_units, result_calendar = encode_cf_datetime(
        dates, expected_units, calendar
    )

    if calendar in _STANDARD_CALENDARS:
        np.testing.assert_array_equal(result_num_dates, expected_num_dates)
    else:
        # cftime datetime arithmetic is not quite exact.
        np.testing.assert_allclose(result_num_dates, expected_num_dates)

    assert result_units == expected_units
    assert result_calendar == calendar
Exemple #4
0
def test_use_cftime_false_standard_calendar_in_range(calendar) -> None:
    numerical_dates = [0, 1]
    units = "days since 2000-01-01"
    expected = pd.date_range("2000", periods=2)

    with assert_no_warnings():
        result = decode_cf_datetime(numerical_dates, units, calendar, use_cftime=False)
        np.testing.assert_array_equal(result, expected)
Exemple #5
0
def _decode_datetime_cf(data_array):
    """
    Decide the datetime based on CF conventions
    """
    for coord in data_array.coords:
        # stage 1: timedelta
        if (
            "units" in data_array[coord].attrs
            and data_array[coord].attrs["units"] in times.TIME_UNITS
        ):
            units = times.pop_to(
                data_array[coord].attrs, data_array[coord].encoding, "units"
            )
            new_values = times.decode_cf_timedelta(
                data_array[coord].values, units=units
            )
            data_array = data_array.assign_coords(
                {
                    coord: IndexVariable(
                        dims=data_array[coord].dims,
                        data=new_values.astype(np.dtype("timedelta64[ns]")),
                        attrs=data_array[coord].attrs,
                        encoding=data_array[coord].encoding,
                    )
                }
            )

        # stage 2: datetime
        if (
            "units" in data_array[coord].attrs
            and "since" in data_array[coord].attrs["units"]
        ):
            units = times.pop_to(
                data_array[coord].attrs, data_array[coord].encoding, "units"
            )
            calendar = times.pop_to(
                data_array[coord].attrs, data_array[coord].encoding, "calendar"
            )
            dtype = times._decode_cf_datetime_dtype(
                data_array[coord].values, units, calendar, True
            )
            new_values = times.decode_cf_datetime(
                data_array[coord].values,
                units=units,
                calendar=calendar,
                use_cftime=True,
            )
            data_array = data_array.assign_coords(
                {
                    coord: IndexVariable(
                        dims=data_array[coord].dims,
                        data=new_values.astype(dtype),
                        attrs=data_array[coord].attrs,
                        encoding=data_array[coord].encoding,
                    )
                }
            )
    return data_array
Exemple #6
0
def test_encode_cf_datetime_overflow(shape):
    # Test for fix to GH 2272
    dates = pd.date_range('2100', periods=24).values.reshape(shape)
    units = 'days since 1800-01-01'
    calendar = 'standard'

    num, _, _ = encode_cf_datetime(dates, units, calendar)
    roundtrip = decode_cf_datetime(num, units, calendar)
    np.testing.assert_array_equal(dates, roundtrip)
Exemple #7
0
def test_encode_cf_datetime_overflow(shape):
    # Test for fix to GH 2272
    dates = pd.date_range("2100", periods=24).values.reshape(shape)
    units = "days since 1800-01-01"
    calendar = "standard"

    num, _, _ = encode_cf_datetime(dates, units, calendar)
    roundtrip = decode_cf_datetime(num, units, calendar)
    np.testing.assert_array_equal(dates, roundtrip)
Exemple #8
0
def test_use_cftime_default_standard_calendar_in_range(calendar):
    numerical_dates = [0, 1]
    units = "days since 2000-01-01"
    expected = pd.date_range("2000", periods=2)

    with pytest.warns(None) as record:
        result = decode_cf_datetime(numerical_dates, units, calendar)
        np.testing.assert_array_equal(result, expected)
        assert not record
def test_use_cftime_default_standard_calendar_out_of_range(calendar, units_year):
    from cftime import num2date

    numerical_dates = [0, 1]
    units = f"days since {units_year}-01-01"
    expected = num2date(
        numerical_dates, units, calendar, only_use_cftime_datetimes=True
    )

    with pytest.warns(SerializationWarning):
        result = decode_cf_datetime(numerical_dates, units, calendar)
        np.testing.assert_array_equal(result, expected)
Exemple #10
0
def test_use_cftime_true(calendar, units_year) -> None:
    from cftime import num2date

    numerical_dates = [0, 1]
    units = f"days since {units_year}-01-01"
    expected = num2date(
        numerical_dates, units, calendar, only_use_cftime_datetimes=True
    )

    with assert_no_warnings():
        result = decode_cf_datetime(numerical_dates, units, calendar, use_cftime=True)
        np.testing.assert_array_equal(result, expected)
Exemple #11
0
def test_use_cftime_true(calendar, units_year):
    from cftime import num2date

    numerical_dates = [0, 1]
    units = "days since {}-01-01".format(units_year)
    expected = num2date(
        numerical_dates, units, calendar, only_use_cftime_datetimes=True
    )

    with pytest.warns(None) as record:
        result = decode_cf_datetime(numerical_dates, units, calendar, use_cftime=True)
        np.testing.assert_array_equal(result, expected)
        assert not record
def test_use_cftime_default_non_standard_calendar(calendar, units_year):
    from cftime import num2date

    numerical_dates = [0, 1]
    units = f"days since {units_year}-01-01"
    expected = num2date(
        numerical_dates, units, calendar, only_use_cftime_datetimes=True
    )

    with pytest.warns(None) as record:
        result = decode_cf_datetime(numerical_dates, units, calendar)
        np.testing.assert_array_equal(result, expected)
        assert not record
Exemple #13
0
    def _process_coords(self, df, reference_file_name):
        from numpy import ndarray, unique, argmax
        from netCDF4 import Dataset
        # if ('dives' in self.__data__):
        #     df = df.drop(columns='dives')

        # TRY TO GET DEPTH AND TIME COORDS AUTOMATICALLY
        for col in df.columns:
            # DECODING TIMES IF PRESENT
            if ('time' in col.lower()) | ('_secs' in col.lower()):
                time = col
                self.__data__.time_name = time
                nco = Dataset(reference_file_name)
                units = nco.variables[time].getncattr('units')
                df[time + '_raw'] = df.loc[:, time].copy()
                if 'seconds since 1970' in units:
                    df[time] = df.loc[:, time].astype('datetime64[s]')
                else:
                    from xarray.coding.times import decode_cf_datetime
                    df[time] = decode_cf_datetime(df.loc[:, time], units)
                nco.close()

            # CREATE UPCAST COLUMN
            # previously I changed the dive number to be 0.5 if upcast,
            # but this ran into indexing problems when importing columns
            # after the inital import (where depth wasn't present).
            if ('depth' in col.lower()):
                depth = df[col].values
                dives = df.dives.values
                self.__data__.depth_name = col
                # INDEX UP AND DOWN DIVES
                updive = ndarray(dives.size, dtype=bool) * False
                for d in unique(dives):
                    i = d == dives
                    j = argmax(depth[i])
                    # bool slice of the dive
                    k = i[i]
                    # make False until the maximum depth
                    k[:j] = False
                    # assign the bool slice to the updive
                    updive[i] = k

                df['dives'] = dives + (updive / 2)

        return df
Exemple #14
0
def time_average_per_dive(dives,
                          time,
                          time_units='seconds since 1970-01-01 00:00:00'):
    """
    Gets the average time stamp per dive. This is used to create psuedo discrete
    time steps per dive for plotting data (using time as x-axis variable).

    Parameters
    ----------
    dives : np.array, dtype=float, shape=[n, ]
        discrete dive numbers (down = d.0; up = d.5) that matches time length
    time : np.array, dtype=datetime64, shape=[n, ]
        time stamp for each observed measurement
    time_units : str
        time_units if `time` is not in datetime64[ns] units

    Returns
    -------
    time_average_per_dive : np.array, dtype=datetime64, shape=[n, ]
        each dive will have the average time stamp of that dive. Can be used for
        plotting where time_average_per_dive is set as the x-axis.
    """
    from pandas import Series
    from numpy import datetime64, array
    from xarray.coding.times import decode_cf_datetime

    time = array(time)
    dives = array(dives)
    if isinstance(time[0], datetime64):
        t = time.astype(float) / 1e9
    else:
        t = time

    t_num = Series(t).groupby(dives).mean()
    t_d64 = decode_cf_datetime(t_num, time_units)
    t_ser = Series(t_d64, index=t_num.index.values)
    t_ful = t_ser.reindex(index=dives).values

    return t_ful
Exemple #15
0
def test_time_units_with_timezone_roundtrip(calendar):
    # Regression test for GH 2649
    expected_units = 'days since 2000-01-01T00:00:00-05:00'
    expected_num_dates = np.array([1, 2, 3])
    dates = decode_cf_datetime(expected_num_dates, expected_units, calendar)

    # Check that dates were decoded to UTC; here the hours should all
    # equal 5.
    result_hours = DataArray(dates).dt.hour
    expected_hours = DataArray([5, 5, 5])
    assert_equal(result_hours, expected_hours)

    # Check that the encoded values are accurately roundtripped.
    result_num_dates, result_units, result_calendar = encode_cf_datetime(
        dates, expected_units, calendar)

    if calendar in _STANDARD_CALENDARS:
        np.testing.assert_array_equal(result_num_dates, expected_num_dates)
    else:
        # cftime datetime arithmetic is not quite exact.
        np.testing.assert_allclose(result_num_dates, expected_num_dates)

    assert result_units == expected_units
    assert result_calendar == calendar
Exemple #16
0
def test_use_cftime_false_standard_calendar_out_of_range(calendar, units_year):
    numerical_dates = [0, 1]
    units = f"days since {units_year}-01-01"
    with pytest.raises(OutOfBoundsDatetime):
        decode_cf_datetime(numerical_dates, units, calendar, use_cftime=False)
Exemple #17
0
def test_decode_cf_datetime_uint(dtype):
    units = "seconds since 2018-08-22T03:23:03Z"
    num_dates = dtype(50)
    result = decode_cf_datetime(num_dates, units)
    expected = np.asarray(np.datetime64("2018-08-22T03:23:53", "ns"))
    np.testing.assert_equal(result, expected)
Exemple #18
0
def test_decode_cf_datetime_uint64_with_cftime_overflow_error():
    units = "microseconds since 1700-01-01"
    calendar = "360_day"
    num_dates = np.uint64(1_000_000 * 86_400 * 360 * 500_000)
    with pytest.raises(OverflowError):
        decode_cf_datetime(num_dates, units, calendar)
Exemple #19
0
def test_decode_cf_datetime_uint64_with_cftime():
    units = "days since 1700-01-01"
    num_dates = np.uint64(182621)
    result = decode_cf_datetime(num_dates, units)
    expected = np.asarray(np.datetime64("2200-01-01", "ns"))
    np.testing.assert_equal(result, expected)
Exemple #20
0
def test_use_cftime_false_non_standard_calendar(calendar, units_year):
    numerical_dates = [0, 1]
    units = 'days since {}-01-01'.format(units_year)
    with pytest.raises(OutOfBoundsDatetime):
        decode_cf_datetime(numerical_dates, units, calendar, use_cftime=False)