def test_astype_object(self, tz_naive_fixture): tz = tz_naive_fixture dti = pd.date_range('2016-01-01', periods=3, tz=tz) arr = DatetimeArray(dti) asobj = arr.astype('O') assert isinstance(asobj, np.ndarray) assert asobj.dtype == 'O' assert list(asobj) == list(dti)
def test_repeat_preserves_tz(self): dti = pd.date_range('2000', periods=2, freq='D', tz='US/Central') arr = DatetimeArray(dti) repeated = arr.repeat([1, 1]) # preserves tz and values, but not freq expected = DatetimeArray(arr.asi8, freq=None, tz=arr.tz) tm.assert_equal(repeated, expected)
def test_to_period(self, datetime_index, freqstr): dti = datetime_index arr = DatetimeArrayMixin(dti) expected = dti.to_period(freq=freqstr) result = arr.to_period(freq=freqstr) assert isinstance(result, PeriodArrayMixin) # placeholder until these become actual EA subclasses and we can use # an EA-specific tm.assert_ function tm.assert_index_equal(pd.Index(result), pd.Index(expected))
def test_to_period(self, datetime_index, freqstr): dti = datetime_index arr = DatetimeArray(dti) expected = dti.to_period(freq=freqstr) result = arr.to_period(freq=freqstr) assert isinstance(result, PeriodArray) # placeholder until these become actual EA subclasses and we can use # an EA-specific tm.assert_ function tm.assert_index_equal(pd.Index(result), pd.Index(expected))
def test_concat_same_type_invalid(self, datetime_index): # different timezones dti = datetime_index arr = DatetimeArray(dti) if arr.tz is None: other = arr.tz_localize('UTC') else: other = arr.tz_localize(None) with pytest.raises(AssertionError): arr._concat_same_type([arr, other])
def test_value_counts_preserves_tz(self): dti = pd.date_range('2000', periods=2, freq='D', tz='US/Central') arr = DatetimeArray(dti).repeat([4, 3]) result = arr.value_counts() # Note: not tm.assert_index_equal, since `freq`s do not match assert result.index.equals(dti) arr[-2] = pd.NaT result = arr.value_counts() expected = pd.Series([1, 4, 2], index=[pd.NaT, dti[0], dti[1]]) tm.assert_series_equal(result, expected)
def _ensure_localized(self, arg, ambiguous='raise', nonexistent='raise', from_utc=False): # See DatetimeLikeArrayMixin._ensure_localized.__doc__ if getattr(self, 'tz', None): # ensure_localized is only relevant for tz-aware DTI from pandas.core.arrays import DatetimeArrayMixin as DatetimeArray dtarr = DatetimeArray(self) result = dtarr._ensure_localized(arg, ambiguous=ambiguous, nonexistent=nonexistent, from_utc=from_utc) return type(self)(result, name=self.name) return arg
def test_fillna_preserves_tz(self, method): dti = pd.date_range('2000-01-01', periods=5, freq='D', tz='US/Central') arr = DatetimeArray(dti, copy=True) arr[2] = pd.NaT fill_val = dti[1] if method == 'pad' else dti[3] expected = DatetimeArray([dti[0], dti[1], fill_val, dti[3], dti[4]], freq=None, tz='US/Central') result = arr.fillna(method=method) tm.assert_extension_array_equal(result, expected) # assert that arr and dti were not modified in-place assert arr[2] is pd.NaT assert dti[2] == pd.Timestamp('2000-01-03', tz='US/Central')
def test_min_max_empty(self, skipna, tz): arr = DatetimeArray._from_sequence([], tz=tz) result = arr.min(skipna=skipna) assert result is pd.NaT result = arr.max(skipna=skipna) assert result is pd.NaT
def test_from_pandas_array(self): arr = pd.array(np.arange(5, dtype=np.int64)) * 3600 * 10**9 result = DatetimeArray._from_sequence(arr, freq='infer') expected = pd.date_range('1970-01-01', periods=5, freq='H')._eadata tm.assert_datetime_array_equal(result, expected)
def _add_datetimelike_scalar(self, other): # adding a timedeltaindex to a datetimelike from pandas.core.arrays import DatetimeArrayMixin assert other is not NaT other = Timestamp(other) if other is NaT: # In this case we specifically interpret NaT as a datetime, not # the timedelta interpretation we would get by returning self + NaT result = self.asi8.view('m8[ms]') + NaT.to_datetime64() return DatetimeArrayMixin(result) i8 = self.asi8 result = checked_add_with_arr(i8, other.value, arr_mask=self._isnan) result = self._maybe_mask_results(result) return DatetimeArrayMixin(result, tz=other.tz, freq=self.freq)
def test_unstack(self, obj): # GH-13287: can't use base test, since building the expected fails. data = DatetimeArray._from_sequence(['2000', '2001', '2002', '2003'], tz='US/Central') index = pd.MultiIndex.from_product(([['A', 'B'], ['a', 'b']]), names=['a', 'b']) if obj == "series": ser = pd.Series(data, index=index) expected = pd.DataFrame( { "A": data.take([0, 1]), "B": data.take([2, 3]) }, index=pd.Index(['a', 'b'], name='b')) expected.columns.name = 'a' else: ser = pd.DataFrame({"A": data, "B": data}, index=index) expected = pd.DataFrame( { ("A", "A"): data.take([0, 1]), ("A", "B"): data.take([2, 3]), ("B", "A"): data.take([0, 1]), ("B", "B"): data.take([2, 3]) }, index=pd.Index(['a', 'b'], name='b')) expected.columns.names = [None, 'a'] result = ser.unstack(0) self.assert_equal(result, expected)
def test_int_properties(self, datetime_index, propname): dti = datetime_index arr = DatetimeArrayMixin(dti) result = getattr(arr, propname) expected = np.array(getattr(dti, propname), dtype=result.dtype) tm.assert_numpy_array_equal(result, expected)
def _add_datelike(self, other): # adding a timedeltaindex to a datetimelike from pandas.core.arrays import DatetimeArrayMixin if isinstance(other, (DatetimeArrayMixin, np.ndarray)): # if other is an ndarray, we assume it is datetime64-dtype # defer to implementation in DatetimeIndex if not isinstance(other, DatetimeArrayMixin): other = DatetimeArrayMixin(other) return other + self else: assert other is not NaT other = Timestamp(other) i8 = self.asi8 result = checked_add_with_arr(i8, other.value, arr_mask=self._isnan) result = self._maybe_mask_results(result, fill_value=iNaT) return DatetimeArrayMixin(result)
def _add_datetime_arraylike(self, other): """Add DatetimeArray/Index or ndarray[datetime64] to TimedeltaArray""" if isinstance(other, np.ndarray): # At this point we have already checked that dtype is datetime64 from pandas.core.arrays import DatetimeArrayMixin other = DatetimeArrayMixin(other) # defer to implementation in DatetimeArray return other + self
def test_bool_properties(self, datetime_index, propname): # in this case _bool_ops is just `is_leap_year` dti = datetime_index arr = DatetimeArrayMixin(dti) assert dti.freq == arr.freq result = getattr(arr, propname) expected = np.array(getattr(dti, propname), dtype=result.dtype) tm.assert_numpy_array_equal(result, expected)
def test_from_dti(self, tz_naive_fixture): tz = tz_naive_fixture dti = pd.date_range('2016-01-01', periods=3, tz=tz) arr = DatetimeArrayMixin(dti) assert list(dti) == list(arr) # Check that Index.__new__ knows what to do with DatetimeArray dti2 = pd.Index(arr) assert isinstance(dti2, pd.DatetimeIndex) assert list(dti2) == list(arr)
def test_to_timestamp(self, how, period_index): pi = period_index arr = PeriodArrayMixin(pi) expected = DatetimeArrayMixin(pi.to_timestamp(how=how)) result = arr.to_timestamp(how=how) assert isinstance(result, DatetimeArrayMixin) # placeholder until these become actual EA subclasses and we can use # an EA-specific tm.assert_ function tm.assert_index_equal(pd.Index(result), pd.Index(expected))
def test_astype_int(self, dtype): arr = DatetimeArray._from_sequence( [pd.Timestamp('2000'), pd.Timestamp('2001')]) result = arr.astype(dtype) if np.dtype(dtype).kind == 'u': expected_dtype = np.dtype('uint64') else: expected_dtype = np.dtype('int64') expected = arr.astype(expected_dtype) assert result.dtype == expected_dtype tm.assert_numpy_array_equal(result, expected)
def test_astype_int(self, dtype): arr = DatetimeArray._from_sequence([pd.Timestamp('2000'), pd.Timestamp('2001')]) result = arr.astype(dtype) if np.dtype(dtype).kind == 'u': expected_dtype = np.dtype('uint64') else: expected_dtype = np.dtype('int64') expected = arr.astype(expected_dtype) assert result.dtype == expected_dtype tm.assert_numpy_array_equal(result, expected)
def test_concat_same_type_different_freq(self): # we *can* concatentate DTI with different freqs. a = DatetimeArray(pd.date_range('2000', periods=2, freq='D', tz='US/Central')) b = DatetimeArray(pd.date_range('2000', periods=2, freq='H', tz='US/Central')) result = DatetimeArray._concat_same_type([a, b]) expected = DatetimeArray(pd.to_datetime([ '2000-01-01 00:00:00', '2000-01-02 00:00:00', '2000-01-01 00:00:00', '2000-01-01 01:00:00', ]).tz_localize("US/Central")) tm.assert_datetime_array_equal(result, expected)
def test_array(self, tz_naive_fixture): # GH#23524 tz = tz_naive_fixture dti = pd.date_range('2016-01-01', periods=3, tz=tz) arr = DatetimeArrayMixin(dti) expected = dti.asi8.view('M8[ns]') result = np.array(arr) tm.assert_numpy_array_equal(result, expected) # check that we are not making copies when setting copy=False result = np.array(arr, copy=False) assert result.base is expected.base assert result.base is not None
def test_array_object_dtype(self, tz_naive_fixture): # GH#23524 tz = tz_naive_fixture dti = pd.date_range('2016-01-01', periods=3, tz=tz) arr = DatetimeArrayMixin(dti) expected = np.array(list(dti)) result = np.array(arr, dtype=object) tm.assert_numpy_array_equal(result, expected) # also test the DatetimeIndex method while we're at it result = np.array(dti, dtype=object) tm.assert_numpy_array_equal(result, expected)
def __rsub__(self, other): if is_datetime64_dtype(other) and is_timedelta64_dtype(self): # ndarray[datetime64] cannot be subtracted from self, so # we need to wrap in DatetimeArray/Index and flip the operation if not isinstance(other, DatetimeLikeArrayMixin): # Avoid down-casting DatetimeIndex from pandas.core.arrays import DatetimeArrayMixin other = DatetimeArrayMixin(other) return other - self elif (is_datetime64_any_dtype(self) and hasattr(other, 'dtype') and not is_datetime64_any_dtype(other)): # GH#19959 datetime - datetime is well-defined as timedelta, # but any other type - datetime is not well-defined. raise TypeError("cannot subtract {cls} from {typ}".format( cls=type(self).__name__, typ=type(other).__name__)) return -(self - other)
def test_take_fill_valid(self, datetime_index, tz_naive_fixture): dti = datetime_index.tz_localize(tz_naive_fixture) arr = DatetimeArray(dti) now = pd.Timestamp.now().tz_localize(dti.tz) result = arr.take([-1, 1], allow_fill=True, fill_value=now) assert result[0] == now with pytest.raises(ValueError): # fill_value Timedelta invalid arr.take([-1, 1], allow_fill=True, fill_value=now - now) with pytest.raises(ValueError): # fill_value Period invalid arr.take([-1, 1], allow_fill=True, fill_value=pd.Period('2014Q1')) tz = None if dti.tz is not None else 'US/Eastern' now = pd.Timestamp.now().tz_localize(tz) with pytest.raises(TypeError): # Timestamp with mismatched tz-awareness arr.take([-1, 1], allow_fill=True, fill_value=now)
def to_timestamp(self, freq=None, how='start'): """ Cast to DatetimeArray/Index Parameters ---------- freq : string or DateOffset, optional Target frequency. The default is 'D' for week or longer, 'S' otherwise how : {'s', 'e', 'start', 'end'} Returns ------- DatetimeArray/Index """ from pandas.core.arrays import DatetimeArrayMixin how = libperiod._validate_end_alias(how) end = how == 'E' if end: if freq == 'B': # roll forward to ensure we land on B date adjust = Timedelta(1, 'D') - Timedelta(1, 'ns') return self.to_timestamp(how='start') + adjust else: adjust = Timedelta(1, 'ns') return (self + 1).to_timestamp(how='start') - adjust if freq is None: base, mult = frequencies.get_freq_code(self.freq) freq = frequencies.get_to_timestamp_base(base) else: freq = Period._maybe_convert_freq(freq) base, mult = frequencies.get_freq_code(freq) new_data = self.asfreq(freq, how=how) new_data = libperiod.periodarr_to_dt64arr(new_data._ndarray_values, base) return DatetimeArrayMixin(new_data, freq='infer')
def to_timestamp(self, freq=None, how='start'): """ Cast to DatetimeArray/Index. Parameters ---------- freq : string or DateOffset, optional Target frequency. The default is 'D' for week or longer, 'S' otherwise how : {'s', 'e', 'start', 'end'} Returns ------- DatetimeArray/Index """ from pandas.core.arrays import DatetimeArrayMixin how = libperiod._validate_end_alias(how) end = how == 'E' if end: if freq == 'B': # roll forward to ensure we land on B date adjust = Timedelta(1, 'D') - Timedelta(1, 'ns') return self.to_timestamp(how='start') + adjust else: adjust = Timedelta(1, 'ns') return (self + self.freq).to_timestamp(how='start') - adjust if freq is None: base, mult = frequencies.get_freq_code(self.freq) freq = frequencies.get_to_timestamp_base(base) else: freq = Period._maybe_convert_freq(freq) base, mult = frequencies.get_freq_code(freq) new_data = self.asfreq(freq, how=how) new_data = libperiod.periodarr_to_dt64arr(new_data.asi8, base) return DatetimeArrayMixin._from_sequence(new_data, freq='infer')
def test_min_max(self, tz): arr = DatetimeArray._from_sequence([ '2000-01-03', '2000-01-03', 'NaT', '2000-01-02', '2000-01-05', '2000-01-04', ], tz=tz) result = arr.min() expected = pd.Timestamp('2000-01-02', tz=tz) assert result == expected result = arr.max() expected = pd.Timestamp('2000-01-05', tz=tz) assert result == expected result = arr.min(skipna=False) assert result is pd.NaT result = arr.max(skipna=False) assert result is pd.NaT
def test_tz_dtype_matches(self): arr = DatetimeArray._from_sequence(['2000'], tz='US/Central') result, _, _ = sequence_to_dt64ns( arr, dtype=DatetimeTZDtype(tz="US/Central")) tm.assert_numpy_array_equal(arr._data, result)
def test_tz_dtype_mismatch_raises(self): arr = DatetimeArray._from_sequence(['2000'], tz='US/Central') with pytest.raises(TypeError, match='data is already tz-aware'): sequence_to_dt64ns(arr, dtype=DatetimeTZDtype(tz="UTC"))
def test_astype_to_same(self): arr = DatetimeArray._from_sequence(['2000'], tz='US/Central') result = arr.astype(DatetimeTZDtype(tz="US/Central"), copy=False) assert result is arr
def test_tz_setter_raises(self): arr = DatetimeArray._from_sequence(['2000'], tz='US/Central') with pytest.raises(AttributeError, match='tz_localize'): arr.tz = 'UTC'
def array(data, # type: Sequence[object] dtype=None, # type: Optional[Union[str, np.dtype, ExtensionDtype]] copy=True, # type: bool ): # type: (...) -> ExtensionArray """ Create an array. .. versionadded:: 0.24.0 Parameters ---------- data : Sequence of objects The scalars inside `data` should be instances of the scalar type for `dtype`. It's expected that `data` represents a 1-dimensional array of data. When `data` is an Index or Series, the underlying array will be extracted from `data`. dtype : str, np.dtype, or ExtensionDtype, optional The dtype to use for the array. This may be a NumPy dtype or an extension type registered with pandas using :meth:`pandas.api.extensions.register_extension_dtype`. If not specified, there are two possibilities: 1. When `data` is a :class:`Series`, :class:`Index`, or :class:`ExtensionArray`, the `dtype` will be taken from the data. 2. Otherwise, pandas will attempt to infer the `dtype` from the data. Note that when `data` is a NumPy array, ``data.dtype`` is *not* used for inferring the array type. This is because NumPy cannot represent all the types of data that can be held in extension arrays. Currently, pandas will infer an extension dtype for sequences of ============================== ===================================== scalar type Array Type ============================= ===================================== * :class:`pandas.Interval` :class:`pandas.IntervalArray` * :class:`pandas.Period` :class:`pandas.arrays.PeriodArray` * :class:`datetime.datetime` :class:`pandas.arrays.DatetimeArray` * :class:`datetime.timedelta` :class:`pandas.arrays.TimedeltaArray` ============================= ===================================== For all other cases, NumPy's usual inference rules will be used. copy : bool, default True Whether to copy the data, even if not necessary. Depending on the type of `data`, creating the new array may require copying data, even if ``copy=False``. Returns ------- ExtensionArray The newly created array. Raises ------ ValueError When `data` is not 1-dimensional. See Also -------- numpy.array : Construct a NumPy array. arrays.PandasArray : ExtensionArray wrapping a NumPy array. Series : Construct a pandas Series. Index : Construct a pandas Index. Notes ----- Omitting the `dtype` argument means pandas will attempt to infer the best array type from the values in the data. As new array types are added by pandas and 3rd party libraries, the "best" array type may change. We recommend specifying `dtype` to ensure that 1. the correct array type for the data is returned 2. the returned array type doesn't change as new extension types are added by pandas and third-party libraries Additionally, if the underlying memory representation of the returned array matters, we recommend specifying the `dtype` as a concrete object rather than a string alias or allowing it to be inferred. For example, a future version of pandas or a 3rd-party library may include a dedicated ExtensionArray for string data. In this event, the following would no longer return a :class:`arrays.PandasArray` backed by a NumPy array. >>> pd.array(['a', 'b'], dtype=str) <PandasArray> ['a', 'b'] Length: 2, dtype: str32 This would instead return the new ExtensionArray dedicated for string data. If you really need the new array to be backed by a NumPy array, specify that in the dtype. >>> pd.array(['a', 'b'], dtype=np.dtype("<U1")) <PandasArray> ['a', 'b'] Length: 2, dtype: str32 Or use the dedicated constructor for the array you're expecting, and wrap that in a PandasArray >>> pd.array(np.array(['a', 'b'], dtype='<U1')) <PandasArray> ['a', 'b'] Length: 2, dtype: str32 Examples -------- If a dtype is not specified, `data` is passed through to :meth:`numpy.array`, and a :class:`arrays.PandasArray` is returned. >>> pd.array([1, 2]) <PandasArray> [1, 2] Length: 2, dtype: int64 Or the NumPy dtype can be specified >>> pd.array([1, 2], dtype=np.dtype("int32")) <PandasArray> [1, 2] Length: 2, dtype: int32 You can use the string alias for `dtype` >>> pd.array(['a', 'b', 'a'], dtype='category') [a, b, a] Categories (2, object): [a, b] Or specify the actual dtype >>> pd.array(['a', 'b', 'a'], ... dtype=pd.CategoricalDtype(['a', 'b', 'c'], ordered=True)) [a, b, a] Categories (3, object): [a < b < c] Because omitting the `dtype` passes the data through to NumPy, a mixture of valid integers and NA will return a floating-point NumPy array. >>> pd.array([1, 2, np.nan]) <PandasArray> [1.0, 2.0, nan] Length: 3, dtype: float64 To use pandas' nullable :class:`pandas.arrays.IntegerArray`, specify the dtype: >>> pd.array([1, 2, np.nan], dtype='Int64') <IntegerArray> [1, 2, NaN] Length: 3, dtype: Int64 Pandas will infer an ExtensionArray for some types of data: >>> pd.array([pd.Period('2000', freq="D"), pd.Period("2000", freq="D")]) <PeriodArray> ['2000-01-01', '2000-01-01'] Length: 2, dtype: period[D] `data` must be 1-dimensional. A ValueError is raised when the input has the wrong dimensionality. >>> pd.array(1) Traceback (most recent call last): ... ValueError: Cannot pass scalar '1' to 'pandas.array'. """ from pandas.core.arrays import ( period_array, ExtensionArray, IntervalArray, PandasArray, DatetimeArrayMixin, TimedeltaArrayMixin, ) from pandas.core.internals.arrays import extract_array if lib.is_scalar(data): msg = ( "Cannot pass scalar '{}' to 'pandas.array'." ) raise ValueError(msg.format(data)) data = extract_array(data, extract_numpy=True) if dtype is None and isinstance(data, ExtensionArray): dtype = data.dtype # this returns None for not-found dtypes. if isinstance(dtype, compat.string_types): dtype = registry.find(dtype) or dtype if is_extension_array_dtype(dtype): cls = dtype.construct_array_type() return cls._from_sequence(data, dtype=dtype, copy=copy) if dtype is None: inferred_dtype = lib.infer_dtype(data) if inferred_dtype == 'period': try: return period_array(data, copy=copy) except tslibs.IncompatibleFrequency: # We may have a mixture of frequencies. # We choose to return an ndarray, rather than raising. pass elif inferred_dtype == 'interval': try: return IntervalArray(data, copy=copy) except ValueError: # We may have a mixture of `closed` here. # We choose to return an ndarray, rather than raising. pass elif inferred_dtype.startswith('datetime'): # datetime, datetime64 try: return DatetimeArrayMixin._from_sequence(data, copy=copy) except ValueError: # Mixture of timezones, fall back to PandasArray pass elif inferred_dtype.startswith('timedelta'): # timedelta, timedelta64 return TimedeltaArrayMixin._from_sequence(data, copy=copy) # TODO(BooleanArray): handle this type result = PandasArray._from_sequence(data, dtype=dtype, copy=copy) return result