def shift(self, n, freq=None): """ Specialized shift which produces a DatetimeIndex Parameters ---------- n : int Periods to shift by freq : DateOffset or timedelta-like, optional Returns ------- shifted : DatetimeIndex """ if freq is not None and freq != self.offset: if isinstance(freq, basestring): freq = datetools.to_offset(freq) return Index.shift(self, n, freq) if n == 0: # immutable so OK return self if self.offset is None: raise ValueError("Cannot shift with no offset") start = self[0] + n * self.offset end = self[-1] + n * self.offset return DatetimeIndex(start=start, end=end, freq=self.offset, name=self.name)
def convert(self, rule, method='pad', how='last', axis=0, as_index=True, closed='right', label='right'): """ Convenience method for frequency conversion and resampling of regular time-series data. Parameters ---------- rule : the offset string or object representing target conversion how : string, method for down- or re-sampling, default 'last' method : string, method for upsampling, default 'pad' axis : int, optional, default 0 closed : {'right', 'left'}, default 'right' Which side of bin interval is closed label : {'right', 'left'}, default 'right' Which bin edge label to label bucket with as_index : see synonymous argument of groupby """ from pandas.core.groupby import TimeGrouper, translate_grouping if isinstance(rule, basestring): rule = datetools.to_offset(rule) idx = self._get_axis(axis) if not isinstance(idx, DatetimeIndex): raise ValueError("Cannot call convert with non-DatetimeIndex") if not isinstance(rule, datetools.DateOffset): raise ValueError("Rule not a recognized offset") interval = TimeGrouper(rule, label=label, closed=closed, _obj=self) currfreq = len(idx) targfreq = len(interval.binner) - 2 # since binner extends endpoints if targfreq <= currfreq: # down- or re-sampling grouped = self.groupby(interval, axis=axis, as_index=as_index) if isinstance(how, basestring): how = translate_grouping(how) result = grouped.agg(how) else: # upsampling result = self.reindex(interval.binner[1:-1].view('M8[us]'), method=method) result.index.offset = rule return result
def snap(self, freq="S"): """ Snap time stamps to nearest occuring frequency """ # Superdumb, punting on any optimizing freq = datetools.to_offset(freq) snapped = np.empty(len(self), dtype="M8[us]") for i, v in enumerate(self): s = v if not freq.onOffset(s): t0 = freq.rollback(s) t1 = freq.rollforward(s) if abs(s - t0) < abs(t1 - s): s = t0 else: s = t1 snapped[i] = np.datetime64(s) # we know it conforms; skip check return DatetimeIndex(snapped, freq=freq, verify_integrity=False)
def _num_of_shown_days_to_timedelta(self): return datetools.to_offset(str(self.num_of_shown_days).split(' ')[0]+"d")
def test_rule_aliases(self): rule = datetools.to_offset('10us') self.assertEqual(rule, datetools.Micro(10))
def _num_of_shown_days_to_timedelta(self): return datetools.to_offset( str(self.num_of_shown_days).split(' ')[0] + "d")
def test_rule_aliases(self): rule = datetools.to_offset("10us") self.assert_(rule == datetools.Micro(10))
def __new__( cls, data=None, freq=None, start=None, end=None, periods=None, dtype=None, copy=False, name=None, tz=None, verify_integrity=True, normalize=False, **kwds ): warn = False if "offset" in kwds and kwds["offset"]: freq = kwds["offset"] warn = True if not isinstance(freq, datetools.DateOffset): freq = datetools.to_offset(freq) if warn: import warnings warnings.warn("parameter 'offset' is deprecated, " "please use 'freq' instead", FutureWarning) if isinstance(freq, basestring): freq = datetools.get_offset(freq) else: if isinstance(freq, basestring): freq = datetools.to_offset(freq) offset = freq if data is None and offset is None: raise ValueError("Must provide freq argument if no data is " "supplied") if data is None: _normalized = True if start is not None: start = Timestamp(start) if not isinstance(start, Timestamp): raise ValueError("Failed to convert %s to timestamp" % start) if normalize: start = datetools.normalize_date(start) _normalized = True else: _normalized = _normalized and start.time() == _midnight if end is not None: end = Timestamp(end) if not isinstance(end, Timestamp): raise ValueError("Failed to convert %s to timestamp" % end) if normalize: end = datetools.normalize_date(end) _normalized = True else: _normalized = _normalized and end.time() == _midnight start, end, tz = tools._figure_out_timezone(start, end, tz) if ( offset._should_cache() and not (offset._normalize_cache and not _normalized) and datetools._naive_in_cache_range(start, end) ): index = cls._cached_range(start, end, periods=periods, offset=offset, name=name) else: index = _generate_regular_range(start, end, periods, offset) index = index.view(cls) index.name = name index.offset = offset index.tz = tz return index if not isinstance(data, np.ndarray): if np.isscalar(data): raise ValueError( "DatetimeIndex() must be called with a " "collection of some kind, %s was passed" % repr(data) ) if isinstance(data, datetime): data = [data] # other iterable of some kind if not isinstance(data, (list, tuple)): data = list(data) data = np.asarray(data, dtype="O") # try a few ways to make it datetime64 if lib.is_string_array(data): data = _str_to_dt_array(data) else: data = np.asarray(data, dtype="M8[us]") if issubclass(data.dtype.type, basestring): subarr = _str_to_dt_array(data) elif issubclass(data.dtype.type, np.integer): subarr = np.array(data, dtype="M8[us]", copy=copy) elif issubclass(data.dtype.type, np.datetime64): subarr = np.array(data, dtype="M8[us]", copy=copy) else: subarr = np.array(data, dtype="M8[us]", copy=copy) # TODO: this is horribly inefficient. If user passes data + offset, we # need to make sure data points conform. Punting on this if verify_integrity: if offset is not None: for i, ts in enumerate(subarr): if not offset.onOffset(Timestamp(ts)): val = Timestamp(offset.rollforward(ts)).value subarr[i] = val subarr = subarr.view(cls) subarr.name = name subarr.offset = offset subarr.tz = tz return subarr