def _generate(cls, start, end, periods, name, offset, tz=None, normalize=False): _normalized = True if start is not None: start = Timestamp(start) if not isinstance(start, Timestamp): raise ValueError('Failed to convert %s to timestamp' % start) if normalize: start = normalize_date(start) _normalized = True else: _normalized = _normalized and start.time() == _midnight if end is not None: end = Timestamp(end) if not isinstance(end, Timestamp): raise ValueError('Failed to convert %s to timestamp' % end) if normalize: end = normalize_date(end) _normalized = True else: _normalized = _normalized and end.time() == _midnight start, end, tz = tools._figure_out_timezone(start, end, tz) if (offset._should_cache() and not (offset._normalize_cache and not _normalized) and _naive_in_cache_range(start, end)): index = cls._cached_range(start, end, periods=periods, offset=offset, name=name) else: index = _generate_regular_range(start, end, periods, offset) if tz is not None: # Convert local to UTC ints = index.view('i8') index = lib.tz_localize_to_utc(ints, tz) index = index.view(_NS_DTYPE) index = index.view(cls) index.name = name index.offset = offset index.tz = tz return index
def tz_localize(self, tz): """ Localize tz-naive DatetimeIndex to given time zone (using pytz) Returns ------- localized : DatetimeIndex """ if self.tz is not None: raise ValueError("Already tz-aware, use tz_convert to convert.") tz = tools._maybe_get_tz(tz) # Convert to UTC new_dates = lib.tz_localize_to_utc(self.asi8, tz) new_dates = new_dates.view(_NS_DTYPE) return self._simple_new(new_dates, self.name, self.offset, tz)
def _generate(cls, start, end, periods, name, offset, tz=None, normalize=False): _normalized = True if start is not None: start = Timestamp(start) if normalize: start = normalize_date(start) _normalized = True else: _normalized = _normalized and start.time() == _midnight if end is not None: end = Timestamp(end) if normalize: end = normalize_date(end) _normalized = True else: _normalized = _normalized and end.time() == _midnight start, end, tz = tools._figure_out_timezone(start, end, tz) if com._count_not_none(start, end, periods) < 2: raise ValueError("Must specify two of start, end, or periods") if ( offset._should_cache() and not (offset._normalize_cache and not _normalized) and _naive_in_cache_range(start, end) ): index = cls._cached_range(start, end, periods=periods, offset=offset, name=name) else: index = _generate_regular_range(start, end, periods, offset) if tz is not None: # Convert local to UTC ints = index.view("i8", type=np.ndarray) index = lib.tz_localize_to_utc(ints, tz) index = index.view(_NS_DTYPE) index = index.view(cls) index.name = name index.offset = offset index.tz = tz return index
def __new__(cls, data=None, freq=None, start=None, end=None, periods=None, copy=False, name=None, tz=None, verify_integrity=True, normalize=False, **kwds): warn = False if 'offset' in kwds and kwds['offset']: freq = kwds['offset'] warn = True freq_infer = False if not isinstance(freq, DateOffset): if freq != 'infer': freq = to_offset(freq) else: freq_infer = True freq = None if warn: import warnings warnings.warn("parameter 'offset' is deprecated, " "please use 'freq' instead", FutureWarning) offset = freq if periods is not None: if com.is_float(periods): periods = int(periods) elif not com.is_integer(periods): raise ValueError('Periods must be a number, got %s' % str(periods)) if data is None and offset is None: raise ValueError("Must provide freq argument if no data is " "supplied") if data is None: return cls._generate(start, end, periods, name, offset, tz=tz, normalize=normalize) if not isinstance(data, np.ndarray): if np.isscalar(data): raise ValueError('DatetimeIndex() must be called with a ' 'collection of some kind, %s was passed' % repr(data)) # other iterable of some kind if not isinstance(data, (list, tuple)): data = list(data) data = np.asarray(data, dtype='O') # try a few ways to make it datetime64 if lib.is_string_array(data): data = _str_to_dt_array(data, offset) else: data = tools.to_datetime(data) data.offset = offset if issubclass(data.dtype.type, basestring): subarr = _str_to_dt_array(data, offset) elif issubclass(data.dtype.type, np.datetime64): if isinstance(data, DatetimeIndex): subarr = data.values if offset is None: offset = data.offset verify_integrity = False else: if data.dtype != _NS_DTYPE: subarr = lib.cast_to_nanoseconds(data) else: subarr = data elif data.dtype == _INT64_DTYPE: if copy: subarr = np.asarray(data, dtype=_NS_DTYPE) else: subarr = data.view(_NS_DTYPE) else: subarr = tools.to_datetime(data) if not np.issubdtype(subarr.dtype, np.datetime64): raise TypeError('Unable to convert %s to datetime dtype' % str(data)) if tz is not None: tz = tools._maybe_get_tz(tz) # Convert local to UTC ints = subarr.view('i8') subarr = lib.tz_localize_to_utc(ints, tz) subarr = subarr.view(_NS_DTYPE) subarr = subarr.view(cls) subarr.name = name subarr.offset = offset subarr.tz = tz if verify_integrity and len(subarr) > 0: if offset is not None and not freq_infer: inferred = subarr.inferred_freq if inferred != offset.freqstr: raise ValueError('Dates do not conform to passed ' 'frequency') if freq_infer: inferred = subarr.inferred_freq if inferred: subarr.offset = to_offset(inferred) return subarr
def _generate(cls, start, end, periods, name, offset, tz=None, normalize=False): if com._count_not_none(start, end, periods) < 2: raise ValueError('Must specify two of start, end, or periods') _normalized = True if start is not None: start = Timestamp(start) if end is not None: end = Timestamp(end) inferred_tz = tools._infer_tzinfo(start, end) if tz is not None and inferred_tz is not None: assert(inferred_tz == tz) elif inferred_tz is not None: tz = inferred_tz tz = tools._maybe_get_tz(tz) if start is not None: if normalize: start = normalize_date(start) _normalized = True else: _normalized = _normalized and start.time() == _midnight if end is not None: if normalize: end = normalize_date(end) _normalized = True else: _normalized = _normalized and end.time() == _midnight if hasattr(offset, 'delta') and offset != offsets.Day(): if inferred_tz is None and tz is not None: # naive dates if start is not None and start.tz is None: start = start.tz_localize(tz) if end is not None and end.tz is None: end = end.tz_localize(tz) if start and end: if start.tz is None and end.tz is not None: start = start.tz_localize(end.tz) if end.tz is None and start.tz is not None: end = end.tz_localize(start.tz) if (offset._should_cache() and not (offset._normalize_cache and not _normalized) and _naive_in_cache_range(start, end)): index = cls._cached_range(start, end, periods=periods, offset=offset, name=name) else: index = _generate_regular_range(start, end, periods, offset) else: if inferred_tz is None and tz is not None: # naive dates if start is not None and start.tz is not None: start = start.replace(tzinfo=None) if end is not None and end.tz is not None: end = end.replace(tzinfo=None) if start and end: if start.tz is None and end.tz is not None: end = end.replace(tzinfo=None) if end.tz is None and start.tz is not None: start = start.replace(tzinfo=None) if (offset._should_cache() and not (offset._normalize_cache and not _normalized) and _naive_in_cache_range(start, end)): index = cls._cached_range(start, end, periods=periods, offset=offset, name=name) else: index = _generate_regular_range(start, end, periods, offset) if tz is not None and getattr(index, 'tz', None) is None: index = lib.tz_localize_to_utc(com._ensure_int64(index), tz) index = index.view(_NS_DTYPE) index = index.view(cls) index.name = name index.offset = offset index.tz = tz return index
def _generate(cls, start, end, periods, name, offset, tz=None, normalize=False): if com._count_not_none(start, end, periods) < 2: raise ValueError('Must specify two of start, end, or periods') _normalized = True if start is not None: start = Timestamp(start) if end is not None: end = Timestamp(end) inferred_tz = tools._infer_tzinfo(start, end) if tz is not None and inferred_tz is not None: assert (inferred_tz == tz) elif inferred_tz is not None: tz = inferred_tz tz = tools._maybe_get_tz(tz) if start is not None: if normalize: start = normalize_date(start) _normalized = True else: _normalized = _normalized and start.time() == _midnight if end is not None: if normalize: end = normalize_date(end) _normalized = True else: _normalized = _normalized and end.time() == _midnight if hasattr(offset, 'delta') and offset != offsets.Day(): if inferred_tz is None and tz is not None: # naive dates if start is not None and start.tz is None: start = start.tz_localize(tz) if end is not None and end.tz is None: end = end.tz_localize(tz) if start and end: if start.tz is None and end.tz is not None: start = start.tz_localize(end.tz) if end.tz is None and start.tz is not None: end = end.tz_localize(start.tz) if (offset._should_cache() and not (offset._normalize_cache and not _normalized) and _naive_in_cache_range(start, end)): index = cls._cached_range(start, end, periods=periods, offset=offset, name=name) else: index = _generate_regular_range(start, end, periods, offset) else: if inferred_tz is None and tz is not None: # naive dates if start is not None and start.tz is not None: start = start.replace(tzinfo=None) if end is not None and end.tz is not None: end = end.replace(tzinfo=None) if start and end: if start.tz is None and end.tz is not None: end = end.replace(tzinfo=None) if end.tz is None and start.tz is not None: start = start.replace(tzinfo=None) if (offset._should_cache() and not (offset._normalize_cache and not _normalized) and _naive_in_cache_range(start, end)): index = cls._cached_range(start, end, periods=periods, offset=offset, name=name) else: index = _generate_regular_range(start, end, periods, offset) if tz is not None and getattr(index, 'tz', None) is None: index = lib.tz_localize_to_utc(com._ensure_int64(index), tz) index = index.view(_NS_DTYPE) index = index.view(cls) index.name = name index.offset = offset index.tz = tz return index
def __new__(cls, data=None, freq=None, start=None, end=None, periods=None, copy=False, name=None, tz=None, verify_integrity=True, normalize=False, **kwds): dayfirst = kwds.pop('dayfirst', None) yearfirst = kwds.pop('yearfirst', None) warn = False if 'offset' in kwds and kwds['offset']: freq = kwds['offset'] warn = True freq_infer = False if not isinstance(freq, DateOffset): if freq != 'infer': freq = to_offset(freq) else: freq_infer = True freq = None if warn: import warnings warnings.warn( "parameter 'offset' is deprecated, " "please use 'freq' instead", FutureWarning) offset = freq if periods is not None: if com.is_float(periods): periods = int(periods) elif not com.is_integer(periods): raise ValueError('Periods must be a number, got %s' % str(periods)) if data is None and offset is None: raise ValueError("Must provide freq argument if no data is " "supplied") if data is None: return cls._generate(start, end, periods, name, offset, tz=tz, normalize=normalize) if not isinstance(data, np.ndarray): if np.isscalar(data): raise ValueError('DatetimeIndex() must be called with a ' 'collection of some kind, %s was passed' % repr(data)) # other iterable of some kind if not isinstance(data, (list, tuple)): data = list(data) data = np.asarray(data, dtype='O') # try a few ways to make it datetime64 if lib.is_string_array(data): data = _str_to_dt_array(data, offset, dayfirst=dayfirst, yearfirst=yearfirst) else: data = tools.to_datetime(data) data.offset = offset if isinstance(data, DatetimeIndex): if name is not None: data.name = name return data if issubclass(data.dtype.type, basestring): subarr = _str_to_dt_array(data, offset, dayfirst=dayfirst, yearfirst=yearfirst) elif issubclass(data.dtype.type, np.datetime64): if isinstance(data, DatetimeIndex): if tz is None: tz = data.tz subarr = data.values if offset is None: offset = data.offset verify_integrity = False else: if data.dtype != _NS_DTYPE: subarr = lib.cast_to_nanoseconds(data) else: subarr = data elif data.dtype == _INT64_DTYPE: if isinstance(data, Int64Index): raise TypeError('cannot convert Int64Index->DatetimeIndex') if copy: subarr = np.asarray(data, dtype=_NS_DTYPE) else: subarr = data.view(_NS_DTYPE) else: try: subarr = tools.to_datetime(data) except ValueError: # tz aware subarr = tools.to_datetime(data, utc=True) if not np.issubdtype(subarr.dtype, np.datetime64): raise TypeError('Unable to convert %s to datetime dtype' % str(data)) if isinstance(subarr, DatetimeIndex): if tz is None: tz = subarr.tz else: if tz is not None: tz = tools._maybe_get_tz(tz) if (not isinstance(data, DatetimeIndex) or getattr(data, 'tz', None) is None): # Convert tz-naive to UTC ints = subarr.view('i8') subarr = lib.tz_localize_to_utc(ints, tz) subarr = subarr.view(_NS_DTYPE) subarr = subarr.view(cls) subarr.name = name subarr.offset = offset subarr.tz = tz if verify_integrity and len(subarr) > 0: if offset is not None and not freq_infer: inferred = subarr.inferred_freq if inferred != offset.freqstr: raise ValueError('Dates do not conform to passed ' 'frequency') if freq_infer: inferred = subarr.inferred_freq if inferred: subarr.offset = to_offset(inferred) return subarr
def __new__( cls, data=None, freq=None, start=None, end=None, periods=None, copy=False, name=None, tz=None, verify_integrity=True, normalize=False, **kwds ): dayfirst = kwds.pop("dayfirst", None) yearfirst = kwds.pop("yearfirst", None) warn = False if "offset" in kwds and kwds["offset"]: freq = kwds["offset"] warn = True freq_infer = False if not isinstance(freq, DateOffset): if freq != "infer": freq = to_offset(freq) else: freq_infer = True freq = None if warn: import warnings warnings.warn("parameter 'offset' is deprecated, " "please use 'freq' instead", FutureWarning) offset = freq if periods is not None: if com.is_float(periods): periods = int(periods) elif not com.is_integer(periods): raise ValueError("Periods must be a number, got %s" % str(periods)) if data is None and offset is None: raise ValueError("Must provide freq argument if no data is " "supplied") if data is None: return cls._generate(start, end, periods, name, offset, tz=tz, normalize=normalize) if not isinstance(data, np.ndarray): if np.isscalar(data): raise ValueError( "DatetimeIndex() must be called with a " "collection of some kind, %s was passed" % repr(data) ) # other iterable of some kind if not isinstance(data, (list, tuple)): data = list(data) data = np.asarray(data, dtype="O") # try a few ways to make it datetime64 if lib.is_string_array(data): data = _str_to_dt_array(data, offset, dayfirst=dayfirst, yearfirst=yearfirst) else: data = tools.to_datetime(data) data.offset = offset if isinstance(data, DatetimeIndex): if name is not None: data.name = name return data if issubclass(data.dtype.type, basestring): subarr = _str_to_dt_array(data, offset, dayfirst=dayfirst, yearfirst=yearfirst) elif issubclass(data.dtype.type, np.datetime64): if isinstance(data, DatetimeIndex): if tz is None: tz = data.tz subarr = data.values if offset is None: offset = data.offset verify_integrity = False else: if data.dtype != _NS_DTYPE: subarr = lib.cast_to_nanoseconds(data) else: subarr = data elif data.dtype == _INT64_DTYPE: if isinstance(data, Int64Index): raise TypeError("cannot convert Int64Index->DatetimeIndex") if copy: subarr = np.asarray(data, dtype=_NS_DTYPE) else: subarr = data.view(_NS_DTYPE) else: try: subarr = tools.to_datetime(data) except ValueError: # tz aware subarr = tools.to_datetime(data, utc=True) if not np.issubdtype(subarr.dtype, np.datetime64): raise TypeError("Unable to convert %s to datetime dtype" % str(data)) if isinstance(subarr, DatetimeIndex): if tz is None: tz = subarr.tz else: if tz is not None: tz = tools._maybe_get_tz(tz) if not isinstance(data, DatetimeIndex) or getattr(data, "tz", None) is None: # Convert tz-naive to UTC ints = subarr.view("i8") subarr = lib.tz_localize_to_utc(ints, tz) subarr = subarr.view(_NS_DTYPE) subarr = subarr.view(cls) subarr.name = name subarr.offset = offset subarr.tz = tz if verify_integrity and len(subarr) > 0: if offset is not None and not freq_infer: inferred = subarr.inferred_freq if inferred != offset.freqstr: raise ValueError("Dates do not conform to passed " "frequency") if freq_infer: inferred = subarr.inferred_freq if inferred: subarr.offset = to_offset(inferred) return subarr