예제 #1
0
 def from_date_range(cls,
         start: DateInitializer,
         stop: DateInitializer,
         step: int = 1,
         *,
         name: tp.Hashable = None
         ):
     '''
     Get an IndexDate instance over a range of dates, where start and stop is inclusive.
     '''
     labels = np.arange(
             to_datetime64(start, _DT64_DAY),
             to_datetime64(stop, _DT64_DAY) + _TD64_DAY,
             np.timedelta64(step, 'D'))
     labels.flags.writeable = False
     return cls(labels, name=name)
예제 #2
0
 def from_year_month_range(cls,
         start: YearMonthInitializer,
         stop: YearMonthInitializer,
         step: int = 1,
         *,
         name: tp.Hashable = None):
     '''
     Get an IndexDate instance over a range of months, where start and end are inclusive.
     '''
     labels = np.arange(
             to_datetime64(start, _DT64_MONTH),
             to_datetime64(stop, _DT64_MONTH) + _TD64_MONTH,
             step=np.timedelta64(step, 'D'),
             dtype=_DT64_DAY)
     labels.flags.writeable = False
     return cls(labels, name=name)
예제 #3
0
    def _ufunc_binary_operator(self, *, operator: tp.Callable[..., tp.Any],
                               other: object) -> np.ndarray:

        if self._recache:
            self._update_array_cache()

        if operator.__name__ == 'matmul' or operator.__name__ == 'rmatmul':
            raise NotImplementedError('matrix multiplication not supported')

        if isinstance(other, Index):
            other = other.values  # operate on labels to labels
        elif isinstance(other, str):
            # do not pass dtype, as want to coerce to this parsed type, not the type of sled
            other = to_datetime64(other)

        if isinstance(other, np.datetime64):
            # convert labels to other's datetime64 type to enable matching on month, year, etc.
            array = operator(self._labels.astype(other.dtype), other)
        elif isinstance(other, datetime.timedelta):
            array = operator(self._labels, to_timedelta64(other))
        else:
            # np.timedelta64 should work fine here
            array = operator(self._labels, other)

        array.flags.writeable = False
        return array
예제 #4
0
 def from_date_range(cls,
         start: DateInitializer,
         stop: DateInitializer,
         step: int = 1,
         *,
         name: tp.Hashable = None):
     '''
     Get an IndexYearMonth instance over a range of dates, where start and stop are inclusive.
     '''
     labels = np.arange(
             to_datetime64(start, _DT64_DAY),
             to_datetime64(stop, _DT64_DAY).astype(_DT64_YEAR) + _TD64_YEAR,
             np.timedelta64(step, 'Y'),
             dtype=_DT64_YEAR)
     labels.flags.writeable = False
     return cls(labels, name=name)
예제 #5
0
 def from_year_range(cls: tp.Type[I],
                     start: YearInitializer,
                     stop: YearInitializer,
                     step: int = 1,
                     *,
                     name: tp.Optional[tp.Hashable] = None) -> I:
     '''
     Get an IndexDate instance over a range of years, where start and end are inclusive.
     '''
     labels = np.arange(
         to_datetime64(start, _DT64_YEAR),
         to_datetime64(stop, _DT64_YEAR) + _TD64_YEAR,
         step=np.timedelta64(step, 'Y'),
     )
     labels.flags.writeable = False
     return cls(labels, name=name)
예제 #6
0
    def from_year_month_range(cls: tp.Type[I],
                              start: YearMonthInitializer,
                              stop: YearMonthInitializer,
                              step: int = 1,
                              *,
                              name: tp.Optional[tp.Hashable] = None) -> I:
        '''
        Get an IndexYearMonth instance over a range of months, where start and end are inclusive.
        '''

        labels = np.arange(to_datetime64(start, DT64_MONTH),
                           to_datetime64(stop, DT64_MONTH) + TD64_MONTH,
                           np.timedelta64(step, 'M'),
                           dtype=DT64_MONTH)
        labels.flags.writeable = False
        return cls(labels, name=name)
예제 #7
0
    def from_date_range(cls: tp.Type[I],
                        start: DateInitializer,
                        stop: DateInitializer,
                        step: int = 1,
                        *,
                        name: tp.Optional[tp.Hashable] = None) -> I:
        '''
        Get an IndexYearMonth instance over a range of dates, where start and stop is inclusive.
        '''
        labels = np.arange(to_datetime64(start, _DT64_DAY),
                           to_datetime64(stop, _DT64_DAY).astype(_DT64_MONTH) +
                           _TD64_MONTH,
                           np.timedelta64(step, 'M'),
                           dtype=_DT64_MONTH)

        labels.flags.writeable = False
        return cls(labels, name=name)
예제 #8
0
 def append(self, value: tp.Hashable) -> None:
     '''Specialize for fixed-typed indices: convert `value` argument; do not need to resolve_dtype with each addition; do not need to check for _loc_is_iloc.
     '''
     value = to_datetime64(value, self._DTYPE)
     if value in self._map:
         raise KeyError(f'duplicate key append attempted: {value}')
     # the new value is the count
     self._map[value] = self._positions_mutable_count
     self._labels_mutable.append(value)
     self._positions_mutable_count += 1 #pylint: disable=E0237
     self._recache = True #pylint: disable=E0237
예제 #9
0
 def append(self, value: tp.Hashable) -> None:
     '''Specialize for fixed-typed indices: convert `value` argument; do not need to resolve_dtype with each addition; self._map is never None
     '''
     value = to_datetime64(value, self._DTYPE)
     if self._map is not None:
         try:
             self._map.add(value)
         except ValueError:
             raise KeyError(f'duplicate key append attempted: {value}')
     self._labels_mutable.append(value)
     self._positions_mutable_count += 1  #pylint: disable=E0237
     self._recache = True  #pylint: disable=E0237
예제 #10
0
    def _ufunc_binary_operator(self, *,
            operator: tp.Callable[..., tp.Any],
            other: object,
            fill_value: object = np.nan,
            ) -> np.ndarray:

        if self._recache:
            self._update_array_cache()

        if operator.__name__ == 'matmul' or operator.__name__ == 'rmatmul':
            raise NotImplementedError('matrix multiplication not supported')

        if isinstance(other, Index):
            other = other.values # operate on labels to labels
            other_is_array = True
        elif isinstance(other, str):
            # do not pass dtype, as want to coerce to this parsed type, not the type of sled
            other = to_datetime64(other)
            other_is_array = False
        elif other.__class__ is np.ndarray:
            other_is_array = True
        else:
            other_is_array = False

        if isinstance(other, np.datetime64):
            # convert labels to other's datetime64 type to enable matching on month, year, etc.
            result = operator(self._labels.astype(other.dtype), other)
        elif isinstance(other, datetime.timedelta):
            result = operator(self._labels, to_timedelta64(other))
        else: # np.timedelta64 should work fine here
            with WarningsSilent():
                result = operator(self._labels, other)

        # NOTE: similar branching as in container_util.apply_binary_operator
        # NOTE: all string will have been converted to dt64, or raise ValueError; comparison to same sized iterables (list, tuple) will result in an array when they are the same size
        if result is False: # will never be True
            if not other_is_array and hasattr(other, '__len__') and len(other) == len(self):
                # NOTE: equality comparisons of an array to same sized iterable normally return an array, but with dt64 types they just return False
                result = np.full(self.shape, result, dtype=DTYPE_BOOL)
            elif other_is_array and other.size == 1:
                # elements in arrays of 0 or more dimensions are acceptable; this is what NP does for arithmetic operators when the types are compatible
                result = np.full(self.shape, result, dtype=DTYPE_BOOL)
            else:
                raise ValueError('operands could not be broadcast together')
                # raise on unaligned shapes as is done for arithmetic operators

        result.flags.writeable = False
        return result
예제 #11
0
    def _ufunc_binary_operator(self, *, operator: tp.Callable, other) -> np.ndarray:

        if self._recache:
            self._update_array_cache()

        if issubclass(other.__class__, Index):
            other = other.values # operate on labels to labels
        elif isinstance(other, str):
            # do not pass dtype, as want to coerce to this parsed type, not the type of sled
            other = to_datetime64(other)
        if isinstance(other, np.datetime64):
            # convert labels to other's datetime64 type to enable matching on month, year, etc.
            array = operator(self._labels.astype(other.dtype), other)
        elif isinstance(other, datetime.timedelta):
            array = operator(self._labels, to_timedelta64(other))
        else:
            # np.timedelta64 should work fine here
            array = operator(self._labels, other)

        array.flags.writeable = False
        return array
예제 #12
0
 def __contains__(self, value) -> bool:
     '''Return True if value in the labels. Will only return True for an exact match to the type of dates stored within.
     '''
     return self._map.__contains__(to_datetime64(value))
예제 #13
0
    def __init__(self,
            labels: IndexInitializer,
            *,
            loc_is_iloc: bool = False,
            name: tp.Hashable = None,
            dtype: DtypeSpecifier = None
            ) -> None:

        self._recache = False
        self._map = None
        positions = None

        # resolve the targetted labels dtype, by lookin at the class attr _DTYPE and/or the passed dtype argument
        if dtype is None:
            dtype_extract = self._DTYPE # set in some specialized Index classes
        else: # passed dtype is not None
            if self._DTYPE is not None and dtype != self._DTYPE:
                raise RuntimeError('invalid dtype argument for this Index',
                        dtype, self._DTYPE)
            # self._DTYPE is None, passed dtype is not None, use dtype
            dtype_extract = dtype

        # handle all Index subclasses
        # check isinstance(labels, IndexBase)
        if issubclass(labels.__class__, IndexBase):
            if labels._recache:
                labels._update_array_cache()
            if name is None and labels.name is not None:
                name = labels.name # immutable, so no copy necessary
            if labels.depth == 1: # not an IndexHierarchy
                if labels.STATIC: # can take the map
                    self._map = labels._map
                # get a reference to the immutable arrays, even if this is an IndexGO index, we can take the cached arrays, assuming they are up to date
                positions = labels._positions
                loc_is_iloc = labels._loc_is_iloc
                labels = labels._labels
            else: # IndexHierarchy
                # will be a generator of tuples; already updated caches
                labels = array2d_to_tuples(labels._labels)
        elif hasattr(labels, 'values'):
            # it is a Series or similar
            array = labels.values
            if array.ndim == 1:
                labels = array
            else:
                labels = array2d_to_tuples(array)

        if self._DTYPE is not None:
            # do not need to check arrays, as will and checked to match dtype_extract in _extract_labels
            if not isinstance(labels, np.ndarray):
                # for now, assume that if _DTYPE is defined, we have a date
                labels = (to_datetime64(v, dtype_extract) for v in labels)
            else: # coerce to target type
                labels = labels.astype(dtype_extract)

        self._name = name if name is None else name_filter(name)

        if self._map is None:
            self._map = self._get_map(labels, positions)

        # this might be NP array, or a list, depending on if static or grow only; if an array, dtype will be compared with passed dtype_extract
        self._labels = self._extract_labels(self._map, labels, dtype_extract)
        self._positions = self._extract_positions(self._map, positions)

        if self._DTYPE and self._labels.dtype != self._DTYPE:
            raise RuntimeError('invalid label dtype for this Index',
                    self._labels.dtype, self._DTYPE)
        if len(self._map) != len(self._labels):
            raise KeyError(f'labels ({len(self._labels)}) have non-unique values ({len(self._map)})')

        # NOTE: automatic discovery is possible, but not yet implemented
        self._loc_is_iloc = loc_is_iloc
예제 #14
0
    def __init__(self,
                 labels: IndexInitializer,
                 *,
                 loc_is_iloc: bool = False,
                 name: NameType = NAME_DEFAULT,
                 dtype: DtypeSpecifier = None) -> None:

        self._recache: bool = False
        self._map: tp.Optional[FrozenAutoMap] = None

        positions = None
        is_typed = self._DTYPE is not None

        # resolve the targetted labels dtype, by lookin at the class attr _DTYPE and/or the passed dtype argument
        if dtype is None:
            dtype_extract = self._DTYPE  # set in some specialized Index classes
        else:  # passed dtype is not None
            if is_typed and dtype != self._DTYPE:
                # NOTE: should never get to this branch, as derived Index classes that set _DTYPE remove dtype from __init__
                raise ErrorInitIndex('invalid dtype argument for this Index',
                                     dtype, self._DTYPE)  #pragma: no cover
            # self._DTYPE is None, passed dtype is not None, use dtype
            dtype_extract = dtype

        #-----------------------------------------------------------------------
        # handle all Index subclasses
        if isinstance(labels, IndexBase):
            if labels._recache:
                labels._update_array_cache()
            if name is NAME_DEFAULT:
                name = labels.name  # immutable, so no copy necessary
            if isinstance(labels, Index):  # not an IndexHierarchy
                if (labels.STATIC and self.STATIC and dtype is None):
                    if not is_typed or (is_typed
                                        and self._DTYPE == labels.dtype):
                        # can take the map if static and if types in the dict are the same as those in the labels (or to become the labels after conversion)
                        self._map = labels._map
                # get a reference to the immutable arrays, even if this is an IndexGO index, we can take the cached arrays, assuming they are up to date; for datetime64 indices, we might need to translate to a different type
                positions = labels._positions
                loc_is_iloc = labels._map is None
                labels = labels._labels
            else:  # IndexHierarchy
                # will be a generator of tuples; already updated caches
                labels = array2d_to_tuples(labels.__iter__())
        elif isinstance(labels, ContainerOperand):
            # it is a Series or similar
            array = labels.values
            if array.ndim == 1:
                labels = array
            else:
                labels = array2d_to_tuples(array)
        # else: assume an iterable suitable for labels usage

        #-----------------------------------------------------------------------
        if is_typed:
            # do not need to check arrays, as will and checked to match dtype_extract in _extract_labels
            if not isinstance(labels, np.ndarray):
                # for now, assume that if _DTYPE is defined, we have a date
                labels = (to_datetime64(v, dtype_extract) for v in labels)
            # coerce to target type
            elif labels.dtype != dtype_extract:
                labels = labels.astype(dtype_extract)
                labels.flags.writeable = False  #type: ignore

        self._name = None if name is NAME_DEFAULT else name_filter(name)

        if self._map is None:  # if _map not shared from another Index
            if not loc_is_iloc:
                try:
                    self._map = FrozenAutoMap(
                        labels) if self.STATIC else AutoMap(labels)
                except ValueError:  # Automap will raise ValueError of non-unique values are encountered
                    pass
                if self._map is None:
                    raise ErrorInitIndex(
                        f'labels ({len(tuple(labels))}) have non-unique values ({len(set(labels))})'
                    )
                size = len(self._map)
            else:  # must assume labels are unique
                # labels must not be a generator, but we assume that internal clients that provided loc_is_iloc will not give a generator
                size = len(labels)  #type: ignore
                if positions is None:
                    positions = PositionsAllocator.get(size)
        else:  # map shared from another Index
            size = len(self._map)

        # this might be NP array, or a list, depending on if static or grow only; if an array, dtype will be compared with passed dtype_extract
        self._labels = self._extract_labels(self._map, labels, dtype_extract)
        self._positions = self._extract_positions(size, positions)

        if self._DTYPE and self._labels.dtype != self._DTYPE:
            raise ErrorInitIndex(
                'invalid label dtype for this Index',  #pragma: no cover
                self._labels.dtype,
                self._DTYPE)