def test_index_base_slotted_a(self) -> None: idx1 = IndexBase() with self.assertRaises(AttributeError): idx1.g = 30 # type: ignore #pylint: disable=E0237 with self.assertRaises(AttributeError): idx1.__dict__ #pylint: disable=W0104
def from_pandas(cls, value, *, own_data: bool = False) -> 'Series': '''Given a Pandas Series, return a Series. Args: value: Pandas Series. {own_data} {own_index} Returns: :py:class:`static_frame.Series` ''' if own_data: data = value.values data.flags.writeable = False else: data = immutable_filter(value.values) return cls(data, index=IndexBase.from_pandas(value.index), name=value.name, own_index=True )
def from_pandas(cls, value) -> 'IndexHierarchyGO': ''' Given a Pandas index, return the appropriate IndexBase derived class. ''' return IndexBase.from_pandas(value, is_go=True)
def pivot_index_map( *, index_src: IndexBase, depth_level: DepthLevelSpecifier, dtypes_src: tp.Optional[tp.Sequence[np.dtype]], ) -> PivotIndexMap: ''' Args: dtypes_src: must be of length equal to axis ''' # We are always moving levels from one axis to another; after application, the expanded axis will always be hierarchical, while the contracted axis may or may not be. From the contract axis, we need to divide the depths into two categories: targets (the depths to be moved and added to expand axis) and groups (unique combinations that remain on the contract axis after removing targets). # Unique target labels are added to labels on the expand axis; unique group labels become the new contract axis. target_select = np.full(index_src.depth, False) target_select[depth_level] = True group_select = ~target_select group_arrays = [] target_arrays = [] for i, v in enumerate(target_select): if v: target_arrays.append(index_src.values_at_depth(i)) else: group_arrays.append(index_src.values_at_depth(i)) group_depth = len(group_arrays) target_depth = len(target_arrays) group_to_dtype: tp.Dict[tp.Optional[tp.Hashable], np.dtype] = {} targets_unique: tp.Iterable[tp.Hashable] if group_depth == 0: # targets must be a tuple group_to_target_map = { None: {v: idx for idx, v in enumerate(zip(*target_arrays))} } targets_unique = [k for k in group_to_target_map[None]] if dtypes_src is not None: group_to_dtype[None] = resolve_dtype_iter(dtypes_src) else: group_to_target_map = defaultdict(dict) targets_unique = dict() # Store targets in order observed for axis_idx, (group, target, dtype) in enumerate( zip( zip(*group_arrays), # get tuples of len 1 to depth zip(*target_arrays), (dtypes_src if dtypes_src is not None else repeat(None)), )): if group_depth == 1: group = group[0] # targets are transfered labels; groups are the new columns group_to_target_map[group][target] = axis_idx targets_unique[target] = None #type: ignore if dtypes_src is not None: if group in group_to_dtype: group_to_dtype[group] = resolve_dtype( group_to_dtype[group], dtype) else: group_to_dtype[group] = dtype return PivotIndexMap( #pylint: disable=E1120 targets_unique=targets_unique, target_depth=target_depth, target_select=target_select, group_to_target_map=group_to_target_map, #type: ignore group_depth=group_depth, group_select=group_select, group_to_dtype=group_to_dtype)
def test_index_base_not_implemented(self) -> None: idx1 = IndexBase() with self.assertRaises(NotImplementedError): idx1._ufunc_axis_skipna(axis=0, skipna=False, ufunc=np.sum, ufunc_skipna=np.nansum, composable=True, dtypes=(), size_one_unity=True) with self.assertRaises(NotImplementedError): idx1._update_array_cache() with self.assertRaises(NotImplementedError): idx1.copy() with self.assertRaises(NotImplementedError): idx1.copy() with self.assertRaises(NotImplementedError): idx1.display() with self.assertRaises(NotImplementedError): idx1.from_labels(())
def tree_extractor(index: IndexBase) -> tp.Union[IndexBase, TreeNodeT]: index = extractor(index) if isinstance(index, IndexHierarchy): return index.to_tree() return index