def patch_spaces_dict_Dict(): # Mapping has the following inheritance structure and requirements: # Mapping: `__getitem__` # Collection: # Sized: `__len__` # Iterable: `__iter__` # Container: `__contains__` # `abc.Mapping` expects `__iter__` to sweep over the keys of the mapping, # and provides generic implementations of `.keys`, `.values`, `.items` # and `.get` based on these methods. spaces_dict = importlib.import_module('gym.spaces.dict') # patch the missing methods into the class prototype spaces_dict.Dict.__len__ = __len__ spaces_dict.Dict.__contains__ = __contains__ spaces_dict.Dict.keys = keys spaces_dict.Dict.items = items spaces_dict.Dict.values = values # replace the __iter with a more direct one spaces_dict.Dict.__iter__ = __iter__ # declare Mapping as a virtual base class of Dict Mapping.register(spaces_dict.Dict)
def is_PDSgroup(self, group: abc.Mapping) -> bool: """Returns true if the dict-like *group* qualifies as a PDS Group, false otherwise. PDS applies the following restrictions to GROUPS: 1. The GROUP structure may only be used in a data product label which also contains one or more data OBJECT definitions. 2. The GROUP statement must contain only attribute assignment statements, include pointers, or related information pointers (i.e., no data location pointers). If there are multiple values, a single statement must be used with either sequence or set syntax; no attribute assignment statement or pointer may be repeated. 3. GROUP statements may not be nested. 4. GROUP statements may not contain OBJECT definitions. 5. Only PSDD elements may appear within a GROUP statement. *PSDD is not defined anywhere in the PDS document, so don't know how to test for it.* 6. The keyword contents associated with a specific GROUP identifier must be identical across all labels of a single data set (with the exception of the “PARAMETERS” GROUP, as explained). Use of the GROUP structure must be coordinated with the responsible PDS discipline Node. Items 1 & 6 and the final sentence above, can't really be tested by examining a single group, but must be dealt with in a larger context. The ODLEncoder.encode_module() handles #1, at least. You're on your own for the other two issues. Item 5: *PSDD* is not defined anywhere in the ODL PDS document, so don't know how to test for it. """ (obj_count, grp_count) = self.count_aggs(group) # Items 3 and 4: if obj_count != 0 or grp_count != 0: return False # Item 2, no data location pointers: for k, v in group.items(): if k.startswith("^"): if isinstance(v, int): return False else: for quant in self.quantities: if isinstance(v, quant.cls) and isinstance( getattr(v, quant.value_prop), int): return False # Item 2, no repeated keys: keys = list(group.keys()) if len(keys) != len(set(keys)): return False return True
def count_aggs(self, module: abc.Mapping, obj_count: int = 0, grp_count: int = 0) -> tuple((int, int)): """Returns the count of OBJECT and GROUP aggregations that are contained within the *module* as a two-tuple in that order. """ # This currently just counts the values in the passed # in module, it does not 'recurse' if those aggregations also # may contain aggregations. for k, v in module.items(): if isinstance(v, abc.Mapping): if isinstance(v, self.grpcls): grp_count += 1 elif isinstance(v, self.objcls): obj_count += 1 else: # We treat other dict-like Python objects as # PVL Objects for the purposes of this count, # because that is how they will be encoded. obj_count += 1 return obj_count, grp_count
def serialize_mapping(self, obj: abc.Mapping): """ Mappings are just copied into another mapping. While copying, all the values are recursively serialized. """ return {k: self.serialize(v) for k, v in obj.items()}
def __init__(self, configuration_file=None): _Mapping.__init__(self) # Initialize values Dictionaries self._sections = dict() self._cache = dict() # Find configuration file to use configuration_file = self._find_config_file(configuration_file) # Read configuration file if exists # If not, return empty Configuration file, this will force # host and accelerator classes to uses defaults values if configuration_file: self._sections = self._read_config_file(configuration_file) # AcceleratorAPI backward compatibility self._legacy_backward_compatibility()
def __init__(self, data: abc.Mapping): super(ReadOnlyDict, self).__init__() self._data = dict() for k, v in data.items(): if isinstance(v, list): self._data.__setitem__(k, ReadOnlyList(v)) elif isinstance(v, dict): self._data.__setitem__(k, ReadOnlyDict(v)) else: self._data.__setitem__(k, v)
def __eq__(self, other): """ python.org: The correspondence between operator symbols and method names is as follows: x==y calls x.__eq__(y) :param other: Object to be compaired with :return: (bool) True if equal :since: v2.1.0 """ return (isinstance(other, EnvironmentDict) and Mapping.__eq__(self, other))
def __init__(self, db_path: str, writeback: bool = False, new: bool = False): Mapping.__init__(self) Sized.__init__(self) self.__closed__ = False # tag whether db is closed self.__path__ = db_path if os.path.exists(self.__path__): if new is True: os.remove(self.__path__) self.__db__ = shelve.open(self.__path__, writeback=writeback, protocol=None) else: if new is False: raise FileNotFoundError(self.__path__) self.__db__ = shelve.open(self.__path__, writeback=writeback, protocol=None)
def encode_module(self, module: abc.Mapping, level: int = 0) -> str: """Returns a ``str`` formatted as a PVL module based on the dict-like *module* object according to the rules of this encoder, with an indentation level of *level*. """ lines = list() # To align things on the equals sign, just need to normalize # the non-aggregation key length: non_agg_key_lengths = list() for k, v in module.items(): if not isinstance(v, abc.Mapping): non_agg_key_lengths.append(len(k)) longest_key_len = max(non_agg_key_lengths, default=0) for k, v in module.items(): if isinstance(v, abc.Mapping): lines.append(self.encode_aggregation_block(k, v, level)) else: lines.append( self.encode_assignment(k, v, level, longest_key_len)) return self.newline.join(lines)
def __subclasshook__(cls, C): # noqa: N803 (argument name should be lowercase) """Check if *C* is a :class:`~collections.abc.Mapping` that also provides an ``inverse`` attribute, thus conforming to the :class:`BidirectionalMapping` interface, in which case it will be considered a (virtual) C even if it doesn't explicitly extend it. """ if cls is not BidirectionalMapping: # lgtm [py/comparison-using-is] return NotImplemented if not Mapping.__subclasshook__(C): return NotImplemented mro = C.__mro__ if not any(B.__dict__.get('inverse') for B in mro): return NotImplemented return True
def __eq__(self, other): """ Generated by @autodict. In the case the other is of the same type, use the dict comparison. Otherwise, falls back to super. :param self: :param other: :return: """ # in the case the other is of the same type, use the dict comparison, that relies on the appropriate fields if isinstance(other, cls): return dict(self) == dict(other) elif isinstance(other, Mapping): return dict(self) == other else: # else fallback to inherited behaviour, whatever it is try: f = super(cls, self).__eq__ except AttributeError: # can happen in python 2 when adding Mapping inheritance failed return Mapping.__eq__(dict(self), other) else: return f(other)
def multi_map_do(m, proc, *args): for k in m.keys(): c = m[k] if isinstance(c,Mapping): multi_map_do(c,proc,*args,k) else: proc(*args,k,c) ## for python3.10 or later if (3,10) <= sys.version_info: def multi_map_do2(m, proc, *args): for k in m.keys(): match (c := m[k]): case Mapping(): multi_map_do2(c,proc,*args,k) case _: proc(*args,k,c) def test(): x = { "a" : 1, "b" : 2, "c" : 3 } multi_map_do(x, lambda *x : print(f"x = {x}") ) def func(*args):
Makes the *default* argument of the original :meth:`dict.setdefault` non-optional. Args: element: The element which is added if not already present. default: The default multiplicity to add the element with if not in the multiset. Returns: The multiplicity for *element* if it is in the multiset, else *default*. """ return self._elements.setdefault(element, default) def clear(self): """Empty the multiset.""" self._elements.clear() self._total = 0 class FrozenMultiset(BaseMultiset): """The frozen multiset variant that is immutable and hashable.""" __slots__ = () def __hash__(self): return hash(frozenset(self._elements.items())) Mapping.register(BaseMultiset) # type: ignore MutableMapping.register(Multiset) # type: ignore if __name__ == '__main__': import doctest doctest.testmod()
def __jclass_init__(cls): Mapping.register(cls)
def __init__(self): Mapping.__init__(self) self._items = {} self.added = Subject() self.removed = Subject() self.cleared = Subject()
raise RuntimeError("Bunch is Frozen") def __setitem__(self, key, item): raise RuntimeError("Bunch is Frozen") def __setattr__(self, key, item): raise RuntimeError("Bunch is Frozen") def __delattr__(self, key): raise RuntimeError("Bunch is Frozen") def __deepcopy__(self, memo): return self.__class__(copy.deepcopy(self._mydict, memo)) MappingABC.register(Bunch) MappingABC.register(FrozenBunch) MappingABC.register(WriteCheckBunch) class HookBunch(Bunch): def __init__(self, inner_dict=None, insert_hook=None, replace_hook=None, delete_hook=None, *args, **kwds): super(HookBunch, self).__init__(inner_dict=inner_dict, *args, **kwds) self.__dict__['insert_hook'] = insert_hook self.__dict__['replace_hook'] = replace_hook
undefined. """ item = self.resolve_or_missing(key) if item is missing: raise KeyError(key) return item def __repr__(self): return '<%s %s of %r>' % (self.__class__.__name__, repr( self.get_all()), self.name) # register the context as mapping if possible try: from collections.abc import Mapping Mapping.register(Context) except ImportError: pass class BlockReference(object): """One block on a template reference.""" def __init__(self, name, context, stack, depth): self.name = name self._context = context self._stack = stack self._depth = depth @property def super(self): """Super the block."""
if item is missing: raise KeyError(key) return item def __repr__(self): return '<%s %s of %r>' % ( self.__class__.__name__, repr(self.get_all()), self.name ) # register the context as mapping if possible try: from collections.abc import Mapping Mapping.register(Context) except ImportError: pass class BlockReference(object): """One block on a template reference.""" def __init__(self, name, context, stack, depth): self.name = name self._context = context self._stack = stack self._depth = depth @property def super(self):
def __setattr__(self, key, value): return Mapping.__setattr__(self, key, value)
def values(self): """D.values() -> list of D's values.""" return self._dict.values() def __iter__(self): """x.__iter__() <==> iter(x).""" return iter(self._dict) def __len__(self): """x.__len__() <==> len(x).""" return self._dict.__len__() def get(self, key, default=None): """D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.""" return self._dict.get(sympify(key), default) def __contains__(self, key): """D.__contains__(k) -> True if D has a key k, else False.""" return sympify(key) in self._dict def __lt__(self, other): return sympify(self.args < other.args) @property def _sorted_args(self): from ..utilities import default_sort_key return tuple(sorted(self.args, key=default_sort_key)) Mapping.register(Dict)
def __init__(self): Mapping.__init__(self) self._items = {}
from collections import Mapping as MappingABC from ..utilities.future_from_2 import str, unicode class QuickBunch(dict): def __getattr__(self, name): return self[name] def __setattr__(self, name, val): self[name] = val return def __dir__(self): dir_lst = list(super(QuickBunch, self).__dir__()) dir_lst = dir_lst + list( k for k in self.keys() if isinstance(k, (str, unicode))) dir_lst.sort() return dir_lst def __getstate__(self): return self.__dict__.copy() def __setstate__(self, state): self.__dict__.update(state) def get(self, name, *default): return super(QuickBunch, self).get(name, *default) MappingABC.register(QuickBunch)
if hdf is None: return for key in list(hdf.keys()): del hdf[key] return def keys(self): hdf = self._resolve_hdf() if hdf is None: return iter(()) return iter(list(hdf.keys())) def values(self): hdf = self._resolve_hdf() if hdf is None: return for key in list(hdf.keys()): yield self[key] return def items(self): hdf = self._resolve_hdf() if hdf is None: return for key in list(hdf.keys()): yield key, self[key] return MappingABC.register(HDFDeepBunch)
def __delattr__(self, key): return Mapping.__delattr__(self, key)
def __jclass_init__(self): Mapping.register(self)
for key, value in self.items(): try: if (value != other[key]): return False except KeyError: return False return True # Easier than making GenericMap actually inherit from Mapping keys = Mapping.keys values = Mapping.values items = Mapping.items GenericMap.register(str, GenericMapS) Mapping.register(GenericMapS) class MutableGenericMap(GenericMap): """An abstract `~collections.abc.MutableMapping` for use when sharing a map between C++ and Python. For compatibility with C++, ``MutableGenericMap`` has the following restrictions: - all keys must be of the same type - values must be built-in types or subclasses of `lsst.afw.typehandling.Storable`. Almost any user-defined class in C++ or Python can have `~lsst.afw.typehandling.Storable` as a mixin. As a safety precaution, `~lsst.afw.typehandling.Storable` objects that are
#MappingView.register(EmptyMapping) KeysView.register(EmptySet) ItemsView.register(EmptySet) ValuesView.register(EmptyCollection) assert issubclass(EmptySet, KeysView) assert issubclass(EmptySet, ItemsView) assert issubclass(EmptySet, Set) assert issubclass(EmptySet, MappingView) assert issubclass(EmptyCollection, ValuesView) assert issubclass(EmptyCollection, Collection) assert issubclass(EmptyCollection, MappingView) Mapping.register(EmptyMapping) Set.register(EmptySet) Sequence.register(EmptySequence) assert issubclass(EmptyMapping, Mapping) assert issubclass(EmptySet, Set) assert issubclass(EmptySequence, Sequence) if 0: Mapping.register(EmptyThree) Set.register(EmptyThree) Sequence.register(EmptyThree) assert issubclass(EmptyThree, Mapping) assert issubclass(EmptyThree, Set) assert issubclass(EmptyThree, Sequence)
def __init__(self, game_id): Mapping.__init__(self) self._game_id = game_id # Just for logging self._back = {}
# This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. __all__ = [] from collections.abc import Mapping, MutableMapping from lsst.utils import continueClass from ._typehandling import GenericMapS, MutableGenericMapS Mapping.register(GenericMapS) @continueClass # noqa F811 class GenericMapS: def __repr__(self): className = type(self).__name__ return className + "({" + ", ".join("%r: %r" % (key, value) for key, value in self.items()) + "})" def __iter__(self): for key in self._keys(): yield key # Support equality with any Mapping, including dict # Not clear why Mapping.__eq__ doesn't work def __eq__(self, other): if len(self) != len(other):
__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", "Hashable", "Iterable", "Iterator", "Generator", "Sized", "Container", "Callable", "Set", "MutableSet", "Mapping", "MutableMapping", **注意这二个** "MappingView", "KeysView", "ItemsView", "ValuesView", "Sequence", "MutableSequence", "ByteString", ] dict属于mapping类型 下面是其源码 """ from abc import abstractmethod from collections.abc import Mapping """ class Mapping(Sized, Iterable, Container): __slots__ = () A Mapping is a generic container for associating key/value pairs. This class provides concrete generic implementations of all methods except for __getitem__, __iter__, and __len__. @abstractmethod def __getitem__(self, key): raise KeyError
>>> m1 pmap({'b': 2, 'a': 1}) The changes are kept in the evolver. An updated pmap can be created using the persistent() function on the evolver. >>> m2 = e.persistent() >>> m2 pmap({'c': 3, 'b': 2}) The new pmap will share data with the original pmap in the same way that would have been done if only using operations on the pmap. """ return self._Evolver(self) Mapping.register(PMap) Hashable.register(PMap) def _turbo_mapping(initial, pre_size): if pre_size: size = pre_size else: try: size = 2 * len(initial) or 8 except Exception: # Guess we can't figure out the length. Give up on length hinting, # we can always reallocate later. size = 8 buckets = size * [None]
def __iter__(self): return iter(list(self.keys())) def __len__(self): return len(self._dict) def iterkeys(self): return iter(list(self._dict.keys())) def keys(self): return list(self._dict.keys()) def itervalues(self): for key in list(self.keys()): yield self[key] return def values(self): return list(self.values()) def iteritems(self): for key in list(self.keys()): yield key, self[key] return def items(self): return list(self.items()) MappingABC.register(TagBunch)