def _generic_to_annotation(ann_node_type: type, node: NodeNG) -> TypeResult: is_generic = isinstance(ann_node_type, _GenericAlias) or \ (sys.version_info >= (3, 9) and hasattr(ann_node_type, '__origin__')) if (is_generic and ann_node_type is getattr(typing, getattr(ann_node_type, '_name', '') or '', None)): if ann_node_type == Dict: ann_type = wrap_container(ann_node_type, Any, Any) elif ann_node_type == Tuple: # TODO: Add proper support for multi-parameter Tuples ann_type = wrap_container(ann_node_type, Any) else: ann_type = wrap_container(ann_node_type, Any) elif is_generic: parsed_args = [] for arg in ann_node_type.__args__: _generic_to_annotation(arg, node) >> parsed_args.append ann_type = wrap_container(ann_node_type, *parsed_args) else: try: _type_check(ann_node_type, '') except TypeError: return TypeFailAnnotationInvalid(node) else: ann_type = TypeInfo(ann_node_type) return ann_type
def __getitem__(self, param): if param is None: raise TypeError('{}[t]: t can not be None'.format(self.__name__)) if isinstance(param, str): param = ForwardRef(param) if isinstance(param, tuple): param = Tuple[param] _type_check(param, msg="{}[t]: t must be a type".format(self.__name__)) t = _DataPipeType(param) if not t.issubtype(self.type): raise TypeError( 'Can not subclass a DataPipe[{}] from DataPipe[{}]'.format( t, self.type)) # Types are equal, fast path for inheritance if self.type.issubtype(t): if _mro_subclass_init(self): return self name = self.__name__ + '[' + str(t) + ']' bases = (self, ) + self.__bases__ return self.__class__( name, bases, { '__init_subclass__': _dp_init_subclass, '__type_class__': True, 'type': t })
def is_valid_type(arg, message: str, is_argument: bool = True): """ exposes the _type_check function from typing module that does basic validations of the type """ if NEW_TYPING: return _type_check(arg, message, is_argument) if is_classvar(arg) and not is_argument: return arg return _type_check(arg, message)
def __new__(cls, name, bases, ns, total=True, allow_extra=False): # Create new typed dict class object. # This method is called directly when TypedDict is subclassed, # or via _typeddict_new when TypedDict is instantiated. This way # TypedDict supports all three syntaxes described in its docstring. # Subclasses and instances of TypedDict return actual dictionaries # via _dict_new. ns['__new__'] = _key_typeddict_new if name == 'KeyTypedDict' else _dict_new tp_dict = super(_KeyTypedDictMeta, cls).__new__(cls, name, (dict, ), ns) try: # Setting correct module is necessary to make typed dict classes pickleable. tp_dict.__module__ = sys._getframe(2).f_globals.get( '__name__', '__main__') except (AttributeError, ValueError): pass anns = ns.get('__annotations__', {}) msg = "KeyTypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" anns = {n: typing._type_check(tp, msg) for n, tp in anns.items()} for base in bases: anns.update(base.__dict__.get('__annotations__', {})) tp_dict.__annotations__ = anns if not hasattr(tp_dict, '__total__'): tp_dict.__total__ = total if not hasattr(tp_dict, '__allow_extra__'): tp_dict.__allow_extra__ = allow_extra return tp_dict
def __class_getitem__(cls, params): if not isinstance(params, tuple): params = (params, ) if not params and cls is not Tuple: raise TypeError( "Parameter list to {}[...] cannot be empty".format( cls.__qualname__)) msg = "Parameters to generic types must be types." params = tuple(_type_check(p, msg) for p in params) if cls is Protocol: # Generic can only be subscripted with unique type variables. if not all(isinstance(p, TypeVar) for p in params): i = 0 while isinstance(params[i], TypeVar): i += 1 raise TypeError( "Parameters to Protocol[...] must all be type variables." " Parameter {} is {}".format(i + 1, params[i])) if len(set(params)) != len(params): raise TypeError( "Parameters to Protocol[...] must all be unique") else: # Subscripting a regular Generic subclass. _check_generic(cls, params) return _GenericAlias(cls, params)
def reinforce_type(self, expected_type): r""" Reinforce the type for DataPipe instance. And the 'expected_type' is required to be a subtype of the original type hint to restrict the type requirement of DataPipe instance. """ if isinstance(expected_type, tuple): expected_type = Tuple[expected_type] _type_check(expected_type, msg="'expected_type' must be a type") if not issubtype(expected_type, self.type.param): raise TypeError("Expected 'expected_type' as subtype of {}, but found {}" .format(self.type, _type_repr(expected_type))) self.type = _DataPipeType(expected_type) return self
def _make_structclass(name, types, readonly=False, use_dict=False, gc=False, use_weakref=False, hashable=False): msg = "StructClass('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" types = [(n, _type_check(t, msg)) for n, t in types] module = None try: module = _sys._getframe(2).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass # print('mod:', module) struct_cls = structclass(name, [n for n, _ in types], readonly=readonly, use_dict=use_dict, gc=gc, use_weakref=use_weakref, hashable=hashable, module=module) struct_cls.__annotations__ = dict(types) # try: # struct_cls.__module__ = _sys._getframe(2).f_globals.get('__name__', '__main__') # except (AttributeError, ValueError): # pass return struct_cls
def __getitem__(self, item): cls = type(self) if self.__type__ is None: return cls(typing._type_check( item, '{} accepts only single type.'.format(cls.__name__[1:])), _root=True) raise TypeError('{} cannot be further subscripted'.format( cls.__name__[1:]))
def __getitem__(self, item): cls = type(self) if self.__type__ is None: return cls(typing._type_check(item, '{} accepts only single type.'.format(cls.__name__[1:])), _root=True) raise TypeError('{} cannot be further subscripted' .format(cls.__name__[1:]))
def _resolver(forward_code, globalns, localns): if globalns is None and localns is None: globalns = localns = {} elif globalns is None: globalns = localns elif localns is None: localns = globalns return typing._type_check(eval(forward_code, globalns, localns), "Forward references must evaluate to types.")
def _make_recordclass(name, types): msg = "RecordClass('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" types = [(n, _type_check(t, msg)) for n, t in types] rec_cls = recordclass(name, [n for n, t in types]) rec_cls.__annotations__ = rec_cls._field_types = dict(types) try: rec_cls.__module__ = _sys._getframe(2).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass return rec_cls
def _make_recordclass(name, types, readonly=False, hashable=False): msg = "RecordClass('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" types = [(n, _type_check(t, msg)) for n, t in types] rec_cls = recordclass(name, [n for n, t in types], readonly=readonly, hashable=hashable) rec_cls.__annotations__ = dict(types) try: rec_cls.__module__ = _sys._getframe(2).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass return rec_cls
def __getitem__(self, item): cls = type(self) if self.__type__ is not None: raise TypeError('{} cannot be further subscripted' .format(cls.__name__[1:])) param = typing._type_check( item, '{} accepts only single type.'.format(cls.__name__[1:])) return cls(self.__name__, self.__bases__, dict(self.__dict__), tp=param, _root=True)
def _make_trafaretrecord(name, types): msg = "TrafaretRecord('Name', [(f0, t0), (f1, t1), ...]); " \ "each t must be a type" types = [(n, _type_check(t, msg)) for n, t in types] rec_cls = trafaretrecord(name, [n for n, t in types]) rec_cls._field_types = dict(types) try: rec_cls.__module__ = \ sys._getframe(2).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass return rec_cls
def __getitem__(self, params): if params is None: raise TypeError('{}[t]: t can not be None'.format(self.__name__)) if isinstance(params, str): params = ForwardRef(params) if not isinstance(params, tuple): params = (params, ) msg = "{}[t]: t must be a type".format(self.__name__) params = tuple(_type_check(p, msg) for p in params) if isinstance(self.type.param, _GenericAlias): orig = getattr(self.type.param, '__origin__', None) if isinstance(orig, type) and orig is not Generic: p = self.type.param[params] # type: ignore[index] t = _DataPipeType(p) l = len(str(self.type)) + 2 name = self.__name__[:-l] name = name + '[' + str(t) + ']' bases = (self, ) + self.__bases__ return self.__class__( name, bases, { '__init_subclass__': _dp_init_subclass, 'type': t, '__type_class__': True }) if len(params) > 1: raise TypeError( 'Too many parameters for {} actual {}, expected 1'.format( self, len(params))) t = _DataPipeType(params[0]) if not t.issubtype(self.type): raise TypeError( 'Can not subclass a DataPipe[{}] from DataPipe[{}]'.format( t, self.type)) # Types are equal, fast path for inheritance if self.type == t: return self name = self.__name__ + '[' + str(t) + ']' bases = (self, ) + self.__bases__ return self.__class__( name, bases, { '__init_subclass__': _dp_init_subclass, '__type_class__': True, 'type': t })
def _make_structclass(name, types, readonly=False, usedict=False, gc=False, weakref=False, hashable=False): msg = "StructClass('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" types = [(n, _type_check(t, msg)) for n, t in types] struct_cls = structclass(name, [n for n, _ in types], readonly=readonly, usedict=usedict, gc=gc, weakref=weakref, hashable=hashable) struct_cls.__annotations__ = dict(types) try: struct_cls.__module__ = _sys._getframe(2).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass return struct_cls
def __getitem__(self, parameters): if self._name in ('ClassVar', 'Final'): item = _type_check(parameters, f'{self._name} accepts only single type.') return _GenericAlias(self, (item,)) if self._name == 'Union': if parameters == (): raise TypeError("Cannot take a Union of no types.") if not isinstance(parameters, tuple): parameters = (parameters,) msg = "Union[arg, ...]: each arg must be a type." parameters = tuple(_type_check(p, msg) for p in parameters) parameters = _remove_dups_flatten(parameters) if len(parameters) == 1: return parameters[0] return _GenericAlias(self, parameters) if self._name == 'Optional': arg = _type_check(parameters, "Optional[t] requires a single type.") return Union[arg, type(None)] if self._name == 'Literal': # There is no '_type_check' call because arguments to Literal[...] are # values, not types. return _GenericAlias(self, parameters) raise TypeError(f"{self} is not subscriptable")
def _evaluate(self, globalns, localns): if not self.__forward_evaluated__ or localns is not globalns: if globalns is None and localns is None: globalns = localns = {} elif globalns is None: globalns = localns elif localns is None: localns = globalns self.__forward_value__ = typing._type_check( eval(self.__forward_code__, globalns, localns), "Forward references must evaluate to types.", is_argument=self.__forward_is_argument__) self.__forward_evaluated__ = True return self.__forward_value__
def _generic_to_annotation(ann_node_type: type, node: NodeNG) -> TypeResult: if (isinstance(ann_node_type, _GenericAlias) and ann_node_type is getattr(typing, getattr(ann_node_type, '_name', '') or '', None)): if ann_node_type == Dict: ann_type = wrap_container(ann_node_type, Any, Any) elif ann_node_type == Tuple: # TODO: Add proper support for multi-parameter Tuples ann_type = wrap_container(ann_node_type, Any) else: ann_type = wrap_container(ann_node_type, Any) elif isinstance(ann_node_type, _GenericAlias): parsed_args = [] for arg in ann_node_type.__args__: _generic_to_annotation(arg, node) >> parsed_args.append ann_type = wrap_container(ann_node_type, *parsed_args) else: try: _type_check(ann_node_type, '') except TypeError: return TypeFailAnnotationInvalid(node) else: ann_type = TypeInfo(ann_node_type) return ann_type
def __class_getitem__( cls, params: t.Tuple[t.Union[t.Type[t.Any], t.Any], ...]) -> HasMetadata: if not isinstance(params, tuple) or len(params) < 2: raise TypeError("Annotated[...] should be used " "with at least two arguments (a type and an " "annotation).") msg = "Annotated[t, ...]: t must be a type." origin = t._type_check(params[0], msg) # type: ignore if not hasattr(origin, "__metadata__"): origin.__metadata__ = {} for x in params[1:]: origin.__metadata__[x.__name__] = x return t.cast(HasMetadata, origin)
def _ForwardRef_safe_eval(ref: ForwardRef, globalns: Dict[str, Any] = None, localns: Dict[str, Any] = None) -> Type: # On 3.6/3.7 ForwardRef._evaluate crashes if str references ClassVar if not ref.__forward_evaluated__: if globalns is None and localns is None: globalns = localns = {} elif globalns is None: globalns = localns elif localns is None: localns = globalns val = eval(ref.__forward_code__, globalns, localns) if not _is_class_var(val): val = _type_check(val, 'Forward references must evaluate to types.') ref.__forward_value__ = val return ref.__forward_value__
def __new__(mcls, class_name, bases, namespace, **kwargs): # type: ignore for name, value in namespace.items(): if name.startswith('_'): continue if utils.is_class_method_on_class( value) or utils.is_instance_method_on_class(value): continue raise TypeError( "Only methods and attribute type annotations can be defined on Params class, not '{name}'." .format(name=name)) class_params_items = {} class_annotations = namespace.get('__annotations__', {}) for name, value in class_annotations.items(): value = typing._type_check(value, "Each annotation must be a type.") if name in namespace: # Just update the annotation. class_annotations[name] = value else: # Extract annotation out. class_params_items[name] = value for name in class_params_items.keys(): del class_annotations[name] # Set back updated annotations. namespace['__annotations__'] = class_annotations params_items = {} for base in reversed(bases): params_items.update(base.__dict__.get('__params_items__', {})) params_items.update(class_params_items) namespace['__params_items__'] = params_items return super().__new__(mcls, class_name, bases, namespace, **kwargs)
def __new__(cls, name, bases, ns, total=True): # Create new typed dict class object. # This method is called directly when TypedDict is subclassed, # or via _typeddict_new when TypedDict is instantiated. This way # TypedDict supports all three syntaxes described in its docstring. # Subclasses and instances of TypedDict return actual dictionaries # via _dict_new. ns["__new__"] = _typeddict_new if name == "TypedDict" else _dict_new tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns) anns = ns.get("__annotations__", {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" anns = {n: _type_check(tp, msg) for n, tp in anns.items()} for base in bases: anns.update(base.__dict__.get("__annotations__", {})) tp_dict.__annotations__ = anns if not hasattr(tp_dict, "__total__"): tp_dict.__total__ = total return tp_dict
def _evaluate(self, globalns, localns, recursive_guard): if self.__forward_arg__ in recursive_guard: return self if not self.__forward_evaluated__ or localns is not globalns: if globalns is None and localns is None: globalns = localns = {} elif globalns is None: globalns = localns elif localns is None: localns = globalns type_ = _type_check( eval(self.__forward_code__, globalns, localns), "Forward references must evaluate to types.", is_argument=self.__forward_is_argument__, ) self.__forward_value__ = _eval_type( type_, globalns, localns, recursive_guard | {self.__forward_arg__}) self.__forward_evaluated__ = True return self.__forward_value__
def __getitem__(self, params): # We also need to copy this from GenericMeta.__getitem__ to get # special treatment of "Protocol". (Comments removed for brevity.) if not isinstance(params, tuple): params = (params, ) if not params and _gorg(self) is not Tuple: raise TypeError( "Parameter list to %s[...] cannot be empty" % self.__qualname__) msg = "Parameters to generic types must be types." params = tuple(_type_check(p, msg) for p in params) if self in (Generic, Protocol): if not all(isinstance(p, TypeVar) for p in params): raise TypeError( "Parameters to %r[...] must all be type variables" % self) if len(set(params)) != len(params): raise TypeError( "Parameters to %r[...] must all be unique" % self) tvars = params args = params elif self in (Tuple, Callable): tvars = _type_vars(params) args = params elif self.__origin__ in (Generic, Protocol): raise TypeError("Cannot subscript already-subscripted %s" % repr(self)) else: _check_generic(self, params) tvars = _type_vars(params) args = params prepend = (self, ) if self.__origin__ is None else () return self.__class__(self.__name__, prepend + self.__bases__, _no_slots_copy(self.__dict__), tvars=tvars, args=args, origin=self, extra=self.__extra__, orig_bases=self.__orig_bases__)
def __getitem__(self, params): if not isinstance(params, tuple): params = (params, ) if self.__origin__ is not None: # specializing an instantiated type return super(AnnotatedMeta, self).__getitem__(params) elif not isinstance(params, tuple) or len(params) < 2: raise TypeError("Annotated[...] should be instantiated with at " "least two arguments (a type and an annotation).") else: msg = "Annotated[t, ...]: t must be a type." tp = typing._type_check(params[0], msg) metadata = tuple(params[1:]) return self.__class__( self.__name__, self.__bases__, dict(self.__dict__), tvars=_type_vars((tp, )), # Metadata is a tuple so it won't be touched by _replace_args et al. args=(tp, metadata), origin=self, )
def __new__(cls, name, bases, ns): # Create new typed dict class object. # This method is called directly when TypedDict is subclassed, # or via _typeddict_new when TypedDict is instantiated. This way # TypedDict supports all three syntaxes described in its docstring. # Subclasses and instanes of TypedDict return actual dictionaries # via _dict_new. ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns) try: # Setting correct module is necessary to make typed dict classes pickleable. tp_dict.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass anns = ns.get('__annotations__', {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" anns = {n: _type_check(tp, msg) for n, tp in anns.items()} for base in bases: anns.update(base.__dict__.get('__annotations__', {})) tp_dict.__annotations__ = anns return tp_dict
def __getitem__(self, params): # We also need to copy this from GenericMeta.__getitem__ to get # special treatment of "Protocol". (Comments removed for brevity.) if not isinstance(params, tuple): params = (params,) if not params and _gorg(self) is not Tuple: raise TypeError( "Parameter list to %s[...] cannot be empty" % self.__qualname__) msg = "Parameters to generic types must be types." params = tuple(_type_check(p, msg) for p in params) if self in (Generic, Protocol): if not all(isinstance(p, TypeVar) for p in params): raise TypeError( "Parameters to %r[...] must all be type variables", self) if len(set(params)) != len(params): raise TypeError( "Parameters to %r[...] must all be unique", self) tvars = params args = params elif self in (Tuple, Callable): tvars = _type_vars(params) args = params elif self.__origin__ in (Generic, Protocol): raise TypeError("Cannot subscript already-subscripted %s" % repr(self)) else: _check_generic(self, params) tvars = _type_vars(params) args = params prepend = (self,) if self.__origin__ is None else () return self.__class__(self.__name__, prepend + self.__bases__, dict(self.__dict__), tvars=tvars, args=args, origin=self, extra=self.__extra__, orig_bases=self.__orig_bases__)
def _make_recordclass(name, types, readonly=False, hashable=False): msg = "RecordClass('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" types = [(n, _type_check(t, msg)) for n, t in types] module = None try: module = _sys._getframe(2).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass # print('mod:', module) rec_cls = recordclass(name, [n for n, t in types], readonly=readonly, hashable=hashable, module=module) rec_cls.__annotations__ = dict(types) # try: # rec_cls.__module__ = _sys._getframe(2).f_globals.get('__name__', '__main__') # except (AttributeError, ValueError): # pass return rec_cls
def is_consistent_types(arg_type, hint, covariant=True, contravariant=False): # TODO: add support for TypingMeta subclasses consistent = False hint = _type_check(hint, '`hint` argument is not an instance of `type`.') if hint == type(None): consistent = arg_type is None or arg_type is type(None) try: hint = evaluate_forward_reference(hint) except NameError: return bad_match(arg_type, hint, "Too early evaluation of {0}" .format(hint)) if not consistent: consistent = (arg_type == hint) if not consistent and covariant: consistent = issubclass(arg_type, hint) if not consistent and contravariant: consistent = issubclass(hint, arg_type) return consistent
def __new__(cls, name, bases, ns, total=True): ns["__new__"] = _typeddict_new if name == "TypedDict" else _dict_new tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns) anns = ns.get("__annotations__", {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" anns = {n: _type_check(tp, msg) for n, tp in anns.items()} required = set(anns if total else ()) optional = set(() if total else anns) for base in bases: base_anns = base.__dict__.get("__annotations__", {}) anns.update(base_anns) if getattr(base, "__total__", True): required.update(base_anns) else: optional.update(base_anns) tp_dict.__annotations__ = anns tp_dict.__required_keys__ = frozenset(required) tp_dict.__optional_keys__ = frozenset(optional) if not hasattr(tp_dict, "__total__"): tp_dict.__total__ = total return tp_dict
def __init__(self, name, *constraints): self.__name__ = name msg = "TypeVar(name, constraint, ...): constraints must be types." self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
import typing from typing import * lst : List[int] = [1,2,3,4,5] print(type(lst)) print(typing._type_check(List[List[int]],"Es una lista de ints",False))
def structclass(typename, fields, rename=False, defaults=None, readonly=False, usedict=False, gc=False, weakref=False, hashable=False, assequence=True, module=None): """Returns a new subclass of array with named fields. >>> Point = structclass('Point', 'x y') >>> Point.__doc__ # docstring for the new class 'Point(x, y)' >>> p = Point(11, y=22) # instantiate with positional args or keywords >>> p[0] + p[1] # indexable like a plain tuple 33 >>> x, y = p # unpack like a regular tuple >>> x, y (11, 22) >>> p.x + p.y # fields also accessable by name 33 >>> d = p._asdict() # convert to a dictionary >>> d.x 11 >>> d.x = 33 # assign new value >>> Point(**d) # convert from a dictionary Point(x=33, y=22) >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields Point(x=100, y=22) """ if isinstance(fields, str): field_names = fields.replace(',', ' ').split() annotations = None else: msg = "recordclass('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" annotations = {} field_names = [] for fn in fields: if type(fn) is tuple: n, t = fn n = str(n) if _type_check: t = _type_check(t, msg) annotations[n] = t field_names.append(n) else: field_names.append(str(fn)) typename = _intern(str(typename)) if rename: seen = set() for index, name in enumerate(field_names): if (not _isidentifier(name) or _iskeyword(name) or name.startswith('_') or name in seen): field_names[index] = '_%d' % index seen.add(name) for name in [typename] + field_names: if type(name) != str: raise TypeError('Type names and field names must be strings') if not _isidentifier(name): raise ValueError('Type names and field names must be valid ' 'identifiers: %r' % name) if _iskeyword(name): raise ValueError('Type names and field names cannot be a ' 'keyword: %r' % name) seen = set() for name in field_names: if name.startswith('_') and not rename: raise ValueError('Field names cannot start with an underscore: ' '%r' % name) if name in seen: raise ValueError('Encountered duplicate field name: %r' % name) seen.add(name) if defaults is not None: defaults = tuple(defaults) if len(defaults) > len(field_names): raise TypeError('Got more default values than field names') field_names = tuple(map(_intern, field_names)) n_fields = len(field_names) arg_list = ', '.join(field_names) repr_fmt=', '.join(_repr_template.format(name=name) for name in field_names) if usedict: new_func_template = """\ def __new__(_cls, {1}, **kw): 'Create new instance of {0}({1})' return _baseclass.__new__(_cls, {1}, **kw) """ else: new_func_template = """\ def __new__(_cls, {1}): 'Create new instance of {0}({1})' return _baseclass.__new__(_cls, {1}) """ new_func_def = new_func_template.format(typename, arg_list) #print(new_func_def) namespace = dict(_baseclass=recordobject) code = compile(new_func_def, "", "exec") eval(code, namespace) __new__ = namespace['__new__'] if defaults is not None: __new__.__defaults__ = defaults #@classmethod def _make(_cls, iterable): ob = _cls(*iterable) if len(ob) != n_fields: raise TypeError('Expected %s arguments, got %s' % (n_fields, len(ob))) return ob _make.__doc__ = 'Make a new %s object from a sequence or iterable' % typename def _replace(_self, **kwds): for name, val in kwds.items(): setattr(_self, name, val) return _self _replace.__doc__ = 'Return a new %s object replacing specified fields with new values' % typename def __repr__(self): 'Return a nicely formatted representation string' args_text = repr_fmt % tuple(self) try: kw = self.__dict__ except AttributeError: kw = None if kw: kw_text = repr(kw) return self.__class__.__name__ + "(" + args_text + ", **" + kw_text + ")" else: return self.__class__.__name__ + "(" + args_text + ")" def _asdict(self): 'Return a new OrderedDict which maps field names to their values.' return OrderedDict(zip(self.__attrs__, self)) for method in (__new__, _make, _replace, __repr__, _asdict,): method.__qualname__ = typename + "." + method.__name__ _make = classmethod(_make) __options__ = {'readonly':readonly, 'usedict':usedict, 'gc':gc, 'weakref':weakref, 'hashable':hashable, 'assequence':assequence} class_namespace = { '__doc__': typename+'('+arg_list+')', '__attrs__': field_names, '__new__': __new__, '_make': _make, '_replace': _replace, '__repr__': __repr__, '_asdict': _asdict, '__options__': __options__, } _result = structclasstype(typename, (recordobject,), class_namespace) # For pickling to work, the __module__ variable needs to be set to the frame # where the class is created. if module is None: try: module = _sys._getframe(1).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass if module is not None: _result.__module__ = module if annotations: _result.__annotations__ = annotations return _result
def __init__(self, name, *constraints): self.__name__ = name msg = "TypeVar(name, constraint, ...): constraints must be types." self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
def structclass(typename, fields, rename=False, defaults=None, readonly=False, usedict=False, gc=False, weakref=False, hashable=False, assequence=True, module=None): """Returns a new subclass of array with named fields. >>> Point = structclass('Point', 'x y') >>> Point.__doc__ # docstring for the new class 'Point(x, y)' >>> p = Point(11, y=22) # instantiate with positional args or keywords >>> p[0] + p[1] # indexable like a plain tuple 33 >>> x, y = p # unpack like a regular tuple >>> x, y (11, 22) >>> p.x + p.y # fields also accessable by name 33 >>> d = p._asdict() # convert to a dictionary >>> d.x 11 >>> d.x = 33 # assign new value >>> Point(**d) # convert from a dictionary Point(x=33, y=22) >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields Point(x=100, y=22) """ if isinstance(fields, str): field_names = fields.replace(',', ' ').split() annotations = None else: msg = "recordclass('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" annotations = {} field_names = [] for fn in fields: if type(fn) is tuple: n, t = fn n = str(n) if _type_check: t = _type_check(t, msg) annotations[n] = t field_names.append(n) else: field_names.append(str(fn)) typename = _intern(str(typename)) if rename: seen = set() for index, name in enumerate(field_names): if (not _isidentifier(name) or _iskeyword(name) or name.startswith('_') or name in seen): field_names[index] = '_%d' % index seen.add(name) for name in [typename] + field_names: if type(name) != str: raise TypeError('Type names and field names must be strings') if not _isidentifier(name): raise ValueError('Type names and field names must be valid ' 'identifiers: %r' % name) if _iskeyword(name): raise ValueError('Type names and field names cannot be a ' 'keyword: %r' % name) seen = set() for name in field_names: if name.startswith('_') and not rename: raise ValueError('Field names cannot start with an underscore: ' '%r' % name) if name in seen: raise ValueError('Encountered duplicate field name: %r' % name) seen.add(name) if defaults is not None: defaults = tuple(defaults) if len(defaults) > len(field_names): raise TypeError('Got more default values than field names') field_names = tuple(map(_intern, field_names)) n_fields = len(field_names) arg_list = ', '.join(field_names) repr_fmt = ', '.join( _repr_template.format(name=name) for name in field_names) if usedict: new_func_template = """\ def __new__(_cls, {1}, **kw): 'Create new instance of {0}({1})' return _baseclass.__new__(_cls, {1}, **kw) """ else: new_func_template = """\ def __new__(_cls, {1}): 'Create new instance of {0}({1})' return _baseclass.__new__(_cls, {1}) """ new_func_def = new_func_template.format(typename, arg_list) #print(new_func_def) namespace = dict(_baseclass=recordobject) code = compile(new_func_def, "", "exec") eval(code, namespace) __new__ = namespace['__new__'] if defaults is not None: __new__.__defaults__ = defaults #@classmethod def _make(_cls, iterable): ob = _cls(*iterable) if len(ob) != n_fields: raise TypeError('Expected %s arguments, got %s' % (n_fields, len(ob))) return ob _make.__doc__ = 'Make a new %s object from a sequence or iterable' % typename def _replace(_self, **kwds): for name, val in kwds.items(): setattr(_self, name, val) return _self _replace.__doc__ = 'Return a new %s object replacing specified fields with new values' % typename def __repr__(self): 'Return a nicely formatted representation string' args_text = repr_fmt % tuple(self) try: kw = self.__dict__ except AttributeError: kw = None if kw: kw_text = repr(kw) return self.__class__.__name__ + "(" + args_text + ", **" + kw_text + ")" else: return self.__class__.__name__ + "(" + args_text + ")" def _asdict(self): 'Return a new OrderedDict which maps field names to their values.' return OrderedDict(zip(self.__attrs__, self)) for method in ( __new__, _make, _replace, __repr__, _asdict, ): method.__qualname__ = typename + "." + method.__name__ _make = classmethod(_make) __options__ = { 'readonly': readonly, 'usedict': usedict, 'gc': gc, 'weakref': weakref, 'hashable': hashable, 'assequence': assequence } class_namespace = { '__doc__': typename + '(' + arg_list + ')', '__attrs__': field_names, '__new__': __new__, '_make': _make, '_replace': _replace, '__repr__': __repr__, '_asdict': _asdict, '__options__': __options__, } _result = structclasstype(typename, (recordobject, ), class_namespace) # For pickling to work, the __module__ variable needs to be set to the frame # where the class is created. if module is None: try: module = _sys._getframe(1).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass if module is not None: _result.__module__ = module if annotations: _result.__annotations__ = annotations return _result
def structclass(typename, fields=None, use_dict=False, use_weakref=False, hashable=True, sequence=True, mapping=False, readonly=False, defaults=None, module=None, gc=False): from ._dataobject import _clsconfig, _enable_gc from ._dataobject import dataobject from .datatype import datatype annotations = {} if isinstance(fields, str): field_names = fields.replace(',', ' ').split() field_names = [fn.strip() for fn in field_names] else: msg = "make_dataclass('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" field_names = [] if isinstance(fields, dict): for fn, tp in fields.items(): tp = _type_check(tp, msg) check_name(fn) fn = _intern(fn) annotations[fn] = tp field_names.append(fn) else: for fn in fields: if type(fn) is tuple: fn, tp = fn tp = _type_check(tp, msg) annotations[fn] = tp check_name(fn) fn = _intern(fn) field_names.append(fn) n_fields = len(field_names) typename = check_name(typename) if defaults is not None: n_fields = len(field_names) defaults = tuple(defaults) n_defaults = len(defaults) if n_defaults > n_fields: raise TypeError('Got more default values than fields') else: defaults = None def _make(_cls, iterable): ob = _cls(*iterable) if len(ob) != n_fields: raise TypeError('Expected %s arguments, got %s' % (n_fields, len(ob))) return ob _make.__doc__ = 'Make a new %s object from a sequence or iterable' % typename def _replace(_self, **kwds): for name, val in kwds.items(): setattr(_self, name, val) return _self _replace.__doc__ = 'Return a new %s object replacing specified fields with new values' % typename def _asdict(self): 'Return a new OrderedDict which maps field names to their values.' return OrderedDict(zip(self.__fields__, self)) for method in ( _make, _replace, _asdict, ): method.__qualname__ = typename + "." + method.__name__ _make = classmethod(_make) options = { 'readonly': readonly, 'defaults': defaults, 'argsonly': False, 'sequence': sequence, 'mapping': mapping, 'iterable': sequence, 'use_dict': use_dict, 'use_weakref': use_weakref, 'readonly': readonly, 'hashable': hashable, 'gc': gc, } ns = { '_make': _make, '_replace': _replace, '_asdict': _asdict, '__doc__': typename + '(' + ', '.join(field_names) + ')', '__module__': module } if defaults: for i in range(-n_defaults, 0): fname = field_names[i] val = defaults[i] ns[fname] = val if use_dict and '__dict__' not in field_names: field_names.append('__dict__') if use_weakref and '__weakref__' not in field_names: field_names.append('__weakref__') ns['__options__'] = options ns['__fields__'] = field_names if annotations: ns['__annotations__'] = annotations bases = (dataobject, ) if module is None: try: module = _sys._getframe(1).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass ns['__module__'] = module cls = datatype(typename, bases, ns) if gc: _enable_gc(cls) return cls
def recordclass(typename, fields, rename=False, defaults=None, readonly=False, hashable=False, gc=False, module=None): """Returns a new subclass of array with named fields. >>> Point = recordclass('Point', 'x y') >>> Point.__doc__ # docstring for the new class 'Point(x, y)' >>> p = Point(11, y=22) # instantiate with positional args or keywords >>> p[0] + p[1] # indexable like a plain tuple 33 >>> x, y = p # unpack like a regular tuple >>> x, y (11, 22) >>> p.x + p.y # fields also accessable by name 33 >>> d = p._asdict() # convert to a dictionary >>> d.x 11 >>> d.x = 33 # assign new value >>> Point(**d) # convert from a dictionary Point(x=11, y=22) >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields Point(x=100, y=22) """ if readonly: baseclass = immutabletuple else: baseclass = mutabletuple # Validate the field names. At the user's option, either generate an error # message or automatically replace the field name with a valid name. if isinstance(fields, str): field_names = fields.replace(',', ' ').split() annotations = None else: msg = "recordclass('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" annotations = {} field_names = [] for fn in fields: if type(fn) is tuple: n, t = fn n = str(n) if _type_check: t = _type_check(t, msg) annotations[n] = t field_names.append(n) else: field_names.append(str(fn)) typename = _intern(str(typename)) if rename: seen = set() for index, name in enumerate(field_names): if (not _isidentifier(name) or _iskeyword(name) or name.startswith('_') or name in seen): field_names[index] = '_%d' % index seen.add(name) for name in [typename] + field_names: if type(name) != str: raise TypeError('Type names and field names must be strings') if not _isidentifier(name): raise ValueError('Type names and field names must be valid ' 'identifiers: %r' % name) if _iskeyword(name): raise ValueError('Type names and field names cannot be a ' 'keyword: %r' % name) seen = set() for name in field_names: if name.startswith('_') and not rename: raise ValueError('Field names cannot start with an underscore: ' '%r' % name) if name in seen: raise ValueError('Encountered duplicate field name: %r' % name) seen.add(name) if defaults is not None: defaults = tuple(defaults) if len(defaults) > len(field_names): raise TypeError('Got more default values than field names') field_defaults = dict( reversed(list(zip(reversed(field_names), reversed(defaults))))) else: field_defaults = {} field_names = tuple(map(_intern, field_names)) n_fields = len(field_names) arg_list = ', '.join(field_names) repr_fmt = ', '.join( _repr_template.format(name=name) for name in field_names) if readonly: new_func_template = """\ def __new__(_cls, {1}): 'Create new instance of {0}({1})' return _method_new(_cls, ({1})) """ _method_new = immutabletuple.__new__ else: new_func_template = """\ def __new__(_cls, {1}): 'Create new instance: {0}({1})' return _method_new(_cls, {1}) """ _method_new = mutabletuple.__new__ new_func_def = new_func_template.format(typename, arg_list) # Execute the template string in a temporary namespace and support # tracing utilities by setting a value for frame.f_globals['__name__'] namespace = dict(_method_new=_method_new) code = compile(new_func_def, "", "exec") eval(code, namespace) __new__ = namespace['__new__'] if defaults is not None: __new__.__defaults__ = defaults if annotations: __new__.__annotations__ = annotations def _make(_cls, iterable): ob = _method_new(_cls, *iterable) if len(ob) != n_fields: raise TypeError('Expected %s arguments, got %s' % (n_fields, len(ob))) return ob _make.__doc__ = 'Make a new %s object from a sequence or iterable' % typename if readonly: def _replace(_self, **kwds): result = _self._make((kwds.pop(name) for name in field_names)) if kwds: raise ValueError('Got unexpected field names: %r' % list(kwds)) return result else: def _replace(_self, **kwds): for name, val in kwds.items(): setattr(_self, name, val) return _self _replace.__doc__ = 'Return a new %s object replacing specified fields with new values' % typename def __repr__(self): 'Return a nicely formatted representation string' return self.__class__.__name__ + "(" + (repr_fmt % tuple(self)) + ")" def _asdict(self): 'Return a new OrderedDict which maps field names to their values.' return OrderedDict(zip(self.__fields__, self)) def __getnewargs__(self): 'Return self as a plain tuple. Used by copy and pickle.' return tuple(self) def __getstate__(self): 'Exclude the OrderedDict from pickling' return None def __reduce__(self): 'Reduce' return type(self), tuple(self) if not readonly and hashable: def __hash__(self): return hash(tuple(self)) __hash__.__qualname__ = typename + "." + "__hash__" for method in (__new__, _make, _replace, __repr__, _asdict, __getnewargs__, __reduce__, __getstate__): method.__qualname__ = typename + "." + method.__name__ _make = classmethod(_make) if readonly: _cache = _itemgeters else: _cache = _itemgetseters class_namespace = {} for index, name in enumerate(field_names): try: item_object = _cache[index] except KeyError: if readonly: item_object = mutabletuple_itemget(index) else: item_object = mutabletuple_itemgetset(index) #doc = 'Alias for field number ' + str(index) _cache[index] = item_object class_namespace[name] = item_object __options__ = {'hashable': hashable, 'gc': gc} if readonly: __options__['hashable'] = True if module is None: try: module = _sys._getframe(1).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass class_namespace.update({ '__slots__': (), '__doc__': typename + '(' + arg_list + ')', '__fields__': field_names, '__new__': __new__, '_make': _make, '_replace': _replace, '__repr__': __repr__, '_asdict': _asdict, '__getnewargs__': __getnewargs__, '__getstate__': __getstate__, '__reduce__': __reduce__, '__dict__': property(_asdict), '__options__': __options__, '__module__': module, }) _result = recordclasstype(typename, (baseclass, ), class_namespace) if annotations: _result.__annotations__ = annotations return _result
def __getitem__(self, parameters): item = _type_check(parameters, "Static accepts only single type.") return _GenericAlias(self, (item, ))
def __getitem__(self, arg): arg = _type_check(arg, "Joint[t] requires a single type.") return Union[arg, self.Tag]
def recordclass(typename, fields, rename=False, defaults=None, readonly=False, hashable=False, gc=False, module=None): """Returns a new subclass of array with named fields. >>> Point = recordclass('Point', 'x y') >>> Point.__doc__ # docstring for the new class 'Point(x, y)' >>> p = Point(11, y=22) # instantiate with positional args or keywords >>> p[0] + p[1] # indexable like a plain tuple 33 >>> x, y = p # unpack like a regular tuple >>> x, y (11, 22) >>> p.x + p.y # fields also accessable by name 33 >>> d = p._asdict() # convert to a dictionary >>> d.x 11 >>> d.x = 33 # assign new value >>> Point(**d) # convert from a dictionary Point(x=11, y=22) >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields Point(x=100, y=22) """ if readonly: baseclass = memoryslotsreadonly else: baseclass = memoryslots # Validate the field names. At the user's option, either generate an error # message or automatically replace the field name with a valid name. if isinstance(fields, str): field_names = fields.replace(',', ' ').split() annotations = None else: msg = "recordclass('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" annotations = {} field_names = [] for fn in fields: if type(fn) is tuple: n, t = fn n = str(n) if _type_check: t = _type_check(t, msg) annotations[n] = t field_names.append(n) else: field_names.append(str(fn)) typename = _intern(str(typename)) if rename: seen = set() for index, name in enumerate(field_names): if (not _isidentifier(name) or _iskeyword(name) or name.startswith('_') or name in seen): field_names[index] = '_%d' % index seen.add(name) for name in [typename] + field_names: if type(name) != str: raise TypeError('Type names and field names must be strings') if not _isidentifier(name): raise ValueError('Type names and field names must be valid ' 'identifiers: %r' % name) if _iskeyword(name): raise ValueError('Type names and field names cannot be a ' 'keyword: %r' % name) seen = set() for name in field_names: if name.startswith('_') and not rename: raise ValueError('Field names cannot start with an underscore: ' '%r' % name) if name in seen: raise ValueError('Encountered duplicate field name: %r' % name) seen.add(name) if defaults is not None: defaults = tuple(defaults) if len(defaults) > len(field_names): raise TypeError('Got more default values than field names') field_defaults = dict(reversed(list(zip(reversed(field_names), reversed(defaults))))) else: field_defaults = {} field_names = tuple(map(_intern, field_names)) n_fields = len(field_names) arg_list = ', '.join(field_names) repr_fmt=', '.join(_repr_template.format(name=name) for name in field_names) if readonly: new_func_template = """\ def __new__(_cls, {1}): 'Create new instance of {0}({1})' return _method_new(_cls, ({1})) """ _method_new = memoryslotsreadonly.__new__ else: new_func_template = """\ def __new__(_cls, {1}): 'Create new instance: {0}({1})' return _method_new(_cls, {1}) """ _method_new = memoryslots.__new__ new_func_def = new_func_template.format(typename, arg_list) # Execute the template string in a temporary namespace and support # tracing utilities by setting a value for frame.f_globals['__name__'] namespace = dict(_method_new=_method_new) code = compile(new_func_def, "", "exec") eval(code, namespace) __new__ = namespace['__new__'] if defaults is not None: __new__.__defaults__ = defaults if annotations: __new__.__annotations__ = annotations def _make(_cls, iterable): ob = _method_new(_cls, *iterable) if len(ob) != n_fields: raise TypeError('Expected %s arguments, got %s' % (n_fields, len(ob))) return ob _make.__doc__ = 'Make a new %s object from a sequence or iterable' % typename if readonly: def _replace(_self, **kwds): result = _self._make((kwds.pop(name) for name in field_names)) if kwds: raise ValueError('Got unexpected field names: %r' % list(kwds)) return result else: def _replace(_self, **kwds): for name, val in kwds.items(): setattr(_self, name, val) return _self _replace.__doc__ = 'Return a new %s object replacing specified fields with new values' % typename def __repr__(self): 'Return a nicely formatted representation string' return self.__class__.__name__ + "(" + (repr_fmt % tuple(self)) + ")" def _asdict(self): 'Return a new OrderedDict which maps field names to their values.' return OrderedDict(zip(self.__attrs__, self)) def __getnewargs__(self): 'Return self as a plain tuple. Used by copy and pickle.' return tuple(self) def __getstate__(self): 'Exclude the OrderedDict from pickling' return None def __reduce__(self): 'Reduce' return type(self), tuple(self) if not readonly and hashable: def __hash__(self): return hash(tuple(self)) __hash__.__qualname__ = typename + "." + "__hash__" for method in (__new__, _make, _replace, __repr__, _asdict, __getnewargs__, __reduce__, __getstate__): method.__qualname__ = typename + "." + method.__name__ _make = classmethod(_make) if readonly: cache = _itemgeters else: cache = _itemgetseters class_namespace = {} for index, name in enumerate(field_names): try: item_object = cache[index] except KeyError: if readonly: item_object = itemget(index) else: item_object = itemgetset(index) #doc = 'Alias for field number ' + str(index) cache[index] = item_object class_namespace[name] = item_object __options__ = {'hashable':hashable, 'gc':gc} if readonly: __options__['hashable'] = True class_namespace.update({ '__slots__': (), '__doc__': typename+'('+arg_list+')', '__attrs__': field_names, '__new__': __new__, '_make': _make, '_replace': _replace, '__repr__': __repr__, '_asdict': _asdict, '__getnewargs__': __getnewargs__, '__getstate__': __getstate__, '__reduce__': __reduce__, '__dict__': property(_asdict), '__options__': __options__, }) _result = recordclasstype(typename, (baseclass,), class_namespace) # For pickling to work, the __module__ variable needs to be set to the frame # where the class is created. if module is None: try: module = _sys._getframe(1).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass if module is not None: _result.__module__ = module if annotations: _result.__annotations__ = annotations return _result