def _build_validator(self, func: gen.Block, context: ContextT, assertions: AssertionsT) -> ContextT: # If we don't have a natively unique type and we're supposed to be unique, make it so. if self.unique is True and util.origin( self.type) not in {set, frozenset}: func.l(f"{self.VALUE} = unique({self.VALUE})", unique=unique) context = BaseConstraints._build_validator(self, func, context, assertions) # Validate the items if necessary. if self.values: o = util.origin(self.type) itval = "__item_validator" ctx = { "unique": unique, itval: self.values.validate, o.__name__: o, "_lazy_repr": util.collectionrepr, } r = "i" if issubclass(self.type, Sequence) else "x" field = f"_lazy_repr({self.FNAME}, {r})" func.l( f"{self.VALUE} = " f"{o.__name__}(" f"({itval}(x, field={field}) for i, x in enumerate({self.VALUE}))" f")", **ctx, # type: ignore ) return context
def iscollectiontype(obj: Type[ObjectT]) -> TypeGuard[Type[Collection]]: """Test whether this annotation is a subclass of :py:class:`typing.Collection`. Includes builtins. Parameters ---------- obj Examples -------- >>> import typic >>> from typing import Collection, Mapping, NewType >>> typic.iscollectiontype(Collection) True >>> typic.iscollectiontype(Mapping[str, str]) True >>> typic.iscollectiontype(str) True >>> typic.iscollectiontype(list) True >>> typic.iscollectiontype(NewType("Foo", dict)) True >>> typic.iscollectiontype(int) False """ obj = util.origin(obj) return obj in _COLLECTIONS or builtins.issubclass(obj, Collection)
def isoptionaltype(obj: Type[ObjectT]) -> TypeGuard[Optional]: """Test whether an annotation is :py:class`typing.Optional`, or can be treated as. :py:class:`typing.Optional` is an alias for `typing.Union[<T>, None]`, so both are "optional". Parameters ---------- obj Examples -------- >>> import typic >>> from typing import Optional, Union, Dict >>> typic.isoptionaltype(Optional[str]) True >>> typic.isoptionaltype(Union[str, None]) True >>> typic.isoptionaltype(Dict[str, None]) False """ args = getattr(obj, "__args__", ()) return (len(args) > 1 and args[-1] in { type(None), None, } # noqa: E721 - we don't know what args[-1] is, so this is safer and util.get_name(util.origin(obj)) in {"Optional", "Union", "Literal"})
def _build_validator(self, func: gen.Block, context: ContextT, assertions: AssertionsT) -> ContextT: if isinstance(self.values, Sequence): if self.unique is True: func.l( f"{self.VALUE} = unique({self.VALUE}, ret_type=tuple)", unique=unique, ) item_validators = MappingProxyType( {i: c.validate for i, c in enumerate(self.values)}) o = util.origin(self.type) itval = "__item_validators" ctx = { "unique": unique, itval: item_validators, o.__name__: o, "_lazy_repr": util.collectionrepr, } field = f"_lazy_repr({self.FNAME}, i)" func.l( f"{self.VALUE} = " f"{o.__name__}(" f"({itval}[i](x, field={field}) if i in {itval} else x " f"for i, x in enumerate({self.VALUE}))" f")", **ctx, # type: ignore ) return ctx return ArrayConstraints._build_validator(self, func=func, context=context, assertions=assertions)
def __post_init__(self): self.has_default = self.parameter.default is not self.EMPTY self.args = util.get_args(self.resolved) self.resolved_origin = util.origin(self.resolved) self.generic = getattr(self.resolved, "__origin__", self.resolved_origin) self.is_class_var = isclassvartype(self.un_resolved)
def ismappingtype(obj: Type[ObjectT]) -> TypeGuard[Type[Mapping]]: """Test whether this annotation is a subtype of :py:class:`typing.Mapping`. Parameters ---------- obj Examples -------- >>> import typic >>> from typing import Mapping, Dict, DefaultDict, NewType >>> typic.ismappingtype(Mapping) True >>> typic.ismappingtype(Dict[str, str]) True >>> typic.ismappingtype(DefaultDict) True >>> typic.ismappingtype(dict) True >>> class MyDict(dict): ... ... >>> typic.ismappingtype(MyDict) True >>> class MyMapping(Mapping): ... ... >>> typic.ismappingtype(MyMapping) True >>> typic.ismappingtype(NewType("Foo", dict)) True """ obj = util.origin(obj) return builtins.issubclass( obj, (dict, Record, sqlite3.Row)) or builtins.issubclass(obj, Mapping)
def _from_simple_type( t: Type[SimpleT], *, nullable: bool = False, name: str = None, cls: Type = None ) -> SimpleConstraintsT: constr_class = cast( Type[SimpleConstraintsT], _SIMPLE_CONSTRAINTS.get_by_parent(origin(t)) ) return constr_class(nullable=nullable, name=name)
def defname(cls, obj, name: str = None) -> Optional[str]: """Get the definition name for an object.""" defname = name or getattr(obj, "__name__", None) if defname in cls._IGNORE_NAME: defname = None if (obj is dict or origin(obj) is dict) and name: defname = name return inflection.camelize(defname) if defname else None
def _get_constraint_cls(cls: Type) -> Optional[Type[c.ConstraintsT]]: if cls in _CONSTRAINT_TYPE_MAP: # pragma: nocover return _CONSTRAINT_TYPE_MAP[cls] for typ, constr in _CONSTRAINT_TYPE_MAP.items(): if issubclass(origin(cls), typ): _CONSTRAINT_TYPE_MAP[cls] = constr return constr return None
def get_constraints( t: Type[VT], *, nullable: bool = False, name: str = None, cls: Optional[Type] = ..., # type: ignore ) -> ConstraintsProtocolT[VT]: while should_unwrap(t): nullable = nullable or isoptionaltype(t) t = get_args(t)[0] if t is cls or t in __stack: dc = DelayedConstraints( t, nullable=nullable, name=name, factory=get_constraints ) return cast(ConstraintsProtocolT, dc) if isforwardref(t): if cls is ...: # pragma: nocover raise TypeError( f"Cannot build constraints for {t} without an enclosing class." ) fdc = ForwardDelayedConstraints( t, # type: ignore cls.__module__, localns=getattr(cls, "__dict__", {}).copy(), nullable=nullable, name=name, factory=get_constraints, ) return cast(ConstraintsProtocolT, fdc) if isconstrained(t): c: ConstraintsProtocolT = t.__constraints__ # type: ignore if (c.name, c.nullable) != (name, nullable): return dataclasses.replace(c, name=name, nullable=nullable) return c if isenumtype(t): ec = _from_enum_type(t, nullable=nullable, name=name) # type: ignore return cast(ConstraintsProtocolT, ec) if isabstract(t): return cast( ConstraintsProtocolT, _from_strict_type(t, nullable=nullable, name=name) ) if isnamedtuple(t) or istypeddict(t): handler = _from_class else: ot = origin(t) if ot in {type, abc.Callable}: handler = _from_strict_type # type: ignore t = ot else: handler = _CONSTRAINT_BUILDER_HANDLERS.get_by_parent(ot, _from_class) # type: ignore __stack.add(t) c = handler(t, nullable=nullable, name=name, cls=cls) __stack.clear() return c
def isfinal(obj: Type[ObjectT]) -> bool: """Test whether an annotation is :py:class:`typing.Final`. Examples -------- >>> import typic >>> from typing import NewType >>> from typic.compat import Final >>> typic.isfinal(Final[str]) True >>> typic.isfinal(NewType("Foo", Final[str])) True """ return util.origin(obj) is Final
def validator(self) -> ValidatorT: """Accessor for the generated multi-validator. Validators are keyed by the origin-type of :py:class:`BaseConstraints` inheritors. If a value does not match any origin-type, as reported by :py:func:`typic.origin`, then we will report the value as invalid. """ func_name = self._get_validator_name() vmap = util.TypeMap({c.type: c for c in self.constraints}) ns = {"tag": self.tag and self.tag.tag, "empty": util.empty} with gen.Block(ns) as main: with self.define(main, func_name) as f: if not vmap: f.l(f"return True, {self.VALUE}") else: f.l(f"{self.VALTNAME} = {self.VALUE}.__class__") if self.nullable: with f.b(f"if {self.VALUE} is None:") as b: b.l(f"return True, {self.VALUE}") if self.tag: validators = { value: vmap[t] for value, t in self.tag.types_by_values } f.namespace.update(vmap=validators) with f.b( f"if issubclass({self.VALTNAME}, Mapping):", Mapping=collections.abc.Mapping, ) as b: b.l(f"tag_value = {self.VALUE}.get(tag, empty)") with f.b("else:") as b: b.l(f"tag_value = getattr({self.VALUE}, tag, empty)" ) f.l(f"valid, {self.VALUE} = " f"(True, vmap[tag_value].validate({self.VALUE}, field=field)) " f"if tag_value in vmap else (False, {self.VALUE})") else: vmap = util.TypeMap( {util.origin(t): v for t, v in vmap.items()}) f.namespace.update(vmap=vmap) f.l(f"v = vmap.get_by_parent({self.VALTNAME}, None)") f.l(f"valid, {self.VALUE} = (True, v.validate(value, field=field)) " f"if v else (False, value)") f.l(f"return valid, {self.VALUE}") validator = main.compile(name=func_name) return validator # type: ignore
def _from_array_type( t: Type[Array], *, nullable: bool = False, name: str = None, cls: Type = None ) -> ArrayConstraintsT: args = get_args(t) constr_class = cast( Type[ArrayConstraintsT], _ARRAY_CONSTRAINTS_BY_TYPE.get_by_parent(origin(t)) ) # If we don't have args, then return a naive constraint if not args: return constr_class(nullable=nullable, name=name) if constr_class is TupleConstraints and ... not in args: items = _resolve_args(*args, cls=cls, nullable=nullable, multi=False) return constr_class(nullable=nullable, values=items, name=name) # type: ignore items = _resolve_args(*args, cls=cls, nullable=nullable, multi=True) return constr_class(nullable=nullable, values=items, name=name) # type: ignore
def isstrict(obj: Type[ObjectT]) -> TypeGuard[typic.Strict]: """Test whether an annotation is marked as :py:class:`typic.WriteOnly`. Parameters ---------- obj Examples -------- >>> import typic >>> from typing import NewType >>> typic.isstrict(typic.Strict[str]) True >>> typic.isstrict(NewType("Foo", typic.Strict[str])) True """ return util.origin(obj) is strict.Strict
def isreadonly(obj: Type[ObjectT]) -> TypeGuard[typic.common.ReadOnly]: """Test whether an annotation is marked as :py:class:`typic.ReadOnly` Parameters ---------- obj Examples -------- >>> import typic >>> from typing import NewType >>> typic.isreadonly(typic.ReadOnly[str]) True >>> typic.isreadonly(NewType("Foo", typic.ReadOnly[str])) True """ return util.origin(obj) is typic.common.ReadOnly
def isdecimaltype(obj: Type[ObjectT]) -> TypeGuard[Type[decimal.Decimal]]: """Test whether this annotation is a Decimal object. Parameters ---------- obj Examples -------- >>> import typic >>> import decimal >>> from typing import NewType >>> typic.isdecimaltype(decimal.Decimal) True >>> typic.isdecimaltype(NewType("Foo", decimal.Decimal)) True """ return builtins.issubclass(util.origin(obj), decimal.Decimal)
def istimedeltatype(obj: Type[ObjectT]) -> TypeGuard[Type[datetime.timedelta]]: """Test whether this annotation is a a date/datetime object. Parameters ---------- obj Examples -------- >>> import typic >>> import datetime >>> from typing import NewType >>> typic.istimedeltatype(datetime.timedelta) True >>> typic.istimedeltatype(NewType("Foo", datetime.timedelta)) True """ return builtins.issubclass(util.origin(obj), datetime.timedelta)
def _compile_validator(self) -> ValidatorT: func_name = self._get_validator_name() origin = util.origin(self.type) type_name = self.type_name self._check_syntax() assertions = self._get_assertions() context: ContextT = {type_name: self.type} with gen.Block() as main: with self.define(main, func_name) as f: # This is a signal that -*-anything can happen...-*- if origin in {Any, Signature.empty}: f.l(f"return True, {self.VALUE}") return main.compile(name=func_name) f.l(f"{self.VALTNAME} = {type_name!r}") f.l(f"{self.FNAME} = {self.VALTNAME} if field is None else field" ) # Short-circuit validation if the value isn't the correct type. if self.instancecheck == InstanceCheck.IS: line = f"if isinstance({self.VALUE}, {type_name}):" if self.nullable: line = (f"if {self.VALUE} in {self.NULLABLES} " f"or isinstance({self.VALUE}, {type_name}):") with f.b(line, **context) as b: # type: ignore b.l(f"return True, {self.VALUE}") else: if self.nullable: with f.b(f"if {self.VALUE} in {self.NULLABLES}:") as b: b.l(f"return True, {self.VALUE}") line = f"if not isinstance({self.VALUE}, {type_name}):" with f.b(line, **context) as b: # type: ignore b.l(f"return False, {self.VALUE}") context = self._build_validator(f, context=context, assertions=assertions) f.namespace.update(context) f.localize_context(*context) f.l(f"return True, {self.VALUE}") return main.compile(name=func_name)
def isuuidtype(obj: Type[ObjectT]) -> TypeGuard[Type[uuid.UUID]]: """Test whether this annotation is a a date/datetime object. Parameters ---------- obj Examples -------- >>> import typic >>> import uuid >>> from typing import NewType >>> typic.isuuidtype(uuid.UUID) True >>> class MyUUID(uuid.UUID): ... ... >>> typic.isuuidtype(MyUUID) True >>> typic.isuuidtype(NewType("Foo", uuid.UUID)) True """ return builtins.issubclass(util.origin(obj), uuid.UUID)
def protocols(self, obj, *, strict: bool = False) -> SerdeProtocolsT: """Get a mapping of param/attr name -> :py:class:`SerdeProtocol` Parameters ---------- obj The class or callable object you wish to extract resolved annotations from. strict Whether to validate instead of coerce. Examples -------- >>> import typic >>> >>> @typic.klass ... class Foo: ... bar: str ... >>> protocols = typic.protocols(Foo) See Also -------- :py:class:`SerdeProtocol` """ if not any((inspect.ismethod(obj), inspect.isfunction(obj), inspect.isclass(obj))): obj = obj.__class__ hints = util.cached_type_hints(obj) params = util.safe_get_params(obj) fields: Mapping[str, dataclasses.Field] = {} if dataclasses.is_dataclass(obj): fields = {f.name: f for f in dataclasses.fields(obj)} ann = {} for name in params.keys() | hints.keys(): param = params.get(name) hint = hints.get(name) field = fields.get(name) annotation = hint or param.annotation # type: ignore annotation = util.resolve_supertype(annotation) param = param or inspect.Parameter( name, inspect.Parameter.POSITIONAL_OR_KEYWORD, default=EMPTY, annotation=hint or annotation, ) if repr(param.default) == "<factory>": param = param.replace(default=EMPTY) if checks.isclassvartype(annotation): val = getattr(obj, name) if annotation is ClassVar: annotation = annotation[type(val)] default = val param = param.replace(default=default) if (field and field.default is not dataclasses.MISSING and param.default is EMPTY): if field.init is False and util.origin( annotation) is not ReadOnly: annotation = ReadOnly[annotation] # type: ignore param = param.replace(default=field.default) if not checks.ishashable(param.default): param = param.replace(default=...) resolved = self.resolve( annotation, parameter=param, name=name, is_strict=strict, namespace=obj, ) ann[name] = resolved try: setattr(obj, TYPIC_ANNOS_NAME, ann) # We wrapped a bound method, or # are wrapping a static-/classmethod # after they were wrapped with @static/class except (AttributeError, TypeError): pass return ann
def annotation( self, annotation: Type[ObjectT], name: str = None, parameter: Optional[inspect.Parameter] = None, is_optional: bool = None, is_strict: StrictModeT = None, flags: "SerdeFlags" = None, default: Any = EMPTY, namespace: Type = None, ) -> AnnotationT: """Get a :py:class:`Annotation` for this type. Unlike a :py:class:`ResolvedAnnotation`, this does not provide access to a serializer/deserializer/validator protocol. """ flags = cast( "SerdeFlags", getattr(annotation, SERDE_FLAGS_ATTR, flags or SerdeFlags())) if parameter is None: parameter = inspect.Parameter( name or "_", inspect.Parameter.POSITIONAL_OR_KEYWORD, annotation=annotation, default=default if checks.ishashable(default) else ..., ) # Check for the super-type non_super = util.resolve_supertype(annotation) # Note, this may be a generic, like Union. orig = util.origin(annotation) use = non_super # Get the unfiltered args args = getattr(non_super, "__args__", None) # Set whether this is optional/strict is_optional = (is_optional or checks.isoptionaltype(non_super) or parameter.default in self.OPTIONALS) is_strict = is_strict or checks.isstrict(non_super) or self.STRICT is_static = util.origin(use) not in self._DYNAMIC is_literal = checks.isliteral(use) # Determine whether we should use the first arg of the annotation while checks.should_unwrap(use) and args: is_optional = is_optional or checks.isoptionaltype(use) is_strict = is_strict or checks.isstrict(use) if is_optional and len(args) > 2: # We can't resolve this annotation. is_static = False use = Union[args[:-1]] break # Note that we don't re-assign `orig`. # This is intentional. # Special forms are needed for building the downstream validator. # Callers should be aware of this and perhaps use `util.origin` elsewhere. non_super = util.resolve_supertype(args[0]) use = non_super args = util.get_args(use) is_static = util.origin(use) not in self._DYNAMIC is_literal = is_literal or checks.isliteral(use) # Only allow legal parameters at runtime, this has implementation implications. if is_literal: args = util.get_args(use) if any(not isinstance(a, self.LITERALS) for a in args): raise TypeError( f"PEP 586: Unsupported parameters for 'Literal' type: {args}. " "See https://www.python.org/dev/peps/pep-0586/" "#legal-parameters-for-literal-at-type-check-time " "for more information.") # The type definition doesn't exist yet. if use.__class__ is ForwardRef: module, localns = self.__module__, {} # Ideally we have a namespace from a parent class/function to the field if namespace: module = namespace.__module__ localns = getattr(namespace, "__dict__", {}) return ForwardDelayedAnnotation( ref=use, resolver=self, _name=name, parameter=parameter, is_optional=is_optional, is_strict=is_strict, flags=flags, default=default, module=module, localns=localns, ) # The type definition is recursive or within a recursive loop. elif use is namespace or use in self.__stack: # If detected via stack, we can remove it now. # Otherwise we'll cause another recursive loop. if use in self.__stack: self.__stack.remove(use) return DelayedAnnotation( type=use, resolver=self, _name=name, parameter=parameter, is_optional=is_optional, is_strict=is_strict, flags=flags, default=default, ) # Otherwise, add this type to the stack to prevent a recursive loop from elsewhere. if not checks.isstdlibtype(use): self.__stack.add(use) serde = (self._get_configuration(util.origin(use), flags) if is_static and not is_literal else SerdeConfig(flags)) anno = Annotation( resolved=use, origin=orig, un_resolved=annotation, parameter=parameter, optional=is_optional, strict=is_strict, static=is_static, serde=serde, ) anno.translator = functools.partial(self.translator.factory, anno) # type: ignore return anno
def get_field( self, protocol: SerdeProtocol, *, ro: bool = None, wo: bool = None, name: str = None, parent: Type = None, ) -> "SchemaFieldT": """Get a field definition for a JSON Schema.""" if protocol.annotation in self.__stack: name = self.defname(protocol.annotation.resolved_origin, name) return self._check_optional(protocol.annotation, Ref(f"#/definitions/{name}"), ro, wo, name) anno = protocol.annotation if anno in self.__cache: return self.__cache[anno] # Get the default value # `None` gets filtered out down the line. this is okay. # If a field isn't required an empty default is functionally the same # as a default to None for the JSON schema. default = anno.parameter.default if anno.has_default else None # `use` is the based annotation we will use for building the schema use = getattr(anno.origin, "__parent__", anno.origin) # This is a flat optional, handle it separately from the Union block. use = anno.resolved if isuniontype(use) and not anno.args else use # If there's not a static annotation, short-circuit the rest of the checks. schema: SchemaFieldT if use in {Any, anno.EMPTY}: schema = self._check_optional(anno, UndeclaredSchemaField(), ro, wo, name) self.__cache[anno] = schema return schema # Unions are `anyOf`, get a new field for each arg and return. # {'type': ['string', 'integer']} == # {'anyOf': [{'type': 'string'}, {'type': 'integer'}]} # We don't care about syntactic sugar if it's functionally the same. if isuniontype(use): return self._handle_union(anno=anno, ro=ro, wo=wo, name=name, parent=parent) self.__stack.add(anno) # Check if this should be ro/wo if use in {ReadOnly, WriteOnly, Final}: ro = (use in {ReadOnly, Final}) or None wo = (use is WriteOnly) or None use = origin(anno.resolved) use = getattr(use, "__parent__", use) # Check for an enumeration enum_ = None # Functionally, literals are enumerations. if isliteral(use): enum_ = (*(a for a in anno.args if a is not None), ) ts = {a.__class__ for a in enum_} use = Literal if len(ts) == 1: use = ts.pop() elif issubclass(use, enum.Enum): use = cast(Type[enum.Enum], use) enum_ = tuple(x.value for x in use) use = getattr(use._member_type_, "__parent__", use._member_type_) # type: ignore # If this is ro with a default, we can consider this a const # Which is an enum with a single value - # we don't currently honor `{'const': <val>}` since it's just syntactic sugar. if ro and default: enum_ = (default.value if isinstance(default, enum.Enum) else default, ) schema = self._build_field( use=use, protocol=protocol, parent=parent, enum_=enum_, default=default, ro=ro, wo=wo, name=name, ) self.__cache[anno] = schema self.__stack.clear() return schema
def isuniontype(obj: Type[ObjectT]) -> TypeGuard[Union]: return util.get_name(util.origin(obj)) in {"Union", "UnionType"}
def istupletype(obj: Type[ObjectT]) -> TypeGuard[Type[tuple]]: obj = util.origin(obj) return obj is tuple or issubclass(obj, tuple)
def isiteratortype(obj: Type[ObjectT]) -> TypeGuard[Type[Iterator]]: obj = util.origin(obj) return builtins.issubclass(obj, Iterator)
def isliteral(obj: Type) -> TypeGuard[Literal]: return util.origin(obj) is Literal or ( obj.__class__ is ForwardRef and obj.__forward_arg__.startswith("Literal"))