def _set_item_validator_pattern_constraints(self, loop: gen.Block, func_name: str): # Item constraints based upon key-pattern pattern_constr_name = f"{func_name}_pattern_constraints" if self.patterns: loop.l( f"{self.RETY} = " f"validate_pattern_constraints" f"({pattern_constr_name}, {self.X}, {self.Y})", level=None, **{ "validate_pattern_constraints": validate_pattern_constraints, pattern_constr_name: self.patterns, }, ) # Required key pattern if self.key_pattern: key_pattern_name = f"{func_name}_key_pattern" loop.l( f"valid = bool({key_pattern_name}.match({self.X}))", level=None, **{key_pattern_name: self.key_pattern}, ) with loop.b("if not valid:") as b: b.l("break")
def _build_collection_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", namespace: Type = None, ): item_des = None it_name = f"{anno_name}_item_des" iterate = f"iterate({self.VNAME}, values=True)" line = f"{self.VNAME} = {anno_name}({iterate})" if annotation.args: item_type = annotation.args[0] item_des = self.resolver.resolve(item_type, flags=annotation.serde.flags, namespace=namespace) line = (f"{self.VNAME} = " f"{anno_name}({it_name}(x) for x in parent({iterate}))") else: self._add_type_check(func, anno_name) func.l( line, level=None, **{ it_name: item_des, "Collection": abc.Collection, "iterate": self.resolver.iterate, }, )
def _build_validator(self, func: gen.Block, context: Dict[str, Any], assertions: AssertionsT) -> ContextT: if self.key_dependencies: self._get_key_dependencies(assertions, context) _lazy_repr = (util.collectionrepr if issubclass(self.type, Mapping) else util.joinedrepr) context.update(Mapping=Mapping, _lazy_repr=_lazy_repr) if self.required_keys: context["required"] = self.required_keys defined_keys = (self.required_keys or set()) | (self.items or {}).keys() if defined_keys: context["defined"] = frozenset(defined_keys) if not issubclass(self.type, Mapping): with func.b(f"if not isinstance({self.VALUE}, Mapping):") as b: b.l(f"return False, {self.VALUE}") func.l(f"valkeys = {{*{self.VALUE}}}") context = BaseConstraints._build_validator(self, func=func, context=context, assertions=assertions) items_context = self._build_item_validator(func) if items_context: context.update(items_context) return context
def _build_validator(self, func: gen.Block, context: ContextT, assertions: AssertionsT) -> ContextT: # If we don't have a natively unique type and we're supposed to be unique, make it so. if self.unique is True and util.origin( self.type) not in {set, frozenset}: func.l(f"{self.VALUE} = unique({self.VALUE})", unique=unique) context = BaseConstraints._build_validator(self, func, context, assertions) # Validate the items if necessary. if self.values: o = util.origin(self.type) itval = "__item_validator" ctx = { "unique": unique, itval: self.values.validate, o.__name__: o, "_lazy_repr": util.collectionrepr, } r = "i" if issubclass(self.type, Sequence) else "x" field = f"_lazy_repr({self.FNAME}, {r})" func.l( f"{self.VALUE} = " f"{o.__name__}(" f"({itval}(x, field={field}) for i, x in enumerate({self.VALUE}))" f")", **ctx, # type: ignore ) return context
def _build_tuple_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", namespace: Type = None, ): if annotation.args and annotation.args[-1] is not ...: item_des = { ix: self.resolver.resolve(t, flags=annotation.serde.flags, namespace=namespace) for ix, t in enumerate(annotation.args) } item_des_name = "item_des" iterate = f"iterate({self.VNAME}, values=True)" line = ( f"{anno_name}" f"({item_des_name}[ix](v) for ix, v in enumerate({iterate})" f"if ix in {item_des_name})") func.l( f"{self.VNAME} = {line}", level=None, **{ item_des_name: item_des, "iterate": self.resolver.iterate, }, ) else: self._build_collection_des( func=func, anno_name=anno_name, annotation=annotation, namespace=namespace, )
def _build_pattern_des(self, func: gen.Block, anno_name: str): func.l( f"{self.VNAME} = {self.VNAME} " f"if issubclass({self.VTYPE}, {anno_name}) " f"else __re_compile({self.VNAME})", __re_compile=re.compile, )
def _build_validator(self, func: gen.Block, context: ContextT, assertions: AssertionsT) -> ContextT: if isinstance(self.values, Sequence): if self.unique is True: func.l( f"{self.VALUE} = unique({self.VALUE}, ret_type=tuple)", unique=unique, ) item_validators = MappingProxyType( {i: c.validate for i, c in enumerate(self.values)}) o = util.origin(self.type) itval = "__item_validators" ctx = { "unique": unique, itval: item_validators, o.__name__: o, "_lazy_repr": util.collectionrepr, } field = f"_lazy_repr({self.FNAME}, i)" func.l( f"{self.VALUE} = " f"{o.__name__}(" f"({itval}[i](x, field={field}) if i in {itval} else x " f"for i, x in enumerate({self.VALUE}))" f")", **ctx, # type: ignore ) return ctx return ArrayConstraints._build_validator(self, func=func, context=context, assertions=assertions)
def _add_eval(self, func: gen.Block): func.l( f"_, {self.VNAME} = __eval({self.VNAME}) " f"if isinstance({self.VNAME}, (str, bytes)) " f"else (False, {self.VNAME})", __eval=safe_eval, ) self._add_vtype(func)
def _build_assertions(self, func: gen.Block, assertions: AssertionsT): # Only get the size if we have to. if assertions: if (self.max_items, self.min_items) != (None, None): func.l(f"size = len({self.VALUE})") BaseConstraints._build_assertions(self, func=func, assertions=assertions)
def _build_mapping_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", namespace: Type = None, ): key_des, item_des = None, None args = annotation.args if args: args = cast(Tuple[Type, Type], args) key_type, item_type = args key_des = self.resolver.resolve(key_type, flags=annotation.serde.flags, namespace=namespace) item_des = self.resolver.resolve(item_type, flags=annotation.serde.flags, namespace=namespace) if issubclass(annotation.resolved_origin, defaultdict): factory = self._get_default_factory(annotation) func.namespace[anno_name] = functools.partial(defaultdict, factory) kd_name = f"{anno_name}_key_des" it_name = f"{anno_name}_item_des" iterate = f"iterate({self.VNAME})" line = f"{anno_name}({iterate})" if args or annotation.serde.fields_in: x, y = "x", "y" # If there are args & field mapping, get the correct field name # AND serialize the key. if args and annotation.serde.fields_in: x = f"{kd_name}(fields_in.get(x, x))" # If there is only a field mapping, get the correct name for the field. elif annotation.serde.fields_in: x = "fields_in.get(x, x)" # If there are only serializers, get the serialized value elif args: x = f"{kd_name}(x)" y = f"{it_name}(y)" line = f"{anno_name}({{{x}: {y} for x, y in {iterate}}})" # If we don't have nested annotations, we can short-circuit on valid inputs else: self._add_type_check(func, anno_name) # Write the lines. func.l( f"{self.VNAME} = {line}", level=None, **{ kd_name: key_des, it_name: item_des, "Mapping": abc.Mapping, "iterate": self.resolver.iterate, }, )
def _build_builtin_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", ): origin = annotation.resolved_origin if issubclass(origin, (str, bytes)): self._build_text_des(func, anno_name, annotation) elif checks.ismappingtype(origin): self._build_mapping_des(func, anno_name, annotation) elif checks.iscollectiontype(origin): self._build_collection_des(func, anno_name, annotation) # bool, int, float... else: func.l(f"{self.VNAME} = {anno_name}({self.VNAME})")
def _build_text_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", ): origin = annotation.resolved_origin # Encode for bytes if issubclass(origin, bytes): with func.b(f"if isinstance({self.VNAME}, str):") as b: b.l(f"{self.VNAME} = {anno_name}(" f"{self.VNAME}, encoding={DEFAULT_ENCODING!r})") # Decode for str elif issubclass(origin, str): with func.b( f"if isinstance({self.VNAME}, (bytes, bytearray)):") as b: b.l(f"{self.VNAME} = {self.VNAME}.decode({DEFAULT_ENCODING!r})" ) func.l(f"{self.VNAME} = {anno_name}({self.VNAME})")
def _build_item_validator(self, func: gen.Block) -> Optional[ContextT]: if any(( self.items, self.patterns, self.key_pattern, self.keys, self.values, )): with func.b(f"{self.VALUE} = {{") as loop: item_context = self._set_item_validator_loop_line( loop, func.name) loop.l(f"for {self.X}, {self.Y} in {self.VALUE}.items()") func.l("}") if self.key_pattern: key_pattern_name = f"{func.name}_key_pattern" with func.b(f"if any((not {key_pattern_name}.match({self.X}) " f"for {self.X} in {self.VALUE})):") as b: b.l(f"return False, {self.VALUE}") item_context[key_pattern_name] = self.key_pattern return item_context return None
def _build_validator(self, func: gen.Block, context: ContextT, assertions: AssertionsT) -> ContextT: if (self.max_digits, self.decimal_places) == (None, None): context = NumberConstraints._build_validator(self, func, context=context, assertions=assertions) context.update(decimal=decimal, Decimal=decimal.Decimal, _get_digits=_get_digits) return context # Update the global namespace for the validator # Add setup/sanity checks for decimals. func.l(f"{self.VALUE} = decimal.Decimal({self.VALUE})") with func.b( f"if {self.VALUE}.is_infinite():", ConstraintValueError=ConstraintValueError, ) as b: b.l("raise ConstraintValueError('Cannot validate infinite values.')" ) func.l(f"tup = {self.VALUE}.as_tuple()") func.l( "whole, digits, decimals = _get_digits(tup)", _get_digits=_get_digits, ) context = NumberConstraints._build_validator(self, func, context=context, assertions=assertions) context.update(decimal=decimal, Decimal=decimal.Decimal, _get_digits=_get_digits) return context
def _build_union_des(self, func: gen.Block, annotation: "Annotation", namespace): # Get all types which we may coerce to. args = (*(a for a in annotation.args if a not in {None, Ellipsis, type(None)}), ) # Get all custom types, which may have discriminators targets = (*(a for a in args if not checks.isstdlibtype(a)), ) # We can only build a tagged union deserializer if all args are valid if args and args == targets: # Try to collect the field which will be the discriminator. # First, get a mapping of Type -> Proto & Type -> Fields tagged = get_tag_for_types(targets) # Just bail out if we can't find a key. if not tagged: func.l("# No-op, couldn't locate a discriminator key.") return # If we got a key, re-map the protocols to the value for each type. deserializers = { value: self.resolver.resolve(t, namespace=namespace) for value, t in tagged.types_by_values } # Finally, build the deserializer func.namespace.update( tag=tagged.tag, desers=deserializers, empty=_empty, ) with func.b(f"if issubclass({self.VTYPE}, Mapping):", Mapping=abc.Mapping) as b: b.l(f"tag_value = {self.VNAME}.get(tag, empty)") with func.b("else:") as b: b.l(f"tag_value = getattr({self.VNAME}, tag, empty)") with func.b("if tag_value in desers:") as b: b.l(f"{self.VNAME} = desers[tag_value].transmute({self.VNAME})" ) with func.b("else:") as b: b.l("raise ValueError(" 'f"Value is missing field {tag!r} with one of ' '{(*desers,)}: {val!r}"' ")")
def _set_checks(self, func: gen.Block, anno_name: str, annotation: Annotation): _ctx = {} # run a safe eval if input is text and anno isn't if inspect.isclass(annotation.resolved_origin) and (issubclass( annotation.resolved_origin, (str, bytes)) or checks.isdecimaltype(annotation.resolved_origin)): self._add_vtype(func) else: self._add_eval(func) # Equality checks for defaults and optionals custom_equality = hasattr(annotation.resolved_origin, "equals") if custom_equality and (annotation.optional or annotation.has_default): func.l(f"custom_equality = hasattr({self.VNAME}, 'equals')") null = "" if annotation.optional: null = f"{self.VNAME} in {self.resolver.OPTIONALS}" if custom_equality: null = ( f"(any({self.VNAME}.equals(o) for o in {self.resolver.OPTIONALS}) " "if custom_equality " f"else {null})") eq = "" if (annotation.has_default and annotation.parameter.default not in self.resolver.OPTIONALS): eq = f"{self.VNAME} == __default" if custom_equality: if hasattr(annotation.parameter.default, "equals"): eq = f"__default.equals({self.VNAME})" eq = f"{self.VNAME}.equals(__default) if custom_equality else {eq}" _ctx["__default"] = annotation.parameter.default if eq or null: # Add a type-check for anything that isn't a builtin. if eq and not checks.isbuiltintype(annotation.resolved_origin): eq = f"{self.VTYPE} is {anno_name} and {eq}" check = " or ".join(c for c in (null, eq) if c) with func.b(f"if {check}:", **_ctx) as b: # type: ignore b.l(f"return {self.VNAME}")
def _set_item_validator_loop_line(self, loop: gen.Block, func_name: str) -> ContextT: names = ItemValidatorNames( item_validators_name=f"{func_name}_items", vals_validator_name=f"{func_name}_vals", keys_validator_name=f"{func_name}_keys", patterns_validators_name=f"{func_name}_patterns", ) ctx: Dict[str, Any] = {} x = self.X y = self.Y field = f"_lazy_repr({self.FNAME}, {self.X})" if self.values: y = f"{names.vals_validator_name}({y}, field={field})" ctx[names.vals_validator_name] = self.values.validate if self.keys: x = f"{names.keys_validator_name}({self.X})" ctx[names.keys_validator_name] = self.keys.validate if self.patterns: y = ("validate_pattern_constraints" f"({names.patterns_validators_name}, {self.X}, {y})") ctx[names.patterns_validators_name] = self.patterns ctx["validate_pattern_constraints"] = validate_pattern_constraints if self.items: ctx.update({ names.item_validators_name: MappingProxyType( {x: y.validate for x, y in self.items.items()} # type: ignore ) }) y = (f"{names.item_validators_name}[{self.X}]({y}, field={field}) " f"if {self.X} in {names.item_validators_name} else {self.Y}") loop.l(f"{x}: {y}") return ctx
def _build_validator( self, func: gen.Block, context: ContextT, assertions: AssertionsT ) -> ContextT: # Set up the local env. if self.curtail_length is not None: func.l(f"{self.VALUE} = {self.VALUE}[:{self.curtail_length}]") if self.strip_whitespace: func.l(f"{self.VALUE} = {self.VALUE}.strip()") if {self.min_length, self.max_length} != {None, None}: func.l(f"size = len({self.VALUE})") BaseConstraints._build_validator( self, func, context=context, assertions=assertions ) # Build the validation. if self.regex is not None: context.update(__pattern=self.regex) return context
def _add_vtype(self, func: gen.Block): func.l(f"{self.VTYPE} = {self.VNAME}.__class__")
def _build_path_des(self, func: gen.Block, anno_name: str): self._add_type_check(func, anno_name) func.l(f"{self.VNAME} = {anno_name}({self.VNAME})")
def _build_generic_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", namespace: Type = None, ): serde = annotation.serde resolved = annotation.resolved self._add_type_check(func, anno_name) # Main branch - we have a mapping for a user-defined class. # This is where the serde configuration comes in. # WINDY PATH AHEAD func.l("# Happy path - deserialize a mapping into the object.") with func.b(f"if issubclass({self.VTYPE}, Mapping):", Mapping=abc.Mapping) as b: # Universal line - transform input to known keys/values. # Specific values may change. def mainline(k, v): return f"{{{k}: {v} for x in fields_in.keys() & {self.VNAME}.keys()}}" # The "happy path" - e.g., no guesswork needed. def happypath(k, v, **ns): b.l(f"{self.VNAME} = {anno_name}(**{mainline(k, v)})", **ns) # Default X - translate given `x` to known input `x` x = "fields_in[x]" # No field name translation needs to happen. if {*serde.fields_in.keys()} == {*serde.fields_in.values()}: x = "x" # Default Y - get the given `y` with the given `x` y = f"{self.VNAME}[x]" # Get the intersection of known input fields and annotations. matched = {*serde.fields_in.values()} & serde.fields.keys() # Happy path! This is a `@typic.al` wrapped class. if self.resolver.known(resolved) or self.resolver.delayed( resolved): happypath(x, y) # Secondary happy path! We know how to deserialize already. else: fields_in = serde.fields_in if serde.fields and len(matched) == len(serde.fields_in): desers = { f: self.resolver._resolve_from_annotation( serde.fields[f], _namespace=namespace).transmute for f in matched } else: protocols = self.resolver.protocols( annotation.resolved_origin) fields_in = {x: x for x in protocols} desers = {f: p.transmute for f, p in protocols.items()} y = f"desers[{x}]({self.VNAME}[x])" happypath(x, y, desers=desers, fields_in=fields_in) # Secondary branch - we have some other input for a user-defined class func.l("# Unknown path, just try casting it directly.") with func.b( f"elif isbuiltinsubtype({self.VTYPE}):", isbuiltinsubtype=checks.isbuiltinsubtype, ) as b: b.l(f"{self.VNAME} = {anno_name}({self.VNAME})") # Final branch - user-defined class for another user-defined class func.l("# Two user-defined types, " "try to translate the input into the desired output.") with func.b("else:") as b: b.l( f"{self.VNAME} = translate({self.VNAME}, {anno_name})", translate=self.resolver.translate, )
def _build_fromdict_des(self, func: gen.Block, anno_name: str): self._add_type_check(func, anno_name) func.l(f"{self.VNAME} = {anno_name}.from_dict({self.VNAME})")