def _build_validator(self, func: gen.Block, context: ContextT, assertions: AssertionsT) -> ContextT: if isinstance(self.values, Sequence): if self.unique is True: func.l( f"{self.VALUE} = unique({self.VALUE}, ret_type=tuple)", unique=unique, ) item_validators = MappingProxyType( {i: c.validate for i, c in enumerate(self.values)}) o = util.origin(self.type) itval = "__item_validators" ctx = { "unique": unique, itval: item_validators, o.__name__: o, "_lazy_repr": util.collectionrepr, } field = f"_lazy_repr({self.FNAME}, i)" func.l( f"{self.VALUE} = " f"{o.__name__}(" f"({itval}[i](x, field={field}) if i in {itval} else x " f"for i, x in enumerate({self.VALUE}))" f")", **ctx, # type: ignore ) return ctx return ArrayConstraints._build_validator(self, func=func, context=context, assertions=assertions)
def iterator(self, type: Type, values: bool = False) -> "FieldIteratorT": """Get an iterator function for a given type, if possible.""" if ismappingtype(type): iter = _valuescaller if values else _itemscaller return iter if isiterabletype(type): return _iter fields = self.get_fields(type, as_source=True) or {} if fields: func_name = get_defname("iterator", (type, values)) oname = "o" ctx: dict = {} with Block(ctx) as main: with main.f(func_name, Block.p(oname)) as func: if values: for f in fields: func.l(f"{Keyword.YLD} {oname}.{f}") else: for f in fields: func.l(f"{Keyword.YLD} {f!r}, {oname}.{f}") return main.compile(name=func_name, ns=ctx) raise TranslatorTypeError( f"Cannot get iterator for type {type!r}, unable to determine fields." ) from None
def _build_validator(self, func: gen.Block, context: Dict[str, Any], assertions: AssertionsT) -> ContextT: if self.key_dependencies: self._get_key_dependencies(assertions, context) _lazy_repr = (util.collectionrepr if issubclass(self.type, Mapping) else util.joinedrepr) context.update(Mapping=Mapping, _lazy_repr=_lazy_repr) if self.required_keys: context["required"] = self.required_keys defined_keys = (self.required_keys or set()) | (self.items or {}).keys() if defined_keys: context["defined"] = frozenset(defined_keys) if not issubclass(self.type, Mapping): with func.b(f"if not isinstance({self.VALUE}, Mapping):") as b: b.l(f"return False, {self.VALUE}") func.l(f"valkeys = {{*{self.VALUE}}}") context = BaseConstraints._build_validator(self, func=func, context=context, assertions=assertions) items_context = self._build_item_validator(func) if items_context: context.update(items_context) return context
def _build_collection_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", namespace: Type = None, ): item_des = None it_name = f"{anno_name}_item_des" iterate = f"iterate({self.VNAME}, values=True)" line = f"{self.VNAME} = {anno_name}({iterate})" if annotation.args: item_type = annotation.args[0] item_des = self.resolver.resolve(item_type, flags=annotation.serde.flags, namespace=namespace) line = (f"{self.VNAME} = " f"{anno_name}({it_name}(x) for x in parent({iterate}))") else: self._add_type_check(func, anno_name) func.l( line, level=None, **{ it_name: item_des, "Collection": abc.Collection, "iterate": self.resolver.iterate, }, )
def _build_typeddict_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", *, total: bool = True, namespace: Type = None, ): with func.b(f"if issubclass({self.VTYPE}, Mapping):", Mapping=abc.Mapping) as b: fields_deser = { x: self.resolver._resolve_from_annotation( y, _namespace=namespace).transmute for x, y in annotation.serde.fields.items() } x = "fields_in[x]" y = (f"fields_deser[x]({self.VNAME}[x])" if fields_deser else f"{self.VNAME}[x]") line = f"{{{x}: {y} for x in fields_in.keys()" tail = "}" if total else f"& {self.VNAME}.keys()}}" b.l(f"{self.VNAME} = {anno_name}(**{line}{tail})", fields_deser=fields_deser) with func.b("else:") as b: b.l( f"{self.VNAME} = translate({self.VNAME}, {anno_name})", translate=self.resolver.translate, )
def _build_tuple_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", namespace: Type = None, ): if annotation.args and annotation.args[-1] is not ...: item_des = { ix: self.resolver.resolve(t, flags=annotation.serde.flags, namespace=namespace) for ix, t in enumerate(annotation.args) } item_des_name = "item_des" iterate = f"iterate({self.VNAME}, values=True)" line = ( f"{anno_name}" f"({item_des_name}[ix](v) for ix, v in enumerate({iterate})" f"if ix in {item_des_name})") func.l( f"{self.VNAME} = {line}", level=None, **{ item_des_name: item_des, "iterate": self.resolver.iterate, }, ) else: self._build_collection_des( func=func, anno_name=anno_name, annotation=annotation, namespace=namespace, )
def _build_typedtuple_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", namespace: Type = None, ): with func.b(f"if issubclass({self.VTYPE}, Mapping):", Mapping=abc.Mapping) as b: if annotation.serde.fields: self._build_typeddict_des(b, anno_name, annotation, namespace=namespace) else: b.l(f"{self.VNAME} = {anno_name}(**{self.VNAME})", ) with func.b( f"elif isinstance({self.VNAME}, (list, set, frozenset, tuple)):" ) as b: if annotation.serde.fields: b.l( f"{self.VNAME} = __bind({anno_name}, *{self.VNAME}).eval()", __bind=self.resolver.bind, ) else: b.l(f"{self.VNAME} = {anno_name}(*{self.VNAME})", ) with func.b("else:") as b: b.l( f"{self.VNAME} = translate({self.VNAME}, {anno_name})", translate=self.resolver.translate, )
def _build_pattern_des(self, func: gen.Block, anno_name: str): func.l( f"{self.VNAME} = {self.VNAME} " f"if issubclass({self.VTYPE}, {anno_name}) " f"else __re_compile({self.VNAME})", __re_compile=re.compile, )
def _build_validator(self, func: gen.Block, context: ContextT, assertions: AssertionsT) -> ContextT: # If we don't have a natively unique type and we're supposed to be unique, make it so. if self.unique is True and util.origin( self.type) not in {set, frozenset}: func.l(f"{self.VALUE} = unique({self.VALUE})", unique=unique) context = BaseConstraints._build_validator(self, func, context, assertions) # Validate the items if necessary. if self.values: o = util.origin(self.type) itval = "__item_validator" ctx = { "unique": unique, itval: self.values.validate, o.__name__: o, "_lazy_repr": util.collectionrepr, } r = "i" if issubclass(self.type, Sequence) else "x" field = f"_lazy_repr({self.FNAME}, {r})" func.l( f"{self.VALUE} = " f"{o.__name__}(" f"({itval}(x, field={field}) for i, x in enumerate({self.VALUE}))" f")", **ctx, # type: ignore ) return context
def _set_item_validator_pattern_constraints(self, loop: gen.Block, func_name: str): # Item constraints based upon key-pattern pattern_constr_name = f"{func_name}_pattern_constraints" if self.patterns: loop.l( f"{self.RETY} = " f"validate_pattern_constraints" f"({pattern_constr_name}, {self.X}, {self.Y})", level=None, **{ "validate_pattern_constraints": validate_pattern_constraints, pattern_constr_name: self.patterns, }, ) # Required key pattern if self.key_pattern: key_pattern_name = f"{func_name}_key_pattern" loop.l( f"valid = bool({key_pattern_name}.match({self.X}))", level=None, **{key_pattern_name: self.key_pattern}, ) with loop.b("if not valid:") as b: b.l("break")
def _add_eval(self, func: gen.Block): func.l( f"_, {self.VNAME} = __eval({self.VNAME}) " f"if isinstance({self.VNAME}, (str, bytes)) " f"else (False, {self.VNAME})", __eval=safe_eval, ) self._add_vtype(func)
def _build_assertions(self, func: gen.Block, assertions: AssertionsT): # Only get the size if we have to. if assertions: if (self.max_items, self.min_items) != (None, None): func.l(f"size = len({self.VALUE})") BaseConstraints._build_assertions(self, func=func, assertions=assertions)
def _build_timedelta_des(self, func: gen.Block, anno_name: str, annotation: "Annotation"): # From an int with func.b(f"if isinstance({self.VNAME}, (int, float)):") as b: b.l(f"{self.VNAME} = {anno_name}(int({self.VNAME}))") # From a string with func.b(f"elif isinstance({self.VNAME}, (str, bytes)):") as b: line = f"{self.VNAME} = dateparse({self.VNAME}, exact=True)" b.l(line, dateparse=dateparse)
def _define(main: gen.Block, name: str) -> gen.Function: return main.func( name, main.param("o"), main.param("lazy", default=False, kind=gen.ParameterKind.KEYWORD_ONLY), main.param("name", default=None, kind=gen.ParameterKind.KEYWORD_ONLY), )
def define(self, block: gen.Block, name: str) -> gen.Function: f: gen.Function = block.f( name, block.param(self.VALUE, annotation="VT"), block.param( "field", annotation=str, kind=gen.ParameterKind.KEYWORD_ONLY, # type: ignore default=None, ), ) return f
def _build_mapping_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", namespace: Type = None, ): key_des, item_des = None, None args = annotation.args if args: args = cast(Tuple[Type, Type], args) key_type, item_type = args key_des = self.resolver.resolve(key_type, flags=annotation.serde.flags, namespace=namespace) item_des = self.resolver.resolve(item_type, flags=annotation.serde.flags, namespace=namespace) if issubclass(annotation.resolved_origin, defaultdict): factory = self._get_default_factory(annotation) func.namespace[anno_name] = functools.partial(defaultdict, factory) kd_name = f"{anno_name}_key_des" it_name = f"{anno_name}_item_des" iterate = f"iterate({self.VNAME})" line = f"{anno_name}({iterate})" if args or annotation.serde.fields_in: x, y = "x", "y" # If there are args & field mapping, get the correct field name # AND serialize the key. if args and annotation.serde.fields_in: x = f"{kd_name}(fields_in.get(x, x))" # If there is only a field mapping, get the correct name for the field. elif annotation.serde.fields_in: x = "fields_in.get(x, x)" # If there are only serializers, get the serialized value elif args: x = f"{kd_name}(x)" y = f"{it_name}(y)" line = f"{anno_name}({{{x}: {y} for x, y in {iterate}}})" # If we don't have nested annotations, we can short-circuit on valid inputs else: self._add_type_check(func, anno_name) # Write the lines. func.l( f"{self.VNAME} = {line}", level=None, **{ kd_name: key_des, it_name: item_des, "Mapping": abc.Mapping, "iterate": self.resolver.iterate, }, )
def iterator( self, type: Type, values: bool = False, relaxed: bool = False, exclude: Tuple[str, ...] = (), ) -> IteratorT: """Get an iterator function for a given type, if possible.""" mapping, iterable, builtin, namedtuple, typicklass = ( ismappingtype(type), isiterabletype(type), isbuiltinsubtype(type), isnamedtuple(type), istypicklass(type), ) if mapping: return _valuescaller if values else _itemscaller if (iterable, namedtuple, typicklass) == (True, False, False): return iter if values else enumerate if (builtin, iterable) == (True, False): raise TranslatorTypeError( f"Cannot get iterator for type {type.__name__!r}." ) from None fields = self.get_fields(type, as_source=True, exclude=exclude) or {} if not fields and not relaxed: raise TranslatorTypeError( f"Cannot get iterator for type {type.__name__!r}, " f"unable to determine fields." ) from None func_name = get_defname("iterator", (type, values)) oname = "o" ctx: dict = {} with Block(ctx) as main: with main.f(func_name, Block.p(oname)) as func: if fields: if values: for f in fields: func.l(f"{Keyword.YLD} {oname}.{f}") else: for f in fields: func.l(f"{Keyword.YLD} {f!r}, {oname}.{f}") else: func.l(f"{Keyword.YLD}") return main.compile(name=func_name, ns=ctx)
def _compile_iterable_translator(self, source: Type, target: Type) -> TranslatorT: func_name = self._get_name(source, target) target_name = get_name(target) oname = "o" ismapping = ismappingtype(target) iterator = self.iterator(source, not ismapping) ctx = {"iterator": iterator, target_name: target} with Block(ctx) as main: with main.f(func_name, Block.p(oname)) as func: retval = f"iterator({oname})" if not isiteratortype(target): retval = f"{target_name}({retval})" func.l(f"{Keyword.RET} {retval}") return main.compile(name=func_name)
def _compile_translator( self, source: Type, target: Type, exclude: Tuple[str, ...] = () ) -> TranslatorT: if isliteral(target): raise TranslatorTypeError( f"Cannot translate to literal type: {target!r}. " ) from None if isliteral(source): raise TranslatorTypeError( f"Cannot translate from literal type: {source!r}. " ) from None # Get the target fields for translation. target_fields = self.get_fields(target) if target_fields is None: if isiterabletype(target): return self._compile_iterable_translator(source, target) raise TranslatorTypeError( f"Cannot translate to type {target!r}. " f"Unable to determine target fields." ) from None # Ensure that the target fields are a subset of the source fields. # We treat the target fields as the parameters for the target, # so this must be true. fields = self.get_fields(source, as_source=True, exclude=exclude) or {} fields_to_pass = {x: fields[x] for x in fields.keys() & target_fields.keys()} required = self.required_fields(target_fields) if not required.issubset(fields_to_pass.keys()): diff = (*(required - fields.keys()),) raise TranslatorValueError( f"{source!r} can't be translated to {target!r}. " f"Source is missing required fields: {diff}." ) from None protocols = self.resolver.protocols(target) # Build the translator. anno_name = get_unique_name(source) target_name = get_unique_name(target) func_name = self._get_name(source, target) oname = "o" ctx: Dict[str, Any] = {target_name: target, anno_name: source} with Block(ctx) as main: with main.f(func_name, Block.p(oname)) as func: args = ", ".join( self._iter_field_assigns(fields_to_pass, oname, protocols, ctx) ) func.l(f"{Keyword.RET} {target_name}({args})") trans = main.compile(name=func_name, ns=ctx) return trans
def _build_builtin_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", ): origin = annotation.resolved_origin if issubclass(origin, (str, bytes)): self._build_text_des(func, anno_name, annotation) elif checks.ismappingtype(origin): self._build_mapping_des(func, anno_name, annotation) elif checks.iscollectiontype(origin): self._build_collection_des(func, anno_name, annotation) # bool, int, float... else: func.l(f"{self.VNAME} = {anno_name}({self.VNAME})")
def _build_validator(self, func: gen.Block, context: ContextT, assertions: AssertionsT) -> ContextT: if (self.max_digits, self.decimal_places) == (None, None): context = NumberConstraints._build_validator(self, func, context=context, assertions=assertions) context.update(decimal=decimal, Decimal=decimal.Decimal, _get_digits=_get_digits) return context # Update the global namespace for the validator # Add setup/sanity checks for decimals. func.l(f"{self.VALUE} = decimal.Decimal({self.VALUE})") with func.b( f"if {self.VALUE}.is_infinite():", ConstraintValueError=ConstraintValueError, ) as b: b.l("raise ConstraintValueError('Cannot validate infinite values.')" ) func.l(f"tup = {self.VALUE}.as_tuple()") func.l( "whole, digits, decimals = _get_digits(tup)", _get_digits=_get_digits, ) context = NumberConstraints._build_validator(self, func, context=context, assertions=assertions) context.update(decimal=decimal, Decimal=decimal.Decimal, _get_digits=_get_digits) return context
def _build_uuid_des(self, func: gen.Block, anno_name: str, annotation: "Annotation"): self._add_type_check(func, anno_name) with func.b(f"if issubclass({self.VTYPE}, UUID):", UUID=uuid.UUID) as b: b.l(f"{self.VNAME} = {anno_name}(int={self.VNAME}.int)") with func.b(f"elif isinstance({self.VNAME}, str):") as b: b.l(f"{self.VNAME} = {anno_name}({self.VNAME})") with func.b(f"elif isinstance({self.VNAME}, bytes):") as b: b.l(f"{self.VNAME} = {anno_name}(bytes={self.VNAME})") with func.b(f"elif isinstance({self.VNAME}, int):") as b: b.l(f"{self.VNAME} = {anno_name}(int={self.VNAME})") with func.b(f"elif isinstance({self.VNAME}, tuple):") as b: b.l(f"{self.VNAME} = {anno_name}(fields={self.VNAME})")
def _build_text_des( self, func: gen.Block, anno_name: str, annotation: "Annotation", ): origin = annotation.resolved_origin # Encode for bytes if issubclass(origin, bytes): with func.b(f"if isinstance({self.VNAME}, str):") as b: b.l(f"{self.VNAME} = {anno_name}(" f"{self.VNAME}, encoding={DEFAULT_ENCODING!r})") # Decode for str elif issubclass(origin, str): with func.b( f"if isinstance({self.VNAME}, (bytes, bytearray)):") as b: b.l(f"{self.VNAME} = {self.VNAME}.decode({DEFAULT_ENCODING!r})" ) func.l(f"{self.VNAME} = {anno_name}({self.VNAME})")
def _build_item_validator(self, func: gen.Block) -> Optional[ContextT]: if any(( self.items, self.patterns, self.key_pattern, self.keys, self.values, )): with func.b(f"{self.VALUE} = {{") as loop: item_context = self._set_item_validator_loop_line( loop, func.name) loop.l(f"for {self.X}, {self.Y} in {self.VALUE}.items()") func.l("}") if self.key_pattern: key_pattern_name = f"{func.name}_key_pattern" with func.b(f"if any((not {key_pattern_name}.match({self.X}) " f"for {self.X} in {self.VALUE})):") as b: b.l(f"return False, {self.VALUE}") item_context[key_pattern_name] = self.key_pattern return item_context return None
def _set_checks(self, func: gen.Block, anno_name: str, annotation: Annotation): _ctx = {} # run a safe eval if input is text and anno isn't if inspect.isclass(annotation.resolved_origin) and (issubclass( annotation.resolved_origin, (str, bytes)) or checks.isdecimaltype(annotation.resolved_origin)): self._add_vtype(func) else: self._add_eval(func) # Equality checks for defaults and optionals custom_equality = hasattr(annotation.resolved_origin, "equals") if custom_equality and (annotation.optional or annotation.has_default): func.l(f"custom_equality = hasattr({self.VNAME}, 'equals')") null = "" if annotation.optional: null = f"{self.VNAME} in {self.resolver.OPTIONALS}" if custom_equality: null = ( f"(any({self.VNAME}.equals(o) for o in {self.resolver.OPTIONALS}) " "if custom_equality " f"else {null})") eq = "" if (annotation.has_default and annotation.parameter.default not in self.resolver.OPTIONALS): eq = f"{self.VNAME} == __default" if custom_equality: if hasattr(annotation.parameter.default, "equals"): eq = f"__default.equals({self.VNAME})" eq = f"{self.VNAME}.equals(__default) if custom_equality else {eq}" _ctx["__default"] = annotation.parameter.default if eq or null: # Add a type-check for anything that isn't a builtin. if eq and not checks.isbuiltintype(annotation.resolved_origin): eq = f"{self.VTYPE} is {anno_name} and {eq}" check = " or ".join(c for c in (null, eq) if c) with func.b(f"if {check}:", **_ctx) as b: # type: ignore b.l(f"return {self.VNAME}")
def _set_item_validator_loop_line(self, loop: gen.Block, func_name: str) -> ContextT: names = ItemValidatorNames( item_validators_name=f"{func_name}_items", vals_validator_name=f"{func_name}_vals", keys_validator_name=f"{func_name}_keys", patterns_validators_name=f"{func_name}_patterns", ) ctx: Dict[str, Any] = {} x = self.X y = self.Y field = f"_lazy_repr({self.FNAME}, {self.X})" if self.values: y = f"{names.vals_validator_name}({y}, field={field})" ctx[names.vals_validator_name] = self.values.validate if self.keys: x = f"{names.keys_validator_name}({self.X})" ctx[names.keys_validator_name] = self.keys.validate if self.patterns: y = ("validate_pattern_constraints" f"({names.patterns_validators_name}, {self.X}, {y})") ctx[names.patterns_validators_name] = self.patterns ctx["validate_pattern_constraints"] = validate_pattern_constraints if self.items: ctx.update({ names.item_validators_name: MappingProxyType( {x: y.validate for x, y in self.items.items()} # type: ignore ) }) y = (f"{names.item_validators_name}[{self.X}]({y}, field={field}) " f"if {self.X} in {names.item_validators_name} else {self.Y}") loop.l(f"{x}: {y}") return ctx
def _build_date_des(self, func: gen.Block, anno_name: str, annotation: "Annotation"): origin = annotation.resolved_origin # From an int with func.b(f"if isinstance({self.VNAME}, (int, float)):") as b: b.l(f"{self.VNAME} = {anno_name}.fromtimestamp({self.VNAME})") # From a string with func.b(f"elif isinstance({self.VNAME}, (str, bytes)):") as b: line = f"{self.VNAME} = dateparse({self.VNAME})" b.l(line, dateparse=dateparse) if issubclass(origin, datetime.datetime): with func.b(f"if isinstance({self.VNAME}, datetime):", datetime=datetime.datetime) as b: # Use pendulum's helper if possible. if origin is DateTime: b.l(f"{self.VNAME} = instance({self.VNAME})", instance=instance) else: b.l( f"{self.VNAME} = " f"{anno_name}(" f"{self.VNAME}.year, " f"{self.VNAME}.month, " f"{self.VNAME}.day, " f"{self.VNAME}.hour, " f"{self.VNAME}.minute, " f"{self.VNAME}.second, " f"{self.VNAME}.microsecond, " f"{self.VNAME}.tzinfo" f")", ) with func.b(f"elif isinstance({self.VNAME}, date):", date=datetime.date) as b: b.l( f"{self.VNAME} = " f"{anno_name}(" f"{self.VNAME}.year, " f"{self.VNAME}.month, " f"{self.VNAME}.day" f")", ) elif issubclass(origin, datetime.date): with func.b(f"if isinstance({self.VNAME}, datetime):", datetime=datetime.datetime) as b: b.l(f"{self.VNAME} = {self.VNAME}.date()") with func.b(f"elif isinstance({self.VNAME}, (int, float)):") as b: b.l(f"{self.VNAME} = {anno_name}.fromtimestamp({self.VNAME})") with func.b(f"elif isinstance({self.VNAME}, (str, bytes)):") as b: line = f"{self.VNAME} = dateparse({self.VNAME}, exact=True)" b.l(line, dateparse=dateparse)
def _build_validator( self, func: gen.Block, context: ContextT, assertions: AssertionsT ) -> ContextT: # Set up the local env. if self.curtail_length is not None: func.l(f"{self.VALUE} = {self.VALUE}[:{self.curtail_length}]") if self.strip_whitespace: func.l(f"{self.VALUE} = {self.VALUE}.strip()") if {self.min_length, self.max_length} != {None, None}: func.l(f"size = len({self.VALUE})") BaseConstraints._build_validator( self, func, context=context, assertions=assertions ) # Build the validation. if self.regex is not None: context.update(__pattern=self.regex) return context
def _build_union_des(self, func: gen.Block, annotation: "Annotation", namespace): # Get all types which we may coerce to. args = (*(a for a in annotation.args if a not in {None, Ellipsis, type(None)}), ) # Get all custom types, which may have discriminators targets = (*(a for a in args if not checks.isstdlibtype(a)), ) # We can only build a tagged union deserializer if all args are valid if args and args == targets: # Try to collect the field which will be the discriminator. # First, get a mapping of Type -> Proto & Type -> Fields tagged = get_tag_for_types(targets) # Just bail out if we can't find a key. if not tagged: func.l("# No-op, couldn't locate a discriminator key.") return # If we got a key, re-map the protocols to the value for each type. deserializers = { value: self.resolver.resolve(t, namespace=namespace) for value, t in tagged.types_by_values } # Finally, build the deserializer func.namespace.update( tag=tagged.tag, desers=deserializers, empty=_empty, ) with func.b(f"if issubclass({self.VTYPE}, Mapping):", Mapping=abc.Mapping) as b: b.l(f"tag_value = {self.VNAME}.get(tag, empty)") with func.b("else:") as b: b.l(f"tag_value = getattr({self.VNAME}, tag, empty)") with func.b("if tag_value in desers:") as b: b.l(f"{self.VNAME} = desers[tag_value].transmute({self.VNAME})" ) with func.b("else:") as b: b.l("raise ValueError(" 'f"Value is missing field {tag!r} with one of ' '{(*desers,)}: {val!r}"' ")")
def _build_assertions(self, func: gen.Block, assertions: AssertionsT): check = " and ".join(assertions) with func.b(f"if not ({check}):") as b: b.l(f"return False, {self.VALUE}")