def deserialization() -> Conversion: annotations: dict[str, Any] = {} deserialization_namespace: dict[str, Any] = { "__annotations__": annotations } for sub in rec_subclasses(cls): annotations[sub.__name__] = Tagged[sub] # type: ignore # Add tagged fields for all its alternative constructors for constructor in _alternative_constructors.get(sub, ()): # Build the alias of the field alias = to_pascal_case(constructor.__name__) # object_deserialization uses get_type_hints, but the constructor # return type is stringified and the class not defined yet, # so it must be assigned manually constructor.__annotations__["return"] = sub # Add constructor tagged field with its conversion annotations[alias] = Tagged[sub] # type: ignore deserialization_namespace[alias] = Tagged( conversion( # Use object_deserialization to wrap constructor as deserializer deserialization=object_deserialization( constructor, type_name(alias)))) # Create the deserialization tagged union class deserialization_union = new_class( cls.__name__, (TaggedUnion, ), exec_body=lambda ns: ns.update(deserialization_namespace), ) return Conversion(lambda obj: get_tagged(obj)[1], source=deserialization_union, target=cls)
class A: a: Annotated[int, schema(max=10), schema(description="type description"), type_name("someInt"), schema(description="field description"), ] = field( metadata=schema(min=0))
def test_find_refs(): refs = {} DeserializationSchemaBuilder.RefsExtractor( settings.deserialization.default_conversion, refs ).visit(D) DeserializationSchemaBuilder.RefsExtractor( settings.deserialization.default_conversion, refs ).visit(Recursive) assert refs == { "B": (B, 1), "DD": (D, 1), "Bs": (Collection[B], 1), "Bs2": (Annotated[List[B], type_name("Bs2")], 1), "Recursive": (Recursive, 2), }
from apischema import deserialize, deserializer, type_name from apischema.json_schema import deserialization_schema from apischema.objects import object_deserialization def create_range(start: int, stop: int, step: int = 1) -> range: return range(start, stop, step) range_conv = object_deserialization(create_range, type_name("Range")) # Conversion can be registered deserializer(range_conv) assert deserialize(range, {"start": 0, "stop": 10}) == range(0, 10) assert deserialization_schema(range) == { "$schema": "http://json-schema.org/draft/2019-09/schema#", "type": "object", "properties": { "start": { "type": "integer" }, "stop": { "type": "integer" }, "step": { "type": "integer", "default": 1 }, }, "required": ["start", "stop"], "additionalProperties": False, }
def test_generic_ref_error(cls): with raises(TypeError): type_name("Data")(cls)
class D: bs: Annotated[List[B], type_name("Bs2")] # noqa: F821
def test_collection_type_name(): type_name("test")(Sequence[A]) assert get_type_name(List[A]) == get_type_name(Collection[A]) == ("test", "test")
from apischema.type_names import get_type_name from apischema.typing import Annotated @type_name(None) @dataclass class A: a: int @dataclass class B: a: Optional[A] type_name("Bs")(List[B]) @type_name("DD") @dataclass class D: bs: Annotated[List[B], type_name("Bs2")] # noqa: F821 @dataclass class Recursive: rec: Optional["Recursive"] def test_find_refs(): refs = {}
import sys from datetime import date, datetime from apischema import deserializer, schema, serializer, type_name from apischema.graphql import relay from apischema.graphql.relay import global_identification if sys.version_info < (3, 7): type_name("Datetime")(datetime) schema(format="date-time")(datetime) @deserializer def to_datetime(s: str) -> datetime: return datetime.strptime(s, "%Y-%m-%d") @serializer def from_datetime(obj: datetime) -> str: return obj.strftime("%Y-%m-%dT%H:%M:%S") type_name("Date")(date) schema(format="date")(date) @deserializer def to_date(s: str) -> date: return date.strptime(s, "%Y-%m-%d") @serializer def from_date(obj: date) -> str: return obj.strftime("%Y-%m-%d")
yield sub_cls yield from get_all_subclasses(sub_cls) Cls = TypeVar("Cls", bound=type) def _get_generic_name_factory(cls: type, *args: type): def _capitalized(name: str) -> str: return name[0].upper() + name[1:] return "".join( (cls.__name__, *(_capitalized(arg.__name__) for arg in args))) generic_name = type_name(_get_generic_name_factory) def as_tagged_union(cls: Cls) -> Cls: """ Tagged union decorator, to be used on base class. Supports generics as well, with names generated by way of `_get_generic_name_factory`. """ params = tuple(getattr(cls, "__parameters__", ())) tagged_union_bases: Tuple[type, ...] = (TaggedUnion, ) # Generic handling is here: if params: tagged_union_bases = (TaggedUnion, Generic[params])
class BaseResource: id: int # or using typing.Annotated tags: Annotated[set[str], type_name("ResourceTags")]
}, { "type": "null" }] } }, "required": ["foo"], "additionalProperties": False, } }, "$schema": "http://json-schema.org/draft/2019-09/schema#", } MoreThanTwo = NewType("MoreThanTwo", int) schema(min=0, extra=lambda s: s.update({"minimum": 2}))(type_name(None)(MoreThanTwo)) @dataclass class WithSchema: attr1: MoreThanTwo = field(metadata=schema(min=3)) attr2: MoreThanTwo = field(metadata=schema(min=1)) def test_flattened_schema(): assert deserialization_schema(WithSchema) == { "$schema": "http://json-schema.org/draft/2019-09/schema#", "type": "object", "properties": { "attr1": { "type": "integer",
from apischema import deserializer, schema, serializer, type_name from apischema.conversions import Conversion, as_str T = TypeVar("T") # =================== bytes ===================== deserializer(Conversion(b64decode, source=str, target=bytes)) @serializer def to_base64(b: bytes) -> str: return b64encode(b).decode() type_name(graphql="Bytes")(bytes) schema(encoding="base64")(bytes) # ================ collections ================== deserializer(Conversion(deque, source=List[T], target=Deque[T])) serializer(Conversion(list, source=Deque[T], target=List[T])) if sys.version_info < (3, 7): deserializer(Conversion(deque, source=List, target=deque)) serializer(Conversion(list, source=deque, target=List)) # ================== datetime =================== if sys.version_info >= (3, 7): # pragma: no cover for cls, format in [(date, "date"), (datetime, "date-time"), (time, "time")]:
@dataclass class Data: id: int content: str @property def size(self) -> int: return len(self.content) def get_details(self) -> Any: ... # Serialization fields can be a str/field or a function/method/property size_only = object_serialization(Data, [get_field(Data).id, Data.size], type_name("DataSize")) # ["id", Data.size] would also work def complete_data(): return [ ..., # shortcut to include all the fields Data.size, (Data.get_details, alias("details")), # add/override metadata using tuple ] # Serialization fields computation can be deferred in a function # The serialization name will then be defaulted to the function name complete = object_serialization(Data, complete_data)
@dataclass class Foo: pass @dataclass class Bar: pass def foo_to_bar(_: Foo) -> Bar: return Bar() type_name("Bars")(list[Bar]) assert serialization_schema( list[Foo], conversion=foo_to_bar, all_refs=True ) == { "$schema": "http://json-schema.org/draft/2019-09/schema#", "$ref": "#/$defs/Bars", "$defs": { # Bars is present because `list[Foo]` is dynamically converted to `list[Bar]` "Bars": { "type": "array", "items": { "$ref": "#/$defs/Bar" } }, "Bar": {