def _fields_from_dict(cls: Type[JSONT], data: DictS, parent) -> DictS: if not isinstance(data, dict): raise JSONLoadError(data, dict, "Expected dict") fields = {} for f in get_fields(cls): if parent and annotation_is_parent(f.type): fields[f.name] = parent continue path = cls.aliases.get(f.name, f.name) path = (path,) if isinstance(path, str) else path value = data for part in path: value = value.get(part, _Missing) if value is _Missing: break if value is not _Missing: fields[f.name] = cls._load(f.type, value, parent, f.name) return fields
def dataclass2json(self, schema: Any) -> Dict[str, Any]: """Extract the object representation of a dataclass schema""" type_info = cast(TypingInfo, TypingInfo.get(schema)) if not type_info or not type_info.is_dataclass: raise InvalidSpecException( "Schema must be a dataclass, got " f"{type_info.element if type_info else None}") properties = {} required = [] for item in get_fields(type_info.element): if item.metadata.get(fields.REQUIRED, False): required.append(item.name) json_property = self.field2json(item) if not json_property: continue for name in fields.field_ops(item): properties[name] = json_property json_schema = { "type": "object", "description": trim_docstring(schema.__doc__ or ""), "properties": properties, "additionalProperties": False, } if required: json_schema["required"] = required return json_schema
def __call__(self, klass): if not is_dataclass(klass): raise TypeError( f'{self.__class__.__name__} only accepts dataclasses, ' f'got {klass.__name__}' ) default_map = { f.name: f.default_factory() if callable(f.default_factory) else f.default for f in get_fields(klass) } if self.fields is None: self.fields = tuple(k for k in default_map.keys() if not k.startswith('_')) if self.compat: def to_json(data_obj): return { k: self._extract_nested(getattr(data_obj, k)) for k in self.fields if default_map[k] != getattr(data_obj, k) } else: def to_json(data_obj): return { k: self._extract_nested(getattr(data_obj, k)) for k in self.fields } klass.to_json = to_json return klass
def __rich__(self): # pragma: no cover if not self.rich_fix: return self return replace(self, **{ f.name: dict(getattr(self, f.name)) for f in get_fields(self) if isinstance(getattr(self, f.name), Mapping) and f.init })
def products(delivery): wb = Workbook() ws = wb.active ws.title = f"{delivery.name} produits" product_fields = [f.name for f in get_fields(Product)] ws.append(product_fields) for product in delivery.products: ws.append([getattr(product, field) for field in product_fields]) producer_sheet = wb.create_sheet(f"producteur⋅ice⋅s et référent⋅e⋅s") producer_fields = [f.name for f in get_fields(Producer)] producer_sheet.append(producer_fields) for producer in delivery.producers.values(): producer_sheet.append( [getattr(producer, field) for field in producer_fields]) return save_virtual_workbook(wb)
def __call__(self, klass: t.Type[T]) -> t.Type[T]: if not is_dataclass(klass): raise TypeError( f"{self.__class__.__name__} only accepts dataclasses, " f"got {klass.__name__}") default_map = { f.name: f.default_factory() if callable(f.default_factory) else f.default for f in get_fields(klass) } if self.fields is None: _fields = tuple(k for k in default_map.keys() if not k.startswith("_")) else: _fields = self.fields if self.compat: def to_json_compat(data_obj: t.Any): return { k: self._extract_nested(getattr(data_obj, k)) for k in _fields if default_map[k] != getattr(data_obj, k) } klass.to_json = to_json_compat # type: ignore else: def to_json(data_obj: t.Any): return { k: self._extract_nested(getattr(data_obj, k)) for k in _fields } klass.to_json = to_json # type: ignore return klass
def _load( cls, annotation: Any, value: Any, parent: Optional["JSON"] = None, field_name: Optional[str] = None, ) -> Any: typ = unwrap_annotated(annotation) typo = getattr(typ, "__bound__", typ) typo = getattr(typo, "__origin__", typo) value = cls._apply_loader(typ, value, parent, field_name) if typo is Union: loadable = cls._get_loadable_type(annotation, value) if loadable: typ = unwrap_annotated(loadable) typo = getattr(typ, "__bound__", typ) typo = getattr(typo, "__origin__", typo) datacls = is_dataclass(typ) if datacls and is_subclass(typ, JSON) and isinstance(value, Mapping): return typ.from_dict(value, parent) if datacls and isinstance(value, Mapping): return typ(**{ f.name: parent if parent and annotation_is_parent(f.type) else cls._load(f.type, value[f.name], field_name=f.name) for f in get_fields(typ) if f.name in value }) value = cls._auto_cast(typ, value) if typo in (Union, Literal) or isinstance(typo, (str, ForwardRef)): return value if is_subclass(typo, Mapping) and isinstance(value, Mapping): if is_subclass(typo, Counter): key_type = getattr(typ, "__args__", (Any,))[0] value_type = int else: key_type, value_type = getattr(typ, "__args__", (Any, Any)) key_type_origin = getattr(key_type, "__origin__", key_type) dct = { cls._load( key_type, k if is_subclass(key_type_origin, str) else json.loads(k), parent, ): cls._load(value_type, v, parent) for k, v in value.items() } try: return getattr(typ, "__origin__", typ)(dct) except TypeError: # happens for typing.Mapping/Dict/etc return dct if isinstance(value, (str, bytes)): return value if is_subclass(typo, Collection) and isinstance(value, Collection): items: Collection if is_subclass(typo, tuple): item_types = getattr(typ, "__args__", (Any, ...)) items = tuple( cls._load( item_types[0 if ... in item_types else i], v, parent, ) for i, v in enumerate(value) ) else: item_type = getattr(typ, "__args__", (Any,))[0] items = [cls._load(item_type, v, parent) for v in value] try: return getattr(typ, "__origin__", typ)(items) except TypeError: # happens for typing.List/Tuple/etc return items return value
def fields(self): """Return `tuple` of the field names.""" return tuple(f.name for f in get_fields(self))