def inner(type_, options): while True: if not _is_new_type(type_): break type_ = type_.__supertype__ if is_dataclass(type_): if _issubclass_safe(type_, mixin): options['field_many'] = bool( _is_supported_generic(field.type) and _is_collection(field.type)) return fields.Nested(type_.schema(), **options) else: warnings.warn(f"Nested dataclass field {field.name} of type " f"{field.type} detected in " f"{cls.__name__} that is not an instance of " f"dataclass_json. Did you mean to recursively " f"serialize this field? If so, make sure to " f"augment {type_} with either the " f"`dataclass_json` decorator or mixin.") return fields.Field(**options) origin = getattr(type_, '__origin__', type_) args = [ inner(a, {}) for a in getattr(type_, '__args__', []) if a is not type(None) ] if _is_optional(type_): options["allow_none"] = True if origin in TYPES: return TYPES[origin](*args, **options) if _issubclass_safe(origin, Enum): return EnumField(enum=origin, by_value=True, *args, **options) if is_union_type(type_): union_types = [ a for a in getattr(type_, '__args__', []) if a is not type(None) ] union_desc = dict(zip(union_types, args)) return _UnionField(union_desc, cls, field, **options) warnings.warn( f"Unknown type {type_} at {cls.__name__}.{field.name}: {field.type} " f"It's advised to pass the correct marshmallow type to `mm_field`." ) return fields.Field(**options)
def _decode_generic(type_, value, infer_missing): if value is None: res = value elif _is_collection(type_): if _is_mapping(type_): k_type, v_type = type_.__args__ # a mapping type has `.keys()` and `.values()` (see collections.abc) ks = _decode_dict_keys(k_type, value.keys(), infer_missing) vs = _decode_items(v_type, value.values(), infer_missing) xs = zip(ks, vs) else: xs = _decode_items(type_.__args__[0], value, infer_missing) # get the constructor if using corresponding generic type in `typing` # otherwise fallback on constructing using type_ itself try: res = _get_type_cons(type_)(xs) except TypeError: res = type_(xs) elif _issubclass_safe(type_, Enum): # Convert to an Enum using the type as a constructor. Assumes a direct match is found. res = type_(value) else: # Optional type_arg = type_.__args__[0] if is_dataclass(type_arg) or is_dataclass(value): res = _decode_dataclass(type_arg, value, infer_missing) elif _is_supported_generic(type_arg): res = _decode_generic(type_arg, value, infer_missing) else: res = value return res
def _decode_generic(type_, value, infer_missing): if value is None: res = value elif _issubclass_safe(type_, Enum): # Convert to an Enum using the type as a constructor. Assumes a direct match is found. res = type_(value) # FIXME this is a hack to fix a deeper underlying issue. A refactor is due. elif _is_collection(type_): if _is_mapping(type_): k_type, v_type = type_.__args__ # a mapping type has `.keys()` and `.values()` (see collections.abc) ks = _decode_dict_keys(k_type, value.keys(), infer_missing) vs = _decode_items(v_type, value.values(), infer_missing) xs = zip(ks, vs) else: xs = _decode_items(type_.__args__[0], value, infer_missing) # get the constructor if using corresponding generic type in `typing` # otherwise fallback on constructing using type_ itself try: res = _get_type_cons(type_)(xs) except TypeError: res = type_(xs) else: # Optional or Union if _is_optional(type_) and len(type_.__args__) == 2: # Optional type_arg = type_.__args__[0] if is_dataclass(type_arg) or is_dataclass(value): res = _decode_dataclass(type_arg, value, infer_missing) elif _is_supported_generic(type_arg): res = _decode_generic(type_arg, value, infer_missing) else: res = _support_extended_types(type_arg, value) else: # Union (already decoded or unsupported 'from_json' used) res = value return res
def inner(type_, options): if is_dataclass(type_): if _issubclass_safe(type_, mixin): options['field_many'] = bool( _is_supported_generic(field.type) and _is_collection(field.type)) return fields.Nested(type_.schema(), **options) else: warnings.warn(f"Nested dataclass field {field.name} of type " f"{field.type} detected in " f"{cls.__name__} that is not an instance of " f"dataclass_json. Did you mean to recursively " f"serialize this field? If so, make sure to " f"augment {type_} with either the " f"`dataclass_json` decorator or mixin.") return fields.Field(**options) origin = getattr(type_, '__origin__', type_) args = [inner(a, {}) for a in getattr(type_, '__args__', [])] if origin in TYPES: return TYPES[origin](*args, **options) warnings.warn( f"Unknown type {type_} at {cls.__name__}.{field.name}: {field.type} " f"It's advised to pass the correct marshmallow type to `mm_field`." ) return fields.Field(**options)
def _support_extended_types(field_type, field_value): if _issubclass_safe(field_type, datetime): # FIXME this is a hack to deal with mm already decoding # the issue is we want to leverage mm fields' missing argument # but need this for the object creation hook if isinstance(field_value, datetime): res = field_value else: tz = datetime.now(timezone.utc).astimezone().tzinfo res = datetime.fromtimestamp(field_value, tz=tz) elif _issubclass_safe(field_type, Decimal): res = (field_value if isinstance(field_value, Decimal) else Decimal(field_value)) elif _issubclass_safe(field_type, UUID): res = (field_value if isinstance(field_value, UUID) else UUID(field_value)) else: res = field_value return res
def _serialize(self, value, attr, obj, **kwargs): if self.allow_none and value is None: return None for type_, schema_ in self.desc.items(): if _issubclass_safe(type(value), type_): if is_dataclass(value): res = schema_._serialize(value, attr, obj, **kwargs) res['__type'] = str(type_.__name__) return res break elif isinstance(value, _get_type_origin(type_)): return schema_._serialize(value, attr, obj, **kwargs) else: warnings.warn( f'The type "{type(value).__name__}" (value: "{value}") ' f'is not in the list of possible types of typing.Union ' f'(dataclass: {self.cls.__name__}, field: {self.field.name}). ' f'Value cannot be serialized properly.') return super()._serialize(value, attr, obj, **kwargs)
def _make_nested_fields(fields_, dataclass_json_cls, infer_missing): nested_fields = {} for field, type_, field_many in _inspect_nested_fields(fields_): if _issubclass_safe(type_, dataclass_json_cls): if infer_missing and _is_optional(field.type): schema = fields.Nested(type_.schema(), many=field_many, missing=None) else: schema = fields.Nested(type_.schema(), many=field_many) nested_fields[field.name] = schema else: warnings.warn(f"Nested dataclass field {field.name} of type " f"{field.type} detected in " f"{cls.__name__} that is not an instance of " f"dataclass_json. Did you mean to recursively " f"serialize this field? If so, make sure to " f"augment {field.type} with either the " f"`dataclass_json` decorator or mixin.") return nested_fields
def _is_supported_generic(type_): not_str = not _issubclass_safe(type_, str) is_enum = _issubclass_safe(type_, Enum) return (not_str and _is_collection(type_)) or _is_optional( type_) or is_union_type(type_) or is_enum
def _decode_dataclass(cls, kvs, infer_missing): overrides = _overrides(cls) kvs = {} if kvs is None and infer_missing else kvs missing_fields = {field for field in fields(cls) if field.name not in kvs} for field in missing_fields: if field.default is not MISSING: kvs[field.name] = field.default elif field.default_factory is not MISSING: kvs[field.name] = field.default_factory() elif infer_missing: kvs[field.name] = None init_kwargs = {} for field in fields(cls): field_value = kvs.get(field.name) if field_value is None and not _is_optional(field.type): warning = (f"value of non-optional type {field.name} detected " f"when decoding {cls.__name__}") if infer_missing: warnings.warn( f"Missing {warning} and was defaulted to None by " f"infer_missing=True. " f"Set infer_missing=False (the default) to prevent this " f"behavior.", RuntimeWarning) else: warnings.warn(f"`NoneType` object {warning}.", RuntimeWarning) init_kwargs[field.name] = field_value elif (field.name in overrides and overrides[field.name].decoder is not None): # FIXME hack if field.type is type(field_value): init_kwargs[field.name] = field_value else: init_kwargs[field.name] = overrides[field.name].decoder( field_value) elif is_dataclass(field.type): # FIXME this is a band-aid to deal with the value already being # serialized when handling nested marshmallow schema # proper fix is to investigate the marshmallow schema generation # code if is_dataclass(field_value): value = field_value else: value = _decode_dataclass(field.type, field_value, infer_missing) init_kwargs[field.name] = value elif _is_supported_generic(field.type) and field.type != str: init_kwargs[field.name] = _decode_generic(field.type, field_value, infer_missing) elif _issubclass_safe(field.type, datetime): # FIXME this is a hack to deal with mm already decoding # the issue is we want to leverage mm fields' missing argument # but need this for the object creation hook if isinstance(field_value, datetime): dt = field_value else: dt = date_parser.parse(field_value) init_kwargs[field.name] = dt elif _issubclass_safe(field.type, UUID): init_kwargs[field.name] = (field_value if isinstance( field_value, UUID) else UUID(field_value)) elif _issubclass_safe(field.type, Decimal): init_kwargs[field.name] = (field_value if isinstance( field_value, UUID) else Decimal(field_value)) else: init_kwargs[field.name] = field_value return cls(**init_kwargs)
def _decode_dataclass(cls, kvs, infer_missing): if isinstance(kvs, cls): return kvs overrides = _user_overrides(cls) kvs = {} if kvs is None and infer_missing else kvs field_names = [field.name for field in fields(cls)] decode_names = _decode_letter_case_overrides(field_names, overrides) kvs = {decode_names.get(k, k): v for k, v in kvs.items()} missing_fields = {field for field in fields(cls) if field.name not in kvs} for field in missing_fields: if field.default is not MISSING: kvs[field.name] = field.default elif field.default_factory is not MISSING: kvs[field.name] = field.default_factory() elif infer_missing: kvs[field.name] = None init_kwargs = {} types = get_type_hints(cls) for field in fields(cls): # The field should be skipped from being added # to init_kwargs as it's not intended as a constructor argument. if not field.init: continue field_value = kvs[field.name] field_type = types[field.name] if field_value is None and not _is_optional(field_type): warning = (f"value of non-optional type {field.name} detected " f"when decoding {cls.__name__}") if infer_missing: warnings.warn( f"Missing {warning} and was defaulted to None by " f"infer_missing=True. " f"Set infer_missing=False (the default) to prevent this " f"behavior.", RuntimeWarning) else: warnings.warn(f"`NoneType` object {warning}.", RuntimeWarning) init_kwargs[field.name] = field_value continue while True: if not _is_new_type(field_type): break field_type = field_type.__supertype__ if (field.name in overrides and overrides[field.name].decoder is not None): # FIXME hack if field_type is type(field_value): init_kwargs[field.name] = field_value else: init_kwargs[field.name] = overrides[field.name].decoder( field_value) elif is_dataclass(field_type): # FIXME this is a band-aid to deal with the value already being # serialized when handling nested marshmallow schema # proper fix is to investigate the marshmallow schema generation # code if is_dataclass(field_value): value = field_value else: value = _decode_dataclass(field_type, field_value, infer_missing) init_kwargs[field.name] = value elif _is_supported_generic(field_type) and field_type != str: init_kwargs[field.name] = _decode_generic(field_type, field_value, infer_missing) elif _issubclass_safe(field_type, datetime): # FIXME this is a hack to deal with mm already decoding # the issue is we want to leverage mm fields' missing argument # but need this for the object creation hook if isinstance(field_value, datetime): dt = field_value else: tz = datetime.now(timezone.utc).astimezone().tzinfo dt = datetime.fromtimestamp(field_value, tz=tz) init_kwargs[field.name] = dt elif _issubclass_safe(field_type, Decimal): init_kwargs[field.name] = (field_value if isinstance( field_value, Decimal) else Decimal(field_value)) elif _issubclass_safe(field_type, UUID): init_kwargs[field.name] = (field_value if isinstance( field_value, UUID) else UUID(field_value)) else: init_kwargs[field.name] = field_value return cls(**init_kwargs)
def _is_supported_generic(type_): not_str = not _issubclass_safe(type_, str) return (not_str and _is_collection(type_)) or _is_optional(type_)