def inner(type_, options): if is_dataclass(type_): if _issubclass_safe(type_, mixin): options['field_many'] = bool( _is_supported_generic(field.type) and _is_collection(field.type)) return fields.Nested(type_.schema(), **options) else: warnings.warn(f"Nested dataclass field {field.name} of type " f"{field.type} detected in " f"{cls.__name__} that is not an instance of " f"dataclass_json. Did you mean to recursively " f"serialize this field? If so, make sure to " f"augment {type_} with either the " f"`dataclass_json` decorator or mixin.") return fields.Field(**options) origin = getattr(type_, '__origin__', type_) args = [inner(a, {}) for a in getattr(type_, '__args__', [])] if origin in TYPES: return TYPES[origin](*args, **options) warnings.warn( f"Unknown type {type_} at {cls.__name__}.{field.name}: {field.type} " f"It's advised to pass the correct marshmallow type to `mm_field`." ) return fields.Field(**options)
def _inspect_nested_fields(fields_): nested_dc_fields_and_is_many = [] for field in fields_: if _is_supported_generic(field.type): t_arg = field.type.__args__[0] if is_dataclass(t_arg): if _is_collection(field.type): nested_dc_fields_and_is_many.append((field, t_arg, True)) else: nested_dc_fields_and_is_many.append((field, t_arg, False)) elif is_dataclass(field.type): nested_dc_fields_and_is_many.append((field, field.type, False)) return nested_dc_fields_and_is_many
def __coerce_value(cls, typ: type, value): if isinstance(value, np.generic): # Handle numpy types return value.item() elif hasattr(value, 'tolist'): # tolist converts scalar or array to native python type if not already native. return value() elif typ in (DictBase, Optional[DictBase]) and isinstance(value, Base): return value.to_dict() if _is_supported_generic(typ): return _decode_generic(typ, value, False) else: return value
def inner(type_, options): while True: if not _is_new_type(type_): break type_ = type_.__supertype__ if is_dataclass(type_): if _issubclass_safe(type_, mixin): options['field_many'] = bool( _is_supported_generic(field.type) and _is_collection(field.type)) return fields.Nested(type_.schema(), **options) else: warnings.warn(f"Nested dataclass field {field.name} of type " f"{field.type} detected in " f"{cls.__name__} that is not an instance of " f"dataclass_json. Did you mean to recursively " f"serialize this field? If so, make sure to " f"augment {type_} with either the " f"`dataclass_json` decorator or mixin.") return fields.Field(**options) origin = getattr(type_, '__origin__', type_) args = [ inner(a, {}) for a in getattr(type_, '__args__', []) if a is not type(None) ] if _is_optional(type_): options["allow_none"] = True if origin in TYPES: return TYPES[origin](*args, **options) if _issubclass_safe(origin, Enum): return EnumField(enum=origin, by_value=True, *args, **options) if is_union_type(type_): union_types = [ a for a in getattr(type_, '__args__', []) if a is not type(None) ] union_desc = dict(zip(union_types, args)) return _UnionField(union_desc, cls, field, **options) warnings.warn( f"Unknown type {type_} at {cls.__name__}.{field.name}: {field.type} " f"It's advised to pass the correct marshmallow type to `mm_field`." ) return fields.Field(**options)
def _decode_generic_subsets(type_, value, infer_missing): if value is None: res = value elif _issubclass_safe(type_, Enum): # Convert to an Enum using the type as a constructor. # Assumes a direct match is found. res = type_(value) # FIXME this is a hack to fix a deeper underlying issue. A refactor is due. elif _is_collection(type_): if _is_mapping(type_): k_type, v_type = getattr(type_, "__args__", (Any, Any)) # a mapping type has `.keys()` and `.values()` # (see collections.abc) ks = _decode_dict_keys(k_type, value.keys(), infer_missing) vs = _decode_items(v_type, value.values(), infer_missing) xs = zip(ks, vs) else: xs = (_decode_dataclass(getSubclass(type_, v), v, infer_missing) for v in value) # get the constructor if using corresponding generic type in `typing` # otherwise fallback on constructing using type_ itself try: res = _get_type_cons(type_)(xs) except (TypeError, AttributeError): res = type_(xs) else: # Optional or Union if not hasattr(type_, "__args__"): # Any, just accept res = value elif _is_optional(type_) and len(type_.__args__) == 2: # Optional type_arg = type_.__args__[0] if is_dataclass(type_arg) or is_dataclass(value): res = _decode_dataclass(type_arg, value, infer_missing) elif _is_supported_generic(type_arg): res = _decode_generic(type_arg, value, infer_missing) else: res = _support_extended_types(type_arg, value) else: # Union (already decoded or unsupported 'from_json' used) res = value return res
def _decode_dataclass(cls, kvs, infer_missing): if isinstance(kvs, cls): return kvs # >>> OVERRIDE TYPE if "__cls__" in kvs and kvs["__cls__"] != cls.__module__ + ":" + cls.__name__: module, name = kvs["__cls__"].split(":") cls = getattr(importlib.import_module(module), name) # <<< END overrides = _user_overrides_or_exts(cls) kvs = {} if kvs is None and infer_missing else kvs field_names = [field.name for field in fields(cls)] decode_names = _decode_letter_case_overrides(field_names, overrides) kvs = {decode_names.get(k, k): v for k, v in kvs.items()} missing_fields = {field for field in fields(cls) if field.name not in kvs} for field in missing_fields: if field.default is not MISSING: kvs[field.name] = field.default elif field.default_factory is not MISSING: kvs[field.name] = field.default_factory() elif infer_missing: kvs[field.name] = None # Perform undefined parameter action kvs = _handle_undefined_parameters_safe(cls, kvs, usage="from") init_kwargs = {} types = get_type_hints(cls) for field in fields(cls): # The field should be skipped from being added # to init_kwargs as it's not intended as a constructor argument. if not field.init: continue field_value = kvs[field.name] field_type = types[field.name] # >>> Support for Generic Types if isinstance(field_type, TypeVar): if not hasattr(field_type, "__bound__"): warnings.warn(f"If using TypeVars, set the bound field for obtaining the default type. ") else: field_type = field_type.__bound__ # <<< Support for Generic Types if field_value is None and not _is_optional(field_type): warning = f"value of non-optional type {field.name} detected " f"when decoding {cls.__name__}" if infer_missing: warnings.warn( f"Missing {warning} and was defaulted to None by " f"infer_missing=True. " f"Set infer_missing=False (the default) to prevent this " f"behavior.", RuntimeWarning, ) else: warnings.warn(f"`NoneType` object {warning}.", RuntimeWarning) init_kwargs[field.name] = field_value continue while True: if not _is_new_type(field_type): break field_type = field_type.__supertype__ if field.name in overrides and overrides[field.name].decoder is not None: # FIXME hack if field_type is type(field_value): init_kwargs[field.name] = field_value else: init_kwargs[field.name] = overrides[field.name].decoder(field_value) elif is_dataclass(field_type): # FIXME this is a band-aid to deal with the value already being # serialized when handling nested marshmallow schema # proper fix is to investigate the marshmallow schema generation # code if is_dataclass(field_value): value = field_value else: value = _decode_dataclass(field_type, field_value, infer_missing) init_kwargs[field.name] = value elif _is_supported_generic(field_type) and field_type != str: init_kwargs[field.name] = _decode_generic(field_type, field_value, infer_missing) else: init_kwargs[field.name] = _support_extended_types(field_type, field_value) return cls(**init_kwargs)
def _decode_dataclass(cls, kvs, infer_missing): if isinstance(kvs, cls): return kvs overrides = _user_overrides_or_exts(cls) kvs = {} if kvs is None and infer_missing else kvs field_names = [field.name for field in fields(cls)] decode_names = _decode_letter_case_overrides(field_names, overrides) kvs = {decode_names.get(k, k): v for k, v in kvs.items()} missing_fields = {field for field in fields(cls) if field.name not in kvs} for field in missing_fields: if field.default is not MISSING: kvs[field.name] = field.default elif field.default_factory is not MISSING: kvs[field.name] = field.default_factory() elif infer_missing: kvs[field.name] = None # Perform undefined parameter action kvs = _handle_undefined_parameters_safe(cls, kvs, usage="from") init_kwargs = {} types = get_type_hints(cls) for field in fields(cls): # The field should be skipped from being added # to init_kwargs as it's not intended as a constructor argument. if not field.init: continue from typing import GenericMeta field_value = kvs[field.name] field_type = types[field.name] if _is_supported_generic(field_type) and field_type.__args__[0] != str: type_param = 'type' in [ f.name for f in fields(field_type.__args__[0]) ] elif 'type' in field_names: type_param = True else: type_param = False if field_value is None and not _is_optional(field_type): warning = (f"value of non-optional type {field.name} detected " f"when decoding {cls.__name__}") if infer_missing: warnings.warn( f"Missing {warning} and was defaulted to None by " f"infer_missing=True. " f"Set infer_missing=False (the default) to prevent this " f"behavior.", RuntimeWarning) else: pass init_kwargs[field.name] = field_value continue while True: if not _is_new_type(field_type): break field_type = field_type.__supertype__ if (field.name in overrides and overrides[field.name].decoder is not None): # FIXME hack if field_type is type(field_value): init_kwargs[field.name] = field_value else: init_kwargs[field.name] = overrides[field.name].decoder( field_value) elif is_dataclass(field_type): # FIXME this is a band-aid to deal with the value already being # serialized when handling nested marshmallow schema # proper fix is to investigate the marshmallow schema generation # code if is_dataclass(field_value): value = field_value else: value = _decode_dataclass(field_type, field_value, infer_missing) init_kwargs[field.name] = value elif _is_supported_generic( field_type) and field_type != str and not type_param: init_kwargs[field.name] = _decode_generic(field_type, field_value, infer_missing) elif _is_supported_generic( field_type) and field_type.__args__[0] != str and type_param: init_kwargs[field.name] = _decode_generic_subsets( field_type, field_value, infer_missing) else: init_kwargs[field.name] = _support_extended_types( field_type, field_value) return cls(**init_kwargs)