def _decode_generic(type_, value, infer_missing): if value is None: res = value elif _is_collection(type_): if _is_mapping(type_): k_type, v_type = type_.__args__ # a mapping type has `.keys()` and `.values()` (see collections.abc) ks = _decode_dict_keys(k_type, value.keys(), infer_missing) vs = _decode_items(v_type, value.values(), infer_missing) xs = zip(ks, vs) else: xs = _decode_items(type_.__args__[0], value, infer_missing) # get the constructor if using corresponding generic type in `typing` # otherwise fallback on constructing using type_ itself try: res = _get_type_cons(type_)(xs) except TypeError: res = type_(xs) elif _issubclass_safe(type_, Enum): # Convert to an Enum using the type as a constructor. Assumes a direct match is found. res = type_(value) else: # Optional type_arg = type_.__args__[0] if is_dataclass(type_arg) or is_dataclass(value): res = _decode_dataclass(type_arg, value, infer_missing) elif _is_supported_generic(type_arg): res = _decode_generic(type_arg, value, infer_missing) else: res = value return res
def _decode_generic(type_, value, infer_missing): if value is None: res = value elif _issubclass_safe(type_, Enum): # Convert to an Enum using the type as a constructor. Assumes a direct match is found. res = type_(value) # FIXME this is a hack to fix a deeper underlying issue. A refactor is due. elif _is_collection(type_): if _is_mapping(type_): k_type, v_type = type_.__args__ # a mapping type has `.keys()` and `.values()` (see collections.abc) ks = _decode_dict_keys(k_type, value.keys(), infer_missing) vs = _decode_items(v_type, value.values(), infer_missing) xs = zip(ks, vs) else: xs = _decode_items(type_.__args__[0], value, infer_missing) # get the constructor if using corresponding generic type in `typing` # otherwise fallback on constructing using type_ itself try: res = _get_type_cons(type_)(xs) except TypeError: res = type_(xs) else: # Optional or Union if _is_optional(type_) and len(type_.__args__) == 2: # Optional type_arg = type_.__args__[0] if is_dataclass(type_arg) or is_dataclass(value): res = _decode_dataclass(type_arg, value, infer_missing) elif _is_supported_generic(type_arg): res = _decode_generic(type_arg, value, infer_missing) else: res = _support_extended_types(type_arg, value) else: # Union (already decoded or unsupported 'from_json' used) res = value return res
def inner(type_, options): if is_dataclass(type_): if _issubclass_safe(type_, mixin): options['field_many'] = bool( _is_supported_generic(field.type) and _is_collection(field.type)) return fields.Nested(type_.schema(), **options) else: warnings.warn(f"Nested dataclass field {field.name} of type " f"{field.type} detected in " f"{cls.__name__} that is not an instance of " f"dataclass_json. Did you mean to recursively " f"serialize this field? If so, make sure to " f"augment {type_} with either the " f"`dataclass_json` decorator or mixin.") return fields.Field(**options) origin = getattr(type_, '__origin__', type_) args = [inner(a, {}) for a in getattr(type_, '__args__', [])] if origin in TYPES: return TYPES[origin](*args, **options) warnings.warn( f"Unknown type {type_} at {cls.__name__}.{field.name}: {field.type} " f"It's advised to pass the correct marshmallow type to `mm_field`." ) return fields.Field(**options)
def _inspect_nested_fields(fields_): nested_dc_fields_and_is_many = [] for field in fields_: if _is_supported_generic(field.type): t_arg = field.type.__args__[0] if is_dataclass(t_arg): if _is_collection(field.type): nested_dc_fields_and_is_many.append((field, t_arg, True)) else: nested_dc_fields_and_is_many.append((field, t_arg, False)) elif is_dataclass(field.type): nested_dc_fields_and_is_many.append((field, field.type, False)) return nested_dc_fields_and_is_many
def inner(type_, options): while True: if not _is_new_type(type_): break type_ = type_.__supertype__ if is_dataclass(type_): if _issubclass_safe(type_, mixin): options['field_many'] = bool( _is_supported_generic(field.type) and _is_collection(field.type)) return fields.Nested(type_.schema(), **options) else: warnings.warn(f"Nested dataclass field {field.name} of type " f"{field.type} detected in " f"{cls.__name__} that is not an instance of " f"dataclass_json. Did you mean to recursively " f"serialize this field? If so, make sure to " f"augment {type_} with either the " f"`dataclass_json` decorator or mixin.") return fields.Field(**options) origin = getattr(type_, '__origin__', type_) args = [ inner(a, {}) for a in getattr(type_, '__args__', []) if a is not type(None) ] if _is_optional(type_): options["allow_none"] = True if origin in TYPES: return TYPES[origin](*args, **options) if _issubclass_safe(origin, Enum): return EnumField(enum=origin, by_value=True, *args, **options) if is_union_type(type_): union_types = [ a for a in getattr(type_, '__args__', []) if a is not type(None) ] union_desc = dict(zip(union_types, args)) return _UnionField(union_desc, cls, field, **options) warnings.warn( f"Unknown type {type_} at {cls.__name__}.{field.name}: {field.type} " f"It's advised to pass the correct marshmallow type to `mm_field`." ) return fields.Field(**options)
def _is_supported_generic(type_): not_str = not _issubclass_safe(type_, str) is_enum = _issubclass_safe(type_, Enum) return (not_str and _is_collection(type_)) or _is_optional( type_) or is_union_type(type_) or is_enum
def _is_supported_generic(type_): not_str = not _issubclass_safe(type_, str) return (not_str and _is_collection(type_)) or _is_optional(type_)