Exemplo n.º 1
0
def _decode_generic(type_, value, infer_missing):
    if value is None:
        res = value
    elif _issubclass_safe(type_, Enum):
        # Convert to an Enum using the type as a constructor. Assumes a direct match is found.
        res = type_(value)
    # FIXME this is a hack to fix a deeper underlying issue. A refactor is due.
    elif _is_collection(type_):
        if _is_mapping(type_):
            k_type, v_type = type_.__args__
            # a mapping type has `.keys()` and `.values()` (see collections.abc)
            ks = _decode_dict_keys(k_type, value.keys(), infer_missing)
            vs = _decode_items(v_type, value.values(), infer_missing)
            xs = zip(ks, vs)
        else:
            xs = _decode_items(type_.__args__[0], value, infer_missing)

        # get the constructor if using corresponding generic type in `typing`
        # otherwise fallback on constructing using type_ itself
        try:
            res = _get_type_cons(type_)(xs)
        except TypeError:
            res = type_(xs)
    else:  # Optional or Union
        if _is_optional(type_) and len(type_.__args__) == 2:  # Optional
            type_arg = type_.__args__[0]
            if is_dataclass(type_arg) or is_dataclass(value):
                res = _decode_dataclass(type_arg, value, infer_missing)
            elif _is_supported_generic(type_arg):
                res = _decode_generic(type_arg, value, infer_missing)
            else:
                res = _support_extended_types(type_arg, value)
        else:  # Union (already decoded or unsupported 'from_json' used)
            res = value
    return res
Exemplo n.º 2
0
def schema(cls, mixin, infer_missing):
    schema = {}
    for field in dc_fields(cls):
        metadata = (field.metadata or {}).get('dataclasses_json', {})
        if 'mm_field' in metadata:
            schema[field.name] = metadata['mm_field']
        else:
            type_ = field.type
            options = {}
            missing_key = 'missing' if infer_missing else 'default'
            if field.default is not MISSING:
                options[missing_key] = field.default
            elif field.default_factory is not MISSING:
                options[missing_key] = field.default_factory

            if options.get(missing_key, ...) is None:
                options['allow_none'] = True

            if _is_optional(type_):
                options.setdefault(missing_key, None)
                options['allow_none'] = True
                if len(type_.__args__) == 2:
                    # Union[str, int, None] is optional too, but it has more than 1 typed field.
                    type_ = type_.__args__[0]

            t = build_type(type_, options, mixin, field, cls)
            # if type(t) is not fields.Field:  # If we use `isinstance` we would return nothing.
            schema[field.name] = t
    return schema
Exemplo n.º 3
0
def schema(cls, mixin, infer_missing):
    schema = {}
    for field in dc_fields(cls):
        if 'dataclasses_json' in (field.metadata or {}):
            schema[field.name] = field.metadata['dataclasses_json'].get(
                'mm_field')
        else:
            type_ = field.type
            options = {}
            missing_key = 'missing' if infer_missing else 'default'
            if field.default is not MISSING:
                options[missing_key] = field.default
            elif field.default_factory is not MISSING:
                options[missing_key] = field.default_factory

            if options.get(missing_key, ...) is None:
                options['allow_none'] = True

            if _is_optional(type_):
                options.setdefault(missing_key, None)
                type_ = type_.__args__[0]
                options['allow_none'] = True

            t = build_type(type_, options, mixin, field, cls)
            #if type(t) is not fields.Field:  # If we use `isinstance` we would return nothing.
            schema[field.name] = t
    return schema
Exemplo n.º 4
0
def _make_default_fields(fields_, cls, infer_missing):
    default_fields = {}
    for field in fields_:
        if field.default is not MISSING:
            default_fields[field.name] = _make_default_field(
                field.type, field.default, cls)
        elif field.default_factory is not MISSING:
            default_fields[field.name] = _make_default_field(
                field.type, field.default_factory, cls)
        elif _is_optional(field.type) and infer_missing:
            default_fields[field.name] = _make_default_field(
                field.type, None, cls)
    return default_fields
Exemplo n.º 5
0
    def inner(type_, options):
        while True:
            if not _is_new_type(type_):
                break

            type_ = type_.__supertype__

        if is_dataclass(type_):
            if _issubclass_safe(type_, mixin):
                options['field_many'] = bool(
                    _is_supported_generic(field.type)
                    and _is_collection(field.type))
                return fields.Nested(type_.schema(), **options)
            else:
                warnings.warn(f"Nested dataclass field {field.name} of type "
                              f"{field.type} detected in "
                              f"{cls.__name__} that is not an instance of "
                              f"dataclass_json. Did you mean to recursively "
                              f"serialize this field? If so, make sure to "
                              f"augment {type_} with either the "
                              f"`dataclass_json` decorator or mixin.")
                return fields.Field(**options)

        origin = getattr(type_, '__origin__', type_)
        args = [
            inner(a, {}) for a in getattr(type_, '__args__', [])
            if a is not type(None)
        ]

        if _is_optional(type_):
            options["allow_none"] = True

        if origin in TYPES:
            return TYPES[origin](*args, **options)

        if _issubclass_safe(origin, Enum):
            return EnumField(enum=origin, by_value=True, *args, **options)

        if is_union_type(type_):
            union_types = [
                a for a in getattr(type_, '__args__', [])
                if a is not type(None)
            ]
            union_desc = dict(zip(union_types, args))
            return _UnionField(union_desc, cls, field, **options)

        warnings.warn(
            f"Unknown type {type_} at {cls.__name__}.{field.name}: {field.type} "
            f"It's advised to pass the correct marshmallow type to `mm_field`."
        )
        return fields.Field(**options)
Exemplo n.º 6
0
def _make_default_field(type_, default, cls):
    cons_type = type_
    cons_type = (list if _is_nonstr_collection(cons_type) else cons_type)
    cons_type = (dict if _is_mapping(cons_type) else cons_type)
    cons_type = (type_.__args__[0] if _is_optional(cons_type) else cons_type)
    cons = _type_to_cons[cons_type]
    if cons is fields.List:
        type_arg = type_.___args__[0]
        if type_arg not in _type_to_cons:
            raise TypeError(f"Unsupported {type_arg} detected. Is it "
                            f"a supported JSON type or dataclass_json "
                            f"instance?")
        arg_cons = _type_to_cons[type_arg]
        return cons(cls, arg_cons, missing=default)
    return cons(cls, missing=default)
Exemplo n.º 7
0
def _make_nested_fields(fields_, dataclass_json_cls, infer_missing):
    nested_fields = {}
    for field, type_, field_many in _inspect_nested_fields(fields_):
        if _issubclass_safe(type_, dataclass_json_cls):
            if infer_missing and _is_optional(field.type):
                schema = fields.Nested(type_.schema(),
                                       many=field_many,
                                       missing=None)
            else:
                schema = fields.Nested(type_.schema(), many=field_many)
            nested_fields[field.name] = schema
        else:
            warnings.warn(f"Nested dataclass field {field.name} of type "
                          f"{field.type} detected in "
                          f"{cls.__name__} that is not an instance of "
                          f"dataclass_json. Did you mean to recursively "
                          f"serialize this field? If so, make sure to "
                          f"augment {field.type} with either the "
                          f"`dataclass_json` decorator or mixin.")
    return nested_fields
Exemplo n.º 8
0
def schema(cls, mixin, infer_missing):
    schema = {}
    overrides = _user_overrides(cls)
    # TODO check the undefined parameters and add the proper schema action
    #  https://marshmallow.readthedocs.io/en/stable/quickstart.html
    for field in dc_fields(cls):
        metadata = (field.metadata or {}).get('dataclasses_json', {})
        metadata = overrides[field.name]
        if metadata.mm_field is not None:
            schema[field.name] = metadata.mm_field
        else:
            type_ = field.type
            options = {}
            missing_key = 'missing' if infer_missing else 'default'
            if field.default is not MISSING:
                options[missing_key] = field.default
            elif field.default_factory is not MISSING:
                options[missing_key] = field.default_factory

            if options.get(missing_key, ...) is None:
                options['allow_none'] = True

            if _is_optional(type_):
                options.setdefault(missing_key, None)
                options['allow_none'] = True
                if len(type_.__args__) == 2:
                    # Union[str, int, None] is optional too, but it has more than 1 typed field.
                    type_ = type_.__args__[0]

            if metadata.letter_case is not None:
                options['data_key'] = metadata.letter_case(field.name)

            t = build_type(type_, options, mixin, field, cls)
            # if type(t) is not fields.Field:  # If we use `isinstance` we would return nothing.
            if field.type != typing.Optional[CatchAllVar]:
                schema[field.name] = t

    return schema
Exemplo n.º 9
0
def _is_supported_generic(type_):
    not_str = not _issubclass_safe(type_, str)
    is_enum = _issubclass_safe(type_, Enum)
    return (not_str and _is_collection(type_)) or _is_optional(
        type_) or is_union_type(type_) or is_enum
Exemplo n.º 10
0
def _decode_dataclass(cls, kvs, infer_missing):
    if isinstance(kvs, cls):
        return kvs
    overrides = _user_overrides_or_exts(cls)
    kvs = {} if kvs is None and infer_missing else kvs
    field_names = [field.name for field in fields(cls)]
    decode_names = _decode_letter_case_overrides(field_names, overrides)
    kvs = {decode_names.get(k, k): v for k, v in kvs.items()}
    missing_fields = {field for field in fields(cls) if field.name not in kvs}

    for field in missing_fields:
        if field.default is not MISSING:
            kvs[field.name] = field.default
        elif field.default_factory is not MISSING:
            kvs[field.name] = field.default_factory()
        elif infer_missing:
            kvs[field.name] = None

    # Perform undefined parameter action
    kvs = _handle_undefined_parameters_safe(cls, kvs, usage="from")

    init_kwargs = {}
    types = get_type_hints(cls)
    for field in fields(cls):
        # The field should be skipped from being added
        # to init_kwargs as it's not intended as a constructor argument.
        if not field.init:
            continue

        field_value = kvs[field.name]
        field_type = types[field.name]
        if field_value is None and not _is_optional(field_type):
            warning = (f"value of non-optional type {field.name} detected "
                       f"when decoding {cls.__name__}")
            if infer_missing:
                warnings.warn(
                    f"Missing {warning} and was defaulted to None by "
                    f"infer_missing=True. "
                    f"Set infer_missing=False (the default) to prevent this "
                    f"behavior.", RuntimeWarning)
            else:
                warnings.warn(f"`NoneType` object {warning}.", RuntimeWarning)
            init_kwargs[field.name] = field_value
            continue

        while True:
            if not _is_new_type(field_type):
                break

            field_type = field_type.__supertype__

        if (field.name in overrides
                and overrides[field.name].decoder is not None):
            # FIXME hack
            if field_type is type(field_value):
                init_kwargs[field.name] = field_value
            else:
                init_kwargs[field.name] = overrides[field.name].decoder(
                    field_value)
        elif is_dataclass(field_type):
            # FIXME this is a band-aid to deal with the value already being
            # serialized when handling nested marshmallow schema
            # proper fix is to investigate the marshmallow schema generation
            # code
            if is_dataclass(field_value):
                value = field_value
            else:
                value = _decode_dataclass(field_type, field_value,
                                          infer_missing)
            init_kwargs[field.name] = value
        elif _is_supported_generic(field_type) and field_type != str:
            init_kwargs[field.name] = _decode_generic(field_type,
                                                      field_value,
                                                      infer_missing)
        else:
            init_kwargs[field.name] = _support_extended_types(field_type,
                                                              field_value)

    return cls(**init_kwargs)
Exemplo n.º 11
0
def _decode_dataclass(cls, kvs, infer_missing):
    overrides = _overrides(cls)
    kvs = {} if kvs is None and infer_missing else kvs
    missing_fields = {field for field in fields(cls) if field.name not in kvs}

    for field in missing_fields:
        if field.default is not MISSING:
            kvs[field.name] = field.default
        elif field.default_factory is not MISSING:
            kvs[field.name] = field.default_factory()
        elif infer_missing:
            kvs[field.name] = None

    init_kwargs = {}
    for field in fields(cls):

        field_value = kvs.get(field.name)

        if field_value is None and not _is_optional(field.type):
            warning = (f"value of non-optional type {field.name} detected "
                       f"when decoding {cls.__name__}")
            if infer_missing:
                warnings.warn(
                    f"Missing {warning} and was defaulted to None by "
                    f"infer_missing=True. "
                    f"Set infer_missing=False (the default) to prevent this "
                    f"behavior.", RuntimeWarning)
            else:
                warnings.warn(f"`NoneType` object {warning}.", RuntimeWarning)
            init_kwargs[field.name] = field_value
        elif (field.name in overrides
              and overrides[field.name].decoder is not None):
            # FIXME hack
            if field.type is type(field_value):
                init_kwargs[field.name] = field_value
            else:
                init_kwargs[field.name] = overrides[field.name].decoder(
                    field_value)
        elif is_dataclass(field.type):
            # FIXME this is a band-aid to deal with the value already being
            # serialized when handling nested marshmallow schema
            # proper fix is to investigate the marshmallow schema generation
            # code
            if is_dataclass(field_value):
                value = field_value
            else:
                value = _decode_dataclass(field.type, field_value,
                                          infer_missing)
            init_kwargs[field.name] = value

        elif _is_supported_generic(field.type) and field.type != str:
            init_kwargs[field.name] = _decode_generic(field.type, field_value,
                                                      infer_missing)
        elif _issubclass_safe(field.type, datetime):
            # FIXME this is a hack to deal with mm already decoding
            # the issue is we want to leverage mm fields' missing argument
            # but need this for the object creation hook
            if isinstance(field_value, datetime):
                dt = field_value
            else:
                dt = date_parser.parse(field_value)
            init_kwargs[field.name] = dt
        elif _issubclass_safe(field.type, UUID):
            init_kwargs[field.name] = (field_value if isinstance(
                field_value, UUID) else UUID(field_value))
        elif _issubclass_safe(field.type, Decimal):
            init_kwargs[field.name] = (field_value if isinstance(
                field_value, UUID) else Decimal(field_value))
        else:
            init_kwargs[field.name] = field_value
    return cls(**init_kwargs)
Exemplo n.º 12
0
def _decode_dataclass(cls, kvs, infer_missing):
    if isinstance(kvs, cls):
        return kvs
    overrides = _user_overrides(cls)
    kvs = {} if kvs is None and infer_missing else kvs
    field_names = [field.name for field in fields(cls)]
    decode_names = _decode_letter_case_overrides(field_names, overrides)
    kvs = {decode_names.get(k, k): v for k, v in kvs.items()}
    missing_fields = {field for field in fields(cls) if field.name not in kvs}
    for field in missing_fields:
        if field.default is not MISSING:
            kvs[field.name] = field.default
        elif field.default_factory is not MISSING:
            kvs[field.name] = field.default_factory()
        elif infer_missing:
            kvs[field.name] = None

    init_kwargs = {}
    types = get_type_hints(cls)
    for field in fields(cls):
        # The field should be skipped from being added
        # to init_kwargs as it's not intended as a constructor argument.
        if not field.init:
            continue

        field_value = kvs[field.name]
        field_type = types[field.name]
        if field_value is None and not _is_optional(field_type):
            warning = (f"value of non-optional type {field.name} detected "
                       f"when decoding {cls.__name__}")
            if infer_missing:
                warnings.warn(
                    f"Missing {warning} and was defaulted to None by "
                    f"infer_missing=True. "
                    f"Set infer_missing=False (the default) to prevent this "
                    f"behavior.", RuntimeWarning)
            else:
                warnings.warn(f"`NoneType` object {warning}.", RuntimeWarning)
            init_kwargs[field.name] = field_value
            continue

        while True:
            if not _is_new_type(field_type):
                break

            field_type = field_type.__supertype__

        if (field.name in overrides
                and overrides[field.name].decoder is not None):
            # FIXME hack
            if field_type is type(field_value):
                init_kwargs[field.name] = field_value
            else:
                init_kwargs[field.name] = overrides[field.name].decoder(
                    field_value)
        elif is_dataclass(field_type):
            # FIXME this is a band-aid to deal with the value already being
            # serialized when handling nested marshmallow schema
            # proper fix is to investigate the marshmallow schema generation
            # code
            if is_dataclass(field_value):
                value = field_value
            else:
                value = _decode_dataclass(field_type, field_value,
                                          infer_missing)
            init_kwargs[field.name] = value
        elif _is_supported_generic(field_type) and field_type != str:
            init_kwargs[field.name] = _decode_generic(field_type, field_value,
                                                      infer_missing)
        elif _issubclass_safe(field_type, datetime):
            # FIXME this is a hack to deal with mm already decoding
            # the issue is we want to leverage mm fields' missing argument
            # but need this for the object creation hook
            if isinstance(field_value, datetime):
                dt = field_value
            else:
                tz = datetime.now(timezone.utc).astimezone().tzinfo
                dt = datetime.fromtimestamp(field_value, tz=tz)
            init_kwargs[field.name] = dt
        elif _issubclass_safe(field_type, Decimal):
            init_kwargs[field.name] = (field_value if isinstance(
                field_value, Decimal) else Decimal(field_value))
        elif _issubclass_safe(field_type, UUID):
            init_kwargs[field.name] = (field_value if isinstance(
                field_value, UUID) else UUID(field_value))
        else:
            init_kwargs[field.name] = field_value
    return cls(**init_kwargs)
Exemplo n.º 13
0
def _is_supported_generic(type_):
    not_str = not _issubclass_safe(type_, str)
    return (not_str and _is_collection(type_)) or _is_optional(type_)