def _deserialize(self, value, attr, obj): if self.many: if not is_collection(value): raise ValidationError('Relationship is list-like') return [self.extract_value(item) for item in value] if is_collection(value): raise ValidationError('Relationship is not list-like') return self.extract_value(value)
def __init__(self, obj=None, extra=None, only=None, exclude=None, prefix='', strict=False, many=False, context=None): if not many and utils.is_collection(obj): warnings.warn('Implicit collection handling is deprecated. Set ' 'many=True to serialize a collection.', category=DeprecationWarning) # copy declared fields from metaclass self.declared_fields = copy.deepcopy(self._declared_fields) self.fields = OrderedDict() self.__data = None self.obj = obj self.many = many self.opts = self.OPTIONS_CLASS(self.Meta) self.only = only or () self.exclude = exclude or () self.prefix = prefix self.strict = strict or self.opts.strict #: Callable marshalling object self.marshal = fields.Marshaller(prefix=self.prefix, strict=self.strict) self.extra = extra self.context = context if isinstance(obj, types.GeneratorType): self.obj = list(obj) else: self.obj = obj self._update_fields(obj) # If object is passed in, marshal it immediately so that errors are stored if self.obj is not None: raw_data = self.marshal(self.obj, self.fields, many=self.many) if self.extra: raw_data.update(self.extra) self.__data = self.process_data(raw_data)
def dump(self, obj): """Serialize an object to native Python data types according to this Schema's fields. :param obj: The object to serialize. :return: A tuple of the form (``data``, ``errors``) :rtype: `MarshalResult`, a `collections.namedtuple` .. versionadded:: 1.0.0 """ if not self.many and utils.is_collection(obj) and not utils.is_keyed_tuple(obj): warnings.warn('Implicit collection handling is deprecated. Set ' 'many=True to serialize a collection.', category=DeprecationWarning) if isinstance(obj, types.GeneratorType): obj = list(obj) self._update_fields(obj) preresult = self._marshal( obj, self.fields, many=self.many, strict=self.strict ) result = self._postprocess(preresult, obj=obj) errors = self._marshal.errors return MarshalResult(result, errors)
def _serialize(self, value, attr, obj): if utils.is_collection(value): limit = self.metadata.get('limit') value = [item for item in value if item] if limit: return super(ListWithLimit, self)._serialize(value[:limit], attr, obj) return super(ListWithLimit, self)._serialize(value, attr, obj)
def marshal(self, data, fields_dict): """Takes the data (a dict, list, or object) and a dict of fields. Stores any errors that occur. :param data: The actual object(s) from which the fields are taken from :param dict fields_dict: A dict whose keys will make up the final serialized response output """ if utils.is_collection(data): return [self.marshal(d, fields_dict) for d in data] items = [] for attr_name, field_obj in iteritems(fields_dict): key = self.prefix + attr_name try: if isinstance(field_obj, dict): item = (key, self.marshal(data, field_obj)) else: try: item = (key, field_obj.output(attr_name, data)) except TypeError: # field declared as a class, not an instance if issubclass(field_obj, base.FieldABC): msg = ('Field for "{0}" must be declared as a ' "Field instance, not a class. " 'Did you mean "fields.{1}()"?' .format(attr_name, field_obj.__name__)) raise TypeError(msg) raise except exceptions.MarshallingError as err: # Store errors if self.strict or self.opts.strict: raise err self.errors[key] = text_type(err) item = (key, None) items.append(item) return OrderedDict(items)
def _deserialize(self, value, attr, data): if self.many and not utils.is_collection(value): self.fail('type', input=value, type=value.__class__.__name__) data, errors = self.schema.load(value) if errors: raise ValidationError(errors, data=data) return data
def get_value(self, attr, obj, accessor=None): """Return the value for a given key from an object.""" value = super(List, self).get_value(attr, obj, accessor=accessor) if self.container.attribute: if utils.is_collection(value): return [ self.container.get_value(self.container.attribute, each) for each in value ] return self.container.get_value(self.container.attribute, value) return value
def dump(self, obj, many=None, update_fields=True, **kwargs): """Serialize an object to native Python data types according to this Schema's fields. :param obj: The object to serialize. :param bool many: Whether to serialize `obj` as a collection. If `None`, the value for `self.many` is used. :param bool update_fields: Whether to update the schema's field classes. Typically set to `True`, but may be `False` when serializing a homogenous collection. This parameter is used by `fields.Nested` to avoid multiple updates. :return: A tuple of the form (``data``, ``errors``) :rtype: `MarshalResult`, a `collections.namedtuple` .. versionadded:: 1.0.0 """ many = self.many if many is None else bool(many) if not many and utils.is_collection(obj) and not utils.is_keyed_tuple(obj): warnings.warn('Implicit collection handling is deprecated. Set ' 'many=True to serialize a collection.', category=DeprecationWarning) if many and utils.is_iterable_but_not_string(obj): obj = list(obj) processed_obj = self._invoke_dump_processors(PRE_DUMP, obj, many, original_data=obj) if update_fields: self._update_fields(processed_obj, many=many) try: preresult = self._marshal( processed_obj, self.fields, many=many, # TODO: Remove self.__accessor__ in a later release accessor=self.get_attribute or self.__accessor__, dict_class=self.dict_class, index_errors=self.opts.index_errors, **kwargs ) except ValidationError as error: errors = self._marshal.errors preresult = error.data if self.strict: raise error else: errors = {} result = self._postprocess(preresult, many, obj=obj) result = self._invoke_dump_processors(POST_DUMP, result, many, original_data=obj) return MarshalResult(result, errors)
def marshal(data, fields): """Takes raw data (in the form of a dict, list, object) and a dict of fields to output and filters the data based on those fields. :param data: The actual object(s) from which the fields are taken from :param dict fields: A dict whose keys will make up the final serialized response output. """ if utils.is_collection(data): return [marshal(d, fields) for d in data] items = ((k, marshal(data, v) if isinstance(v, dict) else v.output(k, data)) for k, v in fields.items()) return OrderedDict(items)
def build_result(query, schema, model=None): schema = select_and_omit(schema) if is_collection(query): query = apply_query_filters(query, model) result = paginate(query) return { 'num_results': result.total, 'page': result.page, 'num_pages': result.pages, 'objects': schema.dump(result.items, many=True).data, } else: return schema.dump(query, many=False).data
def _deserialize(self, value, attr, data): if not utils.is_collection(value): self.fail('invalid') result = [] errors = {} for idx, each in enumerate(value): try: result.append(self.container.deserialize(each)) except ValidationError as e: result.append(e.data) errors.update({idx: e.messages}) if errors: raise ValidationError(errors, data=result) return result
def dump(self, obj, many=None, update_fields=True, **kwargs): """Serialize an object to native Python data types according to this Schema's fields. :param obj: The object to serialize. :param bool many: Whether to serialize `obj` as a collection. If `None`, the value for `self.many` is used. :param bool update_fields: Whether to update the schema's field classes. Typically set to `True`, but may be `False` when serializing a homogenous collection. This parameter is used by `fields.Nested` to avoid multiple updates. :return: A tuple of the form (``data``, ``errors``) :rtype: `MarshalResult`, a `collections.namedtuple` .. versionadded:: 1.0.0 """ many = self.many if many is None else bool(many) if not many and utils.is_collection(obj) and not utils.is_keyed_tuple(obj): warnings.warn('Implicit collection handling is deprecated. Set ' 'many=True to serialize a collection.', category=DeprecationWarning) if isinstance(obj, types.GeneratorType): obj = list(obj) if update_fields: self._update_fields(obj, many=many) preresult = self._marshal( obj, self.fields, many=many, strict=self.strict, skip_missing=self.skip_missing, accessor=self.__accessor__, dict_class=self.dict_class, **kwargs ) result = self._postprocess(preresult, many, obj=obj) errors = self._marshal.errors return MarshalResult(result, errors)
def _test_collection(self, value): if self.many and not is_collection(value): self.fail("type", input=value, type=value.__class__.__name__)
def test_is_collection(self): assert_true(utils.is_collection([1, 'foo', {}])) assert_true(utils.is_collection(('foo', 2.3))) assert_false(utils.is_collection({'foo': 'bar'}))
def _deserialize(self, data, fields_dict, *, error_store, many=False, partial=False, unknown=RAISE, dict_class=dict, index_errors=True, index=None): """Deserialize ``data`` based on the schema defined by ``fields_dict``. :param dict data: The data to deserialize. :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param ErrorStore error_store: Structure to store errors. :param bool many: Set to `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ index = index if index_errors else None if many: if not is_collection(data): error_store.store_error([self.error_messages['type']], index=index) ret = [] else: self._pending = True ret = [ self._deserialize( d, fields_dict, error_store=error_store, many=False, partial=partial, unknown=unknown, dict_class=dict_class, index=idx, index_errors=index_errors, ) for idx, d in enumerate(data) ] self._pending = False return ret ret = dict_class() # Check data is a dict if not isinstance(data, Mapping): error_store.store_error([self.error_messages['type']], index=index) else: partial_is_collection = is_collection(partial) for attr_name, field_obj in fields_dict.items(): if field_obj.dump_only: continue field_name = attr_name if field_obj.data_key: field_name = field_obj.data_key raw_value = data.get(field_name, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if (partial is True or (partial_is_collection and attr_name in partial)): continue d_kwargs = {} if isinstance(field_obj, Nested): # Allow partial loading of nested schemas. if partial_is_collection: prefix = field_name + '.' len_prefix = len(prefix) sub_partial = [ f[len_prefix:] for f in partial if f.startswith(prefix) ] else: sub_partial = partial d_kwargs['partial'] = sub_partial getter = lambda val: field_obj.deserialize( val, field_name, data, **d_kwargs) value = self._call_and_store( getter_func=getter, data=raw_value, field_name=field_name, error_store=error_store, index=index, ) if value is not missing: key = fields_dict[attr_name].attribute or attr_name set_value(ret, key, value) if unknown != EXCLUDE: fields = { field_obj.data_key or field_name for field_name, field_obj in fields_dict.items() if not field_obj.dump_only } for key in set(data) - fields: value = data[key] if unknown == INCLUDE: set_value(ret, key, value) elif unknown == RAISE: error_store.store_error( [self.error_messages['unknown']], key, (index if index_errors else None), ) return ret
def _serialize(self, value, attr, obj): if value is None: return None if utils.is_collection(value): return [self.container._serialize(each, attr, obj) for each in value] return [self.container._serialize(value, attr, obj)]
def test_is_collection(): assert utils.is_collection([1, "foo", {}]) is True assert utils.is_collection(("foo", 2.3)) is True assert utils.is_collection({"foo": "bar"}) is False
def _test_collection(self, value): if self.many and not utils.is_collection(value): raise self.make_error("type", input=value, type=value.__class__.__name__)
def filter_hidden(dict_or_list): if utils.is_collection(dict_or_list): return [item for item in dict_or_list if not item.get("hidden")] return dict_or_list if not dict_or_list.get("hidden") else {}
def dump_data(self, obj: Any) -> Any: attr: Any = super().get_attribute(obj, "data", None) many: bool = is_collection(attr) return self._data_schema.dump(attr, many=many)
def deserialize(self, data, fields_dict, many=False, partial=False, dict_class=dict, index_errors=True, index=None): """Deserialize ``data`` based on the schema defined by ``fields_dict``. :param dict data: The data to deserialize. :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param bool many: Set to `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields. If its value is an iterable, only missing fields listed in that iterable will be ignored. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ # Reset errors if not deserializing a collection if not self._pending: self.reset_errors() if many and data is not None: self._pending = True ret = [ self.deserialize(d, fields_dict, many=False, partial=partial, dict_class=dict_class, index=idx, index_errors=index_errors) for idx, d in enumerate(data) ] self._pending = False if self.errors: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields, data=ret, ) return ret if data is not None: partial_is_collection = is_collection(partial) ret = dict_class() for attr_name, field_obj in iteritems(fields_dict): if field_obj.dump_only: continue try: raw_value = data.get(attr_name, missing) except AttributeError: # Input data is not a dict errors = self.get_errors(index=index) msg = field_obj.error_messages['type'].format( input=data, input_type=data.__class__.__name__) self.error_field_names = [SCHEMA] self.error_fields = [] errors = self.get_errors() errors.setdefault(SCHEMA, []).append(msg) # Input data type is incorrect, so we can bail out early break field_name = attr_name if raw_value is missing and field_obj.load_from: field_name = field_obj.load_from raw_value = data.get(field_obj.load_from, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if (partial is True or (partial_is_collection and attr_name in partial)): continue _miss = field_obj.missing raw_value = _miss() if callable(_miss) else _miss if raw_value is missing and not field_obj.required: continue getter = lambda val: field_obj.deserialize( val, field_obj.load_from or attr_name, data) value = self.call_and_store( getter_func=getter, data=raw_value, field_name=field_name, field_obj=field_obj, index=(index if index_errors else None)) if value is not missing: key = fields_dict[attr_name].attribute or attr_name set_value(ret, key, value) else: ret = None if self.errors and not self._pending: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields, data=ret, ) return ret
def _deserialize( self, data: (typing.Mapping[str, typing.Any] | typing.Iterable[typing.Mapping[str, typing.Any]]), *, error_store: ErrorStore, many: bool = False, partial=False, unknown=RAISE, index=None, ) -> _T | list[_T]: """Deserialize ``data``. :param dict data: The data to deserialize. :param ErrorStore error_store: Structure to store errors. :param bool many: `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ index_errors = self.opts.index_errors index = index if index_errors else None if many: if not is_collection(data): error_store.store_error([self.error_messages["type"]], index=index) ret_l = [] # type: typing.List[_T] else: ret_l = [ typing.cast( _T, self._deserialize( typing.cast(typing.Mapping[str, typing.Any], d), error_store=error_store, many=False, partial=partial, unknown=unknown, index=idx, ), ) for idx, d in enumerate(data) ] return ret_l ret_d = self.dict_class() # Check data is a dict if not isinstance(data, Mapping): error_store.store_error([self.error_messages["type"]], index=index) else: partial_is_collection = is_collection(partial) for attr_name, field_obj in self.load_fields.items(): field_name = (field_obj.data_key if field_obj.data_key is not None else attr_name) raw_value = data.get(field_name, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if partial is True or (partial_is_collection and attr_name in partial): continue d_kwargs = {} # Allow partial loading of nested schemas. if partial_is_collection: prefix = field_name + "." len_prefix = len(prefix) sub_partial = [ f[len_prefix:] for f in partial if f.startswith(prefix) ] d_kwargs["partial"] = sub_partial else: d_kwargs["partial"] = partial getter = lambda val: field_obj.deserialize( val, field_name, data, **d_kwargs) value = self._call_and_store( getter_func=getter, data=raw_value, field_name=field_name, error_store=error_store, index=index, ) if value is not missing: key = field_obj.attribute or attr_name set_value(ret_d, key, value) if unknown != EXCLUDE: fields = { field_obj.data_key if field_obj.data_key is not None else field_name for field_name, field_obj in self.load_fields.items() } for key in set(data) - fields: value = data[key] if unknown == INCLUDE: ret_d[key] = value elif unknown == RAISE: error_store.store_error( [self.error_messages["unknown"]], key, (index if index_errors else None), ) return ret_d
def fields2jsonschema(fields, schema=None, spec=None, use_refs=True, dump=True, name=None): """Return the JSON Schema Object for a given marshmallow :class:`Schema <marshmallow.Schema>`. Schema may optionally provide the ``title`` and ``description`` class Meta options. https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#schemaObject Example: :: class UserSchema(Schema): _id = fields.Int() email = fields.Email(description='email address of the user') name = fields.Str() class Meta: title = 'User' description = 'A registered user' schema2jsonschema(UserSchema) # { # 'title': 'User', 'description': 'A registered user', # 'properties': { # 'name': {'required': False, # 'description': '', # 'type': 'string'}, # '_id': {'format': 'int32', # 'required': False, # 'description': '', # 'type': 'integer'}, # 'email': {'format': 'email', # 'required': False, # 'description': 'email address of the user', # 'type': 'string'} # } # } :param Schema schema: A marshmallow Schema instance or a class object :rtype: dict, a JSON Schema Object """ Meta = getattr(schema, 'Meta', None) if getattr(Meta, 'fields', None) or getattr(Meta, 'additional', None): declared_fields = set(schema._declared_fields.keys()) if (set(getattr(Meta, 'fields', set())) > declared_fields or set(getattr(Meta, 'additional', set())) > declared_fields): warnings.warn( "Only explicitly-declared fields will be included in the Schema Object. " "Fields defined in Meta.fields or Meta.additional are ignored." ) jsonschema = { 'type': 'object', 'properties': OrderedLazyDict() if getattr(Meta, 'ordered', None) else LazyDict(), } exclude = set(getattr(Meta, 'exclude', [])) for field_name, field_obj in iteritems(fields): if field_name in exclude or (field_obj.dump_only and not dump): continue observed_field_name = _observed_name(field_obj, field_name) prop_func = lambda field_obj=field_obj: field2property( # flake8: noqa field_obj, spec=spec, use_refs=use_refs, dump=dump, name=name) jsonschema['properties'][observed_field_name] = prop_func partial = getattr(schema, 'partial', None) if field_obj.required: if not partial or (is_collection(partial) and field_name not in partial): jsonschema.setdefault('required', []).append(observed_field_name) if 'required' in jsonschema: jsonschema['required'].sort() if Meta is not None: if hasattr(Meta, 'title'): jsonschema['title'] = Meta.title if hasattr(Meta, 'description'): jsonschema['description'] = Meta.description if getattr(schema, 'many', False): jsonschema = { 'type': 'array', 'items': jsonschema, } return jsonschema
def _deserialize(self, value, attr, data): if utils.is_collection(value): # Convert all instances in typed list to container type return [self.container.deserialize(each) for each in value] else: self.fail('invalid')
def fields2jsonschema(self, fields, *, ordered=False, partial=None): schema_list = [field.parent for field in fields.values()] if schema_list: if not isinstance_or_subclass(schema_list[0], JSONAPISchema): return super().fields2jsonschema(fields, ordered=ordered, partial=partial) default_fields = [ ('id', None), ('type', { 'type': 'string' }), ] jsonschema = { 'type': 'object', 'properties': OrderedDict(default_fields) if ordered else dict(default_fields), } # type: dict properties = jsonschema['properties'] if schema_list: properties['type']['enum'] = list( {schema.Meta.type_ for schema in schema_list}) for field_name, field_obj in fields.items(): observed_field_name = field_obj.data_key or field_name prop = self.field2property(field_obj) if observed_field_name == 'id': properties[observed_field_name] = prop elif isinstance(field_obj, JSONAPIRelationship): properties.setdefault('relationships', { 'type': 'object', 'properties': {} }) properties['relationships']['properties'][ observed_field_name] = prop else: properties.setdefault('attributes', { 'type': 'object', 'properties': {} }) properties['attributes']['properties'][ observed_field_name] = prop # TODO: support meta fields if field_obj.required: if not partial or (is_collection(partial) and field_name not in partial): if isinstance(field_obj, JSONAPIRelationship): properties['relationships'].setdefault( 'required', []).append(observed_field_name) else: properties['attributes'].setdefault( 'required', []).append(observed_field_name) jsonschema['required'] = ['type'] if 'attributes' in properties and 'required' in properties[ 'attributes']: properties['attributes']['required'].sort() jsonschema['required'].append('attributes') if 'relationships' in properties and 'required' in properties[ 'relationships']: properties['relationships']['required'].sort() jsonschema['required'].append('relationships') return jsonschema
def test_is_collection(): assert utils.is_collection([1, 'foo', {}]) is True assert utils.is_collection(('foo', 2.3)) is True assert utils.is_collection({'foo': 'bar'}) is False
def _is_collection(self, value): return isinstance(value, Mapping) or utils.is_collection(value)
def _deserialize(self, value, attr, data): if not is_collection(value) and value is not None: value = [value] return super(List, self)._deserialize(value, attr, data)
def _deserialize(self, value, *args, **kwargs): """Deserialize data into a nested attribute. In the case of a nested field with many items, the behavior of this field varies in a few key ways depending on whether the parent form has `partial` set to `True` or `False`. If `True`, items can be explicitly added or removed from a collection, but the rest of the collection will remain intact. If `False`, the collection will be set to an empty list, and only items included in the supplied data will in the collection. Important to note also that updates to items contained in this collection will be done so using ``partial=True``, regardless or what the value of the parent schema's ``partial`` attribute is. The only exception to this is in the creation of a new item to be placed in the nested collection, in which case ``partial=False`` is always used. :param value: Data for this field. :type value: list of dict or dict :return: The deserialized form of this nested field. In the case of a value that doesn't use a list, this is a single object (or `None`). Otherwise a list of objects is returned. """ permissions = self.permissions_cls(**self._get_permission_cls_kwargs()) strict = self.parent.strict result = None parent = self.parent.instance if self.many: obj_datum = value if not is_collection(value): self.fail('type', input=value, type=value.__class__.__name__) else: if not self.parent.partial: setattr(parent, self.name, []) else: # Treat this like a list until it comes time to actually # to actually modify the value. obj_datum = [value] errors = {} # each item in value is a sub instance for i, obj_data in enumerate(obj_datum): if not isinstance(obj_data, dict): self.fail('type', input=obj_data, type=obj_data.__class__.__name__) # check if there's an explicit operation included loaded_instance = None if hasattr(obj_data, "pop"): operation = obj_data.pop("$op", None) else: operation = None is_new_obj = False # check whether this data has value(s) for # the indentifier columns. try: if self._has_identifier(obj_data): instance = self._get_identified_instance(obj_data) else: instance = None except TypeError: # Upon deserialization, UnprocessableEntity will get # raised. # TODO - Should sure this up. instance = None if instance is None: is_new_obj = True if operation is None: if self.many: operation = "add" else: operation = "set" if self._permissible(permissions=permissions, obj_data=obj_data, operation=operation, index=i, errors=errors, strict=strict, instance=instance): if is_new_obj: loaded_instance, sub_errors = self._load_new_instance( obj_data) instance = loaded_instance else: loaded_instance, sub_errors = self._load_existing_instance( obj_data, instance) if sub_errors: if self.many: errors[i] = sub_errors else: errors = sub_errors if strict: raise ValidationError(errors) else: continue # TODO - not sure if this is appropriate error handling if (instance is None and self.many) or instance != loaded_instance: try: self.fail("invalid_operation", **kwargs) except ValidationError as e: errors[i] = e.messages if strict: raise ValidationError(errors) result = self._perform_operation( operation=operation, parent=parent, instance=loaded_instance, index=i, errors=errors, strict=strict) if errors: raise ValidationError(errors) return result
def dump(self, obj, many=None, update_fields=True, **kwargs): """Serialize an object to native Python data types according to this Schema's fields. :param obj: The object to serialize. :param bool many: Whether to serialize `obj` as a collection. If `None`, the value for `self.many` is used. :param bool update_fields: Whether to update the schema's field classes. Typically set to `True`, but may be `False` when serializing a homogenous collection. This parameter is used by `fields.Nested` to avoid multiple updates. :return: A tuple of the form (``data``, ``errors``) :rtype: `MarshalResult`, a `collections.namedtuple` .. versionadded:: 1.0.0 """ errors = {} many = self.many if many is None else bool(many) if not many and utils.is_collection( obj) and not utils.is_keyed_tuple(obj): warnings.warn( 'Implicit collection handling is deprecated. Set ' 'many=True to serialize a collection.', category=DeprecationWarning) if many and utils.is_iterable_but_not_string(obj): obj = list(obj) try: processed_obj = self._invoke_dump_processors(PRE_DUMP, obj, many, original_data=obj) except ValidationError as error: errors = error.normalized_messages() result = None if not errors: if update_fields: self._update_fields(processed_obj, many=many) try: preresult = self._marshal( processed_obj, self.fields, many=many, # TODO: Remove self.__accessor__ in a later release accessor=self.get_attribute or self.__accessor__, dict_class=self.dict_class, index_errors=self.opts.index_errors, **kwargs) except ValidationError as error: errors = self._marshal.errors preresult = error.data result = self._postprocess(preresult, many, obj=obj) if not errors: try: result = self._invoke_dump_processors(POST_DUMP, result, many, original_data=obj) except ValidationError as error: errors = error.normalized_messages() if errors: # TODO: Remove self.__error_handler__ in a later release if self.__error_handler__ and callable(self.__error_handler__): self.__error_handler__(errors, obj) exc = ValidationError(errors, field_names=self._marshal.error_field_names, fields=self._marshal.error_fields, data=obj, **self._marshal.error_kwargs) self.handle_error(exc, obj) if self.strict: raise exc return MarshalResult(result, errors)
def _deserialize(self, value, attr, data, **kwargs): if self.many and not utils.is_collection(value): self.fail('type', input=value, type=value.__class__.__name__) if self.many: return [self._deserialize_item(item) for item in value] return self._deserialize_item(value)
def deserialize(self, data, fields_dict, many=False, partial=False, dict_class=dict, index_errors=True, index=None): """Deserialize ``data`` based on the schema defined by ``fields_dict``. :param dict data: The data to deserialize. :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param bool many: Set to `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields. If its value is an iterable, only missing fields listed in that iterable will be ignored. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ # Reset errors if not deserializing a collection if not self._pending: self.reset_errors() if many and data is not None: self._pending = True ret = [self.deserialize(d, fields_dict, many=False, partial=partial, dict_class=dict_class, index=idx, index_errors=index_errors) for idx, d in enumerate(data)] self._pending = False if self.errors: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields, data=ret, ) return ret if data is not None: items = [] partial_is_collection = is_collection(partial) for attr_name, field_obj in iteritems(fields_dict): if field_obj.dump_only: continue try: raw_value = data.get(attr_name, missing) except AttributeError: # Input data is not a dict errors = self.get_errors(index=index) msg = field_obj.error_messages['type'].format( input=data, input_type=data.__class__.__name__ ) self.error_field_names = [SCHEMA] self.error_fields = [] errors = self.get_errors() errors.setdefault(SCHEMA, []).append(msg) # Input data type is incorrect, so we can bail out early break field_name = attr_name if raw_value is missing and field_obj.load_from: field_name = field_obj.load_from raw_value = data.get(field_obj.load_from, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if ( partial is True or (partial_is_collection and attr_name in partial) ): continue _miss = field_obj.missing raw_value = _miss() if callable(_miss) else _miss if raw_value is missing and not field_obj.required: continue getter = lambda val: field_obj.deserialize( val, field_obj.load_from or attr_name, data ) value = self.call_and_store( getter_func=getter, data=raw_value, field_name=field_name, field_obj=field_obj, index=(index if index_errors else None) ) if value is not missing: key = fields_dict[attr_name].attribute or attr_name items.append((key, value)) ret = dict_class(items) else: ret = None if self.errors and not self._pending: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields, data=ret, ) return ret
def deserialize( self, data, fields_dict, many=False, partial=False, unknown=RAISE, dict_class=dict, index_errors=True, index=None, ): """Deserialize ``data`` based on the schema defined by ``fields_dict``. :param dict data: The data to deserialize. :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param bool many: Set to `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields. If its value is an iterable, only missing fields listed in that iterable will be ignored. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ if many and data is not None: self._pending = True ret = [ self.deserialize( d, fields_dict, many=False, partial=partial, unknown=unknown, dict_class=dict_class, index=idx, index_errors=index_errors, ) for idx, d in enumerate(data) ] self._pending = False if self.errors: raise ValidationError( self.errors, field_names=self.error_field_names, data=ret, ) return ret partial_is_collection = is_collection(partial) ret = dict_class() # Check data is a dict if not isinstance(data, collections.Mapping): self.store_error(SCHEMA, ('Invalid input type.', ), index=index) else: for attr_name, field_obj in iteritems(fields_dict): if field_obj.dump_only: continue field_name = attr_name if field_obj.data_key: field_name = field_obj.data_key raw_value = data.get(field_name, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if (partial is True or (partial_is_collection and attr_name in partial)): continue getter = lambda val: field_obj.deserialize( val, field_name, data) value = self.call_and_store( getter_func=getter, data=raw_value, field_name=field_name, index=(index if index_errors else None), ) if value is not missing: key = fields_dict[attr_name].attribute or attr_name set_value(ret, key, value) if unknown != EXCLUDE: fields = { field_obj.data_key or field_name for field_name, field_obj in fields_dict.items() if not field_obj.dump_only } for key in set(data) - fields: value = data[key] if unknown == INCLUDE: set_value(ret, key, value) elif unknown == RAISE: self.store_validation_error( key, ValidationError('Unknown field.'), (index if index_errors else None), ) if self.errors and not self._pending: raise ValidationError( self.errors, field_names=self.error_field_names, data=ret, ) return ret
def load_data(self, value: Dict[str, Any]) -> Any: many: bool = is_collection(value) print(value, many) return self._data_schema.load(value, many=many)