def test_set_value(): d = {} utils.set_value(d, "foo", 42) assert d == {"foo": 42} d = {} utils.set_value(d, "foo.bar", 42) assert d == {"foo": {"bar": 42}} d = {"foo": {}} utils.set_value(d, "foo.bar", 42) assert d == {"foo": {"bar": 42}} d = {"foo": 42} with pytest.raises(ValueError): utils.set_value(d, "foo.bar", 42)
def test_set_value(): d = {} utils.set_value(d, 'foo', 42) assert d == {'foo': 42} d = {} utils.set_value(d, 'foo.bar', 42) assert d == {'foo': {'bar': 42}} d = {'foo': {}} utils.set_value(d, 'foo.bar', 42) assert d == {'foo': {'bar': 42}} d = {'foo': 42} with pytest.raises(ValueError): utils.set_value(d, 'foo.bar', 42)
def _deserialize( self, data: typing.Union[ typing.Mapping[str, typing.Any], typing.Iterable[typing.Mapping[str, typing.Any]], ], *, error_store: ErrorStore, many: bool = False, partial=False, unknown=RAISE, index=None ) -> typing.Union[_T, typing.List[_T]]: """Deserialize ``data``. :param dict data: The data to deserialize. :param ErrorStore error_store: Structure to store errors. :param bool many: `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ index_errors = self.opts.index_errors index = index if index_errors else None if many: if not is_collection(data): error_store.store_error([self.error_messages["type"]], index=index) ret = [] # type: typing.List[_T] else: ret = [ typing.cast( _T, self._deserialize( typing.cast(typing.Mapping[str, typing.Any], d), error_store=error_store, many=False, partial=partial, unknown=unknown, index=idx, ), ) for idx, d in enumerate(data) ] return ret ret = self.dict_class() # Check data is a dict if not isinstance(data, Mapping): error_store.store_error([self.error_messages["type"]], index=index) else: partial_is_collection = is_collection(partial) for attr_name, field_obj in self.load_fields.items(): field_name = ( field_obj.data_key if field_obj.data_key is not None else attr_name ) raw_value = data.get(field_name, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if partial is True or ( partial_is_collection and attr_name in partial ): continue d_kwargs = {} # Allow partial loading of nested schemas. if partial_is_collection: prefix = field_name + "." len_prefix = len(prefix) sub_partial = [ f[len_prefix:] for f in partial if f.startswith(prefix) ] d_kwargs["partial"] = sub_partial else: d_kwargs["partial"] = partial getter = lambda val: field_obj.deserialize( val, field_name, data, **d_kwargs ) value = self._call_and_store( getter_func=getter, data=raw_value, field_name=field_name, error_store=error_store, index=index, ) if value is not missing: key = field_obj.attribute or attr_name set_value(typing.cast(typing.Dict, ret), key, value) if unknown != EXCLUDE: fields = { field_obj.data_key if field_obj.data_key is not None else field_name for field_name, field_obj in self.load_fields.items() } for key in set(data) - fields: value = data[key] if unknown == INCLUDE: set_value(typing.cast(typing.Dict, ret), key, value) elif unknown == RAISE: error_store.store_error( [self.error_messages["unknown"]], key, (index if index_errors else None), ) return ret
def _deserialize( self, data: typing.Union[typing.Mapping[str, typing.Any], typing.Iterable[typing.Mapping[str, typing.Any]], ], *args, error_store: ErrorStore, many: bool = False, partial=False, unknown=RAISE, index=None, ) -> typing.Union[_T, typing.List[_T]]: index_errors = self.opts.index_errors index = index if index_errors else None if self.flattened and is_collection(data) and not self._all_objects: new_data = [] self._all_objects = {} for d in data: self._all_objects[d["@id"]] = d if self._compare_ids(d["@type"], self.opts.rdf_type): new_data.append(d) data = new_data if len(data) == 1: data = data[0] if many: if not is_collection(data): error_store.store_error([self.error_messages["type"]], index=index) ret = [] # type: typing.List[_T] else: ret = [ typing.cast( _T, self._deserialize( typing.cast(typing.Mapping[str, typing.Any], d), error_store=error_store, many=False, partial=partial, unknown=unknown, index=idx, ), ) for idx, d in enumerate(data) ] return ret ret = self.dict_class() # Check data is a dict if not isinstance(data, Mapping): error_store.store_error([self.error_messages["type"]], index=index) else: if data.get("@context", None): # we got compacted jsonld, expand it data = jsonld.expand(data) partial_is_collection = is_collection(partial) for attr_name, field_obj in self.load_fields.items(): field_name = field_obj.data_key if field_obj.data_key is not None else attr_name if getattr(field_obj, "reverse", False): raw_value = data.get("@reverse", missing) if raw_value is not missing: raw_value = raw_value.get(field_name, missing) elif self.flattened: # find an object that refers to this one with the same property raw_value = self.get_reverse_links(data, field_name) if not raw_value: raw_value = missing else: raw_value = data.get(field_name, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if partial is True or (partial_is_collection and attr_name in partial): continue d_kwargs = {} # Allow partial loading of nested schemes. if partial_is_collection: prefix = field_name + "." len_prefix = len(prefix) sub_partial = [ f[len_prefix:] for f in partial if f.startswith(prefix) ] d_kwargs["partial"] = sub_partial else: d_kwargs["partial"] = partial d_kwargs["_all_objects"] = self._all_objects d_kwargs["flattened"] = self.flattened getter = lambda val: field_obj.deserialize( val, field_name, data, **d_kwargs) value = self._call_and_store( getter_func=getter, data=raw_value, field_name=field_name, error_store=error_store, index=index, ) if value is not missing: key = field_obj.attribute or attr_name set_value(typing.cast(typing.Dict, ret), key, value) if unknown != EXCLUDE: fields = { field_obj.data_key if field_obj.data_key is not None else field_name for field_name, field_obj in self.load_fields.items() } for key in set(data) - fields: if key in ["@type", "@reverse"]: # ignore JsonLD meta fields continue value = data[key] if unknown == INCLUDE: set_value(typing.cast(typing.Dict, ret), key, value) elif unknown == RAISE: error_store.store_error( [self.error_messages["unknown"]], key, (index if index_errors else None), ) return ret
def _deserialize(self, data, fields_dict, *, error_store, many=False, partial=False, unknown=RAISE, dict_class=dict, index_errors=True, index=None): """Deserialize ``data`` based on the schema defined by ``fields_dict``. :param dict data: The data to deserialize. :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param ErrorStore error_store: Structure to store errors. :param bool many: Set to `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ index = index if index_errors else None if many: if not is_collection(data): error_store.store_error([self.error_messages['type']], index=index) ret = [] else: self._pending = True ret = [ self._deserialize( d, fields_dict, error_store=error_store, many=False, partial=partial, unknown=unknown, dict_class=dict_class, index=idx, index_errors=index_errors, ) for idx, d in enumerate(data) ] self._pending = False return ret ret = dict_class() # Check data is a dict if not isinstance(data, Mapping): error_store.store_error([self.error_messages['type']], index=index) else: partial_is_collection = is_collection(partial) for attr_name, field_obj in fields_dict.items(): if field_obj.dump_only: continue field_name = attr_name if field_obj.data_key: field_name = field_obj.data_key raw_value = data.get(field_name, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if (partial is True or (partial_is_collection and attr_name in partial)): continue d_kwargs = {} # Allow partial loading of nested schemas. if partial_is_collection: prefix = field_name + '.' len_prefix = len(prefix) sub_partial = [ f[len_prefix:] for f in partial if f.startswith(prefix) ] d_kwargs['partial'] = sub_partial else: d_kwargs['partial'] = partial getter = lambda val: field_obj.deserialize( val, field_name, data, **d_kwargs) value = self._call_and_store( getter_func=getter, data=raw_value, field_name=field_name, error_store=error_store, index=index, ) if value is not missing: key = fields_dict[attr_name].attribute or attr_name set_value(ret, key, value) if unknown != EXCLUDE: fields = { field_obj.data_key or field_name for field_name, field_obj in fields_dict.items() if not field_obj.dump_only } for key in set(data) - fields: value = data[key] if unknown == INCLUDE: set_value(ret, key, value) elif unknown == RAISE: error_store.store_error( [self.error_messages['unknown']], key, (index if index_errors else None), ) return ret
def deserialize(self, data, fields_dict, many=False, partial=False, dict_class=dict, index_errors=True, index=None): """Deserialize ``data`` based on the schema defined by ``fields_dict``. :param dict data: The data to deserialize. :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param bool many: Set to `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields. If its value is an iterable, only missing fields listed in that iterable will be ignored. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ if many and data is not None: if not is_collection(data): errors = self.get_errors(index=index) self.error_field_names.append(SCHEMA) errors[SCHEMA] = ['Invalid input type.'] ret = [] else: self._pending = True ret = [ self.deserialize(d, fields_dict, many=False, partial=partial, dict_class=dict_class, index=idx, index_errors=index_errors) for idx, d in enumerate(data) ] self._pending = False if self.errors: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields, data=ret, ) return ret if data is not None: partial_is_collection = is_collection(partial) ret = dict_class() for attr_name, field_obj in iteritems(fields_dict): if field_obj.dump_only: continue try: raw_value = data.get(attr_name, missing) except AttributeError: # Input data is not a dict errors = self.get_errors(index=index) msg = field_obj.error_messages['type'].format( input=data, input_type=data.__class__.__name__) self.error_field_names = [SCHEMA] self.error_fields = [] errors = self.get_errors() errors.setdefault(SCHEMA, []).append(msg) # Input data type is incorrect, so we can bail out early break field_name = attr_name if raw_value is missing and field_obj.load_from: field_name = field_obj.load_from raw_value = data.get(field_obj.load_from, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if (partial is True or (partial_is_collection and attr_name in partial)): continue _miss = field_obj.missing raw_value = _miss() if callable(_miss) else _miss if raw_value is missing and not field_obj.required: continue getter = lambda val: deserialize_field(field_obj, val, field_obj.load_from or attr_name, data, skip_validation=True) value = self.call_and_store( getter_func=getter, data=raw_value, field_name=field_name, field_obj=field_obj, index=(index if index_errors else None)) if value is not missing: key = fields_dict[attr_name].attribute or attr_name set_value(ret, key, value) else: ret = None if self.errors and not self._pending: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields, data=ret, ) return ret
def deserialize( self, data, fields_dict, many=False, partial=False, unknown=EXCLUDE, dict_class=dict, index_errors=True, index=None, ): """Deserialize ``data`` based on the schema defined by ``fields_dict``. :param dict data: The data to deserialize. :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param bool many: Set to `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields. If its value is an iterable, only missing fields listed in that iterable will be ignored. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ if many and data is not None: self._pending = True ret = [ self.deserialize( d, fields_dict, many=False, partial=partial, unknown=unknown, dict_class=dict_class, index=idx, index_errors=index_errors, ) for idx, d in enumerate(data) ] self._pending = False if self.errors: raise ValidationError( self.errors, field_names=self.error_field_names, data=ret, ) return ret partial_is_collection = is_collection(partial) ret = dict_class() # Check data is a dict if not isinstance(data, collections.Mapping): errors = self.get_errors(index=index) msg = 'Invalid input type.' self.error_field_names = [SCHEMA] errors = self.get_errors() errors.setdefault(SCHEMA, []).append(msg) else: for attr_name, field_obj in iteritems(fields_dict): if field_obj.dump_only: continue field_name = attr_name if field_obj.data_key: field_name = field_obj.data_key raw_value = data.get(field_name, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if (partial is True or (partial_is_collection and attr_name in partial)): continue getter = lambda val: field_obj.deserialize( val, field_name, data) value = self.call_and_store( getter_func=getter, data=raw_value, field_name=field_name, index=(index if index_errors else None), ) if value is not missing: key = fields_dict[attr_name].attribute or attr_name set_value(ret, key, value) if unknown != EXCLUDE: fields = { field_obj.data_key or field_name for field_name, field_obj in fields_dict.items() if not field_obj.dump_only } for key in set(data) - fields: value = data[key] if unknown == INCLUDE: set_value(ret, key, value) elif unknown == RAISE: self.store_error( field_name=key, error=ValidationError('Unknown field.'), index=(index if index_errors else None), ) if self.errors and not self._pending: raise ValidationError( self.errors, field_names=self.error_field_names, data=ret, ) return ret