def _serialize(self, value, attr, obj, **kwargs): if value is None: return None if not self.value_container and not self.key_container: return value if not isinstance(value, collections.Mapping): self.fail('invalid') if self.key_container is None: keys = {k: k for k in value.keys()} else: keys = { k: self.key_container._serialize(k, None, None, **kwargs) for k in value.keys() } if self.value_container is None: result = collections.OrderedDict([(keys[k], v) for k, v in iteritems(value) if k in keys]) else: result = collections.OrderedDict([ (keys[k], self.value_container._serialize(v, None, None, **kwargs)) for k, v in iteritems(value) ]) return result
def get_related_kwargs(): info_args = get_info_args() related_kwargs = {} register_schemas = {} if info_args.fields or info_args.except_: for schema in get_api_manager().register_schemas: for endpoint in _get_schema_endpoint(schema): register_schemas.setdefault(endpoint, []).append(schema) # 指定类型返回的字段 for one_coll_name, fields in iteritems(info_args.fields): schemas = register_schemas.get(one_coll_name, []) for schema in schemas: related_kwargs.setdefault(schema, {})["only"] = [ field for field in fields if field in schema._declared_fields ] # 排除类型返回的字段 for one_coll_name, fields in iteritems(info_args.except_): schemas = register_schemas.get(one_coll_name, []) for schema in schemas: related_kwargs.setdefault(schema, {})["exclude"] = [ field for field in fields if field in schema._declared_fields ] return related_kwargs
def unwrap_item(self, item): if 'type' not in item: raise ma.ValidationError([ { 'detail': '`data` object must include `type` key.', 'source': { 'pointer': '/data', }, }, ]) if item['type'] != self.opts.type_: raise IncorrectTypeError(actual=item['type'], expected=self.opts.type_) payload = self.dict_class() if 'id' in item: payload['id'] = item['id'] if 'meta' in item: payload[_RESOURCE_META_LOAD_FROM] = item['meta'] if self.document_meta: payload[_DOCUMENT_META_LOAD_FROM] = self.document_meta for key, value in iteritems(item.get('attributes', {})): payload[key] = value for key, value in iteritems(item.get('relationships', {})): # Fold included data related to this relationship into the item, so # that we can deserialize the whole objects instead of just IDs. if self.included_data: included_data = [] inner_data = value.get('data', []) # Data may be ``None`` (for empty relationships), but we only # need to process it when it's present. if inner_data: if not is_collection(inner_data): included_data = next( self._extract_from_included(inner_data), None, ) else: for data in inner_data: included_data.extend( self._extract_from_included(data), ) if included_data: value['data'] = included_data payload[key] = value return payload
def load_operations_from_docstring(docstring): doc_data = load_yaml_from_docstring(docstring) if doc_data: return {key: val for key, val in iteritems(doc_data) if key in PATH_KEYS} else: return None
def format_item(self, item): """Format a single datum as a Resource object. See: http://jsonapi.org/format/#document-resource-objects """ ret = self.dict_class() ret[TYPE] = self.opts.type_ # Get the schema attributes so we can confirm `dump-to` values exist attributes = {(self.fields[field].dump_to or field): field for field in self.fields} for field_name, value in iteritems(item): attribute = attributes[field_name] if attribute == ID: ret[ID] = value elif isinstance(self.fields[attribute], BaseRelationship): if 'relationships' not in ret: ret['relationships'] = self.dict_class() ret['relationships'][self.inflect(field_name)] = value else: if 'attributes' not in ret: ret['attributes'] = self.dict_class() ret['attributes'][self.inflect(field_name)] = value links = self.get_resource_links(item) if links: ret['links'] = links return ret
def get_field_names_for_argmap(argmap): if isinstance(argmap, ma.Schema): all_field_names = set([fname for fname, fobj in iteritems(argmap.fields) if not fobj.dump_only]) else: all_field_names = set(argmap.keys()) return all_field_names
def format_item(self, item): """Format a single datum as a Resource object. See: http://jsonapi.org/format/#document-resource-objects """ ret = self.dict_class() ret[TYPE] = self.opts.type_ # Get the schema attributes so we can confirm `dump-to` values exist attributes = { (self.fields[field].dump_to or field): field for field in self.fields } for field_name, value in iteritems(item): attribute = attributes[field_name] if attribute == ID: ret[ID] = value elif isinstance(self.fields[attribute], BaseRelationship): if 'relationships' not in ret: ret['relationships'] = self.dict_class() ret['relationships'][self.inflect(field_name)] = value else: if 'attributes' not in ret: ret['attributes'] = self.dict_class() ret['attributes'][self.inflect(field_name)] = value links = self.get_resource_links(item) if links: ret['links'] = links return ret
def _serialize(self, value, key, obj): """Output the URL for the endpoint, given the kwargs passed to ``__init__``. """ param_values = {} for name, attr_tpl in iteritems(self.params): attr_name = _tpl(str(attr_tpl)) if attr_name: attribute_value = utils.get_value(attr_name, obj, default=missing) if attribute_value is not missing: param_values[name] = attribute_value else: err = AttributeError('{attr_name!r} is not a valid ' 'attribute of {obj!r}'.format( attr_name=attr_name, obj=obj)) if has_forced_error: raise ForcedError(err) else: raise err else: param_values[name] = attr_tpl try: return url_for(self.endpoint, **param_values) except BuildError as err: # Make sure BuildErrors are raised if has_forced_error: raise ForcedError(err) else: raise err
def marshal(self, data, fields_dict, many=False): """Takes raw data (a dict, list, or other object) and a dict of fields to output and filters the data based on those fields. :param data: The actual object(s) from which the fields are taken from :param dict fields: A dict whose keys will make up the final serialized response output. :param bool many: Set to ``True`` if ``data`` is a collection object that is iterable. :returns: An OrderedDict of the marshalled data """ if many and data is not None: return [self.marshal(d, fields_dict, many=False) for d in data] items = [] for attr_name, field_obj in iteritems(fields_dict): key = self.prefix + attr_name try: item = (key, field_obj.output(attr_name, data)) except MarshallingError as err: # Store errors if self.strict: raise err self.errors[key] = text_type(err) item = (key, None) except TypeError: # field declared as a class, not an instance if isinstance(field_obj, type) and \ issubclass(field_obj, FieldABC): msg = ('Field for "{0}" must be declared as a ' "Field instance, not a class. " 'Did you mean "fields.{1}()"?'.format( attr_name, field_obj.__name__)) raise TypeError(msg) raise items.append(item) return OrderedDict(items)
def _parse_request(self, schema, req, locations): if schema.many: assert 'json' in locations, 'schema.many=True is only supported for JSON location' # The ad hoc Nested field is more like a workaround or a helper, and it servers its # purpose fine. However, if somebody has a desire to re-design the support of # bulk-type arguments, go ahead. parsed = yield from self.parse_arg(name='json', field=ma.fields.Nested( schema, many=True), req=req, locations=locations) if parsed is missing: parsed = [] else: argdict = schema.fields parsed = {} for argname, field_obj in iteritems(argdict): if core.MARSHMALLOW_VERSION_INFO[0] < 3: parsed_value = yield from self.parse_arg( argname, field_obj, req, locations) # If load_from is specified on the field, try to parse from that key if parsed_value is missing and field_obj.load_from: parsed_value = yield from self.parse_arg( field_obj.load_from, field_obj, req, locations) argname = field_obj.load_from else: argname = field_obj.data_key or argname parsed_value = yield from self.parse_arg( argname, field_obj, req, locations) if parsed_value is not missing: parsed[argname] = parsed_value return parsed
def _parse_request(self, schema, req, locations): """Return a parsed arguments dictionary for the current request.""" if schema.many: assert 'json' in locations, 'schema.many=True is only supported for JSON location' # The ad hoc Nested field is more like a workaround or a helper, and it servers its # purpose fine. However, if somebody has a desire to re-design the support of # bulk-type arguments, go ahead. parsed = self.parse_arg( req=req, locations=locations, need_list=True ) else: parsed = self.parse_arg(req, locations) argdict = schema.fields for argname, field_obj in iteritems(argdict): loc = field_obj.metadata.get('location') multiple = is_multiple(field_obj) if loc: locations_to_check = self._validated_locations([loc]) parsed_value = self.parse_arg(req, locations_to_check) value = get_value(parsed_value, argname, field_obj, allow_many_nested=True) if value is not missing: parsed[argname] = value if isinstance(parsed, dict): val = parsed.get(argname) if multiple and val is not None and not isinstance(val, (list, tuple)): parsed[argname] = [val] return parsed
def marshal(self, data, fields_dict, many=False): """Takes raw data (a dict, list, or other object) and a dict of fields to output and filters the data based on those fields. :param data: The actual object(s) from which the fields are taken from :param dict fields: A dict whose keys will make up the final serialized response output. :param bool many: Set to ``True`` if ``data`` is a collection object that is iterable. :returns: An OrderedDict of the marshalled data """ if many and data is not None: return [self.marshal(d, fields_dict, many=False) for d in data] items = [] for attr_name, field_obj in iteritems(fields_dict): key = self.prefix + attr_name try: item = (key, field_obj.output(attr_name, data)) except MarshallingError as err: # Store errors if self.strict: raise err self.errors[key] = text_type(err) item = (key, None) except TypeError: # field declared as a class, not an instance if isinstance(field_obj, type) and \ issubclass(field_obj, FieldABC): msg = ('Field for "{0}" must be declared as a ' "Field instance, not a class. " 'Did you mean "fields.{1}()"?' .format(attr_name, field_obj.__name__)) raise TypeError(msg) raise items.append(item) return OrderedDict(items)
def __set_field_attrs(self, fields_dict): """Bind fields to the schema, setting any necessary attributes on the fields (e.g. parent and name). Also set field load_only and dump_only values if field_name was specified in ``class Meta``. """ for field_name, field_obj in iteritems(fields_dict): try: if field_name in self.load_only: field_obj.load_only = True if field_name in self.dump_only: field_obj.dump_only = True field_obj._add_to_schema(field_name, self) self.on_bind_field(field_name, field_obj) except TypeError: # field declared as a class, not an instance if (isinstance(field_obj, type) and issubclass(field_obj, base.FieldABC)): msg = ('Field for "{0}" must be declared as a ' 'Field instance, not a class. ' 'Did you mean "fields.{1}()"?' .format(field_name, field_obj.__name__)) raise TypeError(msg) return fields_dict
def fields2parameters(fields, schema_cls=None, spec=None, use_refs=True, dump=True, default_in='body', name='body', required=False): """Return an array of Swagger parameters given a mapping between field names and :class:`Field <marshmallow.Field>` objects. If `default_in` is "body", then return an array of a single parameter; else return an array of a parameter for each included field in the :class:`Schema <marshmallow.Schema>`. https://github.com/wordnik/swagger-spec/blob/master/versions/2.0.md#parameterObject """ Meta = getattr(schema_cls, 'Meta', None) if default_in == 'body': if schema_cls is not None: # Prevent circular import from apispec.ext.marshmallow import resolve_schema_dict prop = resolve_schema_dict(spec, schema_cls, dump=dump) else: prop = fields2jsonschema( fields, schema_cls=schema_cls, spec=spec, use_refs=use_refs, dump=dump ) return [{ 'in': default_in, 'required': required, 'name': name, 'schema': prop, }] return [ field2parameter(field_obj, name=_observed_name(field_obj, field_name), spec=spec, use_refs=use_refs, dump=dump, default_in=default_in) for field_name, field_obj in iteritems(fields) if ( (field_name not in getattr(Meta, 'exclude', [])) and not (field_obj.dump_only and not dump) ) ]
def dump(self, obj, many=None, expand=0, **kwargs): """ serialize :param obj: :param many: :param expand: 当expand>=1或者None时,field.Related生效,展开展开子资源 :param kwargs: :return: """ old_exclude = self.exclude old_only = self.only try: self._current_expand = expand if self._current_expand is not None and self._current_expand <= 0: exclude = list(old_exclude) if old_exclude else [] only = list(old_only) if old_only else [] for key, field in iteritems(self._declared_fields): if isinstance(field, Related): if key not in exclude: exclude.append(key) if key in only: only.remove(key) if exclude: self.exclude = exclude if only: self.only = only return super(ModelSchema, self).dump(obj, many=many, **kwargs) finally: self._current_expand = None self.exclude = old_exclude self.only = old_only
def _parse_request(self, argmap, req, locations): argdict = argmap.fields if isinstance(argmap, ma.Schema) else argmap parsed = {} for argname, field_obj in iteritems(argdict): pass pass return parsed
def _parse_request(self, schema, req, locations): """Return a parsed arguments dictionary for the current request.""" if schema.many: assert 'json' in locations, "schema.many=True is only supported for JSON location" # The ad hoc Nested field is more like a workaround or a helper, and it servers its # purpose fine. However, if somebody has a desire to re-design the support of # bulk-type arguments, go ahead. parsed = self.parse_arg(name='json', field=ma.fields.Nested(schema, many=True), req=req, locations=locations) if parsed is missing: parsed = [] else: argdict = schema.fields parsed = {} for argname, field_obj in iteritems(argdict): parsed_value = self.parse_arg(argname, field_obj, req, locations) # If load_from is specified on the field, try to parse from that key if parsed_value is missing and field_obj.load_from: parsed_value = self.parse_arg(field_obj.load_from, field_obj, req, locations) argname = field_obj.load_from if parsed_value is not missing: parsed[argname] = parsed_value return parsed
def fields2jsonschema(fields, schema_cls=None, spec=None, use_refs=True): """Return the JSON Schema Object for a given marshmallow :class:`Schema <marshmallow.Schema>`. Schema may optionally provide the ``title`` and ``description`` class Meta options. https://github.com/wordnik/swagger-spec/blob/master/versions/2.0.md#schemaObject Example: :: class UserSchema(Schema): _id = fields.Int() email = fields.Email(description='email address of the user') name = fields.Str() class Meta: title = 'User' description = 'A registered user' schema2jsonschema(UserSchema) # { # 'title': 'User', 'description': 'A registered user', # 'properties': { # 'name': {'required': False, # 'description': '', # 'type': 'string'}, # '_id': {'format': 'int32', # 'required': False, # 'description': '', # 'type': 'integer'}, # 'email': {'format': 'email', # 'required': False, # 'description': 'email address of the user', # 'type': 'string'} # } # } :param type schema_cls: A marshmallow :class:`Schema <marshmallow.Schema>` :rtype: dict, a JSON Schema Object """ Meta = getattr(schema_cls, 'Meta', None) if getattr(Meta, 'fields', None) or getattr(Meta, 'additional', None): warnings.warn( 'Only explicitly-declared fields will be included in the Schema Object. ' 'Fields defined in Meta.fields or Meta.additional are excluded.') ret = {'properties': {}} exclude = set(getattr(Meta, 'exclude', [])) for field_name, field_obj in iteritems(fields): if field_name in exclude: continue ret['properties'][field_name] = field2property(field_obj, spec=spec, use_refs=use_refs) if field_obj.required: ret.setdefault('required', []).append(field_name) if Meta is not None: if hasattr(Meta, 'title'): ret['title'] = Meta.title if hasattr(Meta, 'description'): ret['description'] = Meta.description return ret
def __set_field_attrs(self, fields_dict): """Bind fields to the schema, setting any necessary attributes on the fields (e.g. parent and name). Also set field load_only and dump_only values if field_name was specified in ``class Meta``. """ for field_name, field_obj in iteritems(fields_dict): try: if field_name in self.load_only: field_obj.load_only = True if field_name in self.dump_only: field_obj.dump_only = True field_obj._add_to_schema(field_name, self) self.on_bind_field(field_name, field_obj) except TypeError: # field declared as a class, not an instance if (isinstance(field_obj, type) and issubclass(field_obj, base.FieldABC)): msg = ('Field for "{0}" must be declared as a ' 'Field instance, not a class. ' 'Did you mean "fields.{1}()"?'.format( field_name, field_obj.__name__)) raise TypeError(msg) return fields_dict
def _serialize(self, value, key, obj): """Output the URL for the endpoint, given the kwargs passed to ``__init__``. """ param_values = {} for name, attr_tpl in iteritems(self.params): attr_name = _tpl(str(attr_tpl)) if attr_name: attribute_value = utils.get_value(attr_name, obj, default=missing) if attribute_value is not missing: param_values[name] = attribute_value else: err = AttributeError( '{attr_name!r} is not a valid ' 'attribute of {obj!r}'.format(attr_name=attr_name, obj=obj) ) if has_forced_error: raise ForcedError(err) else: raise err else: param_values[name] = attr_tpl try: return url_for(self.endpoint, **param_values) except BuildError as err: # Make sure BuildErrors are raised if has_forced_error: raise ForcedError(err) else: raise err
def metadata2properties(self, field): """Return a dictionary of properties extracted from field Metadata Will include field metadata that are valid properties of `OpenAPI schema objects <https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#schemaObject>`_ (e.g. “description”, “enum”, “example”). In addition, `specification extensions <https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#specification-extensions>`_ are supported. Prefix `x_` to the desired extension when passing the keyword argument to the field constructor. apispec will convert `x_` to `x-` to comply with OpenAPI. :param Field field: A marshmallow field. :rtype: dict """ # Dasherize metadata that starts with x_ metadata = { key.replace("_", "-") if key.startswith("x_") else key: value for key, value in iteritems(field.metadata) } # Avoid validation error with "Additional properties not allowed" ret = { key: value for key, value in metadata.items() if key in _VALID_PROPERTIES or key.startswith(_VALID_PREFIX) } return ret
def fields2jsonschema(self, fields, ordered=False, partial=None): """Return the JSON Schema Object given a mapping between field names and :class:`Field <marshmallow.Field>` objects. :param dict fields: A dictionary of field name field object pairs :param bool ordered: Whether to preserve the order in which fields were declared :param bool|tuple partial: Whether to override a field's required flag. If `True` no fields will be set as required. If an iterable fields in the iterable will not be marked as required. :rtype: dict, a JSON Schema Object """ jsonschema = { "type": "object", "properties": OrderedDict() if ordered else {} } for field_name, field_obj in iteritems(fields): observed_field_name = self._observed_name(field_obj, field_name) property = self.field2property(field_obj) jsonschema["properties"][observed_field_name] = property if field_obj.required: if not partial or (is_collection(partial) and field_name not in partial): jsonschema.setdefault("required", []).append(observed_field_name) if "required" in jsonschema: jsonschema["required"].sort() return jsonschema
def _serialize(self, value, attr, obj): for name, value in iteritems(self.params): attr_context = _key(str(value)) if attr_context: self.params[name] = self.context.get(attr_context, '') return super()._serialize(value, attr, obj)
def marshal(self, data, fields_dict): """Takes the data (a dict, list, or object) and a dict of fields. Stores any errors that occur. :param data: The actual object(s) from which the fields are taken from :param dict fields_dict: A dict whose keys will make up the final serialized response output """ if utils.is_collection(data): return [self.marshal(d, fields_dict) for d in data] items = [] for attr_name, field_obj in iteritems(fields_dict): key = self.prefix + attr_name try: if isinstance(field_obj, dict): item = (key, self.marshal(data, field_obj)) else: try: item = (key, field_obj.output(attr_name, data)) except TypeError: # field declared as a class, not an instance if issubclass(field_obj, base.FieldABC): msg = ('Field for "{0}" must be declared as a ' "Field instance, not a class. " 'Did you mean "fields.{1}()"?' .format(attr_name, field_obj.__name__)) raise TypeError(msg) raise except exceptions.MarshallingError as err: # Store errors if self.strict or self.opts.strict: raise err self.errors[key] = text_type(err) item = (key, None) items.append(item) return OrderedDict(items)
def fields2parameters(fields, schema=None, spec=None, use_refs=True, default_in='body', name='body', required=False, use_instances=False, description=None, **kwargs): """Return an array of OpenAPI parameters given a mapping between field names and :class:`Field <marshmallow.Field>` objects. If `default_in` is "body", then return an array of a single parameter; else return an array of a parameter for each included field in the :class:`Schema <marshmallow.Schema>`. https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#parameterObject """ swagger_default_in = __location_map__.get(default_in, default_in) if swagger_default_in == 'body': if schema is not None: # Prevent circular import from apispec.ext.marshmallow import resolve_schema_dict prop = resolve_schema_dict(spec, schema, dump=False, use_instances=use_instances) else: prop = fields2jsonschema(fields, spec=spec, use_refs=use_refs, dump=False) param = { 'in': swagger_default_in, 'required': required, 'name': name, 'schema': prop, } if description: param['description'] = description return [param] assert not getattr(schema, 'many', False), \ "Schemas with many=True are only supported for 'json' location (aka 'in: body')" exclude_fields = getattr(getattr(schema, 'Meta', None), 'exclude', []) dump_only_fields = getattr(getattr(schema, 'Meta', None), 'dump_only', []) parameters = [] body_param = None for field_name, field_obj in iteritems(fields): if (field_name in exclude_fields or field_obj.dump_only or field_name in dump_only_fields): continue param = field2parameter(field_obj, name=_observed_name(field_obj, field_name), spec=spec, use_refs=use_refs, default_in=default_in) if param['in'] == 'body' and body_param is not None: body_param['schema']['properties'].update(param['schema']['properties']) required_fields = param['schema'].get('required', []) if required_fields: body_param['schema'].setdefault('required', []).extend(required_fields) else: if param['in'] == 'body': body_param = param parameters.append(param) return parameters
def serialize(self, obj, fields_dict, many=False, strict=False, accessor=None, dict_class=dict, index_errors=True, index=None): """Takes raw data (a dict, list, or other object) and a dict of fields to output and serializes the data based on those fields. :param obj: The actual object(s) from which the fields are taken from :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param bool many: Set to `True` if ``data`` should be serialized as a collection. :param bool strict: If `True`, raise errors if invalid data are passed in instead of failing silently and storing the errors. :param callable accessor: Function to use for getting values from ``obj``. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the marshalled data .. versionchanged:: 1.0.0 Renamed from ``marshal``. """ # Reset errors dict if not serializing a collection if not self._pending: self.reset_errors() if many and obj is not None: self._pending = True ret = [self.serialize(d, fields_dict, many=False, strict=strict, dict_class=dict_class, accessor=accessor, index=idx, index_errors=index_errors) for idx, d in enumerate(obj)] self._pending = False return ret items = [] for attr_name, field_obj in iteritems(fields_dict): if getattr(field_obj, 'load_only', False): continue if not self.prefix: key = attr_name else: key = ''.join([self.prefix, attr_name]) getter = lambda d: field_obj.serialize(attr_name, d, accessor=accessor) value = self.call_and_store( getter_func=getter, data=obj, field_name=key, field_obj=field_obj, index=(index if index_errors else None) ) if value is missing: continue items.append((key, value)) if self.errors and strict: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields ) return dict_class(items)
def _parse_request(self, argmap, req, locations): argdict = argmap.fields if isinstance(argmap, ma.Schema) else argmap parsed = {} for argname, field_obj in iteritems(argdict): parsed_value = self.parse_arg(argname, field_obj, req, locations=locations or self.locations) parsed[argname] = parsed_value return parsed
def make_instance(self, data): """Deserialize data to an instance of the model. Update an existing row if specified in `self.instance` or loaded by primary key(s) in the data; else create a new row. :param data: Data to deserialize. """ instance = self.instance or self.get_instance(data) if instance is not None: for key, value in iteritems(data): setattr(instance, key, value) return instance kwargs, association_attrs = self._split_model_kwargs_association(data) instance = self.opts.model(**kwargs) for attr, value in iteritems(association_attrs): setattr(instance, attr, value) return instance
def _parse_request(self, schema, req, locations): argdict = schema.fields parsed = {} for argname, field_obj in iteritems(argdict): parsed_value = yield from self.parse_arg(argname, field_obj, req, locations=locations or self.locations) parsed[argname] = parsed_value return parsed
def __init__(self, meta, *args, **kwargs): super(ModelSchemaOpts, self).__init__(meta, *args, **kwargs) for attr, default_value in iteritems(DEFAULT_MODEL_OPTS): if hasattr(meta, attr): value = getattr(meta, attr) else: value = default_value setattr(self, attr, value) self.model = getattr(meta, 'model', None)
def unwrap_item(self, item): if 'type' not in item: raise ma.ValidationError([ { 'detail': '`data` object must include `type` key.', 'pointer': '/data' } ]) if item['type'] != self.opts.type_: raise IncorrectTypeError(actual=item['type'], expected=self.opts.type_) payload = self.dict_class() if 'id' in item: payload['id'] = item['id'] for key, value in iteritems(item.get('attributes', {})): payload[key] = value for key, value in iteritems(item.get('relationships', {})): payload[key] = value return payload
def _serialize( self, obj, fields_dict, error_store, many=False, accessor=None, dict_class=dict, index_errors=True, index=None, ): """Takes raw data (a dict, list, or other object) and a dict of fields to output and serializes the data based on those fields. :param obj: The actual object(s) from which the fields are taken from :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param ErrorStore error_store: Structure to store errors. :param bool many: Set to `True` if ``data`` should be serialized as a collection. :param callable accessor: Function to use for getting values from ``obj``. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the marshalled data .. versionchanged:: 1.0.0 Renamed from ``marshal``. """ index = index if index_errors else None if many and obj is not None: self._pending = True ret = [ self._serialize( d, fields_dict, error_store, many=False, dict_class=dict_class, accessor=accessor, index=idx, index_errors=index_errors, ) for idx, d in enumerate(obj) ] self._pending = False return ret items = [] for attr_name, field_obj in iteritems(fields_dict): if getattr(field_obj, 'load_only', False): continue key = field_obj.data_key or attr_name getter = lambda d: field_obj.serialize(attr_name, d, accessor=accessor) value = self._call_and_store( getter_func=getter, data=obj, field_name=key, error_store=error_store, index=index, ) if value is missing: continue items.append((key, value)) ret = dict_class(items) return ret
def _rapply(d, func, *args, **kwargs): """Apply a function to all values in a dictionary or list of dictionaries, recursively.""" if isinstance(d, (tuple, list)): return [_rapply(each, func, *args, **kwargs) for each in d] if isinstance(d, dict): return { key: _rapply(value, func, *args, **kwargs) for key, value in iteritems(d) } else: return func(d, *args, **kwargs)
def format_errors(self, errors, many): """Format validation errors as JSON Error objects.""" if not errors: return {} formatted_errors = [] if many: for index, errors in iteritems(errors): for field_name, field_errors in iteritems(errors): formatted_errors.extend([ self.format_error(field_name, message, index=index) for message in field_errors ]) else: for field_name, field_errors in iteritems(errors): formatted_errors.extend([ self.format_error(field_name, message) for message in field_errors ]) return {'errors': formatted_errors}
def _parse_request(self, schema, req, locations): """Return a parsed arguments dictionary for the current request.""" argdict = schema.fields parsed = {} for argname, field_obj in iteritems(argdict): parsed_value = self.parse_arg(argname, field_obj, req, locations=locations or self.locations) parsed[argname] = parsed_value return parsed
def __set_field_attrs(self, fields_dict): """Set the parents of all field objects in fields_dict to self, and set the dateformat specified in ``class Meta``, if necessary. """ for field_name, field_obj in iteritems(fields_dict): if not field_obj.parent: field_obj.parent = self if not field_obj.name: field_obj.name = field_name if isinstance(field_obj, fields.DateTime): if field_obj.dateformat is None: field_obj.dateformat = self.opts.dateformat return fields_dict
def make_instance(self, data): """Deserialize data to an instance of the model. Update an existing row if specified in `self.instance` or loaded by primary key(s) in the data; else create a new row. :param data: Data to deserialize. """ instance = self.instance or self.get_instance(data) if instance is not None: for key, value in iteritems(data): setattr(instance, key, value) return instance return self.opts.model(**data)
def _deserialize(self, value, attr, data, **kwargs): if not isinstance(value, collections.Mapping): self.fail('invalid') if not self.value_container and not self.key_container: return value errors = collections.defaultdict(dict) if self.key_container is None: keys = {k: k for k in value.keys()} else: keys = {} for key in value.keys(): try: keys[key] = self.key_container.deserialize(key) except ValidationError as error: errors[key]['key'] = error.messages if self.value_container is None: result = collections.OrderedDict([(keys[k], v) for k, v in iteritems(value) if k in keys]) else: result = collections.OrderedDict() for key, val in iteritems(value): try: deser_val = self.value_container.deserialize(val) except ValidationError as error: errors[key]['value'] = error.messages if error.valid_data is not None and key in keys: result[keys[key]] = error.valid_data else: if key in keys: result[keys[key]] = deser_val if errors: raise ValidationError(errors, valid_data=result) return result
def _get_fields(attrs, field_class, pop=False): """Get fields from a class, sorted by creation index. :param attrs: Mapping of class attributes :param type field_class: Base field class :param bool pop: Remove matching fields """ getter = getattr(attrs, 'pop' if pop else 'get') return sorted( [ (field_name, getter(field_name)) for field_name, field_value in list(iteritems(attrs)) if utils.is_instance_or_subclass(field_value, field_class) ], key=lambda pair: pair[1]._creation_index, )
def get_declared_fields(mcs, bases, attrs, field_class): '''Return the declared fields of a class as an OrderedDict. :param tuple bases: Tuple of classes the class is subclassing. :param dict attrs: Dictionary of class attributes. :param type field_class: The base field class. Any class attribute that is of this type will be be returned ''' declared = [(field_name, attrs.pop(field_name)) for field_name, val in list(iteritems(attrs)) if utils.is_instance_or_subclass(val, field_class)] # If subclassing another Serializer, inherit its fields # Loop in reverse to maintain the correct field order for base_class in bases[::-1]: if hasattr(base_class, '_declared_fields'): declared = list(base_class._declared_fields.items()) + declared return OrderedDict(declared)
def resolve_params(obj, params): """Given a dictionary of keyword arguments, return the same dictionary except with values enclosed in `< >` resolved to attributes on `obj`. """ param_values = {} for name, attr_tpl in iteritems(params): attr_name = tpl(str(attr_tpl)) if attr_name: attribute_value = get_value(attr_name, obj, default=missing) if attribute_value is not missing: param_values[name] = attribute_value else: raise AttributeError( '{attr_name!r} is not a valid ' 'attribute of {obj!r}'.format(attr_name=attr_name, obj=obj) ) else: param_values[name] = attr_tpl return param_values
def format_item(self, item): """Format a single datum as a Resource object. See: http://jsonapi.org/format/#document-resource-objects """ ret = self.dict_class() type_ = self.opts.type_ ret[TYPE] = type_ for field_name, value in iteritems(item): if field_name == ID: ret[ID] = value elif isinstance(self.fields[field_name], BaseRelationship): if 'relationships' not in ret: ret['relationships'] = self.dict_class() ret['relationships'].update(value) else: if 'attributes' not in ret: ret['attributes'] = self.dict_class() ret['attributes'][self.inflect(field_name)] = value return ret
def __get_fields(self): '''Return the declared fields for the object as an OrderedDict.''' ret = OrderedDict() declared_fields = copy.deepcopy(self._declared_fields) # Copy _declared_fields # from metaclass # Explicitly declared fields for field_name, field_obj in iteritems(declared_fields): ret[field_name] = field_obj # If "fields" option is specified, use those fields if self.opts.fields: ret = self.__get_opts_fields(ret) # if only __init__ param is specified, only return those fields if self.only: filtered = OrderedDict() for field_name in self.only: if field_name not in ret: raise AttributeError( '"{0}" is not a valid field for {1}.' .format(field_name, self.obj)) filtered[field_name] = ret[field_name] self.__set_parents(filtered) return filtered # If "exclude" option or param is specified, remove those fields if not isinstance(self.opts.exclude, (list, tuple)) or \ not isinstance(self.exclude, (list, tuple)): raise ValueError("`exclude` must be a list or tuple.") excludes = set(self.opts.exclude + self.exclude) if excludes: for field_name in excludes: ret.pop(field_name, None) # Set parents self.__set_parents(ret) return ret
def __set_parents(self, fields_dict): '''Set the parents of all field objects in fields_dict to self.''' for _, field_obj in iteritems(fields_dict): if not field_obj.parent: field_obj.parent = self return fields_dict
def deserialize(self, data, fields_dict, many=False, validators=None, preprocess=None, postprocess=None, strict=False, dict_class=dict, index_errors=True, index=None): """Deserialize ``data`` based on the schema defined by ``fields_dict``. :param dict data: The data to deserialize. :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param bool many: Set to `True` if ``data`` should be deserialized as a collection. :param list validators: List of validation functions to apply to the deserialized dictionary. :param list preprocess: List of pre-processing functions. :param list postprocess: List of post-processing functions. :param bool strict: If `True`, raise errors if invalid data are passed in instead of failing silently and storing the errors. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ # Reset errors if not deserializing a collection if not self._pending: self.reset_errors() if many and data is not None: self._pending = True ret = [self.deserialize(d, fields_dict, many=False, validators=validators, preprocess=preprocess, postprocess=postprocess, strict=strict, dict_class=dict_class, index=idx, index_errors=index_errors) for idx, d in enumerate(data)] self._pending = False return ret raw_data = data if data is not None: items = [] for attr_name, field_obj in iteritems(fields_dict): if field_obj.dump_only: continue key = fields_dict[attr_name].attribute or attr_name try: raw_value = data.get(attr_name, missing) except AttributeError: msg = 'Data must be a dict, got a {0}'.format(data.__class__.__name__) raise ValidationError( msg, field_names=[attr_name], fields=[field_obj] ) if raw_value is missing and field_obj.load_from: raw_value = data.get(field_obj.load_from, missing) if raw_value is missing: _miss = field_obj.missing raw_value = _miss() if callable(_miss) else _miss if raw_value is missing and not field_obj.required: continue value = self.call_and_store( getter_func=field_obj.deserialize, data=raw_value, field_name=key, field_obj=field_obj, index=(index if index_errors else None) ) if value is not missing: items.append((key, value)) ret = dict_class(items) else: ret = None if preprocess: preprocess = preprocess or [] for func in preprocess: ret = func(ret) if validators: validators = validators or [] ret = self._validate(validators, ret, raw_data, fields_dict=fields_dict, strict=strict) if self.errors and strict: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields ) if postprocess: postprocess = postprocess or [] for func in postprocess: ret = func(ret) return ret
def deserialize(self, data, fields_dict, many=False, partial=False, dict_class=dict, index_errors=True, index=None): """Deserialize ``data`` based on the schema defined by ``fields_dict``. :param dict data: The data to deserialize. :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param bool many: Set to `True` if ``data`` should be deserialized as a collection. :param bool partial: If `True`, ignore missing fields. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ # Reset errors if not deserializing a collection if not self._pending: self.reset_errors() if many and data is not None: self._pending = True ret = [self.deserialize(d, fields_dict, many=False, partial=partial, dict_class=dict_class, index=idx, index_errors=index_errors) for idx, d in enumerate(data)] self._pending = False if self.errors: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields, data=ret, ) return ret if data is not None: items = [] for attr_name, field_obj in iteritems(fields_dict): if field_obj.dump_only: continue try: raw_value = data.get(attr_name, missing) except AttributeError: # Input data is not a dict errors = self.get_errors(index=index) msg = field_obj.error_messages['type'].format( input=data, input_type=data.__class__.__name__ ) self.error_field_names = [SCHEMA] self.error_fields = [] errors = self.get_errors() errors.setdefault(SCHEMA, []).append(msg) # Input data type is incorrect, so we can bail out early break field_name = attr_name if raw_value is missing and field_obj.load_from: field_name = field_obj.load_from raw_value = data.get(field_obj.load_from, missing) if raw_value is missing: if partial: continue _miss = field_obj.missing raw_value = _miss() if callable(_miss) else _miss if raw_value is missing and not field_obj.required: continue getter = lambda val: field_obj.deserialize( val, field_obj.load_from or attr_name, data ) value = self.call_and_store( getter_func=getter, data=raw_value, field_name=field_name, field_obj=field_obj, index=(index if index_errors else None) ) if value is not missing: key = fields_dict[attr_name].attribute or attr_name items.append((key, value)) ret = dict_class(items) else: ret = None if self.errors and not self._pending: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields, data=ret, ) return ret
def serialize(self, obj, fields_dict, many=False, accessor=None, dict_class=dict, index_errors=True, index=None): """Takes raw data (a dict, list, or other object) and a dict of fields to output and serializes the data based on those fields. :param obj: The actual object(s) from which the fields are taken from :param dict fields_dict: Mapping of field names to :class:`Field` objects. :param bool many: Set to `True` if ``data`` should be serialized as a collection. :param callable accessor: Function to use for getting values from ``obj``. :param type dict_class: Dictionary class used to construct the output. :param bool index_errors: Whether to store the index of invalid items in ``self.errors`` when ``many=True``. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the marshalled data .. versionchanged:: 1.0.0 Renamed from ``marshal``. """ # Reset errors dict if not serializing a collection if not self._pending: self.reset_errors() if many and obj is not None: self._pending = True ret = [self.serialize(d, fields_dict, many=False, dict_class=dict_class, accessor=accessor, index=idx, index_errors=index_errors) for idx, d in enumerate(obj)] self._pending = False if self.errors: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields, data=ret, ) return ret items = [] for attr_name, field_obj in iteritems(fields_dict): if getattr(field_obj, 'load_only', False): continue key = ''.join([self.prefix or '', field_obj.dump_to or attr_name]) getter = lambda d: field_obj.serialize(attr_name, d, accessor=accessor) value = self.call_and_store( getter_func=getter, data=obj, field_name=key, field_obj=field_obj, index=(index if index_errors else None) ) if value is missing: continue items.append((key, value)) ret = dict_class(items) if self.errors and not self._pending: raise ValidationError( self.errors, field_names=self.error_field_names, fields=self.error_fields, data=ret ) return ret