def get_relationship(data, related_resource): target_type = data.get('type') if not target_type: raise JSONAPIException( source={ 'pointer': 'data/relationships/{}/data/type'.format(related_resource) }, detail=NO_TYPE_ERROR, ) id = data.get('id') return {'id': id, 'target_type': target_type}
def get_embargo_end_date_by_version(self, validated_data): """ Old API versions should pass in "lift_embargo". New API versions should pass in "embargo_end_date" """ if self.expect_cleaner_attributes(self.context['request']): if validated_data.get('lift_embargo'): raise JSONAPIException( source={'pointer': '/data/attributes/lift_embargo'}, detail=f'Deprecated in version {CREATE_REGISTRATION_FIELD_CHANGE_VERSION}. Use embargo_end_date instead.', ) return validated_data.get('embargo_end_date', None) return validated_data.get('lift_embargo')
def get_registration_choice_by_version(self, validated_data): """ Old API versions should pass in "immediate" or "embargo" under `registration_choice`. New API versions should pass in an "embargo_end_date" if it should be embargoed, else it will be None """ if self.expect_cleaner_attributes(self.context['request']): if validated_data.get('registration_choice'): raise JSONAPIException( source={'pointer': '/data/attributes/registration_choice'}, detail=f'Deprecated in version {CREATE_REGISTRATION_FIELD_CHANGE_VERSION}. Use embargo_end_date instead.', ) return 'embargo' if validated_data.get('embargo_end_date', None) else 'immediate' return validated_data.get('registration_choice', 'immediate')
def run_validation(self, data=empty): """ Overwrites run_validation. JSONAPIOnetoOneRelationshipParser parses data into {id: None, type: None} if data is null, which is what this endpoint expects. """ if data == {}: raise JSONAPIException(source={'pointer': '/data'}, detail=NO_DATA_ERROR) if data.get('type', None) is not None and data.get('id', None) is not None: raise DRFValidationError({'data': 'Data must be null. This endpoint can only be used to unset the supplemental project.'}, 400) return data
def run_validation(self, data): meta = getattr(self, 'Meta', None) bulk_limit = getattr(meta, 'bulk_limit', BULK_SETTINGS['DEFAULT_BULK_LIMIT']) num_items = len(data) if num_items > bulk_limit: raise JSONAPIException( source={'pointer': '/data'}, detail='Bulk operation limit is {}, got {}.'.format( bulk_limit, num_items)) return super(JSONAPIListSerializer, self).run_validation(data)
def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream as JSON. Validates the 'signature' in the payload then returns the resulting data. """ data = super(HMACSignedParser, self).parse(stream, media_type=media_type, parser_context=parser_context) try: sig = data['signature'] payload = signing.unserialize_payload(data['payload']) exp_time = payload['time'] except (KeyError, ValueError): raise JSONAPIException(detail='Invalid Payload') if not signing.default_signer.verify_payload(sig, payload): raise NotAuthenticated if time.time() > exp_time: raise JSONAPIException(detail='Signature has expired') return payload
def parse(self, stream, media_type=None, parser_context=None): res = super(JSONAPIOnetoOneRelationshipParser, self).parse(stream, media_type, parser_context) if not isinstance(res, dict): raise ParseError('Request body must be dictionary') data = res.get('data') if data: id_ = data.get('id') type_ = data.get('type') if id_ is None: raise JSONAPIException(source={'pointer': '/data/id'}, detail=NO_ID_ERROR) if type_ is None: raise JSONAPIException(source={'pointer': '/data/type'}, detail=NO_TYPE_ERROR) return data return {'type': None, 'id': None}
def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream as JSON and returns the resulting data """ result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context) if not isinstance(result, dict): raise ParseError() data = result.get('data', {}) if data: if 'attributes' not in data: raise JSONAPIException(source={'pointer': '/data/attributes'}, detail=NO_ATTRIBUTES_ERROR) id = data.get('id') object_type = data.get('type') attributes = data.get('attributes') parsed = {'id': id, 'type': object_type} parsed.update(attributes) return parsed else: raise JSONAPIException(source={'pointer': '/data'}, detail=NO_DATA_ERROR)
def flatten_relationships(self, relationships): """ Flattens relationships dictionary which has information needed to create related resource objects. Validates that formatting of relationships dictionary is correct. """ if not isinstance(relationships, dict): raise ParseError() # Can only create one type of relationship. related_resource = relationships.keys()[0] if not isinstance(relationships[related_resource], dict) or related_resource == 'data': raise ParseError() data = relationships[related_resource].get('data') if not data: raise JSONAPIException(source={'pointer': 'data/relationships/{}/data'.format(related_resource)}, detail=NO_DATA_ERROR) target_type = data.get('type') if not target_type: raise JSONAPIException(source={'pointer': 'data/relationships/{}/data/type'.format(related_resource)}, detail=NO_TYPE_ERROR) id = data.get('id') return {'id': id, 'target_type': target_type}
def parse(self, stream, media_type=None, parser_context=None): try: view = parser_context['view'] except KeyError: raise ImproperlyConfigured('SearchParser requires "view" context.') data = super(SearchParser, self).parse(stream, media_type=media_type, parser_context=parser_context) if not data: raise JSONAPIException(detail='Invalid Payload') res = { 'query': { 'bool': {}, }, } try: q = data.pop('q') except KeyError: pass else: res['query']['bool'].update({ 'must': { 'multi_match': { 'query': q, 'fields': view.search_fields, }, }, }) if any(data.values()): res['query']['bool'].update({'filter': []}) for key, val in data.items(): if val is not None: if isinstance(val, list): res['query']['bool']['filter'].append( {'terms': { key: val }}) else: res['query']['bool']['filter'].append( {'term': { key: val }}) return res
def create(self, validated_data): username = validated_data.get('username', '').lower() or None full_name = validated_data.get('fullname') if not full_name: raise JSONAPIException('A `full_name` is required to create a user.') user = User.create_unregistered(full_name, email=username) user.registered_by = self.context['request'].user if username: user.add_unconfirmed_email(user.username) try: user.save() except ValidationValueError: raise Conflict('User with specified username already exists.') if self.context['request'].GET.get('send_email', False) and username: send_confirm_email(user, user.username) return user
def flatten_relationships(self, relationships): """ Flattens relationships dictionary which has information needed to create related resource objects. Validates that formatting of relationships dictionary is correct. """ if not isinstance(relationships, dict): raise ParseError() # Can only create one type of relationship. related_resource = relationships.keys()[0] if not isinstance(relationships[related_resource], dict) or related_resource == 'data': raise ParseError() data = relationships[related_resource].get('data') if not data: raise JSONAPIException(source={'pointer': 'data/relationships/{}/data'.format(related_resource)}, detail=NO_DATA_ERROR) if isinstance(data, list): return [self.get_relationship(item, related_resource) for item in data] else: return self.get_relationship(data, related_resource)
def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream as JSON and returns the resulting data. """ result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context) if not isinstance(result, dict): raise ParseError() data = result.get('data', {}) if data: if is_bulk_request(parser_context['request']): if not isinstance(data, list): raise ParseError( 'Expected a list of items but got type "dict".') data_collection = [] data_collection.extend([ self.flatten_data(data_object, parser_context, is_list=True) for data_object in data ]) return data_collection else: if not isinstance(data, collections.Mapping): raise ParseError('Expected a dictionary of items.') return self.flatten_data(data, parser_context, is_list=False) else: raise JSONAPIException(source={'pointer': '/data'}, detail=NO_DATA_ERROR)
def bulk_destroy(self, request, *args, **kwargs): """ Handles bulk destroy of resource objects. Handles some validation and enforces bulk limit. """ if hasattr(request, 'query_params') and 'id' in request.query_params: if hasattr(request, 'data') and len(request.data) > 0: raise Conflict( 'A bulk DELETE can only have a body or query parameters, not both.' ) ids = request.query_params['id'].split(',') if 'type' in request.query_params: request_type = request.query_params['type'] data = [] for id in ids: data.append({'type': request_type, 'id': id}) else: raise ValidationError( 'Type query parameter is also required for a bulk DELETE using query parameters.' ) elif not request.data: raise ValidationError( 'Request must contain array of resource identifier objects.') else: data = request.data num_items = len(data) bulk_limit = BULK_SETTINGS['DEFAULT_BULK_LIMIT'] if num_items > bulk_limit: raise JSONAPIException( source={'pointer': '/data'}, detail='Bulk operation limit is {}, got {}.'.format( bulk_limit, num_items)) user = self.request.user object_type = self.serializer_class.Meta.type_ resource_object_list = self.get_requested_resources(request=request, request_data=data) for item in data: item_type = item[u'type'] if item_type != object_type: raise Conflict( 'Type needs to match type expected at this endpoint.') if not self.allow_bulk_destroy_resources(user, resource_object_list): raise PermissionDenied skip_uneditable = self.bulk_destroy_skip_uneditable( resource_object_list, user, object_type) if skip_uneditable: skipped = skip_uneditable['skipped'] allowed = skip_uneditable['allowed'] if skipped: self.perform_bulk_destroy(allowed) return Response(status=status.HTTP_200_OK, data={'errors': skipped}) self.perform_bulk_destroy(resource_object_list) return Response(status=status.HTTP_204_NO_CONTENT)
def flatten_data(self, resource_object, parser_context, is_list): """ Flattens data objects, making attributes and relationships fields the same level as id and type. """ relationships = resource_object.get('relationships') is_relationship = parser_context.get('is_relationship') attributes_required = parser_context.get('attributes_required', True) # allow skip type check for legacy api version legacy_type_allowed = parser_context.get('legacy_type_allowed', False) request_method = parser_context['request'].method # Request must include "relationships" or "attributes" if is_relationship and request_method == 'POST': if not relationships: raise JSONAPIException( source={'pointer': '/data/relationships'}, detail=NO_RELATIONSHIPS_ERROR) else: if 'attributes' not in resource_object and attributes_required and request_method != 'DELETE': raise JSONAPIException(source={'pointer': '/data/attributes'}, detail=NO_ATTRIBUTES_ERROR) object_id = resource_object.get('id') object_type = resource_object.get('type') type_required = not (legacy_type_allowed and parser_context['request'].version < 2.7 and request_method == 'PATCH') # For validating type and id for bulk delete: if is_list and request_method == 'DELETE': if object_id is None: raise JSONAPIException(source={'pointer': '/data/id'}, detail=NO_ID_ERROR) if type_required and object_type is None: raise JSONAPIException(source={'pointer': '/data/type'}, detail=NO_TYPE_ERROR) attributes = resource_object.get('attributes') parsed = {'id': object_id, 'type': object_type} if attributes: parsed.update(attributes) if relationships: relationships = self.flatten_relationships(relationships) if isinstance(relationships, list): relationship_values = [] relationship_key = None for relationship in relationships: for key, value in relationship.iteritems(): relationship_values.append(value) relationship_key = key relationship = {relationship_key: relationship_values} parsed.update(relationship) else: parsed.update(relationships) return parsed