def update_registration_tags(self, registration, validated_data, auth): new_tags = validated_data.pop('tags', []) try: registration.update_tags(new_tags, auth=auth) except NodeStateError as err: raise Conflict(str(err))
def update(self, preprint, validated_data): assert isinstance( preprint, Preprint), 'You must specify a valid preprint to be updated' auth = get_user_auth(self.context['request']) if not preprint.has_permission(auth.user, osf_permissions.WRITE): raise exceptions.PermissionDenied( detail= 'User must have admin or write permissions to update a preprint.' ) published = validated_data.pop('is_published', None) if published and preprint.provider.is_reviewed: raise Conflict( '{} uses a moderation workflow, so preprints must be submitted for review instead of published directly. Submit a preprint by creating a `submit` Action at {}' .format( preprint.provider.name, absolute_reverse( 'preprints:preprint-review-action-list', kwargs={ 'version': self.context['request'].parser_context['kwargs'] ['version'], 'preprint_id': preprint._id, }, ), )) save_preprint = False recently_published = False primary_file = validated_data.pop('primary_file', None) if primary_file: self.set_field(preprint.set_primary_file, primary_file, auth) save_preprint = True old_tags = set(preprint.tags.values_list('name', flat=True)) if validated_data.get('tags'): current_tags = set(validated_data.pop('tags', [])) elif self.partial: current_tags = set(old_tags) else: current_tags = set() for new_tag in (current_tags - old_tags): preprint.add_tag(new_tag, auth=auth) for deleted_tag in (old_tags - current_tags): preprint.remove_tag(deleted_tag, auth=auth) if 'node' in validated_data: node = validated_data.pop('node', None) self.set_field(preprint.set_supplemental_node, node, auth) save_preprint = True if 'subjects' in validated_data: subjects = validated_data.pop('subjects', None) self.set_field(preprint.set_subjects, subjects, auth) save_preprint = True if 'title' in validated_data: title = validated_data['title'] self.set_field(preprint.set_title, title, auth) save_preprint = True if 'description' in validated_data: description = validated_data['description'] self.set_field(preprint.set_description, description, auth) save_preprint = True if 'article_doi' in validated_data: preprint.article_doi = validated_data['article_doi'] save_preprint = True if 'license_type' in validated_data or 'license' in validated_data: license_details = get_license_details(preprint, validated_data) self.set_field(preprint.set_preprint_license, license_details, auth) save_preprint = True if 'original_publication_date' in validated_data: preprint.original_publication_date = validated_data[ 'original_publication_date'] or None save_preprint = True if published is not None: if not preprint.primary_file: raise exceptions.ValidationError( detail= 'A valid primary_file must be set before publishing a preprint.' ) self.set_field(preprint.set_published, published, auth) save_preprint = True recently_published = published preprint.set_privacy('public', log=False, save=True) if save_preprint: preprint.save() if recently_published: for author in preprint.contributors: if author != auth.user: project_signals.contributor_added.send( preprint, contributor=author, auth=auth, email_template='preprint') return preprint
def perform_update(self, serializer): if serializer.instance.is_reviewed: raise Conflict( 'Reviews settings may be set only once. Contact [email protected] if you need to update them.' ) super(PreprintProviderDetail, self).perform_update(serializer)
def perform_destroy(self, instance): if instance.is_published: raise Conflict('Published preprints cannot be deleted.') PreprintService.remove_one(instance)
def check_for_update_errors(self, node_settings, folder_info, external_account_id): if (not node_settings.has_auth and folder_info and not external_account_id): raise Conflict('Cannot set folder without authorization')
def update(self, preprint, validated_data): assert isinstance( preprint, PreprintService), 'You must specify a valid preprint to be updated' assert isinstance( preprint.node, Node ), 'You must specify a preprint with a valid node to be updated.' auth = get_user_auth(self.context['request']) if not preprint.node.has_permission(auth.user, 'admin'): raise exceptions.PermissionDenied( detail='User must be an admin to update a preprint.') published = validated_data.pop('is_published', None) if published and preprint.provider.is_reviewed: raise Conflict( '{} uses a moderation workflow, so preprints must be submitted for review instead of published directly. Submit a preprint by creating a `submit` Action at {}' .format( preprint.provider.name, absolute_reverse( 'preprints:preprint-review-action-list', kwargs={ 'version': self.context['request'].parser_context['kwargs'] ['version'], 'preprint_id': preprint._id, }, ), )) save_node = False save_preprint = False recently_published = False primary_file = validated_data.pop('primary_file', None) if primary_file: self.set_field(preprint.set_primary_file, primary_file, auth) save_node = True old_tags = set(preprint.node.tags.values_list('name', flat=True)) if validated_data.get('node') and 'tags' in validated_data['node']: current_tags = set(validated_data['node'].pop('tags', [])) elif self.partial: current_tags = set(old_tags) else: current_tags = set() for new_tag in (current_tags - old_tags): preprint.node.add_tag(new_tag, auth=auth) for deleted_tag in (old_tags - current_tags): preprint.node.remove_tag(deleted_tag, auth=auth) if 'node' in validated_data: preprint.node.update(fields=validated_data.pop('node')) save_node = True if 'subjects' in validated_data: subjects = validated_data.pop('subjects', None) self.set_field(preprint.set_subjects, subjects, auth) save_preprint = True if 'article_doi' in validated_data: preprint.node.preprint_article_doi = validated_data['article_doi'] save_node = True if 'license_type' in validated_data or 'license' in validated_data: license_details = get_license_details(preprint, validated_data) self.set_field(preprint.set_preprint_license, license_details, auth) save_preprint = True if 'original_publication_date' in validated_data: preprint.original_publication_date = validated_data[ 'original_publication_date'] save_preprint = True if published is not None: if not preprint.primary_file: raise exceptions.ValidationError( detail= 'A valid primary_file must be set before publishing a preprint.' ) self.set_field(preprint.set_published, published, auth) save_preprint = True recently_published = published preprint.node.set_privacy('public') save_node = True if save_node: try: preprint.node.save() except ValidationError as e: # Raised from invalid DOI raise exceptions.ValidationError(detail=e.messages[0]) if save_preprint: preprint.save() # Send preprint confirmation email signal to new authors on preprint! -- only when published # TODO: Some more thought might be required on this; preprints made from existing # nodes will send emails making it seem like a new node. if recently_published: for author in preprint.node.contributors: if author != auth.user: project_signals.contributor_added.send( preprint.node, contributor=author, auth=auth, email_template='preprint') return preprint
def bulk_destroy(self, request, *args, **kwargs): """ Handles bulk destroy of resource objects. Handles some validation and enforces bulk limit. """ if hasattr(request, 'query_params') and 'id' in request.query_params: if hasattr(request, 'data') and len(request.data) > 0: raise Conflict( 'A bulk DELETE can only have a body or query parameters, not both.' ) ids = request.query_params['id'].split(',') if 'type' in request.query_params: request_type = request.query_params['type'] data = [] for id in ids: data.append({'type': request_type, 'id': id}) else: raise ValidationError( 'Type query parameter is also required for a bulk DELETE using query parameters.' ) elif not request.data: raise ValidationError( 'Request must contain array of resource identifier objects.') else: data = request.data num_items = len(data) bulk_limit = BULK_SETTINGS['DEFAULT_BULK_LIMIT'] if num_items > bulk_limit: raise JSONAPIException( source={'pointer': '/data'}, detail='Bulk operation limit is {}, got {}.'.format( bulk_limit, num_items)) user = self.request.user object_type = self.serializer_class.Meta.type_ resource_object_list = self.get_requested_resources(request=request, request_data=data) for item in data: item_type = item[u'type'] if item_type != object_type: raise Conflict( 'Type needs to match type expected at this endpoint.') if not self.allow_bulk_destroy_resources(user, resource_object_list): raise PermissionDenied skip_uneditable = self.bulk_destroy_skip_uneditable( resource_object_list, user, object_type) if skip_uneditable: skipped = skip_uneditable['skipped'] allowed = skip_uneditable['allowed'] if skipped: self.perform_bulk_destroy(allowed) return Response(status=status.HTTP_200_OK, data={'errors': skipped}) self.perform_bulk_destroy(resource_object_list) return Response(status=status.HTTP_204_NO_CONTENT)
def to_internal_value(self, data): if self.root.Meta.type_ != data: raise Conflict() return super(TypeField, self).to_internal_value(data)
def to_internal_value(self, data): if self.target_type != data: raise Conflict() return super(TargetTypeField, self).to_internal_value(data)