class FileSerializer(BulkModelSerializer): contentnode = UserFilteredPrimaryKeyRelatedField( queryset=ContentNode.objects.all(), required=False ) assessment_item = UserFilteredPrimaryKeyRelatedField( queryset=AssessmentItem.objects.all(), required=False ) def update(self, instance, validated_data): if "contentnode" in validated_data: # if we're updating the file's related node, we'll trigger a reset for the # old channel's cache modified date update_node = validated_data.get("contentnode", None) if not update_node or update_node.id != instance.contentnode_id: ResourceSizeCache.reset_modified_for_file(instance) results = super(FileSerializer, self).update(instance, validated_data) if instance.uploaded_by_id: calculate_user_storage(instance.uploaded_by_id) return results class Meta: model = File fields = ( "id", "language", "contentnode", "assessment_item", "preset", ) list_serializer_class = BulkListSerializer
class InvitationSerializer(BulkModelSerializer): # These fields are `read_only` by default, but get set to writable # in the `get_fields` method under appropriate conditions revoked = serializers.BooleanField(read_only=True) accepted = serializers.BooleanField(read_only=True) declined = serializers.BooleanField(read_only=True) channel = UserFilteredPrimaryKeyRelatedField( queryset=Channel.objects.all()) class Meta: model = Invitation fields = ( "id", "accepted", "declined", "revoked", "email", "channel", "share_mode", "first_name", "last_name", ) list_serializer_class = BulkListSerializer def create(self, validated_data): # Need to remove default values for these non-model fields here if "request" in self.context: # If this has been newly created add the current user as the sender self.validated_data["sender"] = self.context["request"].user return super(InvitationSerializer, self).create(validated_data) def update(self, instance, validated_data): instance = super(InvitationSerializer, self).update(instance, validated_data) accepted = self.initial_data.get("accepted") or instance.accepted revoked = self.initial_data.get("revoked") or instance.revoked if accepted and not revoked: instance.accept() return instance def get_fields(self): fields = super().get_fields() request = self.context.get("request", None) # allow invitation state to be modified under the right conditions if request and request.user and self.instance: if self.instance.invited == request.user: fields["accepted"].read_only = self.instance.revoked fields["declined"].read_only = False if self.instance.sender == request.user: fields["revoked"].read_only = False return fields
class FileSerializer(BulkModelSerializer): contentnode = UserFilteredPrimaryKeyRelatedField( queryset=ContentNode.objects.all(), required=False) assessment_item = UserFilteredPrimaryKeyRelatedField( queryset=AssessmentItem.objects.all(), required=False) def update(self, instance, validated_data): results = super(FileSerializer, self).update(instance, validated_data) if instance.uploaded_by_id: calculate_user_storage(instance.uploaded_by_id) return results class Meta: model = File fields = ( "id", "language", "contentnode", "assessment_item", "preset", ) list_serializer_class = BulkListSerializer
class ChannelSetSerializer(BulkModelSerializer): channels = UserFilteredPrimaryKeyRelatedField( many=True, queryset=Channel.objects.all(), edit=False, required=False, ) def create(self, validated_data): channels = validated_data.pop("channels", []) instance = super(ChannelSetSerializer, self).create(validated_data) for channel in channels: instance.secret_token.channels.add(channel) if "request" in self.context: user = self.context["request"].user # This has been newly created so add the current user as an editor instance.editors.add(user) self.changes.append( generate_update_event( instance.id, CHANNELSET, {"secret_token": instance.secret_token.token}, )) return instance def update(self, instance, validated_data): channels = validated_data.pop("channels", {}) for channel, value in channels.items(): if value: instance.secret_token.channels.add(channel) else: instance.secret_token.channels.remove(channel) return super(ChannelSetSerializer, self).update(instance, validated_data) class Meta: model = ChannelSet fields = ("id", "name", "description", "channels") list_serializer_class = BulkListSerializer
class ContentNodeSerializer(BulkModelSerializer): """ This is a write only serializer - we leverage it to do create and update operations, but read operations are handled by the Viewset. """ parent = UserFilteredPrimaryKeyRelatedField( queryset=ContentNode.objects.all(), required=False) extra_fields = ExtraFieldsSerializer(required=False) tags = TagField(required=False) class Meta: model = ContentNode fields = ( "id", "title", "description", "kind", "language", "license", "license_description", "copyright_holder", "author", "role_visibility", "aggregator", "provider", "extra_fields", "thumbnail_encoding", "parent", "complete", "changed", "tags", ) list_serializer_class = ContentNodeListSerializer nested_writes = True def create(self, validated_data): # Creating a new node, by default put it in the orphanage on initial creation. if "parent" not in validated_data: validated_data["parent_id"] = settings.ORPHANAGE_ROOT_ID tags = None if "tags" in validated_data: tags = validated_data.pop("tags") instance = super(ContentNodeSerializer, self).create(validated_data) if tags: set_tags({instance.id: tags}) return instance def update(self, instance, validated_data): if "parent" in validated_data: raise ValidationError({ "parent": "This field should only be changed by a move operation" }) extra_fields = validated_data.pop("extra_fields", None) if extra_fields is not None: validated_data["extra_fields"] = self.fields[ "extra_fields"].update(instance.extra_fields, extra_fields) if "tags" in validated_data: tags = validated_data.pop("tags") set_tags({instance.id: tags}) return super(ContentNodeSerializer, self).update(instance, validated_data)
class AssessmentItemSerializer(BulkModelSerializer): # This is set as editable=False on the model so by default DRF does not allow us # to set it. assessment_id = UUIDRegexField() contentnode = UserFilteredPrimaryKeyRelatedField( queryset=ContentNode.objects.all(), required=False) class Meta: model = AssessmentItem fields = ( "question", "type", "answers", "contentnode", "assessment_id", "hints", "raw_data", "order", "source_url", "randomize", "deleted", ) list_serializer_class = AssessmentListSerializer # Use the contentnode and assessment_id as the lookup field for updates update_lookup_field = ("contentnode", "assessment_id") def set_files(self, all_objects, all_validated_data=None): # noqa C901 files_to_delete = [] files_to_update = {} current_files_by_aitem = {} # Create a set of assessment item ids that have had markdown fields modified. if all_validated_data: # If this is an update operation, check the validated data for which items # have had these fields modified. md_fields_modified = set([ self.id_value_lookup(ai) for ai in all_validated_data if "question" in ai or "hints" in ai or "answers" in ai ]) else: # If this is a create operation, just check if these fields are not null. md_fields_modified = set([ self.id_value_lookup(ai) for ai in all_objects if ai.question or ai.hints or ai.answers ]) all_objects = [ ai for ai in all_objects if self.id_value_lookup(ai) in md_fields_modified ] for file in File.objects.filter(assessment_item__in=all_objects): if file.assessment_item_id not in current_files_by_aitem: current_files_by_aitem[file.assessment_item_id] = [] current_files_by_aitem[file.assessment_item_id].append(file) for aitem in all_objects: current_files = current_files_by_aitem.get(aitem.id, []) filenames = get_filenames_from_assessment(aitem) set_checksums = set( [filename.split(".")[0] for filename in filenames]) current_checksums = set([f.checksum for f in current_files]) missing_checksums = set_checksums.difference(current_checksums) for filename in filenames: checksum = filename.split(".")[0] if checksum in missing_checksums: if checksum not in files_to_update: files_to_update[checksum] = [] files_to_update[checksum].append(aitem) redundant_checksums = current_checksums.difference(set_checksums) files_to_delete.extend([ f.id for f in current_files if f.checksum in redundant_checksums ]) if files_to_delete: File.objects.filter(id__in=files_to_delete).delete() if files_to_update: # Query file objects that this user has uploaded to set the assessment_item attribute source_files = list( File.objects.filter( checksum__in=files_to_update.keys(), uploaded_by=self.context["request"].user, contentnode__isnull=True, assessment_item__isnull=True, )) updated_files = [] for file in source_files: if file.checksum in files_to_update and files_to_update[ file.checksum]: aitem = files_to_update[file.checksum].pop() file.assessment_item = aitem updated_files.append(file) if any(files_to_update.values()): # Not all the files to update had a file, raise an error raise ValidationError( "Attempted to set files to an assessment item that do not have a file on the server" ) bulk_update(source_files) def create(self, validated_data): with transaction.atomic(): instance = super(AssessmentItemSerializer, self).create(validated_data) self.set_files([instance]) return instance def update(self, instance, validated_data): with transaction.atomic(): instance = super(AssessmentItemSerializer, self).update(instance, validated_data) self.set_id_values(instance, validated_data) self.set_files([instance], [validated_data]) return instance