def update(self, instance, validated_data): """ For TargetExtra and TargetName objects, if the ID is present, it will update the corresponding row. If the ID is not present, it will attempt to create a new TargetExtra or TargetName associated with this Target. """ aliases = validated_data.pop('aliases', []) targetextras = validated_data.pop('targetextra_set', []) groups = validated_data.pop('groups', []) # Save groups for this target group_serializer = GroupSerializer(data=groups, many=True) if group_serializer.is_valid(): for group in groups: group_instance = Group.objects.get(pk=group['id']) assign_perm('tom_targets.view_target', group_instance, instance) assign_perm('tom_targets.change_target', group_instance, instance) assign_perm('tom_targets.delete_target', group_instance, instance) # TODO: add tests for alias_data in aliases: alias = dict(alias_data) if alias['name'] == instance.name: # Alias shouldn't conflict with target name alias_name = alias['name'] raise serializers.ValidationError( f'Alias \'{alias_name}\' conflicts with Target name \'{instance.name}\'.') if alias.get('id'): tn_instance = TargetName.objects.get(pk=alias['id']) if tn_instance.target != instance: # Alias should correspond with target to be updated raise serializers.ValidationError(f'''TargetName identified by id \'{tn_instance.id}\' is not an alias of Target \'{instance.name}\'''') elif alias['name'] == tn_instance.name: break # Don't update if value doesn't change, because it will throw an error tns = TargetNameSerializer(tn_instance, data=alias_data) else: tns = TargetNameSerializer(data=alias_data) if tns.is_valid(): tns.save(target=instance) for te_data in targetextras: te = dict(te_data) if te_data.get('id'): te_instance = TargetExtra.objects.get(pk=te['id']) tes = TargetExtraSerializer(te_instance, data=te_data) else: tes = TargetExtraSerializer(data=te_data) if tes.is_valid(): tes.save(target=instance) fields_to_validate = ['name', 'type', 'ra', 'dec', 'epoch', 'parallax', 'pm_ra', 'pm_dec', 'galactic_lng', 'galactic_lat', 'distance', 'distance_err', 'scheme', 'epoch_of_elements', 'mean_anomaly', 'arg_of_perihelion', 'eccentricity', 'lng_asc_node', 'inclination', 'mean_daily_motion', 'semimajor_axis', 'epoch_of_perihelion', 'ephemeris_period', 'ephemeris_period_err', 'ephemeris_epoch', 'ephemeris_epoch_err', 'perihdist'] for field in fields_to_validate: setattr(instance, field, validated_data.get(field, getattr(instance, field))) instance.save() return instance
class ObservationRecordSerializer(serializers.ModelSerializer): groups = GroupSerializer( many=True, required=False) # TODO: return groups in detail and list observation_groups = ObservationGroupField(many=True, read_only=True, source='observationgroup_set') status = serializers.CharField(required=False) class Meta: model = ObservationRecord fields = '__all__' def create(self, validated_data): groups = validated_data.pop('groups', []) obsr = ObservationRecord.objects.create(**validated_data) group_serializer = GroupSerializer(data=groups, many=True) if group_serializer.is_valid( ) and settings.TARGET_PERMISSIONS_ONLY is False: for group in groups: group_instance = Group.objects.get(pk=group['id']) assign_perm('tom_observations.view_observationrecord', group_instance, obsr) assign_perm('tom_observations.change_observationrecord', group_instance, obsr) assign_perm('tom_observations.delete_observationrecord', group_instance, obsr) return obsr
def to_representation(self, instance): representation = super().to_representation(instance) groups = [] for group in get_groups_with_perms(instance): groups.append(GroupSerializer(group).data) representation['groups'] = groups return representation
def create(self, validated_data): groups = validated_data.pop('groups', []) obsr = ObservationRecord.objects.create(**validated_data) group_serializer = GroupSerializer(data=groups, many=True) if group_serializer.is_valid( ) and settings.TARGET_PERMISSIONS_ONLY is False: for group in groups: group_instance = Group.objects.get(pk=group['id']) assign_perm('tom_observations.view_observationrecord', group_instance, obsr) assign_perm('tom_observations.change_observationrecord', group_instance, obsr) assign_perm('tom_observations.delete_observationrecord', group_instance, obsr) return obsr
def update(self, instance, validated_data): groups = validated_data.pop('groups', []) super().save(instance, validated_data) # Save groups for this dataproduct group_serializer = GroupSerializer(data=groups, many=True) if group_serializer.is_valid( ) and not settings.TARGET_PERMISSIONS_ONLY: for group in groups: group_instance = Group.objects.get(pk=group['id']) assign_perm('tom_dataproducts.view_dataproduct', group_instance, instance) assign_perm('tom_dataproducts.change_dataproduct', group_instance, instance) assign_perm('tom_dataproducts.delete_dataproduct', group_instance, instance) return instance
def create(self, validated_data): """DRF requires explicitly handling writeable nested serializers, here we pop the groups data and save it using its serializer. """ groups = validated_data.pop('groups', []) dp = DataProduct.objects.create(**validated_data) # Save groups for this target group_serializer = GroupSerializer(data=groups, many=True) if group_serializer.is_valid( ) and not settings.TARGET_PERMISSIONS_ONLY: for group in groups: group_instance = Group.objects.get(pk=group['id']) assign_perm('tom_dataproducts.view_dataproduct', group_instance, dp) assign_perm('tom_dataproducts.change_dataproduct', group_instance, dp) assign_perm('tom_dataproducts.delete_dataproduct', group_instance, dp) return dp
def create(self, validated_data): """DRF requires explicitly handling writeable nested serializers, here we pop the alias/tag/group data and save it using their respective serializers """ aliases = validated_data.pop('aliases', []) targetextras = validated_data.pop('targetextra_set', []) groups = validated_data.pop('groups', []) target = Target.objects.create(**validated_data) # Save groups for this target group_serializer = GroupSerializer(data=groups, many=True) if group_serializer.is_valid(): for group in groups: group_instance = Group.objects.get(pk=group['id']) assign_perm('tom_targets.view_target', group_instance, target) assign_perm('tom_targets.change_target', group_instance, target) assign_perm('tom_targets.delete_target', group_instance, target) tns = TargetNameSerializer(data=aliases, many=True) if tns.is_valid(): for alias in aliases: if alias['name'] == target.name: target.delete() alias_value = alias['name'] raise serializers.ValidationError( f'Alias \'{alias_value}\' conflicts with Target name \'{target.name}\'.') tns.save(target=target) tes = TargetExtraSerializer(data=targetextras, many=True) if tes.is_valid(): tes.save(target=target) return target
class TargetSerializer(serializers.ModelSerializer): """Target serializer responsbile for transforming models to/from json (or other representations). See https://www.django-rest-framework.org/api-guide/serializers/#modelserializer """ targetextra_set = TargetExtraSerializer(many=True) aliases = TargetNameSerializer(many=True) groups = GroupSerializer(many=True, required=False) # TODO: return groups in detail and list class Meta: model = Target fields = '__all__' # TODO: We should investigate if this validator logic can be reused in the forms to reduce code duplication. # TODO: Try to put validators in settings to allow user changes validators = [RequiredFieldsTogetherValidator('type', 'SIDEREAL', 'ra', 'dec'), RequiredFieldsTogetherValidator('type', 'NON_SIDEREAL', 'epoch_of_elements', 'inclination', 'lng_asc_node', 'arg_of_perihelion', 'eccentricity'), RequiredFieldsTogetherValidator('scheme', 'MPC_COMET', 'perihdist', 'epoch_of_perihelion'), RequiredFieldsTogetherValidator('scheme', 'MPC_MINOR_PLANET', 'mean_anomaly', 'semimajor_axis'), RequiredFieldsTogetherValidator('scheme', 'JPL_MAJOR_PLANET', 'mean_daily_motion', 'mean_anomaly', 'semimajor_axis')] def create(self, validated_data): """DRF requires explicitly handling writeable nested serializers, here we pop the alias/tag/group data and save it using their respective serializers """ aliases = validated_data.pop('aliases', []) targetextras = validated_data.pop('targetextra_set', []) groups = validated_data.pop('groups', []) target = Target.objects.create(**validated_data) # Save groups for this target group_serializer = GroupSerializer(data=groups, many=True) if group_serializer.is_valid(): for group in groups: group_instance = Group.objects.get(pk=group['id']) assign_perm('tom_targets.view_target', group_instance, target) assign_perm('tom_targets.change_target', group_instance, target) assign_perm('tom_targets.delete_target', group_instance, target) tns = TargetNameSerializer(data=aliases, many=True) if tns.is_valid(): for alias in aliases: if alias['name'] == target.name: target.delete() alias_value = alias['name'] raise serializers.ValidationError( f'Alias \'{alias_value}\' conflicts with Target name \'{target.name}\'.') tns.save(target=target) tes = TargetExtraSerializer(data=targetextras, many=True) if tes.is_valid(): tes.save(target=target) return target def to_representation(self, instance): representation = super().to_representation(instance) groups = [] for group in get_groups_with_perms(instance): groups.append(GroupSerializer(group).data) representation['groups'] = groups return representation def update(self, instance, validated_data): """ For TargetExtra and TargetName objects, if the ID is present, it will update the corresponding row. If the ID is not present, it will attempt to create a new TargetExtra or TargetName associated with this Target. """ aliases = validated_data.pop('aliases', []) targetextras = validated_data.pop('targetextra_set', []) groups = validated_data.pop('groups', []) # Save groups for this target group_serializer = GroupSerializer(data=groups, many=True) if group_serializer.is_valid(): for group in groups: group_instance = Group.objects.get(pk=group['id']) assign_perm('tom_targets.view_target', group_instance, instance) assign_perm('tom_targets.change_target', group_instance, instance) assign_perm('tom_targets.delete_target', group_instance, instance) # TODO: add tests for alias_data in aliases: alias = dict(alias_data) if alias['name'] == instance.name: # Alias shouldn't conflict with target name alias_name = alias['name'] raise serializers.ValidationError( f'Alias \'{alias_name}\' conflicts with Target name \'{instance.name}\'.') if alias.get('id'): tn_instance = TargetName.objects.get(pk=alias['id']) if tn_instance.target != instance: # Alias should correspond with target to be updated raise serializers.ValidationError(f'''TargetName identified by id \'{tn_instance.id}\' is not an alias of Target \'{instance.name}\'''') elif alias['name'] == tn_instance.name: break # Don't update if value doesn't change, because it will throw an error tns = TargetNameSerializer(tn_instance, data=alias_data) else: tns = TargetNameSerializer(data=alias_data) if tns.is_valid(): tns.save(target=instance) for te_data in targetextras: te = dict(te_data) if te_data.get('id'): te_instance = TargetExtra.objects.get(pk=te['id']) tes = TargetExtraSerializer(te_instance, data=te_data) else: tes = TargetExtraSerializer(data=te_data) if tes.is_valid(): tes.save(target=instance) fields_to_validate = ['name', 'type', 'ra', 'dec', 'epoch', 'parallax', 'pm_ra', 'pm_dec', 'galactic_lng', 'galactic_lat', 'distance', 'distance_err', 'scheme', 'epoch_of_elements', 'mean_anomaly', 'arg_of_perihelion', 'eccentricity', 'lng_asc_node', 'inclination', 'mean_daily_motion', 'semimajor_axis', 'epoch_of_perihelion', 'ephemeris_period', 'ephemeris_period_err', 'ephemeris_epoch', 'ephemeris_epoch_err', 'perihdist'] for field in fields_to_validate: setattr(instance, field, validated_data.get(field, getattr(instance, field))) instance.save() return instance
class DataProductSerializer(serializers.ModelSerializer): target = TargetFilteredPrimaryKeyRelatedField( queryset=Target.objects.all()) observation_record = ObservationRecordFilteredPrimaryKeyRelatedField( queryset=ObservationRecord.objects.all(), required=False) groups = GroupSerializer(many=True, required=False) data_product_group = DataProductGroupSerializer(many=True, required=False) reduceddatum_set = ReducedDatumSerializer(many=True, required=False) data_product_type = serializers.CharField(allow_blank=False) class Meta: model = DataProduct fields = ('id', 'product_id', 'target', 'observation_record', 'data', 'extra_data', 'data_product_type', 'groups', 'data_product_group', 'reduceddatum_set') def create(self, validated_data): """DRF requires explicitly handling writeable nested serializers, here we pop the groups data and save it using its serializer. """ groups = validated_data.pop('groups', []) dp = DataProduct.objects.create(**validated_data) # Save groups for this target group_serializer = GroupSerializer(data=groups, many=True) if group_serializer.is_valid( ) and not settings.TARGET_PERMISSIONS_ONLY: for group in groups: group_instance = Group.objects.get(pk=group['id']) assign_perm('tom_dataproducts.view_dataproduct', group_instance, dp) assign_perm('tom_dataproducts.change_dataproduct', group_instance, dp) assign_perm('tom_dataproducts.delete_dataproduct', group_instance, dp) return dp def to_representation(self, instance): representation = super().to_representation(instance) groups = [] for group in get_groups_with_perms(instance): groups.append(GroupSerializer(group).data) representation['groups'] = groups return representation def update(self, instance, validated_data): groups = validated_data.pop('groups', []) super().save(instance, validated_data) # Save groups for this dataproduct group_serializer = GroupSerializer(data=groups, many=True) if group_serializer.is_valid( ) and not settings.TARGET_PERMISSIONS_ONLY: for group in groups: group_instance = Group.objects.get(pk=group['id']) assign_perm('tom_dataproducts.view_dataproduct', group_instance, instance) assign_perm('tom_dataproducts.change_dataproduct', group_instance, instance) assign_perm('tom_dataproducts.delete_dataproduct', group_instance, instance) return instance def validate_data_product_type(self, value): for dp_type in settings.DATA_PRODUCT_TYPES.keys(): if not value or value == dp_type: break else: raise serializers.ValidationError( 'Not a valid data_product_type. Valid data_product_types are {0}.' .format(', '.join( k for k in settings.DATA_PRODUCT_TYPES.keys()))) return value