class DayDashboardJobSerializer(BaseDashboardJobSerializer): day = serializers.DateTimeField()
class NewsSerializer(ImagableModelSerializer, serializers.ModelSerializer): """ News serializer - API endpoint for news Backbone application. """ id = serializers.Field(source='pk') title = serializers.CharField(max_length=64) slug = serializers.SlugField(required=False) link = serializers.Field(source='get_absolute_url') content = serializers.Field(source='get_entry_introtext') date_created = serializers.DateTimeField(required=False) date_edited = serializers.DateTimeField(required=False) username = serializers.Field(source='creator.username') user_id = serializers.Field(source='creator.pk') user_full_name = serializers.Field(source='creator.get_full_name') avatar = serializers.Field(source='creator.profile.avatar.url') creator_url = serializers.Field(source='creator.profile.get_absolute_url') location = serializers.PrimaryKeyRelatedField(read_only=True) category = serializers.RelatedField() category_url = serializers.SerializerMethodField('category_search_url') edited = serializers.BooleanField() tags = serializers.SerializerMethodField('get_tags') comment_count = serializers.SerializerMethodField('get_comment_count') comment_meta = serializers.SerializerMethodField('get_comment_meta') class Meta: model = News fields = ( 'id', 'title', 'slug', 'link', 'content', 'date_created', 'date_edited', 'username', 'user_id', 'avatar', 'location', 'category', 'category_url', 'edited', 'tags', 'comment_count', 'user_full_name', 'creator_url', 'comment_meta', 'image', ) def get_tags(self, obj): tags = [] for tag in obj.tags.all(): tags.append({ 'name': tag.name, 'url': reverse('locations:tag_search', kwargs={ 'slug': obj.location.slug, 'tag': tag.slug }) }) return tags def get_comment_meta(self, obj): return { 'content-type': ContentType.objects.get_for_model(News).pk, 'content-label': 'blog', } def get_comment_count(self, obj): pk = obj.pk content_type = ContentType.objects.get_for_model(obj) comments = CustomComment.objects.filter(content_type=content_type) return len(comments.filter(object_pk=pk)) def category_search_url(self, obj): if obj.category is not None: return reverse('locations:category_search', kwargs={ 'slug': obj.location.slug, 'app': 'blog', 'model': 'news', 'category': obj.category.pk }) return u""
class IdeaSerializer(serializers.ModelSerializer): """ Idea serializer. """ id = serializers.Field(source='pk') name = serializers.CharField() url = serializers.Field(source='get_absolute_url') description = serializers.CharField() creator_id = serializers.Field(source='creator.pk') creator_url = serializers.Field(source='creator.profile.get_absolute_url') creator_username = serializers.Field(source='creator.username') creator_fullname = serializers.Field(source='creator.get_full_name') creator_avatar = serializers.Field(source='creator.profile.avatar.url') date_created = serializers.DateTimeField(required=False) date_edited = serializers.DateTimeField(required=False) category_name = serializers.SerializerMethodField('get_category_name') category_url = serializers.SerializerMethodField('category_search_url') total_comments = serializers.Field(source='get_comment_count') total_votes = serializers.Field(source='get_votes') edited = serializers.BooleanField() tags = serializers.SerializerMethodField('get_tags') comment_meta = serializers.SerializerMethodField('get_comment_meta') image = serializers.Field(source='image_url') class Meta: model = Idea fields = ('id', 'name', 'description', 'creator_id', 'creator_username', 'creator_fullname', 'creator_avatar', 'date_created', 'date_edited', 'edited', 'tags', 'category_name', 'category_url', 'total_comments', 'total_votes', 'url', 'creator_url', 'comment_meta', 'image') def get_comment_meta(self, obj): return { 'content-type': ContentType.objects.get_for_model(Idea).pk, 'content-label': 'ideas', } def get_tags(self, obj): tags = [] for tag in obj.tags.all(): tags.append({ 'name': tag.name, 'url': reverse('locations:tag_search', kwargs={ 'slug': obj.location.slug, 'tag': tag.slug }) }) return tags def category_search_url(self, obj): if obj.category: return reverse('locations:category_search', kwargs={ 'slug': obj.location.slug, 'app': 'topics', 'model': 'discussion', 'category': obj.category.pk }) return r'' def get_category_name(self, obj): if obj.category: return obj.category.name return u''
class AppAccountSerializer(AppSerializerMixin, AuthSerializerMixin, BulkOrgResourceModelSerializer): category = serializers.ChoiceField(label=_('Category'), choices=const.AppCategory.choices, read_only=True) category_display = serializers.SerializerMethodField( label=_('Category display')) type = serializers.ChoiceField(label=_('Type'), choices=const.AppType.choices, read_only=True) type_display = serializers.SerializerMethodField(label=_('Type display')) date_created = serializers.DateTimeField(label=_('Date created'), format="%Y/%m/%d %H:%M:%S", read_only=True) date_updated = serializers.DateTimeField(label=_('Date updated'), format="%Y/%m/%d %H:%M:%S", read_only=True) category_mapper = dict(const.AppCategory.choices) type_mapper = dict(const.AppType.choices) class Meta: model = models.Account fields_mini = ['id', 'username', 'version'] fields_write_only = [ 'password', 'private_key', 'public_key', 'passphrase' ] fields_other = ['date_created', 'date_updated'] fields_fk = ['systemuser', 'systemuser_display', 'app', 'app_display'] fields = fields_mini + fields_fk + fields_write_only + fields_other + [ 'type', 'type_display', 'category', 'category_display', 'attrs' ] extra_kwargs = { 'username': { 'default': '', 'required': False }, 'password': { 'write_only': True }, 'app_display': { 'label': _('Application display') }, 'systemuser_display': { 'label': _('System User') } } use_model_bulk_create = True model_bulk_create_kwargs = {'ignore_conflicts': True} @property def app(self): if isinstance(self.instance, models.Account): instance = self.instance.app else: instance = None return instance def get_category_display(self, obj): return self.category_mapper.get(obj.category) def get_type_display(self, obj): return self.type_mapper.get(obj.type) @classmethod def setup_eager_loading(cls, queryset): """ Perform necessary eager loading of data. """ queryset = queryset.prefetch_related('systemuser', 'app') return queryset def to_representation(self, instance): instance.load_auth() return super().to_representation(instance)
class FileSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'id', 'name', 'node', 'kind', 'path', 'materialized_path', 'size', 'provider', 'last_touched', ]) id = IDField(source='_id', read_only=True) type = TypeField() checkout = CheckoutField() name = ser.CharField(read_only=True, help_text='Display name used in the general user interface') kind = ser.CharField(read_only=True, help_text='Either folder or file') path = ser.CharField(read_only=True, help_text='The unique path used to reference this object') size = ser.SerializerMethodField(read_only=True, help_text='The size of this file at this version') provider = ser.CharField(read_only=True, help_text='The Add-on service this file originates from') materialized_path = ser.CharField( read_only=True, help_text='The Unix-style path of this object relative to the provider root') last_touched = ser.DateTimeField(read_only=True, help_text='The last time this file had information fetched about it via the OSF') date_modified = ser.SerializerMethodField(read_only=True, help_text='Timestamp when the file was last modified') date_created = ser.SerializerMethodField(read_only=True, help_text='Timestamp when the file was created') extra = ser.SerializerMethodField(read_only=True, help_text='Additional metadata about this file') files = NodeFileHyperLinkField( related_view='nodes:node-files', related_view_kwargs={'node_id': '<node_id>', 'path': '<path>', 'provider': '<provider>'}, kind='folder' ) versions = NodeFileHyperLinkField( related_view='files:file-versions', related_view_kwargs={'file_id': '<_id>'}, kind='file' ) comments = FileCommentRelationshipField(related_view='nodes:node-comments', related_view_kwargs={'node_id': '<node._id>'}, related_meta={'unread': 'get_unread_comments_count'}, filter={'target': '<_id>'}) links = LinksField({ 'info': Link('files:file-detail', kwargs={'file_id': '<_id>'}), 'move': WaterbutlerLink(), 'upload': WaterbutlerLink(), 'delete': WaterbutlerLink(), 'download': WaterbutlerLink(must_be_file=True), 'new_folder': WaterbutlerLink(must_be_folder=True, kind='folder'), }) class Meta: type_ = 'files' def get_size(self, obj): if obj.versions: return obj.versions[-1].size return None def get_date_modified(self, obj): mod_dt = None if obj.provider == 'osfstorage' and obj.versions: # Each time an osfstorage file is added or uploaded, a new version object is created with its # date_created equal to the time of the update. The date_modified is the modified date # from the backend the file is stored on. This field refers to the modified date on osfstorage, # so prefer to use the date_created of the latest version. mod_dt = obj.versions[-1].date_created elif obj.provider != 'osfstorage' and obj.history: mod_dt = obj.history[-1].get('modified', None) return mod_dt and mod_dt.replace(tzinfo=pytz.utc) def get_date_created(self, obj): creat_dt = None if obj.provider == 'osfstorage' and obj.versions: creat_dt = obj.versions[0].date_created elif obj.provider != 'osfstorage' and obj.history: # Non-osfstorage files don't store a created date, so instead get the modified date of the # earliest entry in the file history. creat_dt = obj.history[0].get('modified', None) return creat_dt and creat_dt.replace(tzinfo=pytz.utc) def get_extra(self, obj): metadata = {} if obj.provider == 'osfstorage' and obj.versions: metadata = obj.versions[-1].metadata elif obj.provider != 'osfstorage' and obj.history: metadata = obj.history[-1].get('extra', {}) extras = {} extras['hashes'] = { # mimic waterbutler response 'md5': metadata.get('md5', None), 'sha256': metadata.get('sha256', None), } return extras def get_unread_comments_count(self, obj): user = self.context['request'].user if user.is_anonymous(): return 0 return Comment.find_n_unread(user=user, node=obj.node, page='files', root_id=obj._id) def user_id(self, obj): # NOTE: obj is the user here, the meta field for # Hyperlinks is weird if obj: return obj._id return None def update(self, instance, validated_data): assert isinstance(instance, FileNode), 'Instance must be a FileNode' for attr, value in validated_data.items(): setattr(instance, attr, value) instance.save() return instance def is_valid(self, **kwargs): return super(FileSerializer, self).is_valid(clean_html=False, **kwargs)
class LastSubmissionDateTimeSerializer(serializers.Serializer): last_submission_datetime = serializers.DateTimeField()
class OrdersAssignPostResponse(serializers.Serializer): orders = OrdersIdSchema(many=True) assign_time = serializers.DateTimeField( label="Время доставки", format="%Y-%m-%dT%H:%M:%SZ" )
class ClientSerializer(serializers.Serializer): country = serializers.CharField() request_time = serializers.DateTimeField()
class GetVersionHistorySerializerParam(serializers.ModelSerializer): create_time = serializers.DateTimeField(label="create_time", format="%Y-%m-%d %H:%M:%S", required=False, read_only=True) class Meta: model = VersionChangeHistory fields = '__all__'
class DeviceBaiscSerializer(NatrixSerializer): """ """ sn = serializers.CharField(max_length=64) hostname = serializers.CharField(max_length=64, allow_blank=True, required=False) type = serializers.CharField(max_length=128, allow_blank=True, required=False) os_type = serializers.CharField(max_length=64, allow_blank=True, required=False) os_version = serializers.CharField(max_length=128, allow_blank=True, required=False) client_version = serializers.CharField(max_length=16, allow_blank=True, required=False) status = serializers.ChoiceField(choices=device_status_choice, required=False) detect_orgs = serializers.ListField( child=serializers.IntegerField(min_value=1), allow_empty=True, required=False) reg_orgs = serializers.ListSerializer( child=serializers.IntegerField(min_value=1), allow_empty=True) comment = serializers.CharField(max_length=512, allow_blank=True, required=False) update_time = serializers.DateTimeField(allow_null=True, required=False) device_alert = serializers.BooleanField(default=True) terminal_alert = serializers.BooleanField(default=False) def validate_sn(self, value): """Validate SN and generate instance. :param value: :return: """ try: self.instance = TerminalDevice.objects.get(sn=value) except TerminalDevice.DoesNotExist: raise serializers.ValidationError( 'The device is not exist for sn({})'.format(value)) return value def validate_reg_orgs(self, value): """Validate reg_orgs. The conditions, includes: - All organization ids is exist in DB - There are not repeat id - All organization's regions are consistent. :param value: :return: """ orgs_query = list(Organization.objects.filter(id__in=value)) if len(orgs_query) != len(value): raise serializers.ValidationError( 'There are some inavailable organization id(repeat or non-existent)!' ) consistent_region = None for org in orgs_query: if org.region is None: continue if consistent_region is None: consistent_region = org.region elif consistent_region != org.region: raise serializers.ValidationError( 'The register organizations with more than one organizations' ) # generate register organizations self.reg_organizations = orgs_query self.reg_region = consistent_region return value def to_representation(self, instance): if not isinstance(instance, TerminalDevice): raise natrix_exception.ParameterInvalidException( parameter='instance') try: ret = OrderedDict() terminals = instance.terminal_set.all() total = 0 active = 0 alive = 0 for t in terminals: total += 1 if t.is_valid(): active += 1 if t.is_alive(): alive += 1 ret['sn'] = instance.sn ret['hostname'] = instance.hostname ret['type'] = instance.product ret['os_type'] = instance.os_type ret['os_version'] = '[{}]-[{}]'.format(instance.os_major_version, instance.os_minor_version) ret['client_version'] = instance.natrixclient_version ret['status'] = instance.status ret['update_time'] = instance.last_online_time ret['comment'] = instance.comment ret['device_alert'] = instance.device_alert ret['terminal_alert'] = instance.terminal_alert register = instance.register ret['reg_orgs'] = map( lambda item: { 'id': item.id, 'name': item.name, 'desc': item.get_full_name() }, register.organizations.all() if register else []) ret['detect_orgs'] = map( lambda item: { 'id': item.id, 'name': item.name, 'desc': item.get_full_name() }, instance.organizations.all()) segments = map(lambda t: t.get_segment(), instance.terminal_set.all()) ret['segments'] = [s for s in segments if s] ret['terminal_total'] = total ret['terminal_active'] = active ret['terminal_alive'] = alive return ret except Exception as e: logger.error('Serializer Terminal Device ERROR: {}'.format(e)) raise natrix_exception.ClassInsideException( message=u'{}'.format(e)) def update(self, instance, validated_data): try: with transaction.atomic(): instance.device_alert = validated_data.get( 'device_alert', self.instance.device_alert) instance.terminal_alert = validated_data.get( 'terminal_alert', self.instance.terminal_alert) if not instance.register: instance.register = RegisterOrganization.objects.create() if self.reg_region: if instance.register.address: instance.register.address.region = self.reg_region instance.register.address.save() else: instance.register.address = Address.objects.create( region=self.reg_region) instance.register.organizations.clear() for item in self.reg_organizations: instance.register.organizations.add(item) instance.register.save() instance.save() return instance except Exception as e: logger.error('Update terminal deivce error, {}'.format(e))
class CostModelSerializer(serializers.Serializer): """Serializer for a list of tiered rates.""" class Meta: """Metadata for the serializer.""" model = CostModel uuid = serializers.UUIDField(read_only=True) name = serializers.CharField(allow_blank=True) description = serializers.CharField(allow_blank=True) source_type = serializers.CharField(required=True) provider_uuids = serializers.ListField(child=UUIDKeyRelatedField( queryset=Provider.objects.all(), pk_field='uuid'), required=False) created_timestamp = serializers.DateTimeField(read_only=True) updated_timestamp = serializers.DateTimeField(read_only=True) rates = RateSerializer(required=False, many=True) markup = MarkupSerializer(required=False) @property def metric_map(self): """Map metrics and display names.""" metric_map_by_source = defaultdict(dict) metric_map = CostModelMetricsMap.objects.all() for metric in metric_map: metric_map_by_source[metric.source_type][metric.metric] = metric return metric_map_by_source @property def source_type_internal_value_map(self): """Map display name to internal source type.""" internal_map = {} for key, value in SOURCE_TYPE_MAP.items(): internal_map[value] = key return internal_map def validate(self, data): """Validate that the source type is acceptable.""" # The cost model has markup, no rates, and is for a valid non-OpenShift source type source_type = data.get('source_type') if source_type and Provider.PROVIDER_CASE_MAPPING.get( source_type.lower()): data['source_type'] = Provider.PROVIDER_CASE_MAPPING.get( source_type.lower()) if (data.get('markup') and not data.get('rates') and data['source_type'] != Provider.PROVIDER_OCP and data['source_type'] in SOURCE_TYPE_MAP.keys()): return data if data['source_type'] not in self.metric_map.keys(): raise serializers.ValidationError( '{} is not a valid source.'.format(data['source_type'])) return data def _get_metric_display_data(self, source_type, metric): """Return API display metadata.""" return self.metric_map.get(source_type, {}).get(metric) def _check_for_duplicate_metrics(self, rates): """Check for duplicate metric/rate combinations within a cost model.""" rate_type_by_metric = defaultdict(dict) for rate in rates: metric = rate.get('metric', {}).get('name') for key in rate: if key in RateSerializer.RATE_TYPES: if key in rate_type_by_metric[metric]: rate_type_by_metric[metric][key] += 1 else: rate_type_by_metric[metric][key] = 1 for metric in rate_type_by_metric: for rate_type, count in rate_type_by_metric[metric].items(): if count > 1: err_msg = 'Duplicate {} entry found for {}'.format( rate_type, metric) raise serializers.ValidationError(err_msg) def validate_provider_uuids(self, provider_uuids): """Check that uuids in provider_uuids are valid identifiers.""" valid_uuids = [] invalid_uuids = [] for uuid in provider_uuids: if Provider.objects.filter(uuid=uuid).count() == 1: valid_uuids.append(uuid) else: invalid_uuids.append(uuid) if invalid_uuids: err_msg = 'Provider object does not exist with following uuid(s): {}.'.format( invalid_uuids) raise serializers.ValidationError(err_msg) return valid_uuids def validate_rates(self, rates): """Run validation for rates.""" self._check_for_duplicate_metrics(rates) validated_rates = [] for rate in rates: serializer = RateSerializer(data=rate) serializer.is_valid(raise_exception=True) validated_rates.append(serializer.validated_data) return validated_rates def create(self, validated_data): """Create the cost model object in the database.""" return CostModelManager().create(**validated_data) def update(self, instance, validated_data, *args, **kwargs): """Update the rate object in the database.""" provider_uuids = validated_data.pop('provider_uuids', []) new_providers_for_instance = [] for uuid in provider_uuids: new_providers_for_instance.append( str(Provider.objects.filter(uuid=uuid).first().uuid)) manager = CostModelManager(cost_model_uuid=instance.uuid) manager.update_provider_uuids(new_providers_for_instance) manager.update(**validated_data) return manager.instance def to_representation(self, cost_model_obj): """Add provider UUIDs to the returned model.""" rep = super().to_representation(cost_model_obj) rates = rep['rates'] for rate in rates: metric = rate.get('metric', {}) display_data = self._get_metric_display_data( cost_model_obj.source_type, metric.get('name')) metric.update({ 'label_metric': display_data.label_metric, 'label_measurement': display_data.label_measurement, 'label_measurement_unit': display_data.label_measurement_unit, }) rep['rates'] = rates source_type = rep.get('source_type') if source_type in SOURCE_TYPE_MAP: source_type = SOURCE_TYPE_MAP[source_type] rep['source_type'] = source_type cm_uuid = cost_model_obj.uuid provider_uuids = CostModelManager(cm_uuid).get_provider_names_uuids() rep.update({'providers': provider_uuids}) return rep
class LeavingMessageSerializer(serializers.ModelSerializer): userlm=serializers.HiddenField(default=serializers.CurrentUserDefault()) add_time=serializers.DateTimeField(read_only=True,format="%Y-%m-%d %H:%M:%s") class Meta: model=UserLeavingMessage fields=("userlm","msg_type","subject","message","file","id","add_time")
class YearDashboardJobSerializer(BaseDashboardJobSerializer): year = serializers.DateTimeField()
class MonthDashboardJobSerializer(BaseDashboardJobSerializer): month = serializers.DateTimeField()
class ParkingSpaceSerializer(serializers.ModelSerializer): booked_till = serializers.DateTimeField() class Meta: model = ParkingSpace fields = "__all__"
class GetVersionDesignsStageFilesSerializerParam(serializers.ModelSerializer): version_id = serializers.CharField(label="version_id", required=True, allow_blank=False) create_time = serializers.DateTimeField(label="create_time", format="%Y-%m-%d %H:%M:%S", required=False, read_only=True) class Meta: model = VersionDesignStageFile fields = '__all__'
class EventCreateAndUpdateSerializer(TagSerializerMixin, BasisModelSerializer): cover = ImageField(required=False, options={"height": 500}) responsible_group = AbakusGroupField(queryset=AbakusGroup.objects.all(), required=False, allow_null=True) pools = PoolCreateAndUpdateSerializer(many=True, required=False) text = ContentSerializerField() is_abakom_only = BooleanField(required=False, default=False) registration_close_time = serializers.DateTimeField(read_only=True) class Meta: model = Event fields = ( "id", "title", "cover", "description", "text", "company", "responsible_group", "feedback_description", "feedback_required", "event_type", "event_status_type", "location", "is_priced", "price_member", "price_guest", "use_stripe", "payment_due_date", "start_time", "end_time", "merge_time", "use_captcha", "tags", "pools", "unregistration_deadline", "pinned", "use_consent", "heed_penalties", "is_abakom_only", "registration_deadline_hours", "registration_close_time", "youtube_url", ) def validate(self, data): """ Check that start is before finish. """ if hasattr(data, "start_time") and hasattr(data, "end_time"): if data["start_time"] > data["end_time"]: raise serializers.ValidationError({ "end_time": "User does not have the required permissions for time travel" }) return data def create(self, validated_data): pools = validated_data.pop("pools", []) is_abakom_only = validated_data.pop("is_abakom_only", False) event_status_type = validated_data.get( "event_status_type", Event._meta.get_field("event_status_type").default) if event_status_type == constants.TBA: pools = [] validated_data["location"] = "TBA" elif event_status_type == constants.OPEN: pools = [] elif event_status_type == constants.INFINITE: pools = [pools[0]] pools[0]["capacity"] = 0 with transaction.atomic(): event = super().create(validated_data) for pool in pools: permission_groups = pool.pop("permission_groups") created_pool = Pool.objects.create(event=event, **pool) created_pool.permission_groups.set(permission_groups) event.set_abakom_only(is_abakom_only) return event def update(self, instance, validated_data): pools = validated_data.pop("pools", None) is_abakom_only = validated_data.pop("is_abakom_only", False) event_status_type = validated_data.get( "event_status_type", Event._meta.get_field("event_status_type").default) if event_status_type == constants.TBA: pools = [] validated_data["location"] = "TBA" elif event_status_type == constants.OPEN: pools = [] elif event_status_type == constants.INFINITE: pools = [pools[0]] pools[0]["capacity"] = 0 with transaction.atomic(): if pools is not None: existing_pools = list(instance.pools.all().values_list( "id", flat=True)) for pool in pools: pool_id = pool.get("id", None) if pool_id in existing_pools: existing_pools.remove(pool_id) permission_groups = pool.pop("permission_groups") created_pool = Pool.objects.update_or_create( event=instance, id=pool_id, defaults={ "name": pool.get("name"), "capacity": pool.get("capacity", 0), "activation_date": pool.get("activation_date"), }, )[0] created_pool.permission_groups.set(permission_groups) for pool_id in existing_pools: Pool.objects.get(id=pool_id).delete() instance.set_abakom_only(is_abakom_only) return super().update(instance, validated_data)
class BaseSerializer(serializers.HyperlinkedModelSerializer): created = serializers.DateTimeField(read_only=True) updated = serializers.DateTimeField(read_only=True)
class AssignTime(serializers.Serializer): assign_time = serializers.DateTimeField( label="Время доставки", format="%Y-%m-%dT%H:%M:%SZ" )
class ServerSerializer(serializers.Serializer): id = serializers.ReadOnlyField() cloud = serializers.PrimaryKeyRelatedField(queryset=Cloud.objects.all(), many=False) instanceId = serializers.CharField(required=True) instanceType = serializers.CharField(required=True) cpu = serializers.CharField(required=True) memory = serializers.CharField(required=True) instanceName = serializers.CharField(required=True) createdTime = serializers.DateTimeField(required=True, format="%Y-%m-%d %H:%M:%S") expiredTime = serializers.DateTimeField(required=True, format="%Y-%m-%d %H:%M:%S") hostname = serializers.CharField(required=True) publicIps = serializers.ListField(required=True, write_only=True) innerIps = serializers.ListField(required=True, write_only=True) class Meta: model = Server fields = "__all__" # 将 cloud换成qcloud对应的id值 def getCloudPk(self, code): try: obj = Cloud.objects.get(code__exact=code) return obj.id except Cloud.DoesNotExist: #参数是code,不是pk,不是唯一的,会报错 logger.error("云厂商不存在: {}".format(code)) raise serializers.ValidationError("云厂商不存在") except Exception as e: logger.error("云厂商错误: ".format(e.args)) raise serializers.ValidationError("云厂商错误") # 改变cloud的值 def to_internal_value(self, data): data["cloud"] = self.getCloudPk( data["cloud"]) #改变下cloud的值,调用getCloudPk print("to_internal_value: ", data) return super(ServerSerializer, self).to_internal_value(data) def getInstance(self, instanceId): try: return Server.objects.get(instanceId__exact=instanceId) except Server.DoesNotExist: return None except Exception as e: logger.error("服务器错误: ".format(e.args)) raise serializers.ValidationError("服务器错误") def create(self, validated_data): instance = self.getInstance(validated_data["instanceId"]) if instance is not None: return self.update(instance, validated_data) innerIps = validated_data.pop("innerIps") publicIps = validated_data.pop("publicIps") instance = Server.objects.create(**validated_data) self.check_inner_ip(instance, innerIps) self.check_public_ip(instance, publicIps) return instance def update(self, instance, validated_data): instance.cpu = validated_data.get("cpu", "") self.check_inner_ip(instance, validated_data['innerIps']) self.check_public_ip(instance, validated_data['publicIps']) instance.save() return instance def check_inner_ip(self, instance, innerIps): ip_queryset = instance.innerIpAddress.all() current_ip_objs = [] for ip in innerIps: try: ip_obj = ip_queryset.get(ip__exact=ip) except Ip.DoesNotExist: ip_obj = Ip.objects.create(ip=ip, inner=instance) current_ip_objs.append(ip_obj) self.cleanip(ip_queryset, current_ip_objs) def check_public_ip(self, instance, publicIps): ip_queryset = instance.publicIpAddress.all() current_ip_objs = [] for ip in publicIps: try: ip_obj = ip_queryset.get(ip__exact=ip) except Ip.DoesNotExist: ip_obj = Ip.objects.create(ip=ip, public=instance) current_ip_objs.append(ip_obj) self.cleanip(ip_queryset, current_ip_objs) def cleanip(self, ip_queryset, current_ip_objs): not_exists_ip = set(ip_queryset) - set(current_ip_objs) for obj in not_exists_ip: obj.delete() def to_representation(self, instance): ret = super(ServerSerializer, self).to_representation(instance) ret['cloud'] = {'id': instance.cloud.id, 'name': instance.cloud.name} # result['publicIps'] = [ip.ip for ip in instance.publicIpAddress.all()] # result['innerIps'] = [ip.ip for ip in instance.innerIpAddress.all()] ret["publicIps"] = [] ret["innerIps"] = [] for ip in instance.publicIpAddress.all(): #遍历所有的外网ip print(ip.ip) ret["publicIps"].append(ip.ip) for ip in instance.innerIpAddress.all(): ret["innerIps"].append(ip.ip) return ret
class OrdersCompletePostResponse(serializers.Serializer): courier_id = serializers.IntegerField(label="Уникальный идентификатор курьера") order_id = serializers.IntegerField(label="Уникальный идентификатор заказа") complete_time = serializers.DateTimeField( label="Время доставки", format="%Y-%m-%dT%H:%M:%SZ" )
class SessionSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) email = serializers.CharField(required=False, allow_blank=True, max_length=100) description = serializers.CharField(max_length=1000, default="") pipeline = serializers.CharField(max_length=100) config = serializers.JSONField() observation = serializers.CharField(max_length=100000, default="") observation2 = serializers.CharField(max_length=100000, default="") status = serializers.CharField(max_length=20, default="Staging") staging = serializers.CharField(max_length=20, default="new") pipeline_version = serializers.CharField(max_length=100, default="", read_only=True) pipeline_response = serializers.CharField(max_length=1000, default="") date_created = serializers.DateTimeField(read_only=True) date_modified = serializers.DateTimeField(read_only=True) # di_image = serializers.ImageField(required=False) di_fits = serializers.CharField(max_length=100, default="") rw_fits = serializers.CharField(max_length=100, default="") # stageid = serializers.CharField(max_length=30, default = "") stage_reqid = serializers.IntegerField(default=0) stage2_reqid = serializers.IntegerField(default=0) transfer_id = serializers.IntegerField(default=0) transfer2_id = serializers.IntegerField(default=0) def create(self, validated_data): return Session.objects.create(**validated_data) def update(self, instance, validated_data): instance.email = validated_data.get('email', instance.email) instance.description = validated_data.get('description', instance.description) instance.pipeline = validated_data.get('pipeline', instance.pipeline) instance.config = validated_data.get('config', instance.config) instance.observation = validated_data.get('observation', instance.observation) instance.observation2 = validated_data.get('observation2', instance.observation2) instance.pipeline_version = validated_data.get( 'pipeline_version', instance.pipeline_version) instance.pipeline_response = validated_data.get( 'pipeline_response', instance.pipeline_response) instance.status = validated_data.get('status', instance.status) instance.staging = validated_data.get('staging', instance.staging) instance.date_created = validated_data.get('date_created', instance.date_created) instance.date_modified = validated_data.get('date_modified', instance.date_modified) instance.di_fits = validated_data.get('di_fits', instance.di_fits) instance.rw_fits = validated_data.get('rw_fits', instance.rw_fits) # instance.stageid = validated_data.get('stageid', instance.stageid) instance.stage_reqid = validated_data.get('stage_reqid', instance.stage_reqid) instance.stage2_reqid = validated_data.get('stage2_reqid', instance.stage2_reqid) instance.transfer_id = validated_data.get('transfer_id', instance.transfer_id) instance.transfer2_id = validated_data.get('transfer2_id', instance.transfer2_id) instance.save() return instance
class DiscoverQuerySerializer(serializers.Serializer): projects = ListField( child=serializers.IntegerField(), required=True, allow_null=False, ) start = serializers.DateTimeField(required=False) end = serializers.DateTimeField(required=False) range = serializers.CharField(required=False) fields = ListField( child=serializers.CharField(), required=False, allow_null=True, ) limit = serializers.IntegerField(min_value=0, max_value=1000, required=False) rollup = serializers.IntegerField(required=False) orderby = serializers.CharField(required=False) conditions = ListField( child=ListField(), required=False, allow_null=True, ) aggregations = ListField(child=ListField(), required=False, allow_null=True, default=[]) groupby = ListField( child=serializers.CharField(), required=False, allow_null=True, ) def __init__(self, *args, **kwargs): super(DiscoverQuerySerializer, self).__init__(*args, **kwargs) self.member = OrganizationMember.objects.get( user=self.context['user'], organization=self.context['organization']) data = kwargs['data'] fields = data.get('fields') or [] match = next( (self.get_array_field(field).group(1) for field in fields if self.get_array_field(field) is not None), None) self.arrayjoin = match if match else None def validate(self, data): data['arrayjoin'] = self.arrayjoin return data def validate_range(self, attrs, source): has_start = bool(attrs.get('start')) has_end = bool(attrs.get('end')) has_range = bool(attrs.get('range')) if has_start != has_end or has_range == has_start: raise serializers.ValidationError( 'Either start and end dates or range is required') # Populate start and end if only range is provided if (attrs.get(source)): delta = parse_stats_period(attrs[source]) if (delta is None): raise serializers.ValidationError('Invalid range') attrs['start'] = timezone.now() - delta attrs['end'] = timezone.now() return attrs def validate_projects(self, attrs, source): organization = self.context['organization'] member = self.member projects = attrs[source] org_projects = set(project[0] for project in self.context['projects']) if not set(projects).issubset( org_projects) or not self.has_projects_access( member, organization, projects): raise PermissionDenied return attrs def validate_conditions(self, attrs, source): # Handle error (exception_stacks), stack(exception_frames) if attrs.get(source): conditions = [ self.get_condition(condition) for condition in attrs[source] ] attrs[source] = conditions return attrs def get_array_field(self, field): pattern = r"^(error|stack)\..+" return re.search(pattern, field) def get_condition(self, condition): array_field = self.get_array_field(condition[0]) has_equality_operator = condition[1] in ('=', '!=') # Cast boolean values to 1 / 0 if isinstance(condition[2], bool): condition[2] = int(condition[2]) # Apply has function to any array field if it's = / != and not part of arrayjoin if array_field and has_equality_operator and (array_field.group(1) != self.arrayjoin): value = condition[2] if (isinstance(value, six.string_types)): value = u"'{}'".format(value) bool_value = 1 if condition[1] == '=' else 0 return [['has', [array_field.group(0), value]], '=', bool_value] return condition def has_projects_access(self, member, organization, requested_projects): has_global_access = roles.get(member.role).is_global if has_global_access: return True member_project_list = Project.objects.filter( organization=organization, teams__in=OrganizationMemberTeam.objects.filter( organizationmember=member, ).values('team'), ).values_list('id', flat=True) return set(requested_projects).issubset(set(member_project_list))
class CreateUserSerializer(UserCreateSerializer): """! Class that overwrites the metadata of the class @author Ing. Leonel P. Hernandez M. (leonelphm at gmail.com) @date 25-09-2018 @version 1.0.0 """ user_profile = UserProfileSerializer(many=False, write_only=True) group_id = serializers.CharField(write_only=True) groups = GroupSerializer(many=True, read_only=True) date_joined = serializers.DateTimeField( format=settings.DATETIME_FORMAT['DATETIME_USER'], read_only=True) last_login = serializers.DateTimeField( format=settings.DATETIME_FORMAT['DATETIME_USER'], read_only=True) class Meta: model = User fields = tuple(User.REQUIRED_FIELDS) + ( User.USERNAME_FIELD, User._meta.pk.name, 'password', 'first_name', 'last_name', 'user_profile', 'group_id', 'groups', 'is_active', 'last_login', 'date_joined') read_only_fields = ( 'last_login', 'date_joined', ) def validate(self, attrs): """ Function that allows to validate the serializer fields """ user = User(attrs.get('password'), attrs.get('username'), attrs.get('first_name'), attrs.get('last_name'), attrs.get('email')) password = attrs.get('password') group_id = attrs.get('group_id') email = attrs.get('email') if User.objects.filter(email=email): msg = "There is already a user with this email %s" % (email) raise serializers.ValidationError({'email': msg}) try: Group.objects.get(pk=group_id) except Exception as e: raise serializers.ValidationError( {'group_id': "This group is not registered"}) try: validate_password(password, user) except django_exceptions.ValidationError as e: raise serializers.ValidationError({'password': list(e.messages)}) return attrs def create(self, validated_data): """ Function that allows to create a record of the serialized object """ try: user = self.perform_create(validated_data) user.groups.add(validated_data.get("group_id")) except IntegrityError: self.fail('cannot_create_user') fk_user = validated_data.get('user_profile') fk_user_create = validated_data.get('fk_user_create') #: load the value fk_user_create to the OrderedDict fk_user['fk_user_create'] = fk_user_create UserProfile.objects.create(fk_user=user, **fk_user) return user def perform_create(self, validated_data): """ Function that allows to create object registry User """ with transaction.atomic(): group_id = validated_data.get('group_id') group_name = Group.objects.get(pk=group_id) if group_name.name == ADMINS: admin = True else: admin = False user = User.objects.create_user( username=validated_data.get("username"), password=validated_data.get("password"), first_name=validated_data.get("first_name"), last_name=validated_data.get("last_name"), email=validated_data.get("email"), is_superuser=admin, is_staff=admin, is_active=validated_data.get('is_active'), ) if settings.DJOSER['SEND_ACTIVATION_EMAIL']: user.is_active = False user.save(update_fields=['is_active']) return user
class ObjectTypeSerializer(serializers.Serializer): timestamp = serializers.DateTimeField() name = serializers.CharField()
class serializer_class(SubtitleLanguageSerializer): created = serializers.DateTimeField(read_only=True)
class DiscussionSerializer(ImagableModelSerializer, serializers.ModelSerializer): """ Basic serializer for discussions. """ id = serializers.Field(source='pk') question = serializers.CharField() intro = serializers.CharField() location = serializers.PrimaryKeyRelatedField() url = serializers.Field(source='get_absolute_url') creator_id = serializers.Field(source='creator.pk') creator_username = serializers.Field(source='creator.username') creator_fullname = serializers.Field(source='creator.get_full_name') creator_url = serializers.Field(source='creator.profile.get_absolute_url') creator_avatar = serializers.Field(source='creator.profile.avatar.url') date_created = serializers.DateTimeField(required=False) date_edited = serializers.DateTimeField(required=False) status = serializers.BooleanField() category_name = serializers.Field(source='category.name') category_url = serializers.SerializerMethodField('category_search_url') tags = serializers.SerializerMethodField('get_tags') answers = serializers.SerializerMethodField('get_answer_count') class Meta: model = Discussion fields = ( 'id', 'question', 'intro', 'url', 'creator_id', 'answers', 'creator_fullname', 'creator_url', 'creator_avatar', 'category_name', 'date_created', 'date_edited', 'status', 'category_url', 'tags', 'creator_username', 'location', 'image', ) def get_tags(self, obj): tags = [] for tag in obj.tags.all(): tags.append({ 'name': tag.name, 'url': reverse('locations:tag_search', kwargs={ 'slug': obj.location.slug, 'tag': tag.slug }) }) return tags def category_search_url(self, obj): if obj.category is not None: return reverse('locations:category_search', kwargs={ 'slug': obj.location.slug, 'app': 'topics', 'model': 'discussion', 'category': obj.category.pk }) return u"" def get_answer_count(self, obj): return obj.entry_set.count()
class serializer_class(NotesSerializer): created = serializers.DateTimeField(read_only=True)
class NestedTrackingSummarySerializer(serializers.Serializer): opened = serializers.DateTimeField(source='first_open') open_time = serializers.IntegerField() clicked = serializers.ListField(source='clicks', child=serializers.DictField())
class AssetSnapshotSerializer(serializers.HyperlinkedModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='assetsnapshot-detail') uid = serializers.ReadOnlyField() xml = serializers.SerializerMethodField() enketopreviewlink = serializers.SerializerMethodField() details = WritableJSONField(required=False) asset = RelativePrefixHyperlinkedRelatedField( queryset=Asset.objects.all(), view_name='asset-detail', lookup_field='uid', required=False, allow_null=True, style={'base_template': 'input.html'} # Render as a simple text box ) owner = RelativePrefixHyperlinkedRelatedField(view_name='user-detail', lookup_field='username', read_only=True) asset_version_id = serializers.ReadOnlyField() date_created = serializers.DateTimeField(read_only=True) source = WritableJSONField(required=False) def get_xml(self, obj): ''' There's too much magic in HyperlinkedIdentityField. When format is unspecified by the request, HyperlinkedIdentityField.to_representation() refuses to append format to the url. We want to *unconditionally* include the xml format suffix. ''' return reverse(viewname='assetsnapshot-detail', format='xml', kwargs={'uid': obj.uid}, request=self.context.get('request', None)) def get_enketopreviewlink(self, obj): return reverse(viewname='assetsnapshot-preview', kwargs={'uid': obj.uid}, request=self.context.get('request', None)) def create(self, validated_data): ''' Create a snapshot of an asset, either by copying an existing asset's content or by accepting the source directly in the request. Transform the source into XML that's then exposed to Enketo (and the www). ''' asset = validated_data.get('asset', None) source = validated_data.get('source', None) # Force owner to be the requesting user # NB: validated_data is not used when linking to an existing asset # without specifying source; in that case, the snapshot owner is the # asset's owner, even if a different user makes the request validated_data['owner'] = self.context['request'].user # TODO: Move to a validator? if asset and source: if not self.context['request'].user.has_perm('view_asset', asset): # The client is not allowed to snapshot this asset raise exceptions.PermissionDenied validated_data['source'] = source snapshot = AssetSnapshot.objects.create(**validated_data) elif asset: # The client provided an existing asset; read source from it if not self.context['request'].user.has_perm('view_asset', asset): # The client is not allowed to snapshot this asset raise exceptions.PermissionDenied # asset.snapshot pulls , by default, a snapshot for the latest # version. snapshot = asset.snapshot elif source: # The client provided source directly; no need to copy anything # For tidiness, pop off unused fields. `None` avoids KeyError validated_data.pop('asset', None) validated_data.pop('asset_version', None) snapshot = AssetSnapshot.objects.create(**validated_data) else: raise serializers.ValidationError( 'Specify an asset and/or a source') if not snapshot.xml: raise serializers.ValidationError(snapshot.details) return snapshot class Meta: model = AssetSnapshot lookup_field = 'uid' fields = ( 'url', 'uid', 'owner', 'date_created', 'xml', 'enketopreviewlink', 'asset', 'asset_version_id', 'details', 'source', )