class ChallengeTestSerializer(serializers.ModelSerializer): questions_saved = serializers.ListField(child=DictField(), required=False) courses_id = serializers.ListField(child=DictField(), write_only=True) questions_requested = serializers.ListField(child=DictField(), required=False, write_only=True) questions = serializers.SerializerMethodField(read_only=True) def get_questions(self, obj): serializer = [] for course in obj.questions_requested: uid = course.get('course') decoded = settings.HASHIDS.decode(uid) course_id = decoded[0] if decoded else None serializer.append({ "course": ListCoursesWithQuestionsSerializer( Course.objects.filter(id=course_id).first()).data, "questions": ListQuestionSerializer( Question.objects.filter(id__in=course.get('questions_id')), many=True, context={ 'user_university': obj.user_university }).data }) return serializer class Meta: model = ChallengeTest fields = ('uid', 'created_at', 'questions_saved', 'questions_requested', 'courses_id', 'time', 'questions')
class ResourceSerializer(EmbeddedDocumentSerializer): is_all = serializers.BooleanField() actions = DictField(child=CharField(), read_only=False, required=False) class Meta: model = Resource fields = ("name", "is_all", "actions")
def get_fields(self): """ Return the field_mapping needed for serializing the data.""" # Re-implement the logic from the superclass methods, but make sure to handle field and query facets properly. # https://github.com/edx/course-discovery/blob/master/course_discovery/apps/api/serializers.py#L950 # https://github.com/inonit/drf-haystack/blob/master/drf_haystack/serializers.py#L373 field_data = self.instance.pop('fields', {}) query_data = self.format_query_facet_data( self.instance.pop('queries', {})) field_mapping = super(DistinctCountsAggregateFacetSearchSerializer, self).get_fields() field_mapping['fields'] = FacetDictField(child=FacetListField( child=DistinctCountsFacetFieldSerializer(field_data), required=False)) field_mapping['queries'] = DictField( query_data, child=DistinctCountsQueryFacetFieldSerializer(), required=False) if self.serialize_objects: field_mapping.move_to_end('objects') self.instance['fields'] = field_data self.instance['queries'] = query_data return field_mapping
def data_for_update(self, data): validate = { ADD: self.validate_add_list, CREATE: self.validate_create_list, REMOVE: self.validate_remove_list, UPDATE: self.validate_update_list, } DictField().run_validation(data) for operation, values in data.items(): try: validate[operation](values) except ValidationError as e: detail = {operation: e.detail} code = e.get_codes() raise ValidationError(detail, code) from None except KeyError: wrap_quotes = lambda op: "`" + op + "`" op_list = list(map(wrap_quotes, update_ops)) msg = ("`%s` is not a valid operation, valid operations " "for this request are %s" % (operation, ', '.join(op_list))) code = 'invalid_operation' raise ValidationError(msg, code=code) from None return data
def run_data_validation(self, data, allowed_ops): DictField().run_validation(data) operation_2_validation_method = { ADD: self.run_add_list_validation, CREATE: self.run_create_list_validation, REMOVE: self.run_remove_list_validation, UPDATE: self.run_update_list_validation, } allowed_operation_2_validation_method = { operation: operation_2_validation_method[operation] for operation in allowed_ops } for operation, values in data.items(): try: allowed_operation_2_validation_method[operation](values) except ValidationError as e: detail = {operation: e.detail} code = e.get_codes() raise ValidationError(detail, code=code) from None except KeyError: msg = ( "`%s` is not a valid operation, valid operations(s) " "for this request %s" % (operation, join_words(allowed_ops)) ) code = 'invalid_operation' raise ValidationError(msg, code=code) from None
class ContainerSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) version = serializers.IntegerField(read_only=True) name = serializers.CharField() properties = DictField(child=ExtensiblePropertySerializer(read_only=True)) type_full_name = serializers.CharField() global_id = serializers.CharField(read_only=True)
class LedgerCompositeTraintupleSerializer(serializers.Serializer): key = serializers.UUIDField() algo_key = serializers.UUIDField() data_manager_key = serializers.UUIDField() rank = serializers.IntegerField(allow_null=True, required=False, default=0) compute_plan_key = serializers.UUIDField(required=False, allow_null=True) serializers.UUIDField(required=False, allow_null=True) in_head_model_key = serializers.UUIDField(required=False, allow_null=True) in_trunk_model_key = serializers.UUIDField(required=False, allow_null=True) out_trunk_model_permissions = PrivatePermissionsSerializer() train_data_sample_keys = serializers.ListField( child=serializers.UUIDField(), min_length=1) tag = serializers.CharField(min_length=0, max_length=64, allow_blank=True, required=False, allow_null=True) metadata = DictField(child=CharField(), required=False, allow_null=True) def get_args(self, validated_data): key = validated_data.get('key') algo_key = validated_data.get('algo_key') data_manager_key = validated_data.get('data_manager_key') rank = validated_data.get('rank', '') rank = '' if rank is None else str(rank) compute_plan_key = validated_data.get('compute_plan_key') train_data_sample_keys = validated_data.get('train_data_sample_keys', []) in_head_model_key = validated_data.get('in_head_model_key') in_trunk_model_key = validated_data.get('in_trunk_model_key') out_trunk_model_permissions = validated_data.get( 'out_trunk_model_permissions') tag = validated_data.get('tag', '') metadata = validated_data.get('metadata') args = { 'key': key, 'algo_key': algo_key, 'in_head_model_key': in_head_model_key, 'in_trunk_model_key': in_trunk_model_key, 'out_trunk_model_permissions': { 'process': { 'authorized_ids': out_trunk_model_permissions.get('authorized_ids'), } }, 'data_manager_key': data_manager_key, 'data_sample_keys': train_data_sample_keys, 'compute_plan_key': compute_plan_key, 'rank': rank, 'tag': tag, 'metadata': metadata } return args def create(self, channel_name, validated_data): args = self.get_args(validated_data) return ledger.assets.create_compositetraintuple(channel_name, args)
class SubstanceSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) version = serializers.IntegerField(read_only=True) name = serializers.CharField() properties = DictField(child=ExtensiblePropertySerializer(read_only=True)) type_full_name = serializers.CharField() location = LocationField(read_only=True) global_id = serializers.CharField(read_only=True)
class ProfileSerializer(DocumentSerializer): default_state = CharField(required=False) permissions = DictField(child=ResourceSerializer(), read_only=False, required=False) class Meta: model = Profile fields = ('id', 'is_root', 'name', 'permissions', 'default_state')
def run_update_list_validation(self, data): DictField().run_validation(data) pks = list(data.keys()) self.run_pk_list_validation(pks) values = list(data.values()) self.run_data_list_validation( values, partial=self.is_partial(True) )
class ProductStatSerializer(Serializer): """ Not combined to a specific model Read only, no create or update """ stats = DictField( child=ListField( child=IntegerField(), ) )
class AssociationSerializer(serializers.HyperlinkedModelSerializer): id = IntegerField(read_only=True) publication = DictField(source='publication.to_dict', read_only=True) experiment = DictField(source='experiment.to_dict', read_only=True) @property def get_publication(self, obj): return serializers.serialize('json', 'publication') @property def get_experiment(self, obj): return serializers.serialize('json', 'experiment') # experiment = serializers.StringRelatedField( many=False, read_only=False) ass_id = serializers.IntegerField(read_only=True) class Meta: model = models.Association read_only_fields = ('publication', 'experiment')
class EventTopPerUserSerializer(Serializer): """Response object of Event's top_per_user""" application = DictField() counted_events = IntegerField() unique_users = IntegerField() def create(self, request: Request) -> Response: raise NotImplementedError def update(self, request: Request) -> Response: raise NotImplementedError
class ComputePlanTesttupleSerializer(serializers.Serializer): traintuple_id = serializers.CharField(min_length=1, max_length=64) objective_key = serializers.UUIDField() data_manager_key = serializers.UUIDField(required=False, allow_null=True) test_data_sample_keys = serializers.ListField( child=serializers.UUIDField(), min_length=0, required=False) tag = serializers.CharField(min_length=0, max_length=64, allow_blank=True, required=False, allow_null=True) metadata = DictField(child=CharField(), required=False, allow_null=True)
class LedgerObjectiveSerializer(serializers.Serializer): test_data_sample_keys = serializers.ListField( child=serializers.UUIDField(), min_length=0, required=False) name = serializers.CharField(min_length=1, max_length=100) test_data_manager_key = serializers.UUIDField(required=False, allow_null=True) permissions = PermissionsSerializer() metrics_name = serializers.CharField(min_length=1, max_length=100) metadata = DictField(child=CharField(), required=False, allow_null=True) def create(self, channel_name, validated_data): instance = self.initial_data.get('instance') name = validated_data.get('name') metrics_name = validated_data.get('metrics_name') permissions = validated_data.get('permissions') test_data_manager_key = validated_data.get('test_data_manager_key') test_data_sample_keys = validated_data.get('test_data_sample_keys', []) metadata = validated_data.get('metadata') # TODO, create a datamigration with new Site domain name when we will know the name of the final website current_site = getattr(settings, "DEFAULT_DOMAIN") args = { 'key': instance.key, 'name': name, 'description_checksum': get_hash(instance.description), 'description_storage_address': current_site + reverse('substrapp:objective-description', args=[instance.key]), # noqa 'metrics_name': metrics_name, 'metrics_checksum': get_hash(instance.metrics), 'metrics_storage_address': current_site + reverse('substrapp:objective-metrics', args=[instance.key]), 'test_dataset': { 'data_manager_key': test_data_manager_key, 'data_sample_keys': test_data_sample_keys, }, 'permissions': { 'process': { 'public': permissions.get('public'), 'authorized_ids': permissions.get('authorized_ids'), } }, 'metadata': metadata } return ledger.assets.create_objective(channel_name, args, instance.key)
class Challenge(PassiveSerializer): """Challenge that gets sent to the client based on which stage is currently active""" type = ChoiceField(choices=[(x.value, x.name) for x in ChallengeTypes], ) component = CharField(required=False) title = CharField(required=False) background = CharField(required=False) response_errors = DictField(child=ErrorDetailSerializer(many=True), allow_empty=True, required=False)
class Challenge(PassiveSerializer): """Challenge that gets sent to the client based on which stage is currently active""" type = ChoiceField( choices=[(x.value, x.name) for x in ChallengeTypes], ) flow_info = ContextualFlowInfo(required=False) component = CharField(default="") response_errors = DictField( child=ErrorDetailSerializer(many=True), allow_empty=True, required=False )
def get_fields(self): query_facet_counts = self.instance.pop('queries', {}) field_mapping = super().get_fields() query_data = self.format_query_facet_data(query_facet_counts) field_mapping['queries'] = DictField(query_data, child=QueryFacetFieldSerializer(), required=False) if self.serialize_objects: field_mapping.move_to_end('objects') self.instance['queries'] = query_data return field_mapping
class ComputePlanAggregatetupleSerializer(serializers.Serializer): aggregatetuple_id = serializers.CharField(min_length=1, max_length=64) algo_key = serializers.UUIDField() worker = serializers.CharField() in_models_ids = serializers.ListField(child=serializers.CharField( min_length=1, max_length=64), min_length=0, required=False) tag = serializers.CharField(min_length=0, max_length=64, allow_blank=True, required=False, allow_null=True) metadata = DictField(child=CharField(), required=False, allow_null=True)
class LedgerDataManagerSerializer(serializers.Serializer): name = serializers.CharField(max_length=100) type = serializers.CharField(max_length=30) objective_key = serializers.UUIDField(required=False, allow_null=True) permissions = PermissionsSerializer() metadata = DictField(child=CharField(), required=False, allow_null=True) def create(self, channel_name, validated_data): instance = self.initial_data.get('instance') name = validated_data.get('name') data_type = validated_data.get('type') permissions = validated_data.get('permissions') objective_key = validated_data.get('objective_key') metadata = validated_data.get('metadata') # TODO, create a datamigration with new Site domain name when we will know the name of the final website current_site = getattr(settings, "DEFAULT_DOMAIN") args = { 'key': instance.key, 'name': name, 'opener_checksum': get_hash(instance.data_opener), 'opener_storage_address': current_site + reverse('substrapp:data_manager-opener', args=[instance.key]), 'type': data_type, 'description_checksum': get_hash(instance.description), 'description_storage_address': current_site + reverse('substrapp:data_manager-description', args=[instance.key]), 'objective_key': objective_key, 'permissions': { 'process': { 'public': permissions.get('public'), 'authorized_ids': permissions.get('authorized_ids'), } }, 'metadata': metadata } return ledger.assets.create_datamanager(channel_name, args, instance.key)
class ComputePlanTraintupleSerializer(serializers.Serializer): algo_key = serializers.UUIDField() data_manager_key = serializers.UUIDField() train_data_sample_keys = serializers.ListField( child=serializers.UUIDField(), min_length=1) traintuple_id = serializers.CharField(min_length=1, max_length=64) in_models_ids = serializers.ListField(child=serializers.CharField( min_length=1, max_length=64), min_length=0, required=False) tag = serializers.CharField(min_length=0, max_length=64, allow_blank=True, required=False, allow_null=True) metadata = DictField(child=CharField(), required=False, allow_null=True)
class ChallengeTimeOutSerializer(serializers.ModelSerializer): questions_saved = serializers.ListField(child=DictField(), required=False) page_questions = serializers.SerializerMethodField(read_only=True) def get_page_questions(self, obj): page = self.context.get("page", 1) questions = obj.questions_requested[(page - 1) * 20:page * 20] return ListQuestionSerializer( Question.objects.filter(id__in=questions), many=True, context={ 'user_university': obj.user_university }).data class Meta: model = ChallengeTimeOut fields = ('uid', 'created_at', 'page_questions', 'questions_saved', 'quantity_questions')
class LedgerAggregateTupleSerializer(serializers.Serializer): key = serializers.UUIDField() algo_key = serializers.UUIDField() rank = serializers.IntegerField(allow_null=True, required=False, default=0) worker = serializers.CharField() compute_plan_key = serializers.UUIDField(required=False, allow_null=True) in_models_keys = serializers.ListField(child=serializers.UUIDField(), min_length=0, required=False, allow_null=True) tag = serializers.CharField(min_length=0, max_length=64, allow_blank=True, required=False, allow_null=True) metadata = DictField(child=CharField(), required=False, allow_null=True) def get_args(self, validated_data): key = validated_data.get('key') algo_key = validated_data.get('algo_key') rank = validated_data.get('rank', '') rank = '' if rank is None else str(rank) worker = validated_data.get('worker') compute_plan_key = validated_data.get('compute_plan_key') in_models_keys = validated_data.get('in_models_keys', []) tag = validated_data.get('tag', '') metadata = validated_data.get('metadata') args = { 'key': key, 'algo_key': algo_key, 'in_models': in_models_keys, 'compute_plan_key': compute_plan_key, 'rank': rank, 'worker': worker, 'tag': tag, 'metadata': metadata } return args def create(self, channel_name, validated_data): args = self.get_args(validated_data) return ledger.assets.create_aggregatetuple(channel_name, args)
def data_for_update(self, data): validation_methods = self.get_operation_validation_methods( update_ops) DictField().run_validation(data) for operation, values in data.items(): try: validation_methods[operation](values) except ValidationError as e: detail = {operation: e.detail} code = e.get_codes() raise ValidationError(detail, code) from None except KeyError: ops_list = ("`" + op + "`" for op in update_ops) msg = ("`%s` is not a valid operation, valid operations " "for this request are %s" % (operation, ', '.join(ops_list))) code = 'invalid_operation' raise ValidationError(msg, code=code) from None return data
def data_for_create(self, data): validation_methods = self.get_operation_validation_methods(create_ops) DictField().run_validation(data) for operation, values in data.items(): try: validation_methods[operation](values) except ValidationError as e: detail = {operation: e.detail} code = e.get_codes() raise ValidationError(detail, code=code) from None except KeyError: msg = ( "`%s` is not a valid operation, valid operation(s) " "for this request %s" % (operation, join_words(create_ops)) ) code = 'invalid_operation' raise ValidationError(msg, code=code) from None return data
class LedgerTestTupleSerializer(serializers.Serializer): key = serializers.UUIDField() traintuple_key = serializers.UUIDField() objective_key = serializers.UUIDField(required=False) data_manager_key = serializers.UUIDField(required=False, allow_null=True) test_data_sample_keys = serializers.ListField( child=serializers.UUIDField(), min_length=0, required=False, allow_null=True) tag = serializers.CharField(min_length=0, max_length=64, allow_blank=True, required=False, allow_null=True) metadata = DictField(child=CharField(), required=False, allow_null=True) def get_args(self, validated_data): key = validated_data.get('key') traintuple_key = validated_data.get('traintuple_key') objective_key = validated_data.get('objective_key') data_manager_key = validated_data.get('data_manager_key') test_data_sample_keys = validated_data.get('test_data_sample_keys', []) tag = validated_data.get('tag', '') metadata = validated_data.get('metadata') args = { 'key': key, 'traintuple_key': traintuple_key, 'objective_key': objective_key, 'data_manager_key': data_manager_key, 'data_sample_keys': test_data_sample_keys, 'tag': tag, 'metadata': metadata } return args def create(self, channel_name, validated_data): args = self.get_args(validated_data) return ledger.assets.create_testtuple(channel_name, args)
def get_fields(self): """ Return the field_mapping needed for serializing the data.""" # Re-implement the logic from the superclass methods, but make sure to handle field and query facets properly. field_data = self.instance.pop('fields', {}) query_data = self.format_query_facet_data(self.instance.pop('queries', {})) field_mapping = super().get_fields() field_mapping['fields'] = FacetDictField( child=FacetListField(child=DistinctCountsFacetFieldSerializer(field_data), required=False) ) field_mapping['queries'] = DictField( query_data, child=DistinctCountsQueryFacetFieldSerializer(), required=False ) if self.serialize_objects: field_mapping.move_to_end('objects') self.instance['fields'] = field_data self.instance['queries'] = query_data return field_mapping
class LedgerCompositeAlgoSerializer(serializers.Serializer): name = serializers.CharField(min_length=1, max_length=100) permissions = PermissionsSerializer() metadata = DictField(child=CharField(), required=False, allow_null=True) def create(self, channel_name, validated_data): instance = self.initial_data.get('instance') name = validated_data.get('name') permissions = validated_data.get('permissions') metadata = validated_data.get('metadata') # TODO, create a datamigration with new Site domain name when we will know the name of the final website current_site = getattr(settings, "DEFAULT_DOMAIN") args = { 'key': instance.key, 'name': name, 'checksum': get_hash(instance.file), 'storage_address': current_site + reverse('substrapp:composite_algo-file', args=[instance.key]), 'description_checksum': get_hash(instance.description), 'description_storage_address': current_site + reverse('substrapp:composite_algo-description', args=[instance.key]), 'permissions': { 'process': { 'public': permissions.get('public'), 'authorized_ids': permissions.get('authorized_ids'), } }, 'metadata': metadata } return ledger.assets.create_compositealgo(channel_name, args, instance.key)
class FixtureDetailSerializer(serializers.ModelSerializer): opponent_name = serializers.ReadOnlyField( source='opponent.not_so_long_name') opponent_crest = serializers.SerializerMethodField() opponent_short_name = serializers.ReadOnlyField( source='opponent.short_name') competition_name = serializers.ReadOnlyField( source='competition_year.competition.name') goals = GoalSerializer(many=True) data = DictField() class Meta: model = Fixture depth = 2 fields = ('id', 'is_home_game', 'opponent_name', 'mufc_score', 'venue', 'opponent_score', 'opponent_crest', 'opponent_short_name', 'competition_name', 'datetime', 'data', 'broadcast_on', 'goals') def get_opponent_crest(self, obj): if obj.opponent.crest: return obj.opponent.crest.url return None
def get_fields(self): """ This returns a dictionary containing the top most fields, ``dates``, ``fields`` and ``queries``. """ field_mapping = OrderedDict() for field, data in self.instance.items(): if field == "heatmaps": # heatmaps is a special facet so we can't use facet_dict_field_class field_mapping.update({field: DictField(data)}) else: field_mapping.update({ field: self.facet_dict_field_class( child=self.facet_list_field_class( child=self.facet_field_serializer_class(data)), required=False, ) }) if self.serialize_objects is True: field_mapping["objects"] = serializers.SerializerMethodField() return field_mapping